UNPKG

442 kBJavaScriptView Raw
1import AsyncLock from 'async-lock';
2import Hash from 'sha.js/sha1.js';
3import crc32 from 'crc-32';
4import pako from 'pako';
5import pify from 'pify';
6import ignore from 'ignore';
7import cleanGitRef from 'clean-git-ref';
8import diff3Merge from 'diff3';
9
10/**
11 * @typedef {Object} GitProgressEvent
12 * @property {string} phase
13 * @property {number} loaded
14 * @property {number} total
15 */
16
17/**
18 * @callback ProgressCallback
19 * @param {GitProgressEvent} progress
20 * @returns {void | Promise<void>}
21 */
22
23/**
24 * @typedef {Object} GitHttpRequest
25 * @property {string} url - The URL to request
26 * @property {string} [method='GET'] - The HTTP method to use
27 * @property {Object<string, string>} [headers={}] - Headers to include in the HTTP request
28 * @property {Object} [agent] - An HTTP or HTTPS agent that manages connections for the HTTP client (Node.js only)
29 * @property {AsyncIterableIterator<Uint8Array>} [body] - An async iterator of Uint8Arrays that make up the body of POST requests
30 * @property {ProgressCallback} [onProgress] - Reserved for future use (emitting `GitProgressEvent`s)
31 * @property {object} [signal] - Reserved for future use (canceling a request)
32 */
33
34/**
35 * @typedef {Object} GitHttpResponse
36 * @property {string} url - The final URL that was fetched after any redirects
37 * @property {string} [method] - The HTTP method that was used
38 * @property {Object<string, string>} [headers] - HTTP response headers
39 * @property {AsyncIterableIterator<Uint8Array>} [body] - An async iterator of Uint8Arrays that make up the body of the response
40 * @property {number} statusCode - The HTTP status code
41 * @property {string} statusMessage - The HTTP status message
42 */
43
44/**
45 * @callback HttpFetch
46 * @param {GitHttpRequest} request
47 * @returns {Promise<GitHttpResponse>}
48 */
49
50/**
51 * @typedef {Object} HttpClient
52 * @property {HttpFetch} request
53 */
54
55/**
56 * A git commit object.
57 *
58 * @typedef {Object} CommitObject
59 * @property {string} message Commit message
60 * @property {string} tree SHA-1 object id of corresponding file tree
61 * @property {string[]} parent an array of zero or more SHA-1 object ids
62 * @property {Object} author
63 * @property {string} author.name The author's name
64 * @property {string} author.email The author's email
65 * @property {number} author.timestamp UTC Unix timestamp in seconds
66 * @property {number} author.timezoneOffset Timezone difference from UTC in minutes
67 * @property {Object} committer
68 * @property {string} committer.name The committer's name
69 * @property {string} committer.email The committer's email
70 * @property {number} committer.timestamp UTC Unix timestamp in seconds
71 * @property {number} committer.timezoneOffset Timezone difference from UTC in minutes
72 * @property {string} [gpgsig] PGP signature (if present)
73 */
74
75/**
76 * An entry from a git tree object. Files are called 'blobs' and directories are called 'trees'.
77 *
78 * @typedef {Object} TreeEntry
79 * @property {string} mode the 6 digit hexadecimal mode
80 * @property {string} path the name of the file or directory
81 * @property {string} oid the SHA-1 object id of the blob or tree
82 * @property {'commit'|'blob'|'tree'} type the type of object
83 */
84
85/**
86 * A git tree object. Trees represent a directory snapshot.
87 *
88 * @typedef {TreeEntry[]} TreeObject
89 */
90
91/**
92 * A git annotated tag object.
93 *
94 * @typedef {Object} TagObject
95 * @property {string} object SHA-1 object id of object being tagged
96 * @property {'blob' | 'tree' | 'commit' | 'tag'} type the type of the object being tagged
97 * @property {string} tag the tag name
98 * @property {Object} tagger
99 * @property {string} tagger.name the tagger's name
100 * @property {string} tagger.email the tagger's email
101 * @property {number} tagger.timestamp UTC Unix timestamp in seconds
102 * @property {number} tagger.timezoneOffset timezone difference from UTC in minutes
103 * @property {string} message tag message
104 * @property {string} [gpgsig] PGP signature (if present)
105 */
106
107/**
108 * @typedef {Object} ReadCommitResult
109 * @property {string} oid - SHA-1 object id of this commit
110 * @property {CommitObject} commit - the parsed commit object
111 * @property {string} payload - PGP signing payload
112 */
113
114/**
115 * @typedef {Object} ServerRef - This object has the following schema:
116 * @property {string} ref - The name of the ref
117 * @property {string} oid - The SHA-1 object id the ref points to
118 * @property {string} [target] - The target ref pointed to by a symbolic ref
119 * @property {string} [peeled] - If the oid is the SHA-1 object id of an annotated tag, this is the SHA-1 object id that the annotated tag points to
120 */
121
122/**
123 * @typedef Walker
124 * @property {Symbol} Symbol('GitWalkerSymbol')
125 */
126
127/**
128 * Normalized subset of filesystem `stat` data:
129 *
130 * @typedef {Object} Stat
131 * @property {number} ctimeSeconds
132 * @property {number} ctimeNanoseconds
133 * @property {number} mtimeSeconds
134 * @property {number} mtimeNanoseconds
135 * @property {number} dev
136 * @property {number} ino
137 * @property {number} mode
138 * @property {number} uid
139 * @property {number} gid
140 * @property {number} size
141 */
142
143/**
144 * The `WalkerEntry` is an interface that abstracts computing many common tree / blob stats.
145 *
146 * @typedef {Object} WalkerEntry
147 * @property {function(): Promise<'tree'|'blob'|'special'|'commit'>} type
148 * @property {function(): Promise<number>} mode
149 * @property {function(): Promise<string>} oid
150 * @property {function(): Promise<Uint8Array|void>} content
151 * @property {function(): Promise<Stat>} stat
152 */
153
154/**
155 * @typedef {Object} CallbackFsClient
156 * @property {function} readFile - https://nodejs.org/api/fs.html#fs_fs_readfile_path_options_callback
157 * @property {function} writeFile - https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback
158 * @property {function} unlink - https://nodejs.org/api/fs.html#fs_fs_unlink_path_callback
159 * @property {function} readdir - https://nodejs.org/api/fs.html#fs_fs_readdir_path_options_callback
160 * @property {function} mkdir - https://nodejs.org/api/fs.html#fs_fs_mkdir_path_mode_callback
161 * @property {function} rmdir - https://nodejs.org/api/fs.html#fs_fs_rmdir_path_callback
162 * @property {function} stat - https://nodejs.org/api/fs.html#fs_fs_stat_path_options_callback
163 * @property {function} lstat - https://nodejs.org/api/fs.html#fs_fs_lstat_path_options_callback
164 * @property {function} [readlink] - https://nodejs.org/api/fs.html#fs_fs_readlink_path_options_callback
165 * @property {function} [symlink] - https://nodejs.org/api/fs.html#fs_fs_symlink_target_path_type_callback
166 * @property {function} [chmod] - https://nodejs.org/api/fs.html#fs_fs_chmod_path_mode_callback
167 */
168
169/**
170 * @typedef {Object} PromiseFsClient
171 * @property {Object} promises
172 * @property {function} promises.readFile - https://nodejs.org/api/fs.html#fs_fspromises_readfile_path_options
173 * @property {function} promises.writeFile - https://nodejs.org/api/fs.html#fs_fspromises_writefile_file_data_options
174 * @property {function} promises.unlink - https://nodejs.org/api/fs.html#fs_fspromises_unlink_path
175 * @property {function} promises.readdir - https://nodejs.org/api/fs.html#fs_fspromises_readdir_path_options
176 * @property {function} promises.mkdir - https://nodejs.org/api/fs.html#fs_fspromises_mkdir_path_options
177 * @property {function} promises.rmdir - https://nodejs.org/api/fs.html#fs_fspromises_rmdir_path
178 * @property {function} promises.stat - https://nodejs.org/api/fs.html#fs_fspromises_stat_path_options
179 * @property {function} promises.lstat - https://nodejs.org/api/fs.html#fs_fspromises_lstat_path_options
180 * @property {function} [promises.readlink] - https://nodejs.org/api/fs.html#fs_fspromises_readlink_path_options
181 * @property {function} [promises.symlink] - https://nodejs.org/api/fs.html#fs_fspromises_symlink_target_path_type
182 * @property {function} [promises.chmod] - https://nodejs.org/api/fs.html#fs_fspromises_chmod_path_mode
183 */
184
185/**
186 * @typedef {CallbackFsClient | PromiseFsClient} FsClient
187 */
188
189/**
190 * @callback MessageCallback
191 * @param {string} message
192 * @returns {void | Promise<void>}
193 */
194
195/**
196 * @typedef {Object} GitAuth
197 * @property {string} [username]
198 * @property {string} [password]
199 * @property {Object<string, string>} [headers]
200 * @property {boolean} [cancel] Tells git to throw a `UserCanceledError` (instead of an `HttpError`).
201 */
202
203/**
204 * @callback AuthCallback
205 * @param {string} url
206 * @param {GitAuth} auth Might have some values if the URL itself originally contained a username or password.
207 * @returns {GitAuth | void | Promise<GitAuth | void>}
208 */
209
210/**
211 * @callback AuthFailureCallback
212 * @param {string} url
213 * @param {GitAuth} auth The credentials that failed
214 * @returns {GitAuth | void | Promise<GitAuth | void>}
215 */
216
217/**
218 * @callback AuthSuccessCallback
219 * @param {string} url
220 * @param {GitAuth} auth
221 * @returns {void | Promise<void>}
222 */
223
224/**
225 * @typedef {Object} SignParams
226 * @property {string} payload - a plaintext message
227 * @property {string} secretKey - an 'ASCII armor' encoded PGP key (technically can actually contain _multiple_ keys)
228 */
229
230/**
231 * @callback SignCallback
232 * @param {SignParams} args
233 * @return {{signature: string} | Promise<{signature: string}>} - an 'ASCII armor' encoded "detached" signature
234 */
235
236/**
237 * @typedef {Object} MergeDriverParams
238 * @property {Array<string>} branches
239 * @property {Array<string>} contents
240 * @property {string} path
241 */
242
243/**
244 * @callback MergeDriverCallback
245 * @param {MergeDriverParams} args
246 * @return {{cleanMerge: boolean, mergedText: string} | Promise<{cleanMerge: boolean, mergedText: string}>}
247 */
248
249/**
250 * @callback WalkerMap
251 * @param {string} filename
252 * @param {WalkerEntry[]} entries
253 * @returns {Promise<any>}
254 */
255
256/**
257 * @callback WalkerReduce
258 * @param {any} parent
259 * @param {any[]} children
260 * @returns {Promise<any>}
261 */
262
263/**
264 * @callback WalkerIterateCallback
265 * @param {WalkerEntry[]} entries
266 * @returns {Promise<any[]>}
267 */
268
269/**
270 * @callback WalkerIterate
271 * @param {WalkerIterateCallback} walk
272 * @param {IterableIterator<WalkerEntry[]>} children
273 * @returns {Promise<any[]>}
274 */
275
276/**
277 * @typedef {Object} RefUpdateStatus
278 * @property {boolean} ok
279 * @property {string} error
280 */
281
282/**
283 * @typedef {Object} PushResult
284 * @property {boolean} ok
285 * @property {?string} error
286 * @property {Object<string, RefUpdateStatus>} refs
287 * @property {Object<string, string>} [headers]
288 */
289
290/**
291 * @typedef {0|1} HeadStatus
292 */
293
294/**
295 * @typedef {0|1|2} WorkdirStatus
296 */
297
298/**
299 * @typedef {0|1|2|3} StageStatus
300 */
301
302/**
303 * @typedef {[string, HeadStatus, WorkdirStatus, StageStatus]} StatusRow
304 */
305
306class BaseError extends Error {
307 constructor(message) {
308 super(message);
309 // Setting this here allows TS to infer that all git errors have a `caller` property and
310 // that its type is string.
311 this.caller = '';
312 }
313
314 toJSON() {
315 // Error objects aren't normally serializable. So we do something about that.
316 return {
317 code: this.code,
318 data: this.data,
319 caller: this.caller,
320 message: this.message,
321 stack: this.stack,
322 }
323 }
324
325 fromJSON(json) {
326 const e = new BaseError(json.message);
327 e.code = json.code;
328 e.data = json.data;
329 e.caller = json.caller;
330 e.stack = json.stack;
331 return e
332 }
333
334 get isIsomorphicGitError() {
335 return true
336 }
337}
338
339class UnmergedPathsError extends BaseError {
340 /**
341 * @param {Array<string>} filepaths
342 */
343 constructor(filepaths) {
344 super(
345 `Modifying the index is not possible because you have unmerged files: ${filepaths.toString}. Fix them up in the work tree, and then use 'git add/rm as appropriate to mark resolution and make a commit.`
346 );
347 this.code = this.name = UnmergedPathsError.code;
348 this.data = { filepaths };
349 }
350}
351/** @type {'UnmergedPathsError'} */
352UnmergedPathsError.code = 'UnmergedPathsError';
353
354class InternalError extends BaseError {
355 /**
356 * @param {string} message
357 */
358 constructor(message) {
359 super(
360 `An internal error caused this command to fail. Please file a bug report at https://github.com/isomorphic-git/isomorphic-git/issues with this error message: ${message}`
361 );
362 this.code = this.name = InternalError.code;
363 this.data = { message };
364 }
365}
366/** @type {'InternalError'} */
367InternalError.code = 'InternalError';
368
369class UnsafeFilepathError extends BaseError {
370 /**
371 * @param {string} filepath
372 */
373 constructor(filepath) {
374 super(`The filepath "${filepath}" contains unsafe character sequences`);
375 this.code = this.name = UnsafeFilepathError.code;
376 this.data = { filepath };
377 }
378}
379/** @type {'UnsafeFilepathError'} */
380UnsafeFilepathError.code = 'UnsafeFilepathError';
381
382// Modeled after https://github.com/tjfontaine/node-buffercursor
383// but with the goal of being much lighter weight.
384class BufferCursor {
385 constructor(buffer) {
386 this.buffer = buffer;
387 this._start = 0;
388 }
389
390 eof() {
391 return this._start >= this.buffer.length
392 }
393
394 tell() {
395 return this._start
396 }
397
398 seek(n) {
399 this._start = n;
400 }
401
402 slice(n) {
403 const r = this.buffer.slice(this._start, this._start + n);
404 this._start += n;
405 return r
406 }
407
408 toString(enc, length) {
409 const r = this.buffer.toString(enc, this._start, this._start + length);
410 this._start += length;
411 return r
412 }
413
414 write(value, length, enc) {
415 const r = this.buffer.write(value, this._start, length, enc);
416 this._start += length;
417 return r
418 }
419
420 copy(source, start, end) {
421 const r = source.copy(this.buffer, this._start, start, end);
422 this._start += r;
423 return r
424 }
425
426 readUInt8() {
427 const r = this.buffer.readUInt8(this._start);
428 this._start += 1;
429 return r
430 }
431
432 writeUInt8(value) {
433 const r = this.buffer.writeUInt8(value, this._start);
434 this._start += 1;
435 return r
436 }
437
438 readUInt16BE() {
439 const r = this.buffer.readUInt16BE(this._start);
440 this._start += 2;
441 return r
442 }
443
444 writeUInt16BE(value) {
445 const r = this.buffer.writeUInt16BE(value, this._start);
446 this._start += 2;
447 return r
448 }
449
450 readUInt32BE() {
451 const r = this.buffer.readUInt32BE(this._start);
452 this._start += 4;
453 return r
454 }
455
456 writeUInt32BE(value) {
457 const r = this.buffer.writeUInt32BE(value, this._start);
458 this._start += 4;
459 return r
460 }
461}
462
463function compareStrings(a, b) {
464 // https://stackoverflow.com/a/40355107/2168416
465 return -(a < b) || +(a > b)
466}
467
468function comparePath(a, b) {
469 // https://stackoverflow.com/a/40355107/2168416
470 return compareStrings(a.path, b.path)
471}
472
473/**
474 * From https://github.com/git/git/blob/master/Documentation/technical/index-format.txt
475 *
476 * 32-bit mode, split into (high to low bits)
477 *
478 * 4-bit object type
479 * valid values in binary are 1000 (regular file), 1010 (symbolic link)
480 * and 1110 (gitlink)
481 *
482 * 3-bit unused
483 *
484 * 9-bit unix permission. Only 0755 and 0644 are valid for regular files.
485 * Symbolic links and gitlinks have value 0 in this field.
486 */
487function normalizeMode(mode) {
488 // Note: BrowserFS will use -1 for "unknown"
489 // I need to make it non-negative for these bitshifts to work.
490 let type = mode > 0 ? mode >> 12 : 0;
491 // If it isn't valid, assume it as a "regular file"
492 // 0100 = directory
493 // 1000 = regular file
494 // 1010 = symlink
495 // 1110 = gitlink
496 if (
497 type !== 0b0100 &&
498 type !== 0b1000 &&
499 type !== 0b1010 &&
500 type !== 0b1110
501 ) {
502 type = 0b1000;
503 }
504 let permissions = mode & 0o777;
505 // Is the file executable? then 755. Else 644.
506 if (permissions & 0b001001001) {
507 permissions = 0o755;
508 } else {
509 permissions = 0o644;
510 }
511 // If it's not a regular file, scrub all permissions
512 if (type !== 0b1000) permissions = 0;
513 return (type << 12) + permissions
514}
515
516const MAX_UINT32 = 2 ** 32;
517
518function SecondsNanoseconds(
519 givenSeconds,
520 givenNanoseconds,
521 milliseconds,
522 date
523) {
524 if (givenSeconds !== undefined && givenNanoseconds !== undefined) {
525 return [givenSeconds, givenNanoseconds]
526 }
527 if (milliseconds === undefined) {
528 milliseconds = date.valueOf();
529 }
530 const seconds = Math.floor(milliseconds / 1000);
531 const nanoseconds = (milliseconds - seconds * 1000) * 1000000;
532 return [seconds, nanoseconds]
533}
534
535function normalizeStats(e) {
536 const [ctimeSeconds, ctimeNanoseconds] = SecondsNanoseconds(
537 e.ctimeSeconds,
538 e.ctimeNanoseconds,
539 e.ctimeMs,
540 e.ctime
541 );
542 const [mtimeSeconds, mtimeNanoseconds] = SecondsNanoseconds(
543 e.mtimeSeconds,
544 e.mtimeNanoseconds,
545 e.mtimeMs,
546 e.mtime
547 );
548
549 return {
550 ctimeSeconds: ctimeSeconds % MAX_UINT32,
551 ctimeNanoseconds: ctimeNanoseconds % MAX_UINT32,
552 mtimeSeconds: mtimeSeconds % MAX_UINT32,
553 mtimeNanoseconds: mtimeNanoseconds % MAX_UINT32,
554 dev: e.dev % MAX_UINT32,
555 ino: e.ino % MAX_UINT32,
556 mode: normalizeMode(e.mode % MAX_UINT32),
557 uid: e.uid % MAX_UINT32,
558 gid: e.gid % MAX_UINT32,
559 // size of -1 happens over a BrowserFS HTTP Backend that doesn't serve Content-Length headers
560 // (like the Karma webserver) because BrowserFS HTTP Backend uses HTTP HEAD requests to do fs.stat
561 size: e.size > -1 ? e.size % MAX_UINT32 : 0,
562 }
563}
564
565function toHex(buffer) {
566 let hex = '';
567 for (const byte of new Uint8Array(buffer)) {
568 if (byte < 16) hex += '0';
569 hex += byte.toString(16);
570 }
571 return hex
572}
573
574/* eslint-env node, browser */
575
576let supportsSubtleSHA1 = null;
577
578async function shasum(buffer) {
579 if (supportsSubtleSHA1 === null) {
580 supportsSubtleSHA1 = await testSubtleSHA1();
581 }
582 return supportsSubtleSHA1 ? subtleSHA1(buffer) : shasumSync(buffer)
583}
584
585// This is modeled after @dominictarr's "shasum" module,
586// but without the 'json-stable-stringify' dependency and
587// extra type-casting features.
588function shasumSync(buffer) {
589 return new Hash().update(buffer).digest('hex')
590}
591
592async function subtleSHA1(buffer) {
593 const hash = await crypto.subtle.digest('SHA-1', buffer);
594 return toHex(hash)
595}
596
597async function testSubtleSHA1() {
598 // I'm using a rather crude method of progressive enhancement, because
599 // some browsers that have crypto.subtle.digest don't actually implement SHA-1.
600 try {
601 const hash = await subtleSHA1(new Uint8Array([]));
602 if (hash === 'da39a3ee5e6b4b0d3255bfef95601890afd80709') return true
603 } catch (_) {
604 // no bother
605 }
606 return false
607}
608
609// Extract 1-bit assume-valid, 1-bit extended flag, 2-bit merge state flag, 12-bit path length flag
610function parseCacheEntryFlags(bits) {
611 return {
612 assumeValid: Boolean(bits & 0b1000000000000000),
613 extended: Boolean(bits & 0b0100000000000000),
614 stage: (bits & 0b0011000000000000) >> 12,
615 nameLength: bits & 0b0000111111111111,
616 }
617}
618
619function renderCacheEntryFlags(entry) {
620 const flags = entry.flags;
621 // 1-bit extended flag (must be zero in version 2)
622 flags.extended = false;
623 // 12-bit name length if the length is less than 0xFFF; otherwise 0xFFF
624 // is stored in this field.
625 flags.nameLength = Math.min(Buffer.from(entry.path).length, 0xfff);
626 return (
627 (flags.assumeValid ? 0b1000000000000000 : 0) +
628 (flags.extended ? 0b0100000000000000 : 0) +
629 ((flags.stage & 0b11) << 12) +
630 (flags.nameLength & 0b111111111111)
631 )
632}
633
634class GitIndex {
635 /*::
636 _entries: Map<string, CacheEntry>
637 _dirty: boolean // Used to determine if index needs to be saved to filesystem
638 */
639 constructor(entries, unmergedPaths) {
640 this._dirty = false;
641 this._unmergedPaths = unmergedPaths || new Set();
642 this._entries = entries || new Map();
643 }
644
645 _addEntry(entry) {
646 if (entry.flags.stage === 0) {
647 entry.stages = [entry];
648 this._entries.set(entry.path, entry);
649 this._unmergedPaths.delete(entry.path);
650 } else {
651 let existingEntry = this._entries.get(entry.path);
652 if (!existingEntry) {
653 this._entries.set(entry.path, entry);
654 existingEntry = entry;
655 }
656 existingEntry.stages[entry.flags.stage] = entry;
657 this._unmergedPaths.add(entry.path);
658 }
659 }
660
661 static async from(buffer) {
662 if (Buffer.isBuffer(buffer)) {
663 return GitIndex.fromBuffer(buffer)
664 } else if (buffer === null) {
665 return new GitIndex(null)
666 } else {
667 throw new InternalError('invalid type passed to GitIndex.from')
668 }
669 }
670
671 static async fromBuffer(buffer) {
672 if (buffer.length === 0) {
673 throw new InternalError('Index file is empty (.git/index)')
674 }
675
676 const index = new GitIndex();
677 const reader = new BufferCursor(buffer);
678 const magic = reader.toString('utf8', 4);
679 if (magic !== 'DIRC') {
680 throw new InternalError(`Invalid dircache magic file number: ${magic}`)
681 }
682
683 // Verify shasum after we ensured that the file has a magic number
684 const shaComputed = await shasum(buffer.slice(0, -20));
685 const shaClaimed = buffer.slice(-20).toString('hex');
686 if (shaClaimed !== shaComputed) {
687 throw new InternalError(
688 `Invalid checksum in GitIndex buffer: expected ${shaClaimed} but saw ${shaComputed}`
689 )
690 }
691
692 const version = reader.readUInt32BE();
693 if (version !== 2) {
694 throw new InternalError(`Unsupported dircache version: ${version}`)
695 }
696 const numEntries = reader.readUInt32BE();
697 let i = 0;
698 while (!reader.eof() && i < numEntries) {
699 const entry = {};
700 entry.ctimeSeconds = reader.readUInt32BE();
701 entry.ctimeNanoseconds = reader.readUInt32BE();
702 entry.mtimeSeconds = reader.readUInt32BE();
703 entry.mtimeNanoseconds = reader.readUInt32BE();
704 entry.dev = reader.readUInt32BE();
705 entry.ino = reader.readUInt32BE();
706 entry.mode = reader.readUInt32BE();
707 entry.uid = reader.readUInt32BE();
708 entry.gid = reader.readUInt32BE();
709 entry.size = reader.readUInt32BE();
710 entry.oid = reader.slice(20).toString('hex');
711 const flags = reader.readUInt16BE();
712 entry.flags = parseCacheEntryFlags(flags);
713 // TODO: handle if (version === 3 && entry.flags.extended)
714 const pathlength = buffer.indexOf(0, reader.tell() + 1) - reader.tell();
715 if (pathlength < 1) {
716 throw new InternalError(`Got a path length of: ${pathlength}`)
717 }
718 // TODO: handle pathnames larger than 12 bits
719 entry.path = reader.toString('utf8', pathlength);
720
721 // Prevent malicious paths like "..\foo"
722 if (entry.path.includes('..\\') || entry.path.includes('../')) {
723 throw new UnsafeFilepathError(entry.path)
724 }
725
726 // The next bit is awkward. We expect 1 to 8 null characters
727 // such that the total size of the entry is a multiple of 8 bits.
728 // (Hence subtract 12 bytes for the header.)
729 let padding = 8 - ((reader.tell() - 12) % 8);
730 if (padding === 0) padding = 8;
731 while (padding--) {
732 const tmp = reader.readUInt8();
733 if (tmp !== 0) {
734 throw new InternalError(
735 `Expected 1-8 null characters but got '${tmp}' after ${entry.path}`
736 )
737 } else if (reader.eof()) {
738 throw new InternalError('Unexpected end of file')
739 }
740 }
741 // end of awkward part
742 entry.stages = [];
743
744 index._addEntry(entry);
745
746 i++;
747 }
748 return index
749 }
750
751 get unmergedPaths() {
752 return [...this._unmergedPaths]
753 }
754
755 get entries() {
756 return [...this._entries.values()].sort(comparePath)
757 }
758
759 get entriesMap() {
760 return this._entries
761 }
762
763 get entriesFlat() {
764 return [...this.entries].flatMap(entry => {
765 return entry.stages.length > 1 ? entry.stages.filter(x => x) : entry
766 })
767 }
768
769 *[Symbol.iterator]() {
770 for (const entry of this.entries) {
771 yield entry;
772 }
773 }
774
775 insert({ filepath, stats, oid, stage = 0 }) {
776 if (!stats) {
777 stats = {
778 ctimeSeconds: 0,
779 ctimeNanoseconds: 0,
780 mtimeSeconds: 0,
781 mtimeNanoseconds: 0,
782 dev: 0,
783 ino: 0,
784 mode: 0,
785 uid: 0,
786 gid: 0,
787 size: 0,
788 };
789 }
790 stats = normalizeStats(stats);
791 const bfilepath = Buffer.from(filepath);
792 const entry = {
793 ctimeSeconds: stats.ctimeSeconds,
794 ctimeNanoseconds: stats.ctimeNanoseconds,
795 mtimeSeconds: stats.mtimeSeconds,
796 mtimeNanoseconds: stats.mtimeNanoseconds,
797 dev: stats.dev,
798 ino: stats.ino,
799 // We provide a fallback value for `mode` here because not all fs
800 // implementations assign it, but we use it in GitTree.
801 // '100644' is for a "regular non-executable file"
802 mode: stats.mode || 0o100644,
803 uid: stats.uid,
804 gid: stats.gid,
805 size: stats.size,
806 path: filepath,
807 oid: oid,
808 flags: {
809 assumeValid: false,
810 extended: false,
811 stage,
812 nameLength: bfilepath.length < 0xfff ? bfilepath.length : 0xfff,
813 },
814 stages: [],
815 };
816
817 this._addEntry(entry);
818
819 this._dirty = true;
820 }
821
822 delete({ filepath }) {
823 if (this._entries.has(filepath)) {
824 this._entries.delete(filepath);
825 } else {
826 for (const key of this._entries.keys()) {
827 if (key.startsWith(filepath + '/')) {
828 this._entries.delete(key);
829 }
830 }
831 }
832
833 if (this._unmergedPaths.has(filepath)) {
834 this._unmergedPaths.delete(filepath);
835 }
836 this._dirty = true;
837 }
838
839 clear() {
840 this._entries.clear();
841 this._dirty = true;
842 }
843
844 has({ filepath }) {
845 return this._entries.has(filepath)
846 }
847
848 render() {
849 return this.entries
850 .map(entry => `${entry.mode.toString(8)} ${entry.oid} ${entry.path}`)
851 .join('\n')
852 }
853
854 static async _entryToBuffer(entry) {
855 const bpath = Buffer.from(entry.path);
856 // the fixed length + the filename + at least one null char => align by 8
857 const length = Math.ceil((62 + bpath.length + 1) / 8) * 8;
858 const written = Buffer.alloc(length);
859 const writer = new BufferCursor(written);
860 const stat = normalizeStats(entry);
861 writer.writeUInt32BE(stat.ctimeSeconds);
862 writer.writeUInt32BE(stat.ctimeNanoseconds);
863 writer.writeUInt32BE(stat.mtimeSeconds);
864 writer.writeUInt32BE(stat.mtimeNanoseconds);
865 writer.writeUInt32BE(stat.dev);
866 writer.writeUInt32BE(stat.ino);
867 writer.writeUInt32BE(stat.mode);
868 writer.writeUInt32BE(stat.uid);
869 writer.writeUInt32BE(stat.gid);
870 writer.writeUInt32BE(stat.size);
871 writer.write(entry.oid, 20, 'hex');
872 writer.writeUInt16BE(renderCacheEntryFlags(entry));
873 writer.write(entry.path, bpath.length, 'utf8');
874 return written
875 }
876
877 async toObject() {
878 const header = Buffer.alloc(12);
879 const writer = new BufferCursor(header);
880 writer.write('DIRC', 4, 'utf8');
881 writer.writeUInt32BE(2);
882 writer.writeUInt32BE(this.entriesFlat.length);
883
884 let entryBuffers = [];
885 for (const entry of this.entries) {
886 entryBuffers.push(GitIndex._entryToBuffer(entry));
887 if (entry.stages.length > 1) {
888 for (const stage of entry.stages) {
889 if (stage && stage !== entry) {
890 entryBuffers.push(GitIndex._entryToBuffer(stage));
891 }
892 }
893 }
894 }
895 entryBuffers = await Promise.all(entryBuffers);
896
897 const body = Buffer.concat(entryBuffers);
898 const main = Buffer.concat([header, body]);
899 const sum = await shasum(main);
900 return Buffer.concat([main, Buffer.from(sum, 'hex')])
901 }
902}
903
904function compareStats(entry, stats) {
905 // Comparison based on the description in Paragraph 4 of
906 // https://www.kernel.org/pub/software/scm/git/docs/technical/racy-git.txt
907 const e = normalizeStats(entry);
908 const s = normalizeStats(stats);
909 const staleness =
910 e.mode !== s.mode ||
911 e.mtimeSeconds !== s.mtimeSeconds ||
912 e.ctimeSeconds !== s.ctimeSeconds ||
913 e.uid !== s.uid ||
914 e.gid !== s.gid ||
915 e.ino !== s.ino ||
916 e.size !== s.size;
917 return staleness
918}
919
920// import LockManager from 'travix-lock-manager'
921
922// import Lock from '../utils.js'
923
924// const lm = new LockManager()
925let lock = null;
926
927const IndexCache = Symbol('IndexCache');
928
929function createCache() {
930 return {
931 map: new Map(),
932 stats: new Map(),
933 }
934}
935
936async function updateCachedIndexFile(fs, filepath, cache) {
937 const stat = await fs.lstat(filepath);
938 const rawIndexFile = await fs.read(filepath);
939 const index = await GitIndex.from(rawIndexFile);
940 // cache the GitIndex object so we don't need to re-read it every time.
941 cache.map.set(filepath, index);
942 // Save the stat data for the index so we know whether the cached file is stale (modified by an outside process).
943 cache.stats.set(filepath, stat);
944}
945
946// Determine whether our copy of the index file is stale
947async function isIndexStale(fs, filepath, cache) {
948 const savedStats = cache.stats.get(filepath);
949 if (savedStats === undefined) return true
950 const currStats = await fs.lstat(filepath);
951 if (savedStats === null) return false
952 if (currStats === null) return false
953 return compareStats(savedStats, currStats)
954}
955
956class GitIndexManager {
957 /**
958 *
959 * @param {object} opts
960 * @param {import('../models/FileSystem.js').FileSystem} opts.fs
961 * @param {string} opts.gitdir
962 * @param {object} opts.cache
963 * @param {bool} opts.allowUnmerged
964 * @param {function(GitIndex): any} closure
965 */
966 static async acquire({ fs, gitdir, cache, allowUnmerged = true }, closure) {
967 if (!cache[IndexCache]) cache[IndexCache] = createCache();
968
969 const filepath = `${gitdir}/index`;
970 if (lock === null) lock = new AsyncLock({ maxPending: Infinity });
971 let result;
972 let unmergedPaths = [];
973 await lock.acquire(filepath, async () => {
974 // Acquire a file lock while we're reading the index
975 // to make sure other processes aren't writing to it
976 // simultaneously, which could result in a corrupted index.
977 // const fileLock = await Lock(filepath)
978 if (await isIndexStale(fs, filepath, cache[IndexCache])) {
979 await updateCachedIndexFile(fs, filepath, cache[IndexCache]);
980 }
981 const index = cache[IndexCache].map.get(filepath);
982 unmergedPaths = index.unmergedPaths;
983
984 if (unmergedPaths.length && !allowUnmerged)
985 throw new UnmergedPathsError(unmergedPaths)
986
987 result = await closure(index);
988 if (index._dirty) {
989 // Acquire a file lock while we're writing the index file
990 // let fileLock = await Lock(filepath)
991 const buffer = await index.toObject();
992 await fs.write(filepath, buffer);
993 // Update cached stat value
994 cache[IndexCache].stats.set(filepath, await fs.lstat(filepath));
995 index._dirty = false;
996 }
997 });
998
999 return result
1000 }
1001}
1002
1003function basename(path) {
1004 const last = Math.max(path.lastIndexOf('/'), path.lastIndexOf('\\'));
1005 if (last > -1) {
1006 path = path.slice(last + 1);
1007 }
1008 return path
1009}
1010
1011function dirname(path) {
1012 const last = Math.max(path.lastIndexOf('/'), path.lastIndexOf('\\'));
1013 if (last === -1) return '.'
1014 if (last === 0) return '/'
1015 return path.slice(0, last)
1016}
1017
1018/*::
1019type Node = {
1020 type: string,
1021 fullpath: string,
1022 basename: string,
1023 metadata: Object, // mode, oid
1024 parent?: Node,
1025 children: Array<Node>
1026}
1027*/
1028
1029function flatFileListToDirectoryStructure(files) {
1030 const inodes = new Map();
1031 const mkdir = function(name) {
1032 if (!inodes.has(name)) {
1033 const dir = {
1034 type: 'tree',
1035 fullpath: name,
1036 basename: basename(name),
1037 metadata: {},
1038 children: [],
1039 };
1040 inodes.set(name, dir);
1041 // This recursively generates any missing parent folders.
1042 // We do it after we've added the inode to the set so that
1043 // we don't recurse infinitely trying to create the root '.' dirname.
1044 dir.parent = mkdir(dirname(name));
1045 if (dir.parent && dir.parent !== dir) dir.parent.children.push(dir);
1046 }
1047 return inodes.get(name)
1048 };
1049
1050 const mkfile = function(name, metadata) {
1051 if (!inodes.has(name)) {
1052 const file = {
1053 type: 'blob',
1054 fullpath: name,
1055 basename: basename(name),
1056 metadata: metadata,
1057 // This recursively generates any missing parent folders.
1058 parent: mkdir(dirname(name)),
1059 children: [],
1060 };
1061 if (file.parent) file.parent.children.push(file);
1062 inodes.set(name, file);
1063 }
1064 return inodes.get(name)
1065 };
1066
1067 mkdir('.');
1068 for (const file of files) {
1069 mkfile(file.path, file);
1070 }
1071 return inodes
1072}
1073
1074/**
1075 *
1076 * @param {number} mode
1077 */
1078function mode2type(mode) {
1079 // prettier-ignore
1080 switch (mode) {
1081 case 0o040000: return 'tree'
1082 case 0o100644: return 'blob'
1083 case 0o100755: return 'blob'
1084 case 0o120000: return 'blob'
1085 case 0o160000: return 'commit'
1086 }
1087 throw new InternalError(`Unexpected GitTree entry mode: ${mode.toString(8)}`)
1088}
1089
1090class GitWalkerIndex {
1091 constructor({ fs, gitdir, cache }) {
1092 this.treePromise = GitIndexManager.acquire(
1093 { fs, gitdir, cache },
1094 async function(index) {
1095 return flatFileListToDirectoryStructure(index.entries)
1096 }
1097 );
1098 const walker = this;
1099 this.ConstructEntry = class StageEntry {
1100 constructor(fullpath) {
1101 this._fullpath = fullpath;
1102 this._type = false;
1103 this._mode = false;
1104 this._stat = false;
1105 this._oid = false;
1106 }
1107
1108 async type() {
1109 return walker.type(this)
1110 }
1111
1112 async mode() {
1113 return walker.mode(this)
1114 }
1115
1116 async stat() {
1117 return walker.stat(this)
1118 }
1119
1120 async content() {
1121 return walker.content(this)
1122 }
1123
1124 async oid() {
1125 return walker.oid(this)
1126 }
1127 };
1128 }
1129
1130 async readdir(entry) {
1131 const filepath = entry._fullpath;
1132 const tree = await this.treePromise;
1133 const inode = tree.get(filepath);
1134 if (!inode) return null
1135 if (inode.type === 'blob') return null
1136 if (inode.type !== 'tree') {
1137 throw new Error(`ENOTDIR: not a directory, scandir '${filepath}'`)
1138 }
1139 const names = inode.children.map(inode => inode.fullpath);
1140 names.sort(compareStrings);
1141 return names
1142 }
1143
1144 async type(entry) {
1145 if (entry._type === false) {
1146 await entry.stat();
1147 }
1148 return entry._type
1149 }
1150
1151 async mode(entry) {
1152 if (entry._mode === false) {
1153 await entry.stat();
1154 }
1155 return entry._mode
1156 }
1157
1158 async stat(entry) {
1159 if (entry._stat === false) {
1160 const tree = await this.treePromise;
1161 const inode = tree.get(entry._fullpath);
1162 if (!inode) {
1163 throw new Error(
1164 `ENOENT: no such file or directory, lstat '${entry._fullpath}'`
1165 )
1166 }
1167 const stats = inode.type === 'tree' ? {} : normalizeStats(inode.metadata);
1168 entry._type = inode.type === 'tree' ? 'tree' : mode2type(stats.mode);
1169 entry._mode = stats.mode;
1170 if (inode.type === 'tree') {
1171 entry._stat = undefined;
1172 } else {
1173 entry._stat = stats;
1174 }
1175 }
1176 return entry._stat
1177 }
1178
1179 async content(_entry) {
1180 // Cannot get content for an index entry
1181 }
1182
1183 async oid(entry) {
1184 if (entry._oid === false) {
1185 const tree = await this.treePromise;
1186 const inode = tree.get(entry._fullpath);
1187 entry._oid = inode.metadata.oid;
1188 }
1189 return entry._oid
1190 }
1191}
1192
1193// This is part of an elaborate system to facilitate code-splitting / tree-shaking.
1194// commands/walk.js can depend on only this, and the actual Walker classes exported
1195// can be opaque - only having a single property (this symbol) that is not enumerable,
1196// and thus the constructor can be passed as an argument to walk while being "unusable"
1197// outside of it.
1198const GitWalkSymbol = Symbol('GitWalkSymbol');
1199
1200// @ts-check
1201
1202/**
1203 * @returns {Walker}
1204 */
1205function STAGE() {
1206 const o = Object.create(null);
1207 Object.defineProperty(o, GitWalkSymbol, {
1208 value: function({ fs, gitdir, cache }) {
1209 return new GitWalkerIndex({ fs, gitdir, cache })
1210 },
1211 });
1212 Object.freeze(o);
1213 return o
1214}
1215
1216// @ts-check
1217
1218class NotFoundError extends BaseError {
1219 /**
1220 * @param {string} what
1221 */
1222 constructor(what) {
1223 super(`Could not find ${what}.`);
1224 this.code = this.name = NotFoundError.code;
1225 this.data = { what };
1226 }
1227}
1228/** @type {'NotFoundError'} */
1229NotFoundError.code = 'NotFoundError';
1230
1231class ObjectTypeError extends BaseError {
1232 /**
1233 * @param {string} oid
1234 * @param {'blob'|'commit'|'tag'|'tree'} actual
1235 * @param {'blob'|'commit'|'tag'|'tree'} expected
1236 * @param {string} [filepath]
1237 */
1238 constructor(oid, actual, expected, filepath) {
1239 super(
1240 `Object ${oid} ${
1241 filepath ? `at ${filepath}` : ''
1242 }was anticipated to be a ${expected} but it is a ${actual}.`
1243 );
1244 this.code = this.name = ObjectTypeError.code;
1245 this.data = { oid, actual, expected, filepath };
1246 }
1247}
1248/** @type {'ObjectTypeError'} */
1249ObjectTypeError.code = 'ObjectTypeError';
1250
1251class InvalidOidError extends BaseError {
1252 /**
1253 * @param {string} value
1254 */
1255 constructor(value) {
1256 super(`Expected a 40-char hex object id but saw "${value}".`);
1257 this.code = this.name = InvalidOidError.code;
1258 this.data = { value };
1259 }
1260}
1261/** @type {'InvalidOidError'} */
1262InvalidOidError.code = 'InvalidOidError';
1263
1264class NoRefspecError extends BaseError {
1265 /**
1266 * @param {string} remote
1267 */
1268 constructor(remote) {
1269 super(`Could not find a fetch refspec for remote "${remote}". Make sure the config file has an entry like the following:
1270[remote "${remote}"]
1271\tfetch = +refs/heads/*:refs/remotes/origin/*
1272`);
1273 this.code = this.name = NoRefspecError.code;
1274 this.data = { remote };
1275 }
1276}
1277/** @type {'NoRefspecError'} */
1278NoRefspecError.code = 'NoRefspecError';
1279
1280class GitPackedRefs {
1281 constructor(text) {
1282 this.refs = new Map();
1283 this.parsedConfig = [];
1284 if (text) {
1285 let key = null;
1286 this.parsedConfig = text
1287 .trim()
1288 .split('\n')
1289 .map(line => {
1290 if (/^\s*#/.test(line)) {
1291 return { line, comment: true }
1292 }
1293 const i = line.indexOf(' ');
1294 if (line.startsWith('^')) {
1295 // This is a oid for the commit associated with the annotated tag immediately preceding this line.
1296 // Trim off the '^'
1297 const value = line.slice(1);
1298 // The tagname^{} syntax is based on the output of `git show-ref --tags -d`
1299 this.refs.set(key + '^{}', value);
1300 return { line, ref: key, peeled: value }
1301 } else {
1302 // This is an oid followed by the ref name
1303 const value = line.slice(0, i);
1304 key = line.slice(i + 1);
1305 this.refs.set(key, value);
1306 return { line, ref: key, oid: value }
1307 }
1308 });
1309 }
1310 return this
1311 }
1312
1313 static from(text) {
1314 return new GitPackedRefs(text)
1315 }
1316
1317 delete(ref) {
1318 this.parsedConfig = this.parsedConfig.filter(entry => entry.ref !== ref);
1319 this.refs.delete(ref);
1320 }
1321
1322 toString() {
1323 return this.parsedConfig.map(({ line }) => line).join('\n') + '\n'
1324 }
1325}
1326
1327class GitRefSpec {
1328 constructor({ remotePath, localPath, force, matchPrefix }) {
1329 Object.assign(this, {
1330 remotePath,
1331 localPath,
1332 force,
1333 matchPrefix,
1334 });
1335 }
1336
1337 static from(refspec) {
1338 const [
1339 forceMatch,
1340 remotePath,
1341 remoteGlobMatch,
1342 localPath,
1343 localGlobMatch,
1344 ] = refspec.match(/^(\+?)(.*?)(\*?):(.*?)(\*?)$/).slice(1);
1345 const force = forceMatch === '+';
1346 const remoteIsGlob = remoteGlobMatch === '*';
1347 const localIsGlob = localGlobMatch === '*';
1348 // validate
1349 // TODO: Make this check more nuanced, and depend on whether this is a fetch refspec or a push refspec
1350 if (remoteIsGlob !== localIsGlob) {
1351 throw new InternalError('Invalid refspec')
1352 }
1353 return new GitRefSpec({
1354 remotePath,
1355 localPath,
1356 force,
1357 matchPrefix: remoteIsGlob,
1358 })
1359 // TODO: We need to run resolveRef on both paths to expand them to their full name.
1360 }
1361
1362 translate(remoteBranch) {
1363 if (this.matchPrefix) {
1364 if (remoteBranch.startsWith(this.remotePath)) {
1365 return this.localPath + remoteBranch.replace(this.remotePath, '')
1366 }
1367 } else {
1368 if (remoteBranch === this.remotePath) return this.localPath
1369 }
1370 return null
1371 }
1372
1373 reverseTranslate(localBranch) {
1374 if (this.matchPrefix) {
1375 if (localBranch.startsWith(this.localPath)) {
1376 return this.remotePath + localBranch.replace(this.localPath, '')
1377 }
1378 } else {
1379 if (localBranch === this.localPath) return this.remotePath
1380 }
1381 return null
1382 }
1383}
1384
1385class GitRefSpecSet {
1386 constructor(rules = []) {
1387 this.rules = rules;
1388 }
1389
1390 static from(refspecs) {
1391 const rules = [];
1392 for (const refspec of refspecs) {
1393 rules.push(GitRefSpec.from(refspec)); // might throw
1394 }
1395 return new GitRefSpecSet(rules)
1396 }
1397
1398 add(refspec) {
1399 const rule = GitRefSpec.from(refspec); // might throw
1400 this.rules.push(rule);
1401 }
1402
1403 translate(remoteRefs) {
1404 const result = [];
1405 for (const rule of this.rules) {
1406 for (const remoteRef of remoteRefs) {
1407 const localRef = rule.translate(remoteRef);
1408 if (localRef) {
1409 result.push([remoteRef, localRef]);
1410 }
1411 }
1412 }
1413 return result
1414 }
1415
1416 translateOne(remoteRef) {
1417 let result = null;
1418 for (const rule of this.rules) {
1419 const localRef = rule.translate(remoteRef);
1420 if (localRef) {
1421 result = localRef;
1422 }
1423 }
1424 return result
1425 }
1426
1427 localNamespaces() {
1428 return this.rules
1429 .filter(rule => rule.matchPrefix)
1430 .map(rule => rule.localPath.replace(/\/$/, ''))
1431 }
1432}
1433
1434function compareRefNames(a, b) {
1435 // https://stackoverflow.com/a/40355107/2168416
1436 const _a = a.replace(/\^\{\}$/, '');
1437 const _b = b.replace(/\^\{\}$/, '');
1438 const tmp = -(_a < _b) || +(_a > _b);
1439 if (tmp === 0) {
1440 return a.endsWith('^{}') ? 1 : -1
1441 }
1442 return tmp
1443}
1444
1445function normalizePath(path) {
1446 return path
1447 .replace(/\/\.\//g, '/') // Replace '/./' with '/'
1448 .replace(/\/{2,}/g, '/') // Replace consecutive '/'
1449 .replace(/^\/\.$/, '/') // if path === '/.' return '/'
1450 .replace(/^\.\/$/, '.') // if path === './' return '.'
1451 .replace(/^\.\//, '') // Remove leading './'
1452 .replace(/\/\.$/, '') // Remove trailing '/.'
1453 .replace(/(.+)\/$/, '$1') // Remove trailing '/'
1454 .replace(/^$/, '.') // if path === '' return '.'
1455}
1456
1457// For some reason path.posix.join is undefined in webpack
1458
1459function join(...parts) {
1460 return normalizePath(parts.map(normalizePath).join('/'))
1461}
1462
1463// This is straight from parse_unit_factor in config.c of canonical git
1464const num = val => {
1465 val = val.toLowerCase();
1466 let n = parseInt(val);
1467 if (val.endsWith('k')) n *= 1024;
1468 if (val.endsWith('m')) n *= 1024 * 1024;
1469 if (val.endsWith('g')) n *= 1024 * 1024 * 1024;
1470 return n
1471};
1472
1473// This is straight from git_parse_maybe_bool_text in config.c of canonical git
1474const bool = val => {
1475 val = val.trim().toLowerCase();
1476 if (val === 'true' || val === 'yes' || val === 'on') return true
1477 if (val === 'false' || val === 'no' || val === 'off') return false
1478 throw Error(
1479 `Expected 'true', 'false', 'yes', 'no', 'on', or 'off', but got ${val}`
1480 )
1481};
1482
1483const schema = {
1484 core: {
1485 filemode: bool,
1486 bare: bool,
1487 logallrefupdates: bool,
1488 symlinks: bool,
1489 ignorecase: bool,
1490 bigFileThreshold: num,
1491 },
1492};
1493
1494// https://git-scm.com/docs/git-config#_syntax
1495
1496// section starts with [ and ends with ]
1497// section is alphanumeric (ASCII) with - and .
1498// section is case insensitive
1499// subsection is optionnal
1500// subsection is specified after section and one or more spaces
1501// subsection is specified between double quotes
1502const SECTION_LINE_REGEX = /^\[([A-Za-z0-9-.]+)(?: "(.*)")?\]$/;
1503const SECTION_REGEX = /^[A-Za-z0-9-.]+$/;
1504
1505// variable lines contain a name, and equal sign and then a value
1506// variable lines can also only contain a name (the implicit value is a boolean true)
1507// variable name is alphanumeric (ASCII) with -
1508// variable name starts with an alphabetic character
1509// variable name is case insensitive
1510const VARIABLE_LINE_REGEX = /^([A-Za-z][A-Za-z-]*)(?: *= *(.*))?$/;
1511const VARIABLE_NAME_REGEX = /^[A-Za-z][A-Za-z-]*$/;
1512
1513const VARIABLE_VALUE_COMMENT_REGEX = /^(.*?)( *[#;].*)$/;
1514
1515const extractSectionLine = line => {
1516 const matches = SECTION_LINE_REGEX.exec(line);
1517 if (matches != null) {
1518 const [section, subsection] = matches.slice(1);
1519 return [section, subsection]
1520 }
1521 return null
1522};
1523
1524const extractVariableLine = line => {
1525 const matches = VARIABLE_LINE_REGEX.exec(line);
1526 if (matches != null) {
1527 const [name, rawValue = 'true'] = matches.slice(1);
1528 const valueWithoutComments = removeComments(rawValue);
1529 const valueWithoutQuotes = removeQuotes(valueWithoutComments);
1530 return [name, valueWithoutQuotes]
1531 }
1532 return null
1533};
1534
1535const removeComments = rawValue => {
1536 const commentMatches = VARIABLE_VALUE_COMMENT_REGEX.exec(rawValue);
1537 if (commentMatches == null) {
1538 return rawValue
1539 }
1540 const [valueWithoutComment, comment] = commentMatches.slice(1);
1541 // if odd number of quotes before and after comment => comment is escaped
1542 if (
1543 hasOddNumberOfQuotes(valueWithoutComment) &&
1544 hasOddNumberOfQuotes(comment)
1545 ) {
1546 return `${valueWithoutComment}${comment}`
1547 }
1548 return valueWithoutComment
1549};
1550
1551const hasOddNumberOfQuotes = text => {
1552 const numberOfQuotes = (text.match(/(?:^|[^\\])"/g) || []).length;
1553 return numberOfQuotes % 2 !== 0
1554};
1555
1556const removeQuotes = text => {
1557 return text.split('').reduce((newText, c, idx, text) => {
1558 const isQuote = c === '"' && text[idx - 1] !== '\\';
1559 const isEscapeForQuote = c === '\\' && text[idx + 1] === '"';
1560 if (isQuote || isEscapeForQuote) {
1561 return newText
1562 }
1563 return newText + c
1564 }, '')
1565};
1566
1567const lower = text => {
1568 return text != null ? text.toLowerCase() : null
1569};
1570
1571const getPath = (section, subsection, name) => {
1572 return [lower(section), subsection, lower(name)]
1573 .filter(a => a != null)
1574 .join('.')
1575};
1576
1577const normalizePath$1 = path => {
1578 const pathSegments = path.split('.');
1579 const section = pathSegments.shift();
1580 const name = pathSegments.pop();
1581 const subsection = pathSegments.length ? pathSegments.join('.') : undefined;
1582
1583 return {
1584 section,
1585 subsection,
1586 name,
1587 path: getPath(section, subsection, name),
1588 sectionPath: getPath(section, subsection, null),
1589 }
1590};
1591
1592const findLastIndex = (array, callback) => {
1593 return array.reduce((lastIndex, item, index) => {
1594 return callback(item) ? index : lastIndex
1595 }, -1)
1596};
1597
1598// Note: there are a LOT of edge cases that aren't covered (e.g. keys in sections that also
1599// have subsections, [include] directives, etc.
1600class GitConfig {
1601 constructor(text) {
1602 let section = null;
1603 let subsection = null;
1604 this.parsedConfig = text.split('\n').map(line => {
1605 let name = null;
1606 let value = null;
1607
1608 const trimmedLine = line.trim();
1609 const extractedSection = extractSectionLine(trimmedLine);
1610 const isSection = extractedSection != null;
1611 if (isSection) {
1612 ;[section, subsection] = extractedSection;
1613 } else {
1614 const extractedVariable = extractVariableLine(trimmedLine);
1615 const isVariable = extractedVariable != null;
1616 if (isVariable) {
1617 ;[name, value] = extractedVariable;
1618 }
1619 }
1620
1621 const path = getPath(section, subsection, name);
1622 return { line, isSection, section, subsection, name, value, path }
1623 });
1624 }
1625
1626 static from(text) {
1627 return new GitConfig(text)
1628 }
1629
1630 async get(path, getall = false) {
1631 const normalizedPath = normalizePath$1(path).path;
1632 const allValues = this.parsedConfig
1633 .filter(config => config.path === normalizedPath)
1634 .map(({ section, name, value }) => {
1635 const fn = schema[section] && schema[section][name];
1636 return fn ? fn(value) : value
1637 });
1638 return getall ? allValues : allValues.pop()
1639 }
1640
1641 async getall(path) {
1642 return this.get(path, true)
1643 }
1644
1645 async getSubsections(section) {
1646 return this.parsedConfig
1647 .filter(config => config.section === section && config.isSection)
1648 .map(config => config.subsection)
1649 }
1650
1651 async deleteSection(section, subsection) {
1652 this.parsedConfig = this.parsedConfig.filter(
1653 config =>
1654 !(config.section === section && config.subsection === subsection)
1655 );
1656 }
1657
1658 async append(path, value) {
1659 return this.set(path, value, true)
1660 }
1661
1662 async set(path, value, append = false) {
1663 const {
1664 section,
1665 subsection,
1666 name,
1667 path: normalizedPath,
1668 sectionPath,
1669 } = normalizePath$1(path);
1670 const configIndex = findLastIndex(
1671 this.parsedConfig,
1672 config => config.path === normalizedPath
1673 );
1674 if (value == null) {
1675 if (configIndex !== -1) {
1676 this.parsedConfig.splice(configIndex, 1);
1677 }
1678 } else {
1679 if (configIndex !== -1) {
1680 const config = this.parsedConfig[configIndex];
1681 // Name should be overwritten in case the casing changed
1682 const modifiedConfig = Object.assign({}, config, {
1683 name,
1684 value,
1685 modified: true,
1686 });
1687 if (append) {
1688 this.parsedConfig.splice(configIndex + 1, 0, modifiedConfig);
1689 } else {
1690 this.parsedConfig[configIndex] = modifiedConfig;
1691 }
1692 } else {
1693 const sectionIndex = this.parsedConfig.findIndex(
1694 config => config.path === sectionPath
1695 );
1696 const newConfig = {
1697 section,
1698 subsection,
1699 name,
1700 value,
1701 modified: true,
1702 path: normalizedPath,
1703 };
1704 if (SECTION_REGEX.test(section) && VARIABLE_NAME_REGEX.test(name)) {
1705 if (sectionIndex >= 0) {
1706 // Reuse existing section
1707 this.parsedConfig.splice(sectionIndex + 1, 0, newConfig);
1708 } else {
1709 // Add a new section
1710 const newSection = {
1711 section,
1712 subsection,
1713 modified: true,
1714 path: sectionPath,
1715 };
1716 this.parsedConfig.push(newSection, newConfig);
1717 }
1718 }
1719 }
1720 }
1721 }
1722
1723 toString() {
1724 return this.parsedConfig
1725 .map(({ line, section, subsection, name, value, modified = false }) => {
1726 if (!modified) {
1727 return line
1728 }
1729 if (name != null && value != null) {
1730 if (typeof value === 'string' && /[#;]/.test(value)) {
1731 // A `#` or `;` symbol denotes a comment, so we have to wrap it in double quotes
1732 return `\t${name} = "${value}"`
1733 }
1734 return `\t${name} = ${value}`
1735 }
1736 if (subsection != null) {
1737 return `[${section} "${subsection}"]`
1738 }
1739 return `[${section}]`
1740 })
1741 .join('\n')
1742 }
1743}
1744
1745class GitConfigManager {
1746 static async get({ fs, gitdir }) {
1747 // We can improve efficiency later if needed.
1748 // TODO: read from full list of git config files
1749 const text = await fs.read(`${gitdir}/config`, { encoding: 'utf8' });
1750 return GitConfig.from(text)
1751 }
1752
1753 static async save({ fs, gitdir, config }) {
1754 // We can improve efficiency later if needed.
1755 // TODO: handle saving to the correct global/user/repo location
1756 await fs.write(`${gitdir}/config`, config.toString(), {
1757 encoding: 'utf8',
1758 });
1759 }
1760}
1761
1762// This is a convenience wrapper for reading and writing files in the 'refs' directory.
1763
1764// @see https://git-scm.com/docs/git-rev-parse.html#_specifying_revisions
1765const refpaths = ref => [
1766 `${ref}`,
1767 `refs/${ref}`,
1768 `refs/tags/${ref}`,
1769 `refs/heads/${ref}`,
1770 `refs/remotes/${ref}`,
1771 `refs/remotes/${ref}/HEAD`,
1772];
1773
1774// @see https://git-scm.com/docs/gitrepository-layout
1775const GIT_FILES = ['config', 'description', 'index', 'shallow', 'commondir'];
1776
1777class GitRefManager {
1778 static async updateRemoteRefs({
1779 fs,
1780 gitdir,
1781 remote,
1782 refs,
1783 symrefs,
1784 tags,
1785 refspecs = undefined,
1786 prune = false,
1787 pruneTags = false,
1788 }) {
1789 // Validate input
1790 for (const value of refs.values()) {
1791 if (!value.match(/[0-9a-f]{40}/)) {
1792 throw new InvalidOidError(value)
1793 }
1794 }
1795 const config = await GitConfigManager.get({ fs, gitdir });
1796 if (!refspecs) {
1797 refspecs = await config.getall(`remote.${remote}.fetch`);
1798 if (refspecs.length === 0) {
1799 throw new NoRefspecError(remote)
1800 }
1801 // There's some interesting behavior with HEAD that doesn't follow the refspec.
1802 refspecs.unshift(`+HEAD:refs/remotes/${remote}/HEAD`);
1803 }
1804 const refspec = GitRefSpecSet.from(refspecs);
1805 const actualRefsToWrite = new Map();
1806 // Delete all current tags if the pruneTags argument is true.
1807 if (pruneTags) {
1808 const tags = await GitRefManager.listRefs({
1809 fs,
1810 gitdir,
1811 filepath: 'refs/tags',
1812 });
1813 await GitRefManager.deleteRefs({
1814 fs,
1815 gitdir,
1816 refs: tags.map(tag => `refs/tags/${tag}`),
1817 });
1818 }
1819 // Add all tags if the fetch tags argument is true.
1820 if (tags) {
1821 for (const serverRef of refs.keys()) {
1822 if (serverRef.startsWith('refs/tags') && !serverRef.endsWith('^{}')) {
1823 // Git's behavior is to only fetch tags that do not conflict with tags already present.
1824 if (!(await GitRefManager.exists({ fs, gitdir, ref: serverRef }))) {
1825 // Always use the object id of the tag itself, and not the peeled object id.
1826 const oid = refs.get(serverRef);
1827 actualRefsToWrite.set(serverRef, oid);
1828 }
1829 }
1830 }
1831 }
1832 // Combine refs and symrefs giving symrefs priority
1833 const refTranslations = refspec.translate([...refs.keys()]);
1834 for (const [serverRef, translatedRef] of refTranslations) {
1835 const value = refs.get(serverRef);
1836 actualRefsToWrite.set(translatedRef, value);
1837 }
1838 const symrefTranslations = refspec.translate([...symrefs.keys()]);
1839 for (const [serverRef, translatedRef] of symrefTranslations) {
1840 const value = symrefs.get(serverRef);
1841 const symtarget = refspec.translateOne(value);
1842 if (symtarget) {
1843 actualRefsToWrite.set(translatedRef, `ref: ${symtarget}`);
1844 }
1845 }
1846 // If `prune` argument is true, clear out the existing local refspec roots
1847 const pruned = [];
1848 if (prune) {
1849 for (const filepath of refspec.localNamespaces()) {
1850 const refs = (
1851 await GitRefManager.listRefs({
1852 fs,
1853 gitdir,
1854 filepath,
1855 })
1856 ).map(file => `${filepath}/${file}`);
1857 for (const ref of refs) {
1858 if (!actualRefsToWrite.has(ref)) {
1859 pruned.push(ref);
1860 }
1861 }
1862 }
1863 if (pruned.length > 0) {
1864 await GitRefManager.deleteRefs({ fs, gitdir, refs: pruned });
1865 }
1866 }
1867 // Update files
1868 // TODO: For large repos with a history of thousands of pull requests
1869 // (i.e. gitlab-ce) it would be vastly more efficient to write them
1870 // to .git/packed-refs.
1871 // The trick is to make sure we a) don't write a packed ref that is
1872 // already shadowed by a loose ref and b) don't loose any refs already
1873 // in packed-refs. Doing this efficiently may be difficult. A
1874 // solution that might work is
1875 // a) load the current packed-refs file
1876 // b) add actualRefsToWrite, overriding the existing values if present
1877 // c) enumerate all the loose refs currently in .git/refs/remotes/${remote}
1878 // d) overwrite their value with the new value.
1879 // Examples of refs we need to avoid writing in loose format for efficieny's sake
1880 // are .git/refs/remotes/origin/refs/remotes/remote_mirror_3059
1881 // and .git/refs/remotes/origin/refs/merge-requests
1882 for (const [key, value] of actualRefsToWrite) {
1883 await fs.write(join(gitdir, key), `${value.trim()}\n`, 'utf8');
1884 }
1885 return { pruned }
1886 }
1887
1888 // TODO: make this less crude?
1889 static async writeRef({ fs, gitdir, ref, value }) {
1890 // Validate input
1891 if (!value.match(/[0-9a-f]{40}/)) {
1892 throw new InvalidOidError(value)
1893 }
1894 await fs.write(join(gitdir, ref), `${value.trim()}\n`, 'utf8');
1895 }
1896
1897 static async writeSymbolicRef({ fs, gitdir, ref, value }) {
1898 await fs.write(join(gitdir, ref), 'ref: ' + `${value.trim()}\n`, 'utf8');
1899 }
1900
1901 static async deleteRef({ fs, gitdir, ref }) {
1902 return GitRefManager.deleteRefs({ fs, gitdir, refs: [ref] })
1903 }
1904
1905 static async deleteRefs({ fs, gitdir, refs }) {
1906 // Delete regular ref
1907 await Promise.all(refs.map(ref => fs.rm(join(gitdir, ref))));
1908 // Delete any packed ref
1909 let text = await fs.read(`${gitdir}/packed-refs`, { encoding: 'utf8' });
1910 const packed = GitPackedRefs.from(text);
1911 const beforeSize = packed.refs.size;
1912 for (const ref of refs) {
1913 if (packed.refs.has(ref)) {
1914 packed.delete(ref);
1915 }
1916 }
1917 if (packed.refs.size < beforeSize) {
1918 text = packed.toString();
1919 await fs.write(`${gitdir}/packed-refs`, text, { encoding: 'utf8' });
1920 }
1921 }
1922
1923 /**
1924 * @param {object} args
1925 * @param {import('../models/FileSystem.js').FileSystem} args.fs
1926 * @param {string} args.gitdir
1927 * @param {string} args.ref
1928 * @param {number} [args.depth]
1929 * @returns {Promise<string>}
1930 */
1931 static async resolve({ fs, gitdir, ref, depth = undefined }) {
1932 if (depth !== undefined) {
1933 depth--;
1934 if (depth === -1) {
1935 return ref
1936 }
1937 }
1938 let sha;
1939 // Is it a ref pointer?
1940 if (ref.startsWith('ref: ')) {
1941 ref = ref.slice('ref: '.length);
1942 return GitRefManager.resolve({ fs, gitdir, ref, depth })
1943 }
1944 // Is it a complete and valid SHA?
1945 if (ref.length === 40 && /[0-9a-f]{40}/.test(ref)) {
1946 return ref
1947 }
1948 // We need to alternate between the file system and the packed-refs
1949 const packedMap = await GitRefManager.packedRefs({ fs, gitdir });
1950 // Look in all the proper paths, in this order
1951 const allpaths = refpaths(ref).filter(p => !GIT_FILES.includes(p)); // exclude git system files (#709)
1952
1953 for (const ref of allpaths) {
1954 sha =
1955 (await fs.read(`${gitdir}/${ref}`, { encoding: 'utf8' })) ||
1956 packedMap.get(ref);
1957 if (sha) {
1958 return GitRefManager.resolve({ fs, gitdir, ref: sha.trim(), depth })
1959 }
1960 }
1961 // Do we give up?
1962 throw new NotFoundError(ref)
1963 }
1964
1965 static async exists({ fs, gitdir, ref }) {
1966 try {
1967 await GitRefManager.expand({ fs, gitdir, ref });
1968 return true
1969 } catch (err) {
1970 return false
1971 }
1972 }
1973
1974 static async expand({ fs, gitdir, ref }) {
1975 // Is it a complete and valid SHA?
1976 if (ref.length === 40 && /[0-9a-f]{40}/.test(ref)) {
1977 return ref
1978 }
1979 // We need to alternate between the file system and the packed-refs
1980 const packedMap = await GitRefManager.packedRefs({ fs, gitdir });
1981 // Look in all the proper paths, in this order
1982 const allpaths = refpaths(ref);
1983 for (const ref of allpaths) {
1984 if (await fs.exists(`${gitdir}/${ref}`)) return ref
1985 if (packedMap.has(ref)) return ref
1986 }
1987 // Do we give up?
1988 throw new NotFoundError(ref)
1989 }
1990
1991 static async expandAgainstMap({ ref, map }) {
1992 // Look in all the proper paths, in this order
1993 const allpaths = refpaths(ref);
1994 for (const ref of allpaths) {
1995 if (await map.has(ref)) return ref
1996 }
1997 // Do we give up?
1998 throw new NotFoundError(ref)
1999 }
2000
2001 static resolveAgainstMap({ ref, fullref = ref, depth = undefined, map }) {
2002 if (depth !== undefined) {
2003 depth--;
2004 if (depth === -1) {
2005 return { fullref, oid: ref }
2006 }
2007 }
2008 // Is it a ref pointer?
2009 if (ref.startsWith('ref: ')) {
2010 ref = ref.slice('ref: '.length);
2011 return GitRefManager.resolveAgainstMap({ ref, fullref, depth, map })
2012 }
2013 // Is it a complete and valid SHA?
2014 if (ref.length === 40 && /[0-9a-f]{40}/.test(ref)) {
2015 return { fullref, oid: ref }
2016 }
2017 // Look in all the proper paths, in this order
2018 const allpaths = refpaths(ref);
2019 for (const ref of allpaths) {
2020 const sha = map.get(ref);
2021 if (sha) {
2022 return GitRefManager.resolveAgainstMap({
2023 ref: sha.trim(),
2024 fullref: ref,
2025 depth,
2026 map,
2027 })
2028 }
2029 }
2030 // Do we give up?
2031 throw new NotFoundError(ref)
2032 }
2033
2034 static async packedRefs({ fs, gitdir }) {
2035 const text = await fs.read(`${gitdir}/packed-refs`, { encoding: 'utf8' });
2036 const packed = GitPackedRefs.from(text);
2037 return packed.refs
2038 }
2039
2040 // List all the refs that match the `filepath` prefix
2041 static async listRefs({ fs, gitdir, filepath }) {
2042 const packedMap = GitRefManager.packedRefs({ fs, gitdir });
2043 let files = null;
2044 try {
2045 files = await fs.readdirDeep(`${gitdir}/${filepath}`);
2046 files = files.map(x => x.replace(`${gitdir}/${filepath}/`, ''));
2047 } catch (err) {
2048 files = [];
2049 }
2050
2051 for (let key of (await packedMap).keys()) {
2052 // filter by prefix
2053 if (key.startsWith(filepath)) {
2054 // remove prefix
2055 key = key.replace(filepath + '/', '');
2056 // Don't include duplicates; the loose files have precedence anyway
2057 if (!files.includes(key)) {
2058 files.push(key);
2059 }
2060 }
2061 }
2062 // since we just appended things onto an array, we need to sort them now
2063 files.sort(compareRefNames);
2064 return files
2065 }
2066
2067 static async listBranches({ fs, gitdir, remote }) {
2068 if (remote) {
2069 return GitRefManager.listRefs({
2070 fs,
2071 gitdir,
2072 filepath: `refs/remotes/${remote}`,
2073 })
2074 } else {
2075 return GitRefManager.listRefs({ fs, gitdir, filepath: `refs/heads` })
2076 }
2077 }
2078
2079 static async listTags({ fs, gitdir }) {
2080 const tags = await GitRefManager.listRefs({
2081 fs,
2082 gitdir,
2083 filepath: `refs/tags`,
2084 });
2085 return tags.filter(x => !x.endsWith('^{}'))
2086 }
2087}
2088
2089function compareTreeEntryPath(a, b) {
2090 // Git sorts tree entries as if there is a trailing slash on directory names.
2091 return compareStrings(appendSlashIfDir(a), appendSlashIfDir(b))
2092}
2093
2094function appendSlashIfDir(entry) {
2095 return entry.mode === '040000' ? entry.path + '/' : entry.path
2096}
2097
2098/**
2099 *
2100 * @typedef {Object} TreeEntry
2101 * @property {string} mode - the 6 digit hexadecimal mode
2102 * @property {string} path - the name of the file or directory
2103 * @property {string} oid - the SHA-1 object id of the blob or tree
2104 * @property {'commit'|'blob'|'tree'} type - the type of object
2105 */
2106
2107function mode2type$1(mode) {
2108 // prettier-ignore
2109 switch (mode) {
2110 case '040000': return 'tree'
2111 case '100644': return 'blob'
2112 case '100755': return 'blob'
2113 case '120000': return 'blob'
2114 case '160000': return 'commit'
2115 }
2116 throw new InternalError(`Unexpected GitTree entry mode: ${mode}`)
2117}
2118
2119function parseBuffer(buffer) {
2120 const _entries = [];
2121 let cursor = 0;
2122 while (cursor < buffer.length) {
2123 const space = buffer.indexOf(32, cursor);
2124 if (space === -1) {
2125 throw new InternalError(
2126 `GitTree: Error parsing buffer at byte location ${cursor}: Could not find the next space character.`
2127 )
2128 }
2129 const nullchar = buffer.indexOf(0, cursor);
2130 if (nullchar === -1) {
2131 throw new InternalError(
2132 `GitTree: Error parsing buffer at byte location ${cursor}: Could not find the next null character.`
2133 )
2134 }
2135 let mode = buffer.slice(cursor, space).toString('utf8');
2136 if (mode === '40000') mode = '040000'; // makes it line up neater in printed output
2137 const type = mode2type$1(mode);
2138 const path = buffer.slice(space + 1, nullchar).toString('utf8');
2139
2140 // Prevent malicious git repos from writing to "..\foo" on clone etc
2141 if (path.includes('\\') || path.includes('/')) {
2142 throw new UnsafeFilepathError(path)
2143 }
2144
2145 const oid = buffer.slice(nullchar + 1, nullchar + 21).toString('hex');
2146 cursor = nullchar + 21;
2147 _entries.push({ mode, path, oid, type });
2148 }
2149 return _entries
2150}
2151
2152function limitModeToAllowed(mode) {
2153 if (typeof mode === 'number') {
2154 mode = mode.toString(8);
2155 }
2156 // tree
2157 if (mode.match(/^0?4.*/)) return '040000' // Directory
2158 if (mode.match(/^1006.*/)) return '100644' // Regular non-executable file
2159 if (mode.match(/^1007.*/)) return '100755' // Regular executable file
2160 if (mode.match(/^120.*/)) return '120000' // Symbolic link
2161 if (mode.match(/^160.*/)) return '160000' // Commit (git submodule reference)
2162 throw new InternalError(`Could not understand file mode: ${mode}`)
2163}
2164
2165function nudgeIntoShape(entry) {
2166 if (!entry.oid && entry.sha) {
2167 entry.oid = entry.sha; // Github
2168 }
2169 entry.mode = limitModeToAllowed(entry.mode); // index
2170 if (!entry.type) {
2171 entry.type = mode2type$1(entry.mode); // index
2172 }
2173 return entry
2174}
2175
2176class GitTree {
2177 constructor(entries) {
2178 if (Buffer.isBuffer(entries)) {
2179 this._entries = parseBuffer(entries);
2180 } else if (Array.isArray(entries)) {
2181 this._entries = entries.map(nudgeIntoShape);
2182 } else {
2183 throw new InternalError('invalid type passed to GitTree constructor')
2184 }
2185 // Tree entries are not sorted alphabetically in the usual sense (see `compareTreeEntryPath`)
2186 // but it is important later on that these be sorted in the same order as they would be returned from readdir.
2187 this._entries.sort(comparePath);
2188 }
2189
2190 static from(tree) {
2191 return new GitTree(tree)
2192 }
2193
2194 render() {
2195 return this._entries
2196 .map(entry => `${entry.mode} ${entry.type} ${entry.oid} ${entry.path}`)
2197 .join('\n')
2198 }
2199
2200 toObject() {
2201 // Adjust the sort order to match git's
2202 const entries = [...this._entries];
2203 entries.sort(compareTreeEntryPath);
2204 return Buffer.concat(
2205 entries.map(entry => {
2206 const mode = Buffer.from(entry.mode.replace(/^0/, ''));
2207 const space = Buffer.from(' ');
2208 const path = Buffer.from(entry.path, 'utf8');
2209 const nullchar = Buffer.from([0]);
2210 const oid = Buffer.from(entry.oid, 'hex');
2211 return Buffer.concat([mode, space, path, nullchar, oid])
2212 })
2213 )
2214 }
2215
2216 /**
2217 * @returns {TreeEntry[]}
2218 */
2219 entries() {
2220 return this._entries
2221 }
2222
2223 *[Symbol.iterator]() {
2224 for (const entry of this._entries) {
2225 yield entry;
2226 }
2227 }
2228}
2229
2230class GitObject {
2231 static wrap({ type, object }) {
2232 return Buffer.concat([
2233 Buffer.from(`${type} ${object.byteLength.toString()}\x00`),
2234 Buffer.from(object),
2235 ])
2236 }
2237
2238 static unwrap(buffer) {
2239 const s = buffer.indexOf(32); // first space
2240 const i = buffer.indexOf(0); // first null value
2241 const type = buffer.slice(0, s).toString('utf8'); // get type of object
2242 const length = buffer.slice(s + 1, i).toString('utf8'); // get type of object
2243 const actualLength = buffer.length - (i + 1);
2244 // verify length
2245 if (parseInt(length) !== actualLength) {
2246 throw new InternalError(
2247 `Length mismatch: expected ${length} bytes but got ${actualLength} instead.`
2248 )
2249 }
2250 return {
2251 type,
2252 object: Buffer.from(buffer.slice(i + 1)),
2253 }
2254 }
2255}
2256
2257async function readObjectLoose({ fs, gitdir, oid }) {
2258 const source = `objects/${oid.slice(0, 2)}/${oid.slice(2)}`;
2259 const file = await fs.read(`${gitdir}/${source}`);
2260 if (!file) {
2261 return null
2262 }
2263 return { object: file, format: 'deflated', source }
2264}
2265
2266/**
2267 * @param {Buffer} delta
2268 * @param {Buffer} source
2269 * @returns {Buffer}
2270 */
2271function applyDelta(delta, source) {
2272 const reader = new BufferCursor(delta);
2273 const sourceSize = readVarIntLE(reader);
2274
2275 if (sourceSize !== source.byteLength) {
2276 throw new InternalError(
2277 `applyDelta expected source buffer to be ${sourceSize} bytes but the provided buffer was ${source.length} bytes`
2278 )
2279 }
2280 const targetSize = readVarIntLE(reader);
2281 let target;
2282
2283 const firstOp = readOp(reader, source);
2284 // Speed optimization - return raw buffer if it's just single simple copy
2285 if (firstOp.byteLength === targetSize) {
2286 target = firstOp;
2287 } else {
2288 // Otherwise, allocate a fresh buffer and slices
2289 target = Buffer.alloc(targetSize);
2290 const writer = new BufferCursor(target);
2291 writer.copy(firstOp);
2292
2293 while (!reader.eof()) {
2294 writer.copy(readOp(reader, source));
2295 }
2296
2297 const tell = writer.tell();
2298 if (targetSize !== tell) {
2299 throw new InternalError(
2300 `applyDelta expected target buffer to be ${targetSize} bytes but the resulting buffer was ${tell} bytes`
2301 )
2302 }
2303 }
2304 return target
2305}
2306
2307function readVarIntLE(reader) {
2308 let result = 0;
2309 let shift = 0;
2310 let byte = null;
2311 do {
2312 byte = reader.readUInt8();
2313 result |= (byte & 0b01111111) << shift;
2314 shift += 7;
2315 } while (byte & 0b10000000)
2316 return result
2317}
2318
2319function readCompactLE(reader, flags, size) {
2320 let result = 0;
2321 let shift = 0;
2322 while (size--) {
2323 if (flags & 0b00000001) {
2324 result |= reader.readUInt8() << shift;
2325 }
2326 flags >>= 1;
2327 shift += 8;
2328 }
2329 return result
2330}
2331
2332function readOp(reader, source) {
2333 /** @type {number} */
2334 const byte = reader.readUInt8();
2335 const COPY = 0b10000000;
2336 const OFFS = 0b00001111;
2337 const SIZE = 0b01110000;
2338 if (byte & COPY) {
2339 // copy consists of 4 byte offset, 3 byte size (in LE order)
2340 const offset = readCompactLE(reader, byte & OFFS, 4);
2341 let size = readCompactLE(reader, (byte & SIZE) >> 4, 3);
2342 // Yup. They really did this optimization.
2343 if (size === 0) size = 0x10000;
2344 return source.slice(offset, offset + size)
2345 } else {
2346 // insert
2347 return reader.slice(byte)
2348 }
2349}
2350
2351// Convert a value to an Async Iterator
2352// This will be easier with async generator functions.
2353function fromValue(value) {
2354 let queue = [value];
2355 return {
2356 next() {
2357 return Promise.resolve({ done: queue.length === 0, value: queue.pop() })
2358 },
2359 return() {
2360 queue = [];
2361 return {}
2362 },
2363 [Symbol.asyncIterator]() {
2364 return this
2365 },
2366 }
2367}
2368
2369function getIterator(iterable) {
2370 if (iterable[Symbol.asyncIterator]) {
2371 return iterable[Symbol.asyncIterator]()
2372 }
2373 if (iterable[Symbol.iterator]) {
2374 return iterable[Symbol.iterator]()
2375 }
2376 if (iterable.next) {
2377 return iterable
2378 }
2379 return fromValue(iterable)
2380}
2381
2382// inspired by 'gartal' but lighter-weight and more battle-tested.
2383class StreamReader {
2384 constructor(stream) {
2385 this.stream = getIterator(stream);
2386 this.buffer = null;
2387 this.cursor = 0;
2388 this.undoCursor = 0;
2389 this.started = false;
2390 this._ended = false;
2391 this._discardedBytes = 0;
2392 }
2393
2394 eof() {
2395 return this._ended && this.cursor === this.buffer.length
2396 }
2397
2398 tell() {
2399 return this._discardedBytes + this.cursor
2400 }
2401
2402 async byte() {
2403 if (this.eof()) return
2404 if (!this.started) await this._init();
2405 if (this.cursor === this.buffer.length) {
2406 await this._loadnext();
2407 if (this._ended) return
2408 }
2409 this._moveCursor(1);
2410 return this.buffer[this.undoCursor]
2411 }
2412
2413 async chunk() {
2414 if (this.eof()) return
2415 if (!this.started) await this._init();
2416 if (this.cursor === this.buffer.length) {
2417 await this._loadnext();
2418 if (this._ended) return
2419 }
2420 this._moveCursor(this.buffer.length);
2421 return this.buffer.slice(this.undoCursor, this.cursor)
2422 }
2423
2424 async read(n) {
2425 if (this.eof()) return
2426 if (!this.started) await this._init();
2427 if (this.cursor + n > this.buffer.length) {
2428 this._trim();
2429 await this._accumulate(n);
2430 }
2431 this._moveCursor(n);
2432 return this.buffer.slice(this.undoCursor, this.cursor)
2433 }
2434
2435 async skip(n) {
2436 if (this.eof()) return
2437 if (!this.started) await this._init();
2438 if (this.cursor + n > this.buffer.length) {
2439 this._trim();
2440 await this._accumulate(n);
2441 }
2442 this._moveCursor(n);
2443 }
2444
2445 async undo() {
2446 this.cursor = this.undoCursor;
2447 }
2448
2449 async _next() {
2450 this.started = true;
2451 let { done, value } = await this.stream.next();
2452 if (done) {
2453 this._ended = true;
2454 }
2455 if (value) {
2456 value = Buffer.from(value);
2457 }
2458 return value
2459 }
2460
2461 _trim() {
2462 // Throw away parts of the buffer we don't need anymore
2463 // assert(this.cursor <= this.buffer.length)
2464 this.buffer = this.buffer.slice(this.undoCursor);
2465 this.cursor -= this.undoCursor;
2466 this._discardedBytes += this.undoCursor;
2467 this.undoCursor = 0;
2468 }
2469
2470 _moveCursor(n) {
2471 this.undoCursor = this.cursor;
2472 this.cursor += n;
2473 if (this.cursor > this.buffer.length) {
2474 this.cursor = this.buffer.length;
2475 }
2476 }
2477
2478 async _accumulate(n) {
2479 if (this._ended) return
2480 // Expand the buffer until we have N bytes of data
2481 // or we've reached the end of the stream
2482 const buffers = [this.buffer];
2483 while (this.cursor + n > lengthBuffers(buffers)) {
2484 const nextbuffer = await this._next();
2485 if (this._ended) break
2486 buffers.push(nextbuffer);
2487 }
2488 this.buffer = Buffer.concat(buffers);
2489 }
2490
2491 async _loadnext() {
2492 this._discardedBytes += this.buffer.length;
2493 this.undoCursor = 0;
2494 this.cursor = 0;
2495 this.buffer = await this._next();
2496 }
2497
2498 async _init() {
2499 this.buffer = await this._next();
2500 }
2501}
2502
2503// This helper function helps us postpone concatenating buffers, which
2504// would create intermediate buffer objects,
2505function lengthBuffers(buffers) {
2506 return buffers.reduce((acc, buffer) => acc + buffer.length, 0)
2507}
2508
2509// My version of git-list-pack - roughly 15x faster than the original
2510
2511async function listpack(stream, onData) {
2512 const reader = new StreamReader(stream);
2513 let PACK = await reader.read(4);
2514 PACK = PACK.toString('utf8');
2515 if (PACK !== 'PACK') {
2516 throw new InternalError(`Invalid PACK header '${PACK}'`)
2517 }
2518
2519 let version = await reader.read(4);
2520 version = version.readUInt32BE(0);
2521 if (version !== 2) {
2522 throw new InternalError(`Invalid packfile version: ${version}`)
2523 }
2524
2525 let numObjects = await reader.read(4);
2526 numObjects = numObjects.readUInt32BE(0);
2527 // If (for some godforsaken reason) this is an empty packfile, abort now.
2528 if (numObjects < 1) return
2529
2530 while (!reader.eof() && numObjects--) {
2531 const offset = reader.tell();
2532 const { type, length, ofs, reference } = await parseHeader(reader);
2533 const inflator = new pako.Inflate();
2534 while (!inflator.result) {
2535 const chunk = await reader.chunk();
2536 if (!chunk) break
2537 inflator.push(chunk, false);
2538 if (inflator.err) {
2539 throw new InternalError(`Pako error: ${inflator.msg}`)
2540 }
2541 if (inflator.result) {
2542 if (inflator.result.length !== length) {
2543 throw new InternalError(
2544 `Inflated object size is different from that stated in packfile.`
2545 )
2546 }
2547
2548 // Backtrack parser to where deflated data ends
2549 await reader.undo();
2550 await reader.read(chunk.length - inflator.strm.avail_in);
2551 const end = reader.tell();
2552 await onData({
2553 data: inflator.result,
2554 type,
2555 num: numObjects,
2556 offset,
2557 end,
2558 reference,
2559 ofs,
2560 });
2561 }
2562 }
2563 }
2564}
2565
2566async function parseHeader(reader) {
2567 // Object type is encoded in bits 654
2568 let byte = await reader.byte();
2569 const type = (byte >> 4) & 0b111;
2570 // The length encoding get complicated.
2571 // Last four bits of length is encoded in bits 3210
2572 let length = byte & 0b1111;
2573 // Whether the next byte is part of the variable-length encoded number
2574 // is encoded in bit 7
2575 if (byte & 0b10000000) {
2576 let shift = 4;
2577 do {
2578 byte = await reader.byte();
2579 length |= (byte & 0b01111111) << shift;
2580 shift += 7;
2581 } while (byte & 0b10000000)
2582 }
2583 // Handle deltified objects
2584 let ofs;
2585 let reference;
2586 if (type === 6) {
2587 let shift = 0;
2588 ofs = 0;
2589 const bytes = [];
2590 do {
2591 byte = await reader.byte();
2592 ofs |= (byte & 0b01111111) << shift;
2593 shift += 7;
2594 bytes.push(byte);
2595 } while (byte & 0b10000000)
2596 reference = Buffer.from(bytes);
2597 }
2598 if (type === 7) {
2599 const buf = await reader.read(20);
2600 reference = buf;
2601 }
2602 return { type, length, ofs, reference }
2603}
2604
2605/* eslint-env node, browser */
2606
2607let supportsDecompressionStream = false;
2608
2609async function inflate(buffer) {
2610 if (supportsDecompressionStream === null) {
2611 supportsDecompressionStream = testDecompressionStream();
2612 }
2613 return supportsDecompressionStream
2614 ? browserInflate(buffer)
2615 : pako.inflate(buffer)
2616}
2617
2618async function browserInflate(buffer) {
2619 const ds = new DecompressionStream('deflate');
2620 const d = new Blob([buffer]).stream().pipeThrough(ds);
2621 return new Uint8Array(await new Response(d).arrayBuffer())
2622}
2623
2624function testDecompressionStream() {
2625 try {
2626 const ds = new DecompressionStream('deflate');
2627 if (ds) return true
2628 } catch (_) {
2629 // no bother
2630 }
2631 return false
2632}
2633
2634function decodeVarInt(reader) {
2635 const bytes = [];
2636 let byte = 0;
2637 let multibyte = 0;
2638 do {
2639 byte = reader.readUInt8();
2640 // We keep bits 6543210
2641 const lastSeven = byte & 0b01111111;
2642 bytes.push(lastSeven);
2643 // Whether the next byte is part of the variable-length encoded number
2644 // is encoded in bit 7
2645 multibyte = byte & 0b10000000;
2646 } while (multibyte)
2647 // Now that all the bytes are in big-endian order,
2648 // alternate shifting the bits left by 7 and OR-ing the next byte.
2649 // And... do a weird increment-by-one thing that I don't quite understand.
2650 return bytes.reduce((a, b) => ((a + 1) << 7) | b, -1)
2651}
2652
2653// I'm pretty much copying this one from the git C source code,
2654// because it makes no sense.
2655function otherVarIntDecode(reader, startWith) {
2656 let result = startWith;
2657 let shift = 4;
2658 let byte = null;
2659 do {
2660 byte = reader.readUInt8();
2661 result |= (byte & 0b01111111) << shift;
2662 shift += 7;
2663 } while (byte & 0b10000000)
2664 return result
2665}
2666
2667class GitPackIndex {
2668 constructor(stuff) {
2669 Object.assign(this, stuff);
2670 this.offsetCache = {};
2671 }
2672
2673 static async fromIdx({ idx, getExternalRefDelta }) {
2674 const reader = new BufferCursor(idx);
2675 const magic = reader.slice(4).toString('hex');
2676 // Check for IDX v2 magic number
2677 if (magic !== 'ff744f63') {
2678 return // undefined
2679 }
2680 const version = reader.readUInt32BE();
2681 if (version !== 2) {
2682 throw new InternalError(
2683 `Unable to read version ${version} packfile IDX. (Only version 2 supported)`
2684 )
2685 }
2686 if (idx.byteLength > 2048 * 1024 * 1024) {
2687 throw new InternalError(
2688 `To keep implementation simple, I haven't implemented the layer 5 feature needed to support packfiles > 2GB in size.`
2689 )
2690 }
2691 // Skip over fanout table
2692 reader.seek(reader.tell() + 4 * 255);
2693 // Get hashes
2694 const size = reader.readUInt32BE();
2695 const hashes = [];
2696 for (let i = 0; i < size; i++) {
2697 const hash = reader.slice(20).toString('hex');
2698 hashes[i] = hash;
2699 }
2700 reader.seek(reader.tell() + 4 * size);
2701 // Skip over CRCs
2702 // Get offsets
2703 const offsets = new Map();
2704 for (let i = 0; i < size; i++) {
2705 offsets.set(hashes[i], reader.readUInt32BE());
2706 }
2707 const packfileSha = reader.slice(20).toString('hex');
2708 return new GitPackIndex({
2709 hashes,
2710 crcs: {},
2711 offsets,
2712 packfileSha,
2713 getExternalRefDelta,
2714 })
2715 }
2716
2717 static async fromPack({ pack, getExternalRefDelta, onProgress }) {
2718 const listpackTypes = {
2719 1: 'commit',
2720 2: 'tree',
2721 3: 'blob',
2722 4: 'tag',
2723 6: 'ofs-delta',
2724 7: 'ref-delta',
2725 };
2726 const offsetToObject = {};
2727
2728 // Older packfiles do NOT use the shasum of the pack itself,
2729 // so it is recommended to just use whatever bytes are in the trailer.
2730 // Source: https://github.com/git/git/commit/1190a1acf800acdcfd7569f87ac1560e2d077414
2731 const packfileSha = pack.slice(-20).toString('hex');
2732
2733 const hashes = [];
2734 const crcs = {};
2735 const offsets = new Map();
2736 let totalObjectCount = null;
2737 let lastPercent = null;
2738
2739 await listpack([pack], async ({ data, type, reference, offset, num }) => {
2740 if (totalObjectCount === null) totalObjectCount = num;
2741 const percent = Math.floor(
2742 ((totalObjectCount - num) * 100) / totalObjectCount
2743 );
2744 if (percent !== lastPercent) {
2745 if (onProgress) {
2746 await onProgress({
2747 phase: 'Receiving objects',
2748 loaded: totalObjectCount - num,
2749 total: totalObjectCount,
2750 });
2751 }
2752 }
2753 lastPercent = percent;
2754 // Change type from a number to a meaningful string
2755 type = listpackTypes[type];
2756
2757 if (['commit', 'tree', 'blob', 'tag'].includes(type)) {
2758 offsetToObject[offset] = {
2759 type,
2760 offset,
2761 };
2762 } else if (type === 'ofs-delta') {
2763 offsetToObject[offset] = {
2764 type,
2765 offset,
2766 };
2767 } else if (type === 'ref-delta') {
2768 offsetToObject[offset] = {
2769 type,
2770 offset,
2771 };
2772 }
2773 });
2774
2775 // We need to know the lengths of the slices to compute the CRCs.
2776 const offsetArray = Object.keys(offsetToObject).map(Number);
2777 for (const [i, start] of offsetArray.entries()) {
2778 const end =
2779 i + 1 === offsetArray.length ? pack.byteLength - 20 : offsetArray[i + 1];
2780 const o = offsetToObject[start];
2781 const crc = crc32.buf(pack.slice(start, end)) >>> 0;
2782 o.end = end;
2783 o.crc = crc;
2784 }
2785
2786 // We don't have the hashes yet. But we can generate them using the .readSlice function!
2787 const p = new GitPackIndex({
2788 pack: Promise.resolve(pack),
2789 packfileSha,
2790 crcs,
2791 hashes,
2792 offsets,
2793 getExternalRefDelta,
2794 });
2795
2796 // Resolve deltas and compute the oids
2797 lastPercent = null;
2798 let count = 0;
2799 const objectsByDepth = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
2800 for (let offset in offsetToObject) {
2801 offset = Number(offset);
2802 const percent = Math.floor((count * 100) / totalObjectCount);
2803 if (percent !== lastPercent) {
2804 if (onProgress) {
2805 await onProgress({
2806 phase: 'Resolving deltas',
2807 loaded: count,
2808 total: totalObjectCount,
2809 });
2810 }
2811 }
2812 count++;
2813 lastPercent = percent;
2814
2815 const o = offsetToObject[offset];
2816 if (o.oid) continue
2817 try {
2818 p.readDepth = 0;
2819 p.externalReadDepth = 0;
2820 const { type, object } = await p.readSlice({ start: offset });
2821 objectsByDepth[p.readDepth] += 1;
2822 const oid = await shasum(GitObject.wrap({ type, object }));
2823 o.oid = oid;
2824 hashes.push(oid);
2825 offsets.set(oid, offset);
2826 crcs[oid] = o.crc;
2827 } catch (err) {
2828 continue
2829 }
2830 }
2831
2832 hashes.sort();
2833 return p
2834 }
2835
2836 async toBuffer() {
2837 const buffers = [];
2838 const write = (str, encoding) => {
2839 buffers.push(Buffer.from(str, encoding));
2840 };
2841 // Write out IDX v2 magic number
2842 write('ff744f63', 'hex');
2843 // Write out version number 2
2844 write('00000002', 'hex');
2845 // Write fanout table
2846 const fanoutBuffer = new BufferCursor(Buffer.alloc(256 * 4));
2847 for (let i = 0; i < 256; i++) {
2848 let count = 0;
2849 for (const hash of this.hashes) {
2850 if (parseInt(hash.slice(0, 2), 16) <= i) count++;
2851 }
2852 fanoutBuffer.writeUInt32BE(count);
2853 }
2854 buffers.push(fanoutBuffer.buffer);
2855 // Write out hashes
2856 for (const hash of this.hashes) {
2857 write(hash, 'hex');
2858 }
2859 // Write out crcs
2860 const crcsBuffer = new BufferCursor(Buffer.alloc(this.hashes.length * 4));
2861 for (const hash of this.hashes) {
2862 crcsBuffer.writeUInt32BE(this.crcs[hash]);
2863 }
2864 buffers.push(crcsBuffer.buffer);
2865 // Write out offsets
2866 const offsetsBuffer = new BufferCursor(Buffer.alloc(this.hashes.length * 4));
2867 for (const hash of this.hashes) {
2868 offsetsBuffer.writeUInt32BE(this.offsets.get(hash));
2869 }
2870 buffers.push(offsetsBuffer.buffer);
2871 // Write out packfile checksum
2872 write(this.packfileSha, 'hex');
2873 // Write out shasum
2874 const totalBuffer = Buffer.concat(buffers);
2875 const sha = await shasum(totalBuffer);
2876 const shaBuffer = Buffer.alloc(20);
2877 shaBuffer.write(sha, 'hex');
2878 return Buffer.concat([totalBuffer, shaBuffer])
2879 }
2880
2881 async load({ pack }) {
2882 this.pack = pack;
2883 }
2884
2885 async unload() {
2886 this.pack = null;
2887 }
2888
2889 async read({ oid }) {
2890 if (!this.offsets.get(oid)) {
2891 if (this.getExternalRefDelta) {
2892 this.externalReadDepth++;
2893 return this.getExternalRefDelta(oid)
2894 } else {
2895 throw new InternalError(`Could not read object ${oid} from packfile`)
2896 }
2897 }
2898 const start = this.offsets.get(oid);
2899 return this.readSlice({ start })
2900 }
2901
2902 async readSlice({ start }) {
2903 if (this.offsetCache[start]) {
2904 return Object.assign({}, this.offsetCache[start])
2905 }
2906 this.readDepth++;
2907 const types = {
2908 0b0010000: 'commit',
2909 0b0100000: 'tree',
2910 0b0110000: 'blob',
2911 0b1000000: 'tag',
2912 0b1100000: 'ofs_delta',
2913 0b1110000: 'ref_delta',
2914 };
2915 if (!this.pack) {
2916 throw new InternalError(
2917 'Tried to read from a GitPackIndex with no packfile loaded into memory'
2918 )
2919 }
2920 const raw = (await this.pack).slice(start);
2921 const reader = new BufferCursor(raw);
2922 const byte = reader.readUInt8();
2923 // Object type is encoded in bits 654
2924 const btype = byte & 0b1110000;
2925 let type = types[btype];
2926 if (type === undefined) {
2927 throw new InternalError('Unrecognized type: 0b' + btype.toString(2))
2928 }
2929 // The length encoding get complicated.
2930 // Last four bits of length is encoded in bits 3210
2931 const lastFour = byte & 0b1111;
2932 let length = lastFour;
2933 // Whether the next byte is part of the variable-length encoded number
2934 // is encoded in bit 7
2935 const multibyte = byte & 0b10000000;
2936 if (multibyte) {
2937 length = otherVarIntDecode(reader, lastFour);
2938 }
2939 let base = null;
2940 let object = null;
2941 // Handle deltified objects
2942 if (type === 'ofs_delta') {
2943 const offset = decodeVarInt(reader);
2944 const baseOffset = start - offset
2945 ;({ object: base, type } = await this.readSlice({ start: baseOffset }));
2946 }
2947 if (type === 'ref_delta') {
2948 const oid = reader.slice(20).toString('hex')
2949 ;({ object: base, type } = await this.read({ oid }));
2950 }
2951 // Handle undeltified objects
2952 const buffer = raw.slice(reader.tell());
2953 object = Buffer.from(await inflate(buffer));
2954 // Assert that the object length is as expected.
2955 if (object.byteLength !== length) {
2956 throw new InternalError(
2957 `Packfile told us object would have length ${length} but it had length ${object.byteLength}`
2958 )
2959 }
2960 if (base) {
2961 object = Buffer.from(applyDelta(object, base));
2962 }
2963 // Cache the result based on depth.
2964 if (this.readDepth > 3) {
2965 // hand tuned for speed / memory usage tradeoff
2966 this.offsetCache[start] = { type, object };
2967 }
2968 return { type, format: 'content', object }
2969 }
2970}
2971
2972const PackfileCache = Symbol('PackfileCache');
2973
2974async function loadPackIndex({
2975 fs,
2976 filename,
2977 getExternalRefDelta,
2978 emitter,
2979 emitterPrefix,
2980}) {
2981 const idx = await fs.read(filename);
2982 return GitPackIndex.fromIdx({ idx, getExternalRefDelta })
2983}
2984
2985function readPackIndex({
2986 fs,
2987 cache,
2988 filename,
2989 getExternalRefDelta,
2990 emitter,
2991 emitterPrefix,
2992}) {
2993 // Try to get the packfile index from the in-memory cache
2994 if (!cache[PackfileCache]) cache[PackfileCache] = new Map();
2995 let p = cache[PackfileCache].get(filename);
2996 if (!p) {
2997 p = loadPackIndex({
2998 fs,
2999 filename,
3000 getExternalRefDelta,
3001 emitter,
3002 emitterPrefix,
3003 });
3004 cache[PackfileCache].set(filename, p);
3005 }
3006 return p
3007}
3008
3009async function readObjectPacked({
3010 fs,
3011 cache,
3012 gitdir,
3013 oid,
3014 format = 'content',
3015 getExternalRefDelta,
3016}) {
3017 // Check to see if it's in a packfile.
3018 // Iterate through all the .idx files
3019 let list = await fs.readdir(join(gitdir, 'objects/pack'));
3020 list = list.filter(x => x.endsWith('.idx'));
3021 for (const filename of list) {
3022 const indexFile = `${gitdir}/objects/pack/${filename}`;
3023 const p = await readPackIndex({
3024 fs,
3025 cache,
3026 filename: indexFile,
3027 getExternalRefDelta,
3028 });
3029 if (p.error) throw new InternalError(p.error)
3030 // If the packfile DOES have the oid we're looking for...
3031 if (p.offsets.has(oid)) {
3032 // Get the resolved git object from the packfile
3033 if (!p.pack) {
3034 const packFile = indexFile.replace(/idx$/, 'pack');
3035 p.pack = fs.read(packFile);
3036 }
3037 const result = await p.read({ oid, getExternalRefDelta });
3038 result.format = 'content';
3039 result.source = `objects/pack/${filename.replace(/idx$/, 'pack')}`;
3040 return result
3041 }
3042 }
3043 // Failed to find it
3044 return null
3045}
3046
3047/**
3048 * @param {object} args
3049 * @param {import('../models/FileSystem.js').FileSystem} args.fs
3050 * @param {any} args.cache
3051 * @param {string} args.gitdir
3052 * @param {string} args.oid
3053 * @param {string} [args.format]
3054 */
3055async function _readObject({
3056 fs,
3057 cache,
3058 gitdir,
3059 oid,
3060 format = 'content',
3061}) {
3062 // Curry the current read method so that the packfile un-deltification
3063 // process can acquire external ref-deltas.
3064 const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid });
3065
3066 let result;
3067 // Empty tree - hard-coded so we can use it as a shorthand.
3068 // Note: I think the canonical git implementation must do this too because
3069 // `git cat-file -t 4b825dc642cb6eb9a060e54bf8d69288fbee4904` prints "tree" even in empty repos.
3070 if (oid === '4b825dc642cb6eb9a060e54bf8d69288fbee4904') {
3071 result = { format: 'wrapped', object: Buffer.from(`tree 0\x00`) };
3072 }
3073 // Look for it in the loose object directory.
3074 if (!result) {
3075 result = await readObjectLoose({ fs, gitdir, oid });
3076 }
3077 // Check to see if it's in a packfile.
3078 if (!result) {
3079 result = await readObjectPacked({
3080 fs,
3081 cache,
3082 gitdir,
3083 oid,
3084 getExternalRefDelta,
3085 });
3086 }
3087 // Finally
3088 if (!result) {
3089 throw new NotFoundError(oid)
3090 }
3091
3092 if (format === 'deflated') {
3093 return result
3094 }
3095
3096 if (result.format === 'deflated') {
3097 result.object = Buffer.from(await inflate(result.object));
3098 result.format = 'wrapped';
3099 }
3100
3101 if (result.format === 'wrapped') {
3102 if (format === 'wrapped' && result.format === 'wrapped') {
3103 return result
3104 }
3105 const sha = await shasum(result.object);
3106 if (sha !== oid) {
3107 throw new InternalError(
3108 `SHA check failed! Expected ${oid}, computed ${sha}`
3109 )
3110 }
3111 const { object, type } = GitObject.unwrap(result.object);
3112 result.type = type;
3113 result.object = object;
3114 result.format = 'content';
3115 }
3116
3117 if (result.format === 'content') {
3118 if (format === 'content') return result
3119 return
3120 }
3121
3122 throw new InternalError(`invalid format "${result.format}"`)
3123}
3124
3125class AlreadyExistsError extends BaseError {
3126 /**
3127 * @param {'note'|'remote'|'tag'|'branch'} noun
3128 * @param {string} where
3129 * @param {boolean} canForce
3130 */
3131 constructor(noun, where, canForce = true) {
3132 super(
3133 `Failed to create ${noun} at ${where} because it already exists.${
3134 canForce
3135 ? ` (Hint: use 'force: true' parameter to overwrite existing ${noun}.)`
3136 : ''
3137 }`
3138 );
3139 this.code = this.name = AlreadyExistsError.code;
3140 this.data = { noun, where, canForce };
3141 }
3142}
3143/** @type {'AlreadyExistsError'} */
3144AlreadyExistsError.code = 'AlreadyExistsError';
3145
3146class AmbiguousError extends BaseError {
3147 /**
3148 * @param {'oids'|'refs'} nouns
3149 * @param {string} short
3150 * @param {string[]} matches
3151 */
3152 constructor(nouns, short, matches) {
3153 super(
3154 `Found multiple ${nouns} matching "${short}" (${matches.join(
3155 ', '
3156 )}). Use a longer abbreviation length to disambiguate them.`
3157 );
3158 this.code = this.name = AmbiguousError.code;
3159 this.data = { nouns, short, matches };
3160 }
3161}
3162/** @type {'AmbiguousError'} */
3163AmbiguousError.code = 'AmbiguousError';
3164
3165class CheckoutConflictError extends BaseError {
3166 /**
3167 * @param {string[]} filepaths
3168 */
3169 constructor(filepaths) {
3170 super(
3171 `Your local changes to the following files would be overwritten by checkout: ${filepaths.join(
3172 ', '
3173 )}`
3174 );
3175 this.code = this.name = CheckoutConflictError.code;
3176 this.data = { filepaths };
3177 }
3178}
3179/** @type {'CheckoutConflictError'} */
3180CheckoutConflictError.code = 'CheckoutConflictError';
3181
3182class CommitNotFetchedError extends BaseError {
3183 /**
3184 * @param {string} ref
3185 * @param {string} oid
3186 */
3187 constructor(ref, oid) {
3188 super(
3189 `Failed to checkout "${ref}" because commit ${oid} is not available locally. Do a git fetch to make the branch available locally.`
3190 );
3191 this.code = this.name = CommitNotFetchedError.code;
3192 this.data = { ref, oid };
3193 }
3194}
3195/** @type {'CommitNotFetchedError'} */
3196CommitNotFetchedError.code = 'CommitNotFetchedError';
3197
3198class EmptyServerResponseError extends BaseError {
3199 constructor() {
3200 super(`Empty response from git server.`);
3201 this.code = this.name = EmptyServerResponseError.code;
3202 this.data = {};
3203 }
3204}
3205/** @type {'EmptyServerResponseError'} */
3206EmptyServerResponseError.code = 'EmptyServerResponseError';
3207
3208class FastForwardError extends BaseError {
3209 constructor() {
3210 super(`A simple fast-forward merge was not possible.`);
3211 this.code = this.name = FastForwardError.code;
3212 this.data = {};
3213 }
3214}
3215/** @type {'FastForwardError'} */
3216FastForwardError.code = 'FastForwardError';
3217
3218class GitPushError extends BaseError {
3219 /**
3220 * @param {string} prettyDetails
3221 * @param {PushResult} result
3222 */
3223 constructor(prettyDetails, result) {
3224 super(`One or more branches were not updated: ${prettyDetails}`);
3225 this.code = this.name = GitPushError.code;
3226 this.data = { prettyDetails, result };
3227 }
3228}
3229/** @type {'GitPushError'} */
3230GitPushError.code = 'GitPushError';
3231
3232class HttpError extends BaseError {
3233 /**
3234 * @param {number} statusCode
3235 * @param {string} statusMessage
3236 * @param {string} response
3237 */
3238 constructor(statusCode, statusMessage, response) {
3239 super(`HTTP Error: ${statusCode} ${statusMessage}`);
3240 this.code = this.name = HttpError.code;
3241 this.data = { statusCode, statusMessage, response };
3242 }
3243}
3244/** @type {'HttpError'} */
3245HttpError.code = 'HttpError';
3246
3247class InvalidFilepathError extends BaseError {
3248 /**
3249 * @param {'leading-slash'|'trailing-slash'|'directory'} [reason]
3250 */
3251 constructor(reason) {
3252 let message = 'invalid filepath';
3253 if (reason === 'leading-slash' || reason === 'trailing-slash') {
3254 message = `"filepath" parameter should not include leading or trailing directory separators because these can cause problems on some platforms.`;
3255 } else if (reason === 'directory') {
3256 message = `"filepath" should not be a directory.`;
3257 }
3258 super(message);
3259 this.code = this.name = InvalidFilepathError.code;
3260 this.data = { reason };
3261 }
3262}
3263/** @type {'InvalidFilepathError'} */
3264InvalidFilepathError.code = 'InvalidFilepathError';
3265
3266class InvalidRefNameError extends BaseError {
3267 /**
3268 * @param {string} ref
3269 * @param {string} suggestion
3270 * @param {boolean} canForce
3271 */
3272 constructor(ref, suggestion) {
3273 super(
3274 `"${ref}" would be an invalid git reference. (Hint: a valid alternative would be "${suggestion}".)`
3275 );
3276 this.code = this.name = InvalidRefNameError.code;
3277 this.data = { ref, suggestion };
3278 }
3279}
3280/** @type {'InvalidRefNameError'} */
3281InvalidRefNameError.code = 'InvalidRefNameError';
3282
3283class MaxDepthError extends BaseError {
3284 /**
3285 * @param {number} depth
3286 */
3287 constructor(depth) {
3288 super(`Maximum search depth of ${depth} exceeded.`);
3289 this.code = this.name = MaxDepthError.code;
3290 this.data = { depth };
3291 }
3292}
3293/** @type {'MaxDepthError'} */
3294MaxDepthError.code = 'MaxDepthError';
3295
3296class MergeNotSupportedError extends BaseError {
3297 constructor() {
3298 super(`Merges with conflicts are not supported yet.`);
3299 this.code = this.name = MergeNotSupportedError.code;
3300 this.data = {};
3301 }
3302}
3303/** @type {'MergeNotSupportedError'} */
3304MergeNotSupportedError.code = 'MergeNotSupportedError';
3305
3306class MergeConflictError extends BaseError {
3307 /**
3308 * @param {Array<string>} filepaths
3309 */
3310 constructor(filepaths) {
3311 super(
3312 `Automatic merge failed with one or more merge conflicts in the following files: ${filepaths.toString()}. Fix conflicts then commit the result.`
3313 );
3314 this.code = this.name = MergeConflictError.code;
3315 this.data = { filepaths };
3316 }
3317}
3318/** @type {'MergeConflictError'} */
3319MergeConflictError.code = 'MergeConflictError';
3320
3321class MissingNameError extends BaseError {
3322 /**
3323 * @param {'author'|'committer'|'tagger'} role
3324 */
3325 constructor(role) {
3326 super(
3327 `No name was provided for ${role} in the argument or in the .git/config file.`
3328 );
3329 this.code = this.name = MissingNameError.code;
3330 this.data = { role };
3331 }
3332}
3333/** @type {'MissingNameError'} */
3334MissingNameError.code = 'MissingNameError';
3335
3336class MissingParameterError extends BaseError {
3337 /**
3338 * @param {string} parameter
3339 */
3340 constructor(parameter) {
3341 super(
3342 `The function requires a "${parameter}" parameter but none was provided.`
3343 );
3344 this.code = this.name = MissingParameterError.code;
3345 this.data = { parameter };
3346 }
3347}
3348/** @type {'MissingParameterError'} */
3349MissingParameterError.code = 'MissingParameterError';
3350
3351class MultipleGitError extends BaseError {
3352 /**
3353 * @param {Error[]} errors
3354 * @param {string} message
3355 */
3356 constructor(errors) {
3357 super(
3358 `There are multiple errors that were thrown by the method. Please refer to the "errors" property to see more`
3359 );
3360 this.code = this.name = MultipleGitError.code;
3361 this.data = { errors };
3362 this.errors = errors;
3363 }
3364}
3365/** @type {'MultipleGitError'} */
3366MultipleGitError.code = 'MultipleGitError';
3367
3368class ParseError extends BaseError {
3369 /**
3370 * @param {string} expected
3371 * @param {string} actual
3372 */
3373 constructor(expected, actual) {
3374 super(`Expected "${expected}" but received "${actual}".`);
3375 this.code = this.name = ParseError.code;
3376 this.data = { expected, actual };
3377 }
3378}
3379/** @type {'ParseError'} */
3380ParseError.code = 'ParseError';
3381
3382class PushRejectedError extends BaseError {
3383 /**
3384 * @param {'not-fast-forward'|'tag-exists'} reason
3385 */
3386 constructor(reason) {
3387 let message = '';
3388 if (reason === 'not-fast-forward') {
3389 message = ' because it was not a simple fast-forward';
3390 } else if (reason === 'tag-exists') {
3391 message = ' because tag already exists';
3392 }
3393 super(`Push rejected${message}. Use "force: true" to override.`);
3394 this.code = this.name = PushRejectedError.code;
3395 this.data = { reason };
3396 }
3397}
3398/** @type {'PushRejectedError'} */
3399PushRejectedError.code = 'PushRejectedError';
3400
3401class RemoteCapabilityError extends BaseError {
3402 /**
3403 * @param {'shallow'|'deepen-since'|'deepen-not'|'deepen-relative'} capability
3404 * @param {'depth'|'since'|'exclude'|'relative'} parameter
3405 */
3406 constructor(capability, parameter) {
3407 super(
3408 `Remote does not support the "${capability}" so the "${parameter}" parameter cannot be used.`
3409 );
3410 this.code = this.name = RemoteCapabilityError.code;
3411 this.data = { capability, parameter };
3412 }
3413}
3414/** @type {'RemoteCapabilityError'} */
3415RemoteCapabilityError.code = 'RemoteCapabilityError';
3416
3417class SmartHttpError extends BaseError {
3418 /**
3419 * @param {string} preview
3420 * @param {string} response
3421 */
3422 constructor(preview, response) {
3423 super(
3424 `Remote did not reply using the "smart" HTTP protocol. Expected "001e# service=git-upload-pack" but received: ${preview}`
3425 );
3426 this.code = this.name = SmartHttpError.code;
3427 this.data = { preview, response };
3428 }
3429}
3430/** @type {'SmartHttpError'} */
3431SmartHttpError.code = 'SmartHttpError';
3432
3433class UnknownTransportError extends BaseError {
3434 /**
3435 * @param {string} url
3436 * @param {string} transport
3437 * @param {string} [suggestion]
3438 */
3439 constructor(url, transport, suggestion) {
3440 super(
3441 `Git remote "${url}" uses an unrecognized transport protocol: "${transport}"`
3442 );
3443 this.code = this.name = UnknownTransportError.code;
3444 this.data = { url, transport, suggestion };
3445 }
3446}
3447/** @type {'UnknownTransportError'} */
3448UnknownTransportError.code = 'UnknownTransportError';
3449
3450class UrlParseError extends BaseError {
3451 /**
3452 * @param {string} url
3453 */
3454 constructor(url) {
3455 super(`Cannot parse remote URL: "${url}"`);
3456 this.code = this.name = UrlParseError.code;
3457 this.data = { url };
3458 }
3459}
3460/** @type {'UrlParseError'} */
3461UrlParseError.code = 'UrlParseError';
3462
3463class UserCanceledError extends BaseError {
3464 constructor() {
3465 super(`The operation was canceled.`);
3466 this.code = this.name = UserCanceledError.code;
3467 this.data = {};
3468 }
3469}
3470/** @type {'UserCanceledError'} */
3471UserCanceledError.code = 'UserCanceledError';
3472
3473class IndexResetError extends BaseError {
3474 /**
3475 * @param {Array<string>} filepaths
3476 */
3477 constructor(filepath) {
3478 super(
3479 `Could not merge index: Entry for '${filepath}' is not up to date. Either reset the index entry to HEAD, or stage your unstaged chages.`
3480 );
3481 this.code = this.name = IndexResetError.code;
3482 this.data = { filepath };
3483 }
3484}
3485/** @type {'IndexResetError'} */
3486IndexResetError.code = 'IndexResetError';
3487
3488
3489
3490var Errors = /*#__PURE__*/Object.freeze({
3491 __proto__: null,
3492 AlreadyExistsError: AlreadyExistsError,
3493 AmbiguousError: AmbiguousError,
3494 CheckoutConflictError: CheckoutConflictError,
3495 CommitNotFetchedError: CommitNotFetchedError,
3496 EmptyServerResponseError: EmptyServerResponseError,
3497 FastForwardError: FastForwardError,
3498 GitPushError: GitPushError,
3499 HttpError: HttpError,
3500 InternalError: InternalError,
3501 InvalidFilepathError: InvalidFilepathError,
3502 InvalidOidError: InvalidOidError,
3503 InvalidRefNameError: InvalidRefNameError,
3504 MaxDepthError: MaxDepthError,
3505 MergeNotSupportedError: MergeNotSupportedError,
3506 MergeConflictError: MergeConflictError,
3507 MissingNameError: MissingNameError,
3508 MissingParameterError: MissingParameterError,
3509 MultipleGitError: MultipleGitError,
3510 NoRefspecError: NoRefspecError,
3511 NotFoundError: NotFoundError,
3512 ObjectTypeError: ObjectTypeError,
3513 ParseError: ParseError,
3514 PushRejectedError: PushRejectedError,
3515 RemoteCapabilityError: RemoteCapabilityError,
3516 SmartHttpError: SmartHttpError,
3517 UnknownTransportError: UnknownTransportError,
3518 UnsafeFilepathError: UnsafeFilepathError,
3519 UrlParseError: UrlParseError,
3520 UserCanceledError: UserCanceledError,
3521 UnmergedPathsError: UnmergedPathsError,
3522 IndexResetError: IndexResetError
3523});
3524
3525function formatAuthor({ name, email, timestamp, timezoneOffset }) {
3526 timezoneOffset = formatTimezoneOffset(timezoneOffset);
3527 return `${name} <${email}> ${timestamp} ${timezoneOffset}`
3528}
3529
3530// The amount of effort that went into crafting these cases to handle
3531// -0 (just so we don't lose that information when parsing and reconstructing)
3532// but can also default to +0 was extraordinary.
3533
3534function formatTimezoneOffset(minutes) {
3535 const sign = simpleSign(negateExceptForZero(minutes));
3536 minutes = Math.abs(minutes);
3537 const hours = Math.floor(minutes / 60);
3538 minutes -= hours * 60;
3539 let strHours = String(hours);
3540 let strMinutes = String(minutes);
3541 if (strHours.length < 2) strHours = '0' + strHours;
3542 if (strMinutes.length < 2) strMinutes = '0' + strMinutes;
3543 return (sign === -1 ? '-' : '+') + strHours + strMinutes
3544}
3545
3546function simpleSign(n) {
3547 return Math.sign(n) || (Object.is(n, -0) ? -1 : 1)
3548}
3549
3550function negateExceptForZero(n) {
3551 return n === 0 ? n : -n
3552}
3553
3554function normalizeNewlines(str) {
3555 // remove all <CR>
3556 str = str.replace(/\r/g, '');
3557 // no extra newlines up front
3558 str = str.replace(/^\n+/, '');
3559 // and a single newline at the end
3560 str = str.replace(/\n+$/, '') + '\n';
3561 return str
3562}
3563
3564function parseAuthor(author) {
3565 const [, name, email, timestamp, offset] = author.match(
3566 /^(.*) <(.*)> (.*) (.*)$/
3567 );
3568 return {
3569 name: name,
3570 email: email,
3571 timestamp: Number(timestamp),
3572 timezoneOffset: parseTimezoneOffset(offset),
3573 }
3574}
3575
3576// The amount of effort that went into crafting these cases to handle
3577// -0 (just so we don't lose that information when parsing and reconstructing)
3578// but can also default to +0 was extraordinary.
3579
3580function parseTimezoneOffset(offset) {
3581 let [, sign, hours, minutes] = offset.match(/(\+|-)(\d\d)(\d\d)/);
3582 minutes = (sign === '+' ? 1 : -1) * (Number(hours) * 60 + Number(minutes));
3583 return negateExceptForZero$1(minutes)
3584}
3585
3586function negateExceptForZero$1(n) {
3587 return n === 0 ? n : -n
3588}
3589
3590class GitAnnotatedTag {
3591 constructor(tag) {
3592 if (typeof tag === 'string') {
3593 this._tag = tag;
3594 } else if (Buffer.isBuffer(tag)) {
3595 this._tag = tag.toString('utf8');
3596 } else if (typeof tag === 'object') {
3597 this._tag = GitAnnotatedTag.render(tag);
3598 } else {
3599 throw new InternalError(
3600 'invalid type passed to GitAnnotatedTag constructor'
3601 )
3602 }
3603 }
3604
3605 static from(tag) {
3606 return new GitAnnotatedTag(tag)
3607 }
3608
3609 static render(obj) {
3610 return `object ${obj.object}
3611type ${obj.type}
3612tag ${obj.tag}
3613tagger ${formatAuthor(obj.tagger)}
3614
3615${obj.message}
3616${obj.gpgsig ? obj.gpgsig : ''}`
3617 }
3618
3619 justHeaders() {
3620 return this._tag.slice(0, this._tag.indexOf('\n\n'))
3621 }
3622
3623 message() {
3624 const tag = this.withoutSignature();
3625 return tag.slice(tag.indexOf('\n\n') + 2)
3626 }
3627
3628 parse() {
3629 return Object.assign(this.headers(), {
3630 message: this.message(),
3631 gpgsig: this.gpgsig(),
3632 })
3633 }
3634
3635 render() {
3636 return this._tag
3637 }
3638
3639 headers() {
3640 const headers = this.justHeaders().split('\n');
3641 const hs = [];
3642 for (const h of headers) {
3643 if (h[0] === ' ') {
3644 // combine with previous header (without space indent)
3645 hs[hs.length - 1] += '\n' + h.slice(1);
3646 } else {
3647 hs.push(h);
3648 }
3649 }
3650 const obj = {};
3651 for (const h of hs) {
3652 const key = h.slice(0, h.indexOf(' '));
3653 const value = h.slice(h.indexOf(' ') + 1);
3654 if (Array.isArray(obj[key])) {
3655 obj[key].push(value);
3656 } else {
3657 obj[key] = value;
3658 }
3659 }
3660 if (obj.tagger) {
3661 obj.tagger = parseAuthor(obj.tagger);
3662 }
3663 if (obj.committer) {
3664 obj.committer = parseAuthor(obj.committer);
3665 }
3666 return obj
3667 }
3668
3669 withoutSignature() {
3670 const tag = normalizeNewlines(this._tag);
3671 if (tag.indexOf('\n-----BEGIN PGP SIGNATURE-----') === -1) return tag
3672 return tag.slice(0, tag.lastIndexOf('\n-----BEGIN PGP SIGNATURE-----'))
3673 }
3674
3675 gpgsig() {
3676 if (this._tag.indexOf('\n-----BEGIN PGP SIGNATURE-----') === -1) return
3677 const signature = this._tag.slice(
3678 this._tag.indexOf('-----BEGIN PGP SIGNATURE-----'),
3679 this._tag.indexOf('-----END PGP SIGNATURE-----') +
3680 '-----END PGP SIGNATURE-----'.length
3681 );
3682 return normalizeNewlines(signature)
3683 }
3684
3685 payload() {
3686 return this.withoutSignature() + '\n'
3687 }
3688
3689 toObject() {
3690 return Buffer.from(this._tag, 'utf8')
3691 }
3692
3693 static async sign(tag, sign, secretKey) {
3694 const payload = tag.payload();
3695 let { signature } = await sign({ payload, secretKey });
3696 // renormalize the line endings to the one true line-ending
3697 signature = normalizeNewlines(signature);
3698 const signedTag = payload + signature;
3699 // return a new tag object
3700 return GitAnnotatedTag.from(signedTag)
3701 }
3702}
3703
3704function indent(str) {
3705 return (
3706 str
3707 .trim()
3708 .split('\n')
3709 .map(x => ' ' + x)
3710 .join('\n') + '\n'
3711 )
3712}
3713
3714function outdent(str) {
3715 return str
3716 .split('\n')
3717 .map(x => x.replace(/^ /, ''))
3718 .join('\n')
3719}
3720
3721class GitCommit {
3722 constructor(commit) {
3723 if (typeof commit === 'string') {
3724 this._commit = commit;
3725 } else if (Buffer.isBuffer(commit)) {
3726 this._commit = commit.toString('utf8');
3727 } else if (typeof commit === 'object') {
3728 this._commit = GitCommit.render(commit);
3729 } else {
3730 throw new InternalError('invalid type passed to GitCommit constructor')
3731 }
3732 }
3733
3734 static fromPayloadSignature({ payload, signature }) {
3735 const headers = GitCommit.justHeaders(payload);
3736 const message = GitCommit.justMessage(payload);
3737 const commit = normalizeNewlines(
3738 headers + '\ngpgsig' + indent(signature) + '\n' + message
3739 );
3740 return new GitCommit(commit)
3741 }
3742
3743 static from(commit) {
3744 return new GitCommit(commit)
3745 }
3746
3747 toObject() {
3748 return Buffer.from(this._commit, 'utf8')
3749 }
3750
3751 // Todo: allow setting the headers and message
3752 headers() {
3753 return this.parseHeaders()
3754 }
3755
3756 // Todo: allow setting the headers and message
3757 message() {
3758 return GitCommit.justMessage(this._commit)
3759 }
3760
3761 parse() {
3762 return Object.assign({ message: this.message() }, this.headers())
3763 }
3764
3765 static justMessage(commit) {
3766 return normalizeNewlines(commit.slice(commit.indexOf('\n\n') + 2))
3767 }
3768
3769 static justHeaders(commit) {
3770 return commit.slice(0, commit.indexOf('\n\n'))
3771 }
3772
3773 parseHeaders() {
3774 const headers = GitCommit.justHeaders(this._commit).split('\n');
3775 const hs = [];
3776 for (const h of headers) {
3777 if (h[0] === ' ') {
3778 // combine with previous header (without space indent)
3779 hs[hs.length - 1] += '\n' + h.slice(1);
3780 } else {
3781 hs.push(h);
3782 }
3783 }
3784 const obj = {
3785 parent: [],
3786 };
3787 for (const h of hs) {
3788 const key = h.slice(0, h.indexOf(' '));
3789 const value = h.slice(h.indexOf(' ') + 1);
3790 if (Array.isArray(obj[key])) {
3791 obj[key].push(value);
3792 } else {
3793 obj[key] = value;
3794 }
3795 }
3796 if (obj.author) {
3797 obj.author = parseAuthor(obj.author);
3798 }
3799 if (obj.committer) {
3800 obj.committer = parseAuthor(obj.committer);
3801 }
3802 return obj
3803 }
3804
3805 static renderHeaders(obj) {
3806 let headers = '';
3807 if (obj.tree) {
3808 headers += `tree ${obj.tree}\n`;
3809 } else {
3810 headers += `tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904\n`; // the null tree
3811 }
3812 if (obj.parent) {
3813 if (obj.parent.length === undefined) {
3814 throw new InternalError(`commit 'parent' property should be an array`)
3815 }
3816 for (const p of obj.parent) {
3817 headers += `parent ${p}\n`;
3818 }
3819 }
3820 const author = obj.author;
3821 headers += `author ${formatAuthor(author)}\n`;
3822 const committer = obj.committer || obj.author;
3823 headers += `committer ${formatAuthor(committer)}\n`;
3824 if (obj.gpgsig) {
3825 headers += 'gpgsig' + indent(obj.gpgsig);
3826 }
3827 return headers
3828 }
3829
3830 static render(obj) {
3831 return GitCommit.renderHeaders(obj) + '\n' + normalizeNewlines(obj.message)
3832 }
3833
3834 render() {
3835 return this._commit
3836 }
3837
3838 withoutSignature() {
3839 const commit = normalizeNewlines(this._commit);
3840 if (commit.indexOf('\ngpgsig') === -1) return commit
3841 const headers = commit.slice(0, commit.indexOf('\ngpgsig'));
3842 const message = commit.slice(
3843 commit.indexOf('-----END PGP SIGNATURE-----\n') +
3844 '-----END PGP SIGNATURE-----\n'.length
3845 );
3846 return normalizeNewlines(headers + '\n' + message)
3847 }
3848
3849 isolateSignature() {
3850 const signature = this._commit.slice(
3851 this._commit.indexOf('-----BEGIN PGP SIGNATURE-----'),
3852 this._commit.indexOf('-----END PGP SIGNATURE-----') +
3853 '-----END PGP SIGNATURE-----'.length
3854 );
3855 return outdent(signature)
3856 }
3857
3858 static async sign(commit, sign, secretKey) {
3859 const payload = commit.withoutSignature();
3860 const message = GitCommit.justMessage(commit._commit);
3861 let { signature } = await sign({ payload, secretKey });
3862 // renormalize the line endings to the one true line-ending
3863 signature = normalizeNewlines(signature);
3864 const headers = GitCommit.justHeaders(commit._commit);
3865 const signedCommit =
3866 headers + '\n' + 'gpgsig' + indent(signature) + '\n' + message;
3867 // return a new commit object
3868 return GitCommit.from(signedCommit)
3869 }
3870}
3871
3872async function resolveTree({ fs, cache, gitdir, oid }) {
3873 // Empty tree - bypass `readObject`
3874 if (oid === '4b825dc642cb6eb9a060e54bf8d69288fbee4904') {
3875 return { tree: GitTree.from([]), oid }
3876 }
3877 const { type, object } = await _readObject({ fs, cache, gitdir, oid });
3878 // Resolve annotated tag objects to whatever
3879 if (type === 'tag') {
3880 oid = GitAnnotatedTag.from(object).parse().object;
3881 return resolveTree({ fs, cache, gitdir, oid })
3882 }
3883 // Resolve commits to trees
3884 if (type === 'commit') {
3885 oid = GitCommit.from(object).parse().tree;
3886 return resolveTree({ fs, cache, gitdir, oid })
3887 }
3888 if (type !== 'tree') {
3889 throw new ObjectTypeError(oid, type, 'tree')
3890 }
3891 return { tree: GitTree.from(object), oid }
3892}
3893
3894class GitWalkerRepo {
3895 constructor({ fs, gitdir, ref, cache }) {
3896 this.fs = fs;
3897 this.cache = cache;
3898 this.gitdir = gitdir;
3899 this.mapPromise = (async () => {
3900 const map = new Map();
3901 let oid;
3902 try {
3903 oid = await GitRefManager.resolve({ fs, gitdir, ref });
3904 } catch (e) {
3905 if (e instanceof NotFoundError) {
3906 // Handle fresh branches with no commits
3907 oid = '4b825dc642cb6eb9a060e54bf8d69288fbee4904';
3908 }
3909 }
3910 const tree = await resolveTree({ fs, cache: this.cache, gitdir, oid });
3911 tree.type = 'tree';
3912 tree.mode = '40000';
3913 map.set('.', tree);
3914 return map
3915 })();
3916 const walker = this;
3917 this.ConstructEntry = class TreeEntry {
3918 constructor(fullpath) {
3919 this._fullpath = fullpath;
3920 this._type = false;
3921 this._mode = false;
3922 this._stat = false;
3923 this._content = false;
3924 this._oid = false;
3925 }
3926
3927 async type() {
3928 return walker.type(this)
3929 }
3930
3931 async mode() {
3932 return walker.mode(this)
3933 }
3934
3935 async stat() {
3936 return walker.stat(this)
3937 }
3938
3939 async content() {
3940 return walker.content(this)
3941 }
3942
3943 async oid() {
3944 return walker.oid(this)
3945 }
3946 };
3947 }
3948
3949 async readdir(entry) {
3950 const filepath = entry._fullpath;
3951 const { fs, cache, gitdir } = this;
3952 const map = await this.mapPromise;
3953 const obj = map.get(filepath);
3954 if (!obj) throw new Error(`No obj for ${filepath}`)
3955 const oid = obj.oid;
3956 if (!oid) throw new Error(`No oid for obj ${JSON.stringify(obj)}`)
3957 if (obj.type !== 'tree') {
3958 // TODO: support submodules (type === 'commit')
3959 return null
3960 }
3961 const { type, object } = await _readObject({ fs, cache, gitdir, oid });
3962 if (type !== obj.type) {
3963 throw new ObjectTypeError(oid, type, obj.type)
3964 }
3965 const tree = GitTree.from(object);
3966 // cache all entries
3967 for (const entry of tree) {
3968 map.set(join(filepath, entry.path), entry);
3969 }
3970 return tree.entries().map(entry => join(filepath, entry.path))
3971 }
3972
3973 async type(entry) {
3974 if (entry._type === false) {
3975 const map = await this.mapPromise;
3976 const { type } = map.get(entry._fullpath);
3977 entry._type = type;
3978 }
3979 return entry._type
3980 }
3981
3982 async mode(entry) {
3983 if (entry._mode === false) {
3984 const map = await this.mapPromise;
3985 const { mode } = map.get(entry._fullpath);
3986 entry._mode = normalizeMode(parseInt(mode, 8));
3987 }
3988 return entry._mode
3989 }
3990
3991 async stat(_entry) {}
3992
3993 async content(entry) {
3994 if (entry._content === false) {
3995 const map = await this.mapPromise;
3996 const { fs, cache, gitdir } = this;
3997 const obj = map.get(entry._fullpath);
3998 const oid = obj.oid;
3999 const { type, object } = await _readObject({ fs, cache, gitdir, oid });
4000 if (type !== 'blob') {
4001 entry._content = undefined;
4002 } else {
4003 entry._content = new Uint8Array(object);
4004 }
4005 }
4006 return entry._content
4007 }
4008
4009 async oid(entry) {
4010 if (entry._oid === false) {
4011 const map = await this.mapPromise;
4012 const obj = map.get(entry._fullpath);
4013 entry._oid = obj.oid;
4014 }
4015 return entry._oid
4016 }
4017}
4018
4019// @ts-check
4020
4021/**
4022 * @param {object} args
4023 * @param {string} [args.ref='HEAD']
4024 * @returns {Walker}
4025 */
4026function TREE({ ref = 'HEAD' } = {}) {
4027 const o = Object.create(null);
4028 Object.defineProperty(o, GitWalkSymbol, {
4029 value: function({ fs, gitdir, cache }) {
4030 return new GitWalkerRepo({ fs, gitdir, ref, cache })
4031 },
4032 });
4033 Object.freeze(o);
4034 return o
4035}
4036
4037// @ts-check
4038
4039class GitWalkerFs {
4040 constructor({ fs, dir, gitdir, cache }) {
4041 this.fs = fs;
4042 this.cache = cache;
4043 this.dir = dir;
4044 this.gitdir = gitdir;
4045 const walker = this;
4046 this.ConstructEntry = class WorkdirEntry {
4047 constructor(fullpath) {
4048 this._fullpath = fullpath;
4049 this._type = false;
4050 this._mode = false;
4051 this._stat = false;
4052 this._content = false;
4053 this._oid = false;
4054 }
4055
4056 async type() {
4057 return walker.type(this)
4058 }
4059
4060 async mode() {
4061 return walker.mode(this)
4062 }
4063
4064 async stat() {
4065 return walker.stat(this)
4066 }
4067
4068 async content() {
4069 return walker.content(this)
4070 }
4071
4072 async oid() {
4073 return walker.oid(this)
4074 }
4075 };
4076 }
4077
4078 async readdir(entry) {
4079 const filepath = entry._fullpath;
4080 const { fs, dir } = this;
4081 const names = await fs.readdir(join(dir, filepath));
4082 if (names === null) return null
4083 return names.map(name => join(filepath, name))
4084 }
4085
4086 async type(entry) {
4087 if (entry._type === false) {
4088 await entry.stat();
4089 }
4090 return entry._type
4091 }
4092
4093 async mode(entry) {
4094 if (entry._mode === false) {
4095 await entry.stat();
4096 }
4097 return entry._mode
4098 }
4099
4100 async stat(entry) {
4101 if (entry._stat === false) {
4102 const { fs, dir } = this;
4103 let stat = await fs.lstat(`${dir}/${entry._fullpath}`);
4104 if (!stat) {
4105 throw new Error(
4106 `ENOENT: no such file or directory, lstat '${entry._fullpath}'`
4107 )
4108 }
4109 let type = stat.isDirectory() ? 'tree' : 'blob';
4110 if (type === 'blob' && !stat.isFile() && !stat.isSymbolicLink()) {
4111 type = 'special';
4112 }
4113 entry._type = type;
4114 stat = normalizeStats(stat);
4115 entry._mode = stat.mode;
4116 // workaround for a BrowserFS edge case
4117 if (stat.size === -1 && entry._actualSize) {
4118 stat.size = entry._actualSize;
4119 }
4120 entry._stat = stat;
4121 }
4122 return entry._stat
4123 }
4124
4125 async content(entry) {
4126 if (entry._content === false) {
4127 const { fs, dir } = this;
4128 if ((await entry.type()) === 'tree') {
4129 entry._content = undefined;
4130 } else {
4131 const content = await fs.read(`${dir}/${entry._fullpath}`);
4132 // workaround for a BrowserFS edge case
4133 entry._actualSize = content.length;
4134 if (entry._stat && entry._stat.size === -1) {
4135 entry._stat.size = entry._actualSize;
4136 }
4137 entry._content = new Uint8Array(content);
4138 }
4139 }
4140 return entry._content
4141 }
4142
4143 async oid(entry) {
4144 if (entry._oid === false) {
4145 const { fs, gitdir, cache } = this;
4146 let oid;
4147 // See if we can use the SHA1 hash in the index.
4148 await GitIndexManager.acquire({ fs, gitdir, cache }, async function(
4149 index
4150 ) {
4151 const stage = index.entriesMap.get(entry._fullpath);
4152 const stats = await entry.stat();
4153 if (!stage || compareStats(stats, stage)) {
4154 const content = await entry.content();
4155 if (content === undefined) {
4156 oid = undefined;
4157 } else {
4158 oid = await shasum(
4159 GitObject.wrap({ type: 'blob', object: await entry.content() })
4160 );
4161 // Update the stats in the index so we will get a "cache hit" next time
4162 // 1) if we can (because the oid and mode are the same)
4163 // 2) and only if we need to (because other stats differ)
4164 if (
4165 stage &&
4166 oid === stage.oid &&
4167 stats.mode === stage.mode &&
4168 compareStats(stats, stage)
4169 ) {
4170 index.insert({
4171 filepath: entry._fullpath,
4172 stats,
4173 oid: oid,
4174 });
4175 }
4176 }
4177 } else {
4178 // Use the index SHA1 rather than compute it
4179 oid = stage.oid;
4180 }
4181 });
4182 entry._oid = oid;
4183 }
4184 return entry._oid
4185 }
4186}
4187
4188// @ts-check
4189
4190/**
4191 * @returns {Walker}
4192 */
4193function WORKDIR() {
4194 const o = Object.create(null);
4195 Object.defineProperty(o, GitWalkSymbol, {
4196 value: function({ fs, dir, gitdir, cache }) {
4197 return new GitWalkerFs({ fs, dir, gitdir, cache })
4198 },
4199 });
4200 Object.freeze(o);
4201 return o
4202}
4203
4204// @ts-check
4205
4206// https://dev.to/namirsab/comment/2050
4207function arrayRange(start, end) {
4208 const length = end - start;
4209 return Array.from({ length }, (_, i) => start + i)
4210}
4211
4212// TODO: Should I just polyfill Array.flat?
4213const flat =
4214 typeof Array.prototype.flat === 'undefined'
4215 ? entries => entries.reduce((acc, x) => acc.concat(x), [])
4216 : entries => entries.flat();
4217
4218// This is convenient for computing unions/joins of sorted lists.
4219class RunningMinimum {
4220 constructor() {
4221 // Using a getter for 'value' would just bloat the code.
4222 // You know better than to set it directly right?
4223 this.value = null;
4224 }
4225
4226 consider(value) {
4227 if (value === null || value === undefined) return
4228 if (this.value === null) {
4229 this.value = value;
4230 } else if (value < this.value) {
4231 this.value = value;
4232 }
4233 }
4234
4235 reset() {
4236 this.value = null;
4237 }
4238}
4239
4240// Take an array of length N of
4241// iterators of length Q_n
4242// of strings
4243// and return an iterator of length max(Q_n) for all n
4244// of arrays of length N
4245// of string|null who all have the same string value
4246function* unionOfIterators(sets) {
4247 /* NOTE: We can assume all arrays are sorted.
4248 * Indexes are sorted because they are defined that way:
4249 *
4250 * > Index entries are sorted in ascending order on the name field,
4251 * > interpreted as a string of unsigned bytes (i.e. memcmp() order, no
4252 * > localization, no special casing of directory separator '/'). Entries
4253 * > with the same name are sorted by their stage field.
4254 *
4255 * Trees should be sorted because they are created directly from indexes.
4256 * They definitely should be sorted, or else they wouldn't have a unique SHA1.
4257 * So that would be very naughty on the part of the tree-creator.
4258 *
4259 * Lastly, the working dir entries are sorted because I choose to sort them
4260 * in my FileSystem.readdir() implementation.
4261 */
4262
4263 // Init
4264 const min = new RunningMinimum();
4265 let minimum;
4266 const heads = [];
4267 const numsets = sets.length;
4268 for (let i = 0; i < numsets; i++) {
4269 // Abuse the fact that iterators continue to return 'undefined' for value
4270 // once they are done
4271 heads[i] = sets[i].next().value;
4272 if (heads[i] !== undefined) {
4273 min.consider(heads[i]);
4274 }
4275 }
4276 if (min.value === null) return
4277 // Iterate
4278 while (true) {
4279 const result = [];
4280 minimum = min.value;
4281 min.reset();
4282 for (let i = 0; i < numsets; i++) {
4283 if (heads[i] !== undefined && heads[i] === minimum) {
4284 result[i] = heads[i];
4285 heads[i] = sets[i].next().value;
4286 } else {
4287 // A little hacky, but eh
4288 result[i] = null;
4289 }
4290 if (heads[i] !== undefined) {
4291 min.consider(heads[i]);
4292 }
4293 }
4294 yield result;
4295 if (min.value === null) return
4296 }
4297}
4298
4299// @ts-check
4300
4301/**
4302 * @param {object} args
4303 * @param {import('../models/FileSystem.js').FileSystem} args.fs
4304 * @param {object} args.cache
4305 * @param {string} [args.dir]
4306 * @param {string} [args.gitdir=join(dir,'.git')]
4307 * @param {Walker[]} args.trees
4308 * @param {WalkerMap} [args.map]
4309 * @param {WalkerReduce} [args.reduce]
4310 * @param {WalkerIterate} [args.iterate]
4311 *
4312 * @returns {Promise<any>} The finished tree-walking result
4313 *
4314 * @see {WalkerMap}
4315 *
4316 */
4317async function _walk({
4318 fs,
4319 cache,
4320 dir,
4321 gitdir,
4322 trees,
4323 // @ts-ignore
4324 map = async (_, entry) => entry,
4325 // The default reducer is a flatmap that filters out undefineds.
4326 reduce = async (parent, children) => {
4327 const flatten = flat(children);
4328 if (parent !== undefined) flatten.unshift(parent);
4329 return flatten
4330 },
4331 // The default iterate function walks all children concurrently
4332 iterate = (walk, children) => Promise.all([...children].map(walk)),
4333}) {
4334 const walkers = trees.map(proxy =>
4335 proxy[GitWalkSymbol]({ fs, dir, gitdir, cache })
4336 );
4337
4338 const root = new Array(walkers.length).fill('.');
4339 const range = arrayRange(0, walkers.length);
4340 const unionWalkerFromReaddir = async entries => {
4341 range.map(i => {
4342 entries[i] = entries[i] && new walkers[i].ConstructEntry(entries[i]);
4343 });
4344 const subdirs = await Promise.all(
4345 range.map(i => (entries[i] ? walkers[i].readdir(entries[i]) : []))
4346 );
4347 // Now process child directories
4348 const iterators = subdirs
4349 .map(array => (array === null ? [] : array))
4350 .map(array => array[Symbol.iterator]());
4351 return {
4352 entries,
4353 children: unionOfIterators(iterators),
4354 }
4355 };
4356
4357 const walk = async root => {
4358 const { entries, children } = await unionWalkerFromReaddir(root);
4359 const fullpath = entries.find(entry => entry && entry._fullpath)._fullpath;
4360 const parent = await map(fullpath, entries);
4361 if (parent !== null) {
4362 let walkedChildren = await iterate(walk, children);
4363 walkedChildren = walkedChildren.filter(x => x !== undefined);
4364 return reduce(parent, walkedChildren)
4365 }
4366 };
4367 return walk(root)
4368}
4369
4370/**
4371 * Removes the directory at the specified filepath recursively. Used internally to replicate the behavior of
4372 * fs.promises.rm({ recursive: true, force: true }) from Node.js 14 and above when not available. If the provided
4373 * filepath resolves to a file, it will be removed.
4374 *
4375 * @param {import('../models/FileSystem.js').FileSystem} fs
4376 * @param {string} filepath - The file or directory to remove.
4377 */
4378async function rmRecursive(fs, filepath) {
4379 const entries = await fs.readdir(filepath);
4380 if (entries == null) {
4381 await fs.rm(filepath);
4382 } else if (entries.length) {
4383 await Promise.all(
4384 entries.map(entry => {
4385 const subpath = join(filepath, entry);
4386 return fs.lstat(subpath).then(stat => {
4387 if (!stat) return
4388 return stat.isDirectory() ? rmRecursive(fs, subpath) : fs.rm(subpath)
4389 })
4390 })
4391 ).then(() => fs.rmdir(filepath));
4392 } else {
4393 await fs.rmdir(filepath);
4394 }
4395}
4396
4397function isPromiseFs(fs) {
4398 const test = targetFs => {
4399 try {
4400 // If readFile returns a promise then we can probably assume the other
4401 // commands do as well
4402 return targetFs.readFile().catch(e => e)
4403 } catch (e) {
4404 return e
4405 }
4406 };
4407 return test(fs).constructor.name === 'Promise'
4408}
4409
4410// List of commands all filesystems are expected to provide. `rm` is not
4411// included since it may not exist and must be handled as a special case
4412const commands = [
4413 'readFile',
4414 'writeFile',
4415 'mkdir',
4416 'rmdir',
4417 'unlink',
4418 'stat',
4419 'lstat',
4420 'readdir',
4421 'readlink',
4422 'symlink',
4423];
4424
4425function bindFs(target, fs) {
4426 if (isPromiseFs(fs)) {
4427 for (const command of commands) {
4428 target[`_${command}`] = fs[command].bind(fs);
4429 }
4430 } else {
4431 for (const command of commands) {
4432 target[`_${command}`] = pify(fs[command].bind(fs));
4433 }
4434 }
4435
4436 // Handle the special case of `rm`
4437 if (isPromiseFs(fs)) {
4438 if (fs.rm) target._rm = fs.rm.bind(fs);
4439 else if (fs.rmdir.length > 1) target._rm = fs.rmdir.bind(fs);
4440 else target._rm = rmRecursive.bind(null, target);
4441 } else {
4442 if (fs.rm) target._rm = pify(fs.rm.bind(fs));
4443 else if (fs.rmdir.length > 2) target._rm = pify(fs.rmdir.bind(fs));
4444 else target._rm = rmRecursive.bind(null, target);
4445 }
4446}
4447
4448/**
4449 * This is just a collection of helper functions really. At least that's how it started.
4450 */
4451class FileSystem {
4452 constructor(fs) {
4453 if (typeof fs._original_unwrapped_fs !== 'undefined') return fs
4454
4455 const promises = Object.getOwnPropertyDescriptor(fs, 'promises');
4456 if (promises && promises.enumerable) {
4457 bindFs(this, fs.promises);
4458 } else {
4459 bindFs(this, fs);
4460 }
4461 this._original_unwrapped_fs = fs;
4462 }
4463
4464 /**
4465 * Return true if a file exists, false if it doesn't exist.
4466 * Rethrows errors that aren't related to file existance.
4467 */
4468 async exists(filepath, options = {}) {
4469 try {
4470 await this._stat(filepath);
4471 return true
4472 } catch (err) {
4473 if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
4474 return false
4475 } else {
4476 console.log('Unhandled error in "FileSystem.exists()" function', err);
4477 throw err
4478 }
4479 }
4480 }
4481
4482 /**
4483 * Return the contents of a file if it exists, otherwise returns null.
4484 *
4485 * @param {string} filepath
4486 * @param {object} [options]
4487 *
4488 * @returns {Promise<Buffer|string|null>}
4489 */
4490 async read(filepath, options = {}) {
4491 try {
4492 let buffer = await this._readFile(filepath, options);
4493 // Convert plain ArrayBuffers to Buffers
4494 if (typeof buffer !== 'string') {
4495 buffer = Buffer.from(buffer);
4496 }
4497 return buffer
4498 } catch (err) {
4499 return null
4500 }
4501 }
4502
4503 /**
4504 * Write a file (creating missing directories if need be) without throwing errors.
4505 *
4506 * @param {string} filepath
4507 * @param {Buffer|Uint8Array|string} contents
4508 * @param {object|string} [options]
4509 */
4510 async write(filepath, contents, options = {}) {
4511 try {
4512 await this._writeFile(filepath, contents, options);
4513 return
4514 } catch (err) {
4515 // Hmm. Let's try mkdirp and try again.
4516 await this.mkdir(dirname(filepath));
4517 await this._writeFile(filepath, contents, options);
4518 }
4519 }
4520
4521 /**
4522 * Make a directory (or series of nested directories) without throwing an error if it already exists.
4523 */
4524 async mkdir(filepath, _selfCall = false) {
4525 try {
4526 await this._mkdir(filepath);
4527 return
4528 } catch (err) {
4529 // If err is null then operation succeeded!
4530 if (err === null) return
4531 // If the directory already exists, that's OK!
4532 if (err.code === 'EEXIST') return
4533 // Avoid infinite loops of failure
4534 if (_selfCall) throw err
4535 // If we got a "no such file or directory error" backup and try again.
4536 if (err.code === 'ENOENT') {
4537 const parent = dirname(filepath);
4538 // Check to see if we've gone too far
4539 if (parent === '.' || parent === '/' || parent === filepath) throw err
4540 // Infinite recursion, what could go wrong?
4541 await this.mkdir(parent);
4542 await this.mkdir(filepath, true);
4543 }
4544 }
4545 }
4546
4547 /**
4548 * Delete a file without throwing an error if it is already deleted.
4549 */
4550 async rm(filepath) {
4551 try {
4552 await this._unlink(filepath);
4553 } catch (err) {
4554 if (err.code !== 'ENOENT') throw err
4555 }
4556 }
4557
4558 /**
4559 * Delete a directory without throwing an error if it is already deleted.
4560 */
4561 async rmdir(filepath, opts) {
4562 try {
4563 if (opts && opts.recursive) {
4564 await this._rm(filepath, opts);
4565 } else {
4566 await this._rmdir(filepath);
4567 }
4568 } catch (err) {
4569 if (err.code !== 'ENOENT') throw err
4570 }
4571 }
4572
4573 /**
4574 * Read a directory without throwing an error is the directory doesn't exist
4575 */
4576 async readdir(filepath) {
4577 try {
4578 const names = await this._readdir(filepath);
4579 // Ordering is not guaranteed, and system specific (Windows vs Unix)
4580 // so we must sort them ourselves.
4581 names.sort(compareStrings);
4582 return names
4583 } catch (err) {
4584 if (err.code === 'ENOTDIR') return null
4585 return []
4586 }
4587 }
4588
4589 /**
4590 * Return a flast list of all the files nested inside a directory
4591 *
4592 * Based on an elegant concurrent recursive solution from SO
4593 * https://stackoverflow.com/a/45130990/2168416
4594 */
4595 async readdirDeep(dir) {
4596 const subdirs = await this._readdir(dir);
4597 const files = await Promise.all(
4598 subdirs.map(async subdir => {
4599 const res = dir + '/' + subdir;
4600 return (await this._stat(res)).isDirectory()
4601 ? this.readdirDeep(res)
4602 : res
4603 })
4604 );
4605 return files.reduce((a, f) => a.concat(f), [])
4606 }
4607
4608 /**
4609 * Return the Stats of a file/symlink if it exists, otherwise returns null.
4610 * Rethrows errors that aren't related to file existance.
4611 */
4612 async lstat(filename) {
4613 try {
4614 const stats = await this._lstat(filename);
4615 return stats
4616 } catch (err) {
4617 if (err.code === 'ENOENT') {
4618 return null
4619 }
4620 throw err
4621 }
4622 }
4623
4624 /**
4625 * Reads the contents of a symlink if it exists, otherwise returns null.
4626 * Rethrows errors that aren't related to file existance.
4627 */
4628 async readlink(filename, opts = { encoding: 'buffer' }) {
4629 // Note: FileSystem.readlink returns a buffer by default
4630 // so we can dump it into GitObject.write just like any other file.
4631 try {
4632 const link = await this._readlink(filename, opts);
4633 return Buffer.isBuffer(link) ? link : Buffer.from(link)
4634 } catch (err) {
4635 if (err.code === 'ENOENT') {
4636 return null
4637 }
4638 throw err
4639 }
4640 }
4641
4642 /**
4643 * Write the contents of buffer to a symlink.
4644 */
4645 async writelink(filename, buffer) {
4646 return this._symlink(buffer.toString('utf8'), filename)
4647 }
4648}
4649
4650function assertParameter(name, value) {
4651 if (value === undefined) {
4652 throw new MissingParameterError(name)
4653 }
4654}
4655
4656// @ts-check
4657/**
4658 *
4659 * @param {WalkerEntry} entry
4660 * @param {WalkerEntry} base
4661 *
4662 */
4663async function modified(entry, base) {
4664 if (!entry && !base) return false
4665 if (entry && !base) return true
4666 if (!entry && base) return true
4667 if ((await entry.type()) === 'tree' && (await base.type()) === 'tree') {
4668 return false
4669 }
4670 if (
4671 (await entry.type()) === (await base.type()) &&
4672 (await entry.mode()) === (await base.mode()) &&
4673 (await entry.oid()) === (await base.oid())
4674 ) {
4675 return false
4676 }
4677 return true
4678}
4679
4680// @ts-check
4681
4682/**
4683 * Abort a merge in progress.
4684 *
4685 * Based on the behavior of git reset --merge, i.e. "Resets the index and updates the files in the working tree that are different between <commit> and HEAD, but keeps those which are different between the index and working tree (i.e. which have changes which have not been added). If a file that is different between <commit> and the index has unstaged changes, reset is aborted."
4686 *
4687 * Essentially, abortMerge will reset any files affected by merge conflicts to their last known good version at HEAD.
4688 * Any unstaged changes are saved and any staged changes are reset as well.
4689 *
4690 * NOTE: The behavior of this command differs slightly from canonical git in that an error will be thrown if a file exists in the index and nowhere else.
4691 * Canonical git will reset the file and continue aborting the merge in this case.
4692 *
4693 * **WARNING:** Running git merge with non-trivial uncommitted changes is discouraged: while possible, it may leave you in a state that is hard to back out of in the case of a conflict.
4694 * If there were uncommitted changes when the merge started (and especially if those changes were further modified after the merge was started), `git.abortMerge` will in some cases be unable to reconstruct the original (pre-merge) changes.
4695 *
4696 * @param {object} args
4697 * @param {FsClient} args.fs - a file system implementation
4698 * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path
4699 * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path
4700 * @param {string} [args.commit='HEAD'] - commit to reset the index and worktree to, defaults to HEAD
4701 * @param {object} [args.cache] - a [cache](cache.md) object
4702 *
4703 * @returns {Promise<void>} Resolves successfully once the git index has been updated
4704 *
4705 */
4706async function abortMerge({
4707 fs: _fs,
4708 dir,
4709 gitdir = join(dir, '.git'),
4710 commit = 'HEAD',
4711 cache = {},
4712}) {
4713 try {
4714 assertParameter('fs', _fs);
4715 assertParameter('dir', dir);
4716 assertParameter('gitdir', gitdir);
4717
4718 const fs = new FileSystem(_fs);
4719 const trees = [TREE({ ref: commit }), WORKDIR(), STAGE()];
4720 let unmergedPaths = [];
4721
4722 await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) {
4723 unmergedPaths = index.unmergedPaths;
4724 });
4725
4726 const results = await _walk({
4727 fs,
4728 cache,
4729 dir,
4730 gitdir,
4731 trees,
4732 map: async function(path, [head, workdir, index]) {
4733 const staged = !(await modified(workdir, index));
4734 const unmerged = unmergedPaths.includes(path);
4735 const unmodified = !(await modified(index, head));
4736
4737 if (staged || unmerged) {
4738 return head
4739 ? {
4740 path,
4741 mode: await head.mode(),
4742 oid: await head.oid(),
4743 type: await head.type(),
4744 content: await head.content(),
4745 }
4746 : undefined
4747 }
4748
4749 if (unmodified) return false
4750 else throw new IndexResetError(path)
4751 },
4752 });
4753
4754 await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) {
4755 // Reset paths in index and worktree, this can't be done in _walk because the
4756 // STAGE walker acquires its own index lock.
4757
4758 for (const entry of results) {
4759 if (entry === false) continue
4760
4761 // entry is not false, so from here we can assume index = workdir
4762 if (!entry) {
4763 await fs.rmdir(`${dir}/${entry.path}`, { recursive: true });
4764 index.delete({ filepath: entry.path });
4765 continue
4766 }
4767
4768 if (entry.type === 'blob') {
4769 const content = new TextDecoder().decode(entry.content);
4770 await fs.write(`${dir}/${entry.path}`, content, { mode: entry.mode });
4771 index.insert({
4772 filepath: entry.path,
4773 oid: entry.oid,
4774 stage: 0,
4775 });
4776 }
4777 }
4778 });
4779 } catch (err) {
4780 err.caller = 'git.abortMerge';
4781 throw err
4782 }
4783}
4784
4785// I'm putting this in a Manager because I reckon it could benefit
4786// from a LOT of cacheing.
4787class GitIgnoreManager {
4788 static async isIgnored({ fs, dir, gitdir = join(dir, '.git'), filepath }) {
4789 // ALWAYS ignore ".git" folders.
4790 if (basename(filepath) === '.git') return true
4791 // '.' is not a valid gitignore entry, so '.' is never ignored
4792 if (filepath === '.') return false
4793 // Check and load exclusion rules from project exclude file (.git/info/exclude)
4794 let excludes = '';
4795 const excludesFile = join(gitdir, 'info', 'exclude');
4796 if (await fs.exists(excludesFile)) {
4797 excludes = await fs.read(excludesFile, 'utf8');
4798 }
4799 // Find all the .gitignore files that could affect this file
4800 const pairs = [
4801 {
4802 gitignore: join(dir, '.gitignore'),
4803 filepath,
4804 },
4805 ];
4806 const pieces = filepath.split('/').filter(Boolean);
4807 for (let i = 1; i < pieces.length; i++) {
4808 const folder = pieces.slice(0, i).join('/');
4809 const file = pieces.slice(i).join('/');
4810 pairs.push({
4811 gitignore: join(dir, folder, '.gitignore'),
4812 filepath: file,
4813 });
4814 }
4815 let ignoredStatus = false;
4816 for (const p of pairs) {
4817 let file;
4818 try {
4819 file = await fs.read(p.gitignore, 'utf8');
4820 } catch (err) {
4821 if (err.code === 'NOENT') continue
4822 }
4823 const ign = ignore().add(excludes);
4824 ign.add(file);
4825 // If the parent directory is excluded, we are done.
4826 // "It is not possible to re-include a file if a parent directory of that file is excluded. Git doesn’t list excluded directories for performance reasons, so any patterns on contained files have no effect, no matter where they are defined."
4827 // source: https://git-scm.com/docs/gitignore
4828 const parentdir = dirname(p.filepath);
4829 if (parentdir !== '.' && ign.ignores(parentdir)) return true
4830 // If the file is currently ignored, test for UNignoring.
4831 if (ignoredStatus) {
4832 ignoredStatus = !ign.test(p.filepath).unignored;
4833 } else {
4834 ignoredStatus = ign.test(p.filepath).ignored;
4835 }
4836 }
4837 return ignoredStatus
4838 }
4839}
4840
4841async function writeObjectLoose({ fs, gitdir, object, format, oid }) {
4842 if (format !== 'deflated') {
4843 throw new InternalError(
4844 'GitObjectStoreLoose expects objects to write to be in deflated format'
4845 )
4846 }
4847 const source = `objects/${oid.slice(0, 2)}/${oid.slice(2)}`;
4848 const filepath = `${gitdir}/${source}`;
4849 // Don't overwrite existing git objects - this helps avoid EPERM errors.
4850 // Although I don't know how we'd fix corrupted objects then. Perhaps delete them
4851 // on read?
4852 if (!(await fs.exists(filepath))) await fs.write(filepath, object);
4853}
4854
4855/* eslint-env node, browser */
4856
4857let supportsCompressionStream = null;
4858
4859async function deflate(buffer) {
4860 if (supportsCompressionStream === null) {
4861 supportsCompressionStream = testCompressionStream();
4862 }
4863 return supportsCompressionStream
4864 ? browserDeflate(buffer)
4865 : pako.deflate(buffer)
4866}
4867
4868async function browserDeflate(buffer) {
4869 const cs = new CompressionStream('deflate');
4870 const c = new Blob([buffer]).stream().pipeThrough(cs);
4871 return new Uint8Array(await new Response(c).arrayBuffer())
4872}
4873
4874function testCompressionStream() {
4875 try {
4876 const cs = new CompressionStream('deflate');
4877 // Test if `Blob.stream` is present. React Native does not have the `stream` method
4878 new Blob([]).stream();
4879 if (cs) return true
4880 } catch (_) {
4881 // no bother
4882 }
4883 return false
4884}
4885
4886async function _writeObject({
4887 fs,
4888 gitdir,
4889 type,
4890 object,
4891 format = 'content',
4892 oid = undefined,
4893 dryRun = false,
4894}) {
4895 if (format !== 'deflated') {
4896 if (format !== 'wrapped') {
4897 object = GitObject.wrap({ type, object });
4898 }
4899 oid = await shasum(object);
4900 object = Buffer.from(await deflate(object));
4901 }
4902 if (!dryRun) {
4903 await writeObjectLoose({ fs, gitdir, object, format: 'deflated', oid });
4904 }
4905 return oid
4906}
4907
4908function posixifyPathBuffer(buffer) {
4909 let idx;
4910 while (~(idx = buffer.indexOf(92))) buffer[idx] = 47;
4911 return buffer
4912}
4913
4914// @ts-check
4915
4916/**
4917 * Add a file to the git index (aka staging area)
4918 *
4919 * @param {object} args
4920 * @param {FsClient} args.fs - a file system implementation
4921 * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path
4922 * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path
4923 * @param {string|string[]} args.filepath - The path to the file to add to the index
4924 * @param {object} [args.cache] - a [cache](cache.md) object
4925 * @param {boolean} [args.force=false] - add to index even if matches gitignore. Think `git add --force`
4926 * @param {boolean} [args.parallel=false] - process each input file in parallel. Parallel processing will result in more memory consumption but less process time
4927 *
4928 * @returns {Promise<void>} Resolves successfully once the git index has been updated
4929 *
4930 * @example
4931 * await fs.promises.writeFile('/tutorial/README.md', `# TEST`)
4932 * await git.add({ fs, dir: '/tutorial', filepath: 'README.md' })
4933 * console.log('done')
4934 *
4935 */
4936async function add({
4937 fs: _fs,
4938 dir,
4939 gitdir = join(dir, '.git'),
4940 filepath,
4941 cache = {},
4942 force = false,
4943 parallel = true,
4944}) {
4945 try {
4946 assertParameter('fs', _fs);
4947 assertParameter('dir', dir);
4948 assertParameter('gitdir', gitdir);
4949 assertParameter('filepath', filepath);
4950
4951 const fs = new FileSystem(_fs);
4952 await GitIndexManager.acquire({ fs, gitdir, cache }, async index => {
4953 return addToIndex({
4954 dir,
4955 gitdir,
4956 fs,
4957 filepath,
4958 index,
4959 force,
4960 parallel,
4961 })
4962 });
4963 } catch (err) {
4964 err.caller = 'git.add';
4965 throw err
4966 }
4967}
4968
4969async function addToIndex({
4970 dir,
4971 gitdir,
4972 fs,
4973 filepath,
4974 index,
4975 force,
4976 parallel,
4977}) {
4978 // TODO: Should ignore UNLESS it's already in the index.
4979 filepath = Array.isArray(filepath) ? filepath : [filepath];
4980 const promises = filepath.map(async currentFilepath => {
4981 if (!force) {
4982 const ignored = await GitIgnoreManager.isIgnored({
4983 fs,
4984 dir,
4985 gitdir,
4986 filepath: currentFilepath,
4987 });
4988 if (ignored) return
4989 }
4990 const stats = await fs.lstat(join(dir, currentFilepath));
4991 if (!stats) throw new NotFoundError(currentFilepath)
4992
4993 if (stats.isDirectory()) {
4994 const children = await fs.readdir(join(dir, currentFilepath));
4995 if (parallel) {
4996 const promises = children.map(child =>
4997 addToIndex({
4998 dir,
4999 gitdir,
5000 fs,
5001 filepath: [join(currentFilepath, child)],
5002 index,
5003 force,
5004 parallel,
5005 })
5006 );
5007 await Promise.all(promises);
5008 } else {
5009 for (const child of children) {
5010 await addToIndex({
5011 dir,
5012 gitdir,
5013 fs,
5014 filepath: [join(currentFilepath, child)],
5015 index,
5016 force,
5017 parallel,
5018 });
5019 }
5020 }
5021 } else {
5022 const object = stats.isSymbolicLink()
5023 ? await fs.readlink(join(dir, currentFilepath)).then(posixifyPathBuffer)
5024 : await fs.read(join(dir, currentFilepath));
5025 if (object === null) throw new NotFoundError(currentFilepath)
5026 const oid = await _writeObject({ fs, gitdir, type: 'blob', object });
5027 index.insert({ filepath: currentFilepath, stats, oid });
5028 }
5029 });
5030
5031 const settledPromises = await Promise.allSettled(promises);
5032 const rejectedPromises = settledPromises
5033 .filter(settle => settle.status === 'rejected')
5034 .map(settle => settle.reason);
5035 if (rejectedPromises.length > 1) {
5036 throw new MultipleGitError(rejectedPromises)
5037 }
5038 if (rejectedPromises.length === 1) {
5039 throw rejectedPromises[0]
5040 }
5041
5042 const fulfilledPromises = settledPromises
5043 .filter(settle => settle.status === 'fulfilled' && settle.value)
5044 .map(settle => settle.value);
5045
5046 return fulfilledPromises
5047}
5048
5049// @ts-check
5050
5051/**
5052 *
5053 * @param {Object} args
5054 * @param {import('../models/FileSystem.js').FileSystem} args.fs
5055 * @param {object} args.cache
5056 * @param {SignCallback} [args.onSign]
5057 * @param {string} args.gitdir
5058 * @param {string} args.message
5059 * @param {Object} args.author
5060 * @param {string} args.author.name
5061 * @param {string} args.author.email
5062 * @param {number} args.author.timestamp
5063 * @param {number} args.author.timezoneOffset
5064 * @param {Object} args.committer
5065 * @param {string} args.committer.name
5066 * @param {string} args.committer.email
5067 * @param {number} args.committer.timestamp
5068 * @param {number} args.committer.timezoneOffset
5069 * @param {string} [args.signingKey]
5070 * @param {boolean} [args.dryRun = false]
5071 * @param {boolean} [args.noUpdateBranch = false]
5072 * @param {string} [args.ref]
5073 * @param {string[]} [args.parent]
5074 * @param {string} [args.tree]
5075 *
5076 * @returns {Promise<string>} Resolves successfully with the SHA-1 object id of the newly created commit.
5077 */
5078async function _commit({
5079 fs,
5080 cache,
5081 onSign,
5082 gitdir,
5083 message,
5084 author,
5085 committer,
5086 signingKey,
5087 dryRun = false,
5088 noUpdateBranch = false,
5089 ref,
5090 parent,
5091 tree,
5092}) {
5093 if (!ref) {
5094 ref = await GitRefManager.resolve({
5095 fs,
5096 gitdir,
5097 ref: 'HEAD',
5098 depth: 2,
5099 });
5100 }
5101
5102 return GitIndexManager.acquire(
5103 { fs, gitdir, cache, allowUnmerged: false },
5104 async function(index) {
5105 const inodes = flatFileListToDirectoryStructure(index.entries);
5106 const inode = inodes.get('.');
5107 if (!tree) {
5108 tree = await constructTree({ fs, gitdir, inode, dryRun });
5109 }
5110 if (!parent) {
5111 try {
5112 parent = [
5113 await GitRefManager.resolve({
5114 fs,
5115 gitdir,
5116 ref,
5117 }),
5118 ];
5119 } catch (err) {
5120 // Probably an initial commit
5121 parent = [];
5122 }
5123 } else {
5124 // ensure that the parents are oids, not refs
5125 parent = await Promise.all(
5126 parent.map(p => {
5127 return GitRefManager.resolve({ fs, gitdir, ref: p })
5128 })
5129 );
5130 }
5131
5132 let comm = GitCommit.from({
5133 tree,
5134 parent,
5135 author,
5136 committer,
5137 message,
5138 });
5139 if (signingKey) {
5140 comm = await GitCommit.sign(comm, onSign, signingKey);
5141 }
5142 const oid = await _writeObject({
5143 fs,
5144 gitdir,
5145 type: 'commit',
5146 object: comm.toObject(),
5147 dryRun,
5148 });
5149 if (!noUpdateBranch && !dryRun) {
5150 // Update branch pointer
5151 await GitRefManager.writeRef({
5152 fs,
5153 gitdir,
5154 ref,
5155 value: oid,
5156 });
5157 }
5158 return oid
5159 }
5160 )
5161}
5162
5163async function constructTree({ fs, gitdir, inode, dryRun }) {
5164 // use depth first traversal
5165 const children = inode.children;
5166 for (const inode of children) {
5167 if (inode.type === 'tree') {
5168 inode.metadata.mode = '040000';
5169 inode.metadata.oid = await constructTree({ fs, gitdir, inode, dryRun });
5170 }
5171 }
5172 const entries = children.map(inode => ({
5173 mode: inode.metadata.mode,
5174 path: inode.basename,
5175 oid: inode.metadata.oid,
5176 type: inode.type,
5177 }));
5178 const tree = GitTree.from(entries);
5179 const oid = await _writeObject({
5180 fs,
5181 gitdir,
5182 type: 'tree',
5183 object: tree.toObject(),
5184 dryRun,
5185 });
5186 return oid
5187}
5188
5189// @ts-check
5190
5191async function resolveFilepath({ fs, cache, gitdir, oid, filepath }) {
5192 // Ensure there are no leading or trailing directory separators.
5193 // I was going to do this automatically, but then found that the Git Terminal for Windows
5194 // auto-expands --filepath=/src/utils to --filepath=C:/Users/Will/AppData/Local/Programs/Git/src/utils
5195 // so I figured it would be wise to promote the behavior in the application layer not just the library layer.
5196 if (filepath.startsWith('/')) {
5197 throw new InvalidFilepathError('leading-slash')
5198 } else if (filepath.endsWith('/')) {
5199 throw new InvalidFilepathError('trailing-slash')
5200 }
5201 const _oid = oid;
5202 const result = await resolveTree({ fs, cache, gitdir, oid });
5203 const tree = result.tree;
5204 if (filepath === '') {
5205 oid = result.oid;
5206 } else {
5207 const pathArray = filepath.split('/');
5208 oid = await _resolveFilepath({
5209 fs,
5210 cache,
5211 gitdir,
5212 tree,
5213 pathArray,
5214 oid: _oid,
5215 filepath,
5216 });
5217 }
5218 return oid
5219}
5220
5221async function _resolveFilepath({
5222 fs,
5223 cache,
5224 gitdir,
5225 tree,
5226 pathArray,
5227 oid,
5228 filepath,
5229}) {
5230 const name = pathArray.shift();
5231 for (const entry of tree) {
5232 if (entry.path === name) {
5233 if (pathArray.length === 0) {
5234 return entry.oid
5235 } else {
5236 const { type, object } = await _readObject({
5237 fs,
5238 cache,
5239 gitdir,
5240 oid: entry.oid,
5241 });
5242 if (type !== 'tree') {
5243 throw new ObjectTypeError(oid, type, 'tree', filepath)
5244 }
5245 tree = GitTree.from(object);
5246 return _resolveFilepath({
5247 fs,
5248 cache,
5249 gitdir,
5250 tree,
5251 pathArray,
5252 oid,
5253 filepath,
5254 })
5255 }
5256 }
5257 }
5258 throw new NotFoundError(`file or directory found at "${oid}:${filepath}"`)
5259}
5260
5261// @ts-check
5262
5263/**
5264 *
5265 * @typedef {Object} ReadTreeResult - The object returned has the following schema:
5266 * @property {string} oid - SHA-1 object id of this tree
5267 * @property {TreeObject} tree - the parsed tree object
5268 */
5269
5270/**
5271 * @param {object} args
5272 * @param {import('../models/FileSystem.js').FileSystem} args.fs
5273 * @param {any} args.cache
5274 * @param {string} args.gitdir
5275 * @param {string} args.oid
5276 * @param {string} [args.filepath]
5277 *
5278 * @returns {Promise<ReadTreeResult>}
5279 */
5280async function _readTree({
5281 fs,
5282 cache,
5283 gitdir,
5284 oid,
5285 filepath = undefined,
5286}) {
5287 if (filepath !== undefined) {
5288 oid = await resolveFilepath({ fs, cache, gitdir, oid, filepath });
5289 }
5290 const { tree, oid: treeOid } = await resolveTree({ fs, cache, gitdir, oid });
5291 const result = {
5292 oid: treeOid,
5293 tree: tree.entries(),
5294 };
5295 return result
5296}
5297
5298// @ts-check
5299
5300/**
5301 * @param {object} args
5302 * @param {import('../models/FileSystem.js').FileSystem} args.fs
5303 * @param {string} args.gitdir
5304 * @param {TreeObject} args.tree
5305 *
5306 * @returns {Promise<string>}
5307 */
5308async function _writeTree({ fs, gitdir, tree }) {
5309 // Convert object to buffer
5310 const object = GitTree.from(tree).toObject();
5311 const oid = await _writeObject({
5312 fs,
5313 gitdir,
5314 type: 'tree',
5315 object,
5316 format: 'content',
5317 });
5318 return oid
5319}
5320
5321// @ts-check
5322
5323/**
5324 * @param {object} args
5325 * @param {import('../models/FileSystem.js').FileSystem} args.fs
5326 * @param {object} args.cache
5327 * @param {SignCallback} [args.onSign]
5328 * @param {string} args.gitdir
5329 * @param {string} args.ref
5330 * @param {string} args.oid
5331 * @param {string|Uint8Array} args.note
5332 * @param {boolean} [args.force]
5333 * @param {Object} args.author
5334 * @param {string} args.author.name
5335 * @param {string} args.author.email
5336 * @param {number} args.author.timestamp
5337 * @param {number} args.author.timezoneOffset
5338 * @param {Object} args.committer
5339 * @param {string} args.committer.name
5340 * @param {string} args.committer.email
5341 * @param {number} args.committer.timestamp
5342 * @param {number} args.committer.timezoneOffset
5343 * @param {string} [args.signingKey]
5344 *
5345 * @returns {Promise<string>}
5346 */
5347
5348async function _addNote({
5349 fs,
5350 cache,
5351 onSign,
5352 gitdir,
5353 ref,
5354 oid,
5355 note,
5356 force,
5357 author,
5358 committer,
5359 signingKey,
5360}) {
5361 // Get the current note commit
5362 let parent;
5363 try {
5364 parent = await GitRefManager.resolve({ gitdir, fs, ref });
5365 } catch (err) {
5366 if (!(err instanceof NotFoundError)) {
5367 throw err
5368 }
5369 }
5370
5371 // I'm using the "empty tree" magic number here for brevity
5372 const result = await _readTree({
5373 fs,
5374 cache,
5375 gitdir,
5376 oid: parent || '4b825dc642cb6eb9a060e54bf8d69288fbee4904',
5377 });
5378 let tree = result.tree;
5379
5380 // Handle the case where a note already exists
5381 if (force) {
5382 tree = tree.filter(entry => entry.path !== oid);
5383 } else {
5384 for (const entry of tree) {
5385 if (entry.path === oid) {
5386 throw new AlreadyExistsError('note', oid)
5387 }
5388 }
5389 }
5390
5391 // Create the note blob
5392 if (typeof note === 'string') {
5393 note = Buffer.from(note, 'utf8');
5394 }
5395 const noteOid = await _writeObject({
5396 fs,
5397 gitdir,
5398 type: 'blob',
5399 object: note,
5400 format: 'content',
5401 });
5402
5403 // Create the new note tree
5404 tree.push({ mode: '100644', path: oid, oid: noteOid, type: 'blob' });
5405 const treeOid = await _writeTree({
5406 fs,
5407 gitdir,
5408 tree,
5409 });
5410
5411 // Create the new note commit
5412 const commitOid = await _commit({
5413 fs,
5414 cache,
5415 onSign,
5416 gitdir,
5417 ref,
5418 tree: treeOid,
5419 parent: parent && [parent],
5420 message: `Note added by 'isomorphic-git addNote'\n`,
5421 author,
5422 committer,
5423 signingKey,
5424 });
5425
5426 return commitOid
5427}
5428
5429// @ts-check
5430
5431/**
5432 * @param {Object} args
5433 * @param {import('../models/FileSystem.js').FileSystem} args.fs
5434 * @param {string} args.gitdir
5435 * @param {string} args.path
5436 *
5437 * @returns {Promise<any>} Resolves with the config value
5438 *
5439 * @example
5440 * // Read config value
5441 * let value = await git.getConfig({
5442 * dir: '$input((/))',
5443 * path: '$input((user.name))'
5444 * })
5445 * console.log(value)
5446 *
5447 */
5448async function _getConfig({ fs, gitdir, path }) {
5449 const config = await GitConfigManager.get({ fs, gitdir });
5450 return config.get(path)
5451}
5452
5453/**
5454 *
5455 * @returns {Promise<void | {name: string, email: string, date: Date, timestamp: number, timezoneOffset: number }>}
5456 */
5457async function normalizeAuthorObject({ fs, gitdir, author = {} }) {
5458 let { name, email, timestamp, timezoneOffset } = author;
5459 name = name || (await _getConfig({ fs, gitdir, path: 'user.name' }));
5460 email = email || (await _getConfig({ fs, gitdir, path: 'user.email' })) || '';
5461
5462 if (name === undefined) {
5463 return undefined
5464 }
5465
5466 timestamp = timestamp != null ? timestamp : Math.floor(Date.now() / 1000);
5467 timezoneOffset =
5468 timezoneOffset != null
5469 ? timezoneOffset
5470 : new Date(timestamp * 1000).getTimezoneOffset();
5471
5472 return { name, email, timestamp, timezoneOffset }
5473}
5474
5475/**
5476 *
5477 * @returns {Promise<void | {name: string, email: string, timestamp: number, timezoneOffset: number }>}
5478 */
5479async function normalizeCommitterObject({
5480 fs,
5481 gitdir,
5482 author,
5483 committer,
5484}) {
5485 committer = Object.assign({}, committer || author);
5486 // Match committer's date to author's one, if omitted
5487 if (author) {
5488 committer.timestamp = committer.timestamp || author.timestamp;
5489 committer.timezoneOffset = committer.timezoneOffset || author.timezoneOffset;
5490 }
5491 committer = await normalizeAuthorObject({ fs, gitdir, author: committer });
5492 return committer
5493}
5494
5495// @ts-check
5496
5497/**
5498 * Add or update an object note
5499 *
5500 * @param {object} args
5501 * @param {FsClient} args.fs - a file system implementation
5502 * @param {SignCallback} [args.onSign] - a PGP signing implementation
5503 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
5504 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
5505 * @param {string} [args.ref] - The notes ref to look under
5506 * @param {string} args.oid - The SHA-1 object id of the object to add the note to.
5507 * @param {string|Uint8Array} args.note - The note to add
5508 * @param {boolean} [args.force] - Over-write note if it already exists.
5509 * @param {Object} [args.author] - The details about the author.
5510 * @param {string} [args.author.name] - Default is `user.name` config.
5511 * @param {string} [args.author.email] - Default is `user.email` config.
5512 * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00).
5513 * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`.
5514 * @param {Object} [args.committer = author] - The details about the note committer, in the same format as the author parameter. If not specified, the author details are used.
5515 * @param {string} [args.committer.name] - Default is `user.name` config.
5516 * @param {string} [args.committer.email] - Default is `user.email` config.
5517 * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00).
5518 * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`.
5519 * @param {string} [args.signingKey] - Sign the note commit using this private PGP key.
5520 * @param {object} [args.cache] - a [cache](cache.md) object
5521 *
5522 * @returns {Promise<string>} Resolves successfully with the SHA-1 object id of the commit object for the added note.
5523 */
5524
5525async function addNote({
5526 fs: _fs,
5527 onSign,
5528 dir,
5529 gitdir = join(dir, '.git'),
5530 ref = 'refs/notes/commits',
5531 oid,
5532 note,
5533 force,
5534 author: _author,
5535 committer: _committer,
5536 signingKey,
5537 cache = {},
5538}) {
5539 try {
5540 assertParameter('fs', _fs);
5541 assertParameter('gitdir', gitdir);
5542 assertParameter('oid', oid);
5543 assertParameter('note', note);
5544 if (signingKey) {
5545 assertParameter('onSign', onSign);
5546 }
5547 const fs = new FileSystem(_fs);
5548
5549 const author = await normalizeAuthorObject({ fs, gitdir, author: _author });
5550 if (!author) throw new MissingNameError('author')
5551
5552 const committer = await normalizeCommitterObject({
5553 fs,
5554 gitdir,
5555 author,
5556 committer: _committer,
5557 });
5558 if (!committer) throw new MissingNameError('committer')
5559
5560 return await _addNote({
5561 fs: new FileSystem(fs),
5562 cache,
5563 onSign,
5564 gitdir,
5565 ref,
5566 oid,
5567 note,
5568 force,
5569 author,
5570 committer,
5571 signingKey,
5572 })
5573 } catch (err) {
5574 err.caller = 'git.addNote';
5575 throw err
5576 }
5577}
5578
5579// @ts-check
5580
5581/**
5582 * @param {object} args
5583 * @param {import('../models/FileSystem.js').FileSystem} args.fs
5584 * @param {string} args.gitdir
5585 * @param {string} args.remote
5586 * @param {string} args.url
5587 * @param {boolean} args.force
5588 *
5589 * @returns {Promise<void>}
5590 *
5591 */
5592async function _addRemote({ fs, gitdir, remote, url, force }) {
5593 if (remote !== cleanGitRef.clean(remote)) {
5594 throw new InvalidRefNameError(remote, cleanGitRef.clean(remote))
5595 }
5596 const config = await GitConfigManager.get({ fs, gitdir });
5597 if (!force) {
5598 // Check that setting it wouldn't overwrite.
5599 const remoteNames = await config.getSubsections('remote');
5600 if (remoteNames.includes(remote)) {
5601 // Throw an error if it would overwrite an existing remote,
5602 // but not if it's simply setting the same value again.
5603 if (url !== (await config.get(`remote.${remote}.url`))) {
5604 throw new AlreadyExistsError('remote', remote)
5605 }
5606 }
5607 }
5608 await config.set(`remote.${remote}.url`, url);
5609 await config.set(
5610 `remote.${remote}.fetch`,
5611 `+refs/heads/*:refs/remotes/${remote}/*`
5612 );
5613 await GitConfigManager.save({ fs, gitdir, config });
5614}
5615
5616// @ts-check
5617
5618/**
5619 * Add or update a remote
5620 *
5621 * @param {object} args
5622 * @param {FsClient} args.fs - a file system implementation
5623 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
5624 * @param {string} [args.gitdir] - [required] The [git directory](dir-vs-gitdir.md) path
5625 * @param {string} args.remote - The name of the remote
5626 * @param {string} args.url - The URL of the remote
5627 * @param {boolean} [args.force = false] - Instead of throwing an error if a remote named `remote` already exists, overwrite the existing remote.
5628 *
5629 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
5630 *
5631 * @example
5632 * await git.addRemote({
5633 * fs,
5634 * dir: '/tutorial',
5635 * remote: 'upstream',
5636 * url: 'https://github.com/isomorphic-git/isomorphic-git'
5637 * })
5638 * console.log('done')
5639 *
5640 */
5641async function addRemote({
5642 fs,
5643 dir,
5644 gitdir = join(dir, '.git'),
5645 remote,
5646 url,
5647 force = false,
5648}) {
5649 try {
5650 assertParameter('fs', fs);
5651 assertParameter('gitdir', gitdir);
5652 assertParameter('remote', remote);
5653 assertParameter('url', url);
5654 return await _addRemote({
5655 fs: new FileSystem(fs),
5656 gitdir,
5657 remote,
5658 url,
5659 force,
5660 })
5661 } catch (err) {
5662 err.caller = 'git.addRemote';
5663 throw err
5664 }
5665}
5666
5667// @ts-check
5668
5669/**
5670 * Create an annotated tag.
5671 *
5672 * @param {object} args
5673 * @param {import('../models/FileSystem.js').FileSystem} args.fs
5674 * @param {any} args.cache
5675 * @param {SignCallback} [args.onSign]
5676 * @param {string} args.gitdir
5677 * @param {string} args.ref
5678 * @param {string} [args.message = ref]
5679 * @param {string} [args.object = 'HEAD']
5680 * @param {object} [args.tagger]
5681 * @param {string} args.tagger.name
5682 * @param {string} args.tagger.email
5683 * @param {number} args.tagger.timestamp
5684 * @param {number} args.tagger.timezoneOffset
5685 * @param {string} [args.gpgsig]
5686 * @param {string} [args.signingKey]
5687 * @param {boolean} [args.force = false]
5688 *
5689 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
5690 *
5691 * @example
5692 * await git.annotatedTag({
5693 * dir: '$input((/))',
5694 * ref: '$input((test-tag))',
5695 * message: '$input((This commit is awesome))',
5696 * tagger: {
5697 * name: '$input((Mr. Test))',
5698 * email: '$input((mrtest@example.com))'
5699 * }
5700 * })
5701 * console.log('done')
5702 *
5703 */
5704async function _annotatedTag({
5705 fs,
5706 cache,
5707 onSign,
5708 gitdir,
5709 ref,
5710 tagger,
5711 message = ref,
5712 gpgsig,
5713 object,
5714 signingKey,
5715 force = false,
5716}) {
5717 ref = ref.startsWith('refs/tags/') ? ref : `refs/tags/${ref}`;
5718
5719 if (!force && (await GitRefManager.exists({ fs, gitdir, ref }))) {
5720 throw new AlreadyExistsError('tag', ref)
5721 }
5722
5723 // Resolve passed value
5724 const oid = await GitRefManager.resolve({
5725 fs,
5726 gitdir,
5727 ref: object || 'HEAD',
5728 });
5729
5730 const { type } = await _readObject({ fs, cache, gitdir, oid });
5731 let tagObject = GitAnnotatedTag.from({
5732 object: oid,
5733 type,
5734 tag: ref.replace('refs/tags/', ''),
5735 tagger,
5736 message,
5737 gpgsig,
5738 });
5739 if (signingKey) {
5740 tagObject = await GitAnnotatedTag.sign(tagObject, onSign, signingKey);
5741 }
5742 const value = await _writeObject({
5743 fs,
5744 gitdir,
5745 type: 'tag',
5746 object: tagObject.toObject(),
5747 });
5748
5749 await GitRefManager.writeRef({ fs, gitdir, ref, value });
5750}
5751
5752// @ts-check
5753
5754/**
5755 * Create an annotated tag.
5756 *
5757 * @param {object} args
5758 * @param {FsClient} args.fs - a file system implementation
5759 * @param {SignCallback} [args.onSign] - a PGP signing implementation
5760 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
5761 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
5762 * @param {string} args.ref - What to name the tag
5763 * @param {string} [args.message = ref] - The tag message to use.
5764 * @param {string} [args.object = 'HEAD'] - The SHA-1 object id the tag points to. (Will resolve to a SHA-1 object id if value is a ref.) By default, the commit object which is referred by the current `HEAD` is used.
5765 * @param {object} [args.tagger] - The details about the tagger.
5766 * @param {string} [args.tagger.name] - Default is `user.name` config.
5767 * @param {string} [args.tagger.email] - Default is `user.email` config.
5768 * @param {number} [args.tagger.timestamp=Math.floor(Date.now()/1000)] - Set the tagger timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00).
5769 * @param {number} [args.tagger.timezoneOffset] - Set the tagger timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`.
5770 * @param {string} [args.gpgsig] - The gpgsig attatched to the tag object. (Mutually exclusive with the `signingKey` option.)
5771 * @param {string} [args.signingKey] - Sign the tag object using this private PGP key. (Mutually exclusive with the `gpgsig` option.)
5772 * @param {boolean} [args.force = false] - Instead of throwing an error if a tag named `ref` already exists, overwrite the existing tag. Note that this option does not modify the original tag object itself.
5773 * @param {object} [args.cache] - a [cache](cache.md) object
5774 *
5775 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
5776 *
5777 * @example
5778 * await git.annotatedTag({
5779 * fs,
5780 * dir: '/tutorial',
5781 * ref: 'test-tag',
5782 * message: 'This commit is awesome',
5783 * tagger: {
5784 * name: 'Mr. Test',
5785 * email: 'mrtest@example.com'
5786 * }
5787 * })
5788 * console.log('done')
5789 *
5790 */
5791async function annotatedTag({
5792 fs: _fs,
5793 onSign,
5794 dir,
5795 gitdir = join(dir, '.git'),
5796 ref,
5797 tagger: _tagger,
5798 message = ref,
5799 gpgsig,
5800 object,
5801 signingKey,
5802 force = false,
5803 cache = {},
5804}) {
5805 try {
5806 assertParameter('fs', _fs);
5807 assertParameter('gitdir', gitdir);
5808 assertParameter('ref', ref);
5809 if (signingKey) {
5810 assertParameter('onSign', onSign);
5811 }
5812 const fs = new FileSystem(_fs);
5813
5814 // Fill in missing arguments with default values
5815 const tagger = await normalizeAuthorObject({ fs, gitdir, author: _tagger });
5816 if (!tagger) throw new MissingNameError('tagger')
5817
5818 return await _annotatedTag({
5819 fs,
5820 cache,
5821 onSign,
5822 gitdir,
5823 ref,
5824 tagger,
5825 message,
5826 gpgsig,
5827 object,
5828 signingKey,
5829 force,
5830 })
5831 } catch (err) {
5832 err.caller = 'git.annotatedTag';
5833 throw err
5834 }
5835}
5836
5837// @ts-check
5838
5839/**
5840 * Create a branch
5841 *
5842 * @param {object} args
5843 * @param {import('../models/FileSystem.js').FileSystem} args.fs
5844 * @param {string} args.gitdir
5845 * @param {string} args.ref
5846 * @param {string} [args.object = 'HEAD']
5847 * @param {boolean} [args.checkout = false]
5848 * @param {boolean} [args.force = false]
5849 *
5850 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
5851 *
5852 * @example
5853 * await git.branch({ dir: '$input((/))', ref: '$input((develop))' })
5854 * console.log('done')
5855 *
5856 */
5857async function _branch({
5858 fs,
5859 gitdir,
5860 ref,
5861 object,
5862 checkout = false,
5863 force = false,
5864}) {
5865 if (ref !== cleanGitRef.clean(ref)) {
5866 throw new InvalidRefNameError(ref, cleanGitRef.clean(ref))
5867 }
5868
5869 const fullref = `refs/heads/${ref}`;
5870
5871 if (!force) {
5872 const exist = await GitRefManager.exists({ fs, gitdir, ref: fullref });
5873 if (exist) {
5874 throw new AlreadyExistsError('branch', ref, false)
5875 }
5876 }
5877
5878 // Get current HEAD tree oid
5879 let oid;
5880 try {
5881 oid = await GitRefManager.resolve({ fs, gitdir, ref: object || 'HEAD' });
5882 } catch (e) {
5883 // Probably an empty repo
5884 }
5885
5886 // Create a new ref that points at the current commit
5887 if (oid) {
5888 await GitRefManager.writeRef({ fs, gitdir, ref: fullref, value: oid });
5889 }
5890
5891 if (checkout) {
5892 // Update HEAD
5893 await GitRefManager.writeSymbolicRef({
5894 fs,
5895 gitdir,
5896 ref: 'HEAD',
5897 value: fullref,
5898 });
5899 }
5900}
5901
5902// @ts-check
5903
5904/**
5905 * Create a branch
5906 *
5907 * @param {object} args
5908 * @param {FsClient} args.fs - a file system implementation
5909 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
5910 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
5911 * @param {string} args.ref - What to name the branch
5912 * @param {string} [args.object = 'HEAD'] - What oid to use as the start point. Accepts a symbolic ref.
5913 * @param {boolean} [args.checkout = false] - Update `HEAD` to point at the newly created branch
5914 * @param {boolean} [args.force = false] - Instead of throwing an error if a branched named `ref` already exists, overwrite the existing branch.
5915 *
5916 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
5917 *
5918 * @example
5919 * await git.branch({ fs, dir: '/tutorial', ref: 'develop' })
5920 * console.log('done')
5921 *
5922 */
5923async function branch({
5924 fs,
5925 dir,
5926 gitdir = join(dir, '.git'),
5927 ref,
5928 object,
5929 checkout = false,
5930 force = false,
5931}) {
5932 try {
5933 assertParameter('fs', fs);
5934 assertParameter('gitdir', gitdir);
5935 assertParameter('ref', ref);
5936 return await _branch({
5937 fs: new FileSystem(fs),
5938 gitdir,
5939 ref,
5940 object,
5941 checkout,
5942 force,
5943 })
5944 } catch (err) {
5945 err.caller = 'git.branch';
5946 throw err
5947 }
5948}
5949
5950const worthWalking = (filepath, root) => {
5951 if (filepath === '.' || root == null || root.length === 0 || root === '.') {
5952 return true
5953 }
5954 if (root.length >= filepath.length) {
5955 return root.startsWith(filepath)
5956 } else {
5957 return filepath.startsWith(root)
5958 }
5959};
5960
5961// @ts-check
5962
5963/**
5964 * @param {object} args
5965 * @param {import('../models/FileSystem.js').FileSystem} args.fs
5966 * @param {any} args.cache
5967 * @param {ProgressCallback} [args.onProgress]
5968 * @param {string} args.dir
5969 * @param {string} args.gitdir
5970 * @param {string} args.ref
5971 * @param {string[]} [args.filepaths]
5972 * @param {string} args.remote
5973 * @param {boolean} args.noCheckout
5974 * @param {boolean} [args.noUpdateHead]
5975 * @param {boolean} [args.dryRun]
5976 * @param {boolean} [args.force]
5977 * @param {boolean} [args.track]
5978 *
5979 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
5980 *
5981 */
5982async function _checkout({
5983 fs,
5984 cache,
5985 onProgress,
5986 dir,
5987 gitdir,
5988 remote,
5989 ref,
5990 filepaths,
5991 noCheckout,
5992 noUpdateHead,
5993 dryRun,
5994 force,
5995 track = true,
5996}) {
5997 // Get tree oid
5998 let oid;
5999 try {
6000 oid = await GitRefManager.resolve({ fs, gitdir, ref });
6001 // TODO: Figure out what to do if both 'ref' and 'remote' are specified, ref already exists,
6002 // and is configured to track a different remote.
6003 } catch (err) {
6004 if (ref === 'HEAD') throw err
6005 // If `ref` doesn't exist, create a new remote tracking branch
6006 // Figure out the commit to checkout
6007 const remoteRef = `${remote}/${ref}`;
6008 oid = await GitRefManager.resolve({
6009 fs,
6010 gitdir,
6011 ref: remoteRef,
6012 });
6013 if (track) {
6014 // Set up remote tracking branch
6015 const config = await GitConfigManager.get({ fs, gitdir });
6016 await config.set(`branch.${ref}.remote`, remote);
6017 await config.set(`branch.${ref}.merge`, `refs/heads/${ref}`);
6018 await GitConfigManager.save({ fs, gitdir, config });
6019 }
6020 // Create a new branch that points at that same commit
6021 await GitRefManager.writeRef({
6022 fs,
6023 gitdir,
6024 ref: `refs/heads/${ref}`,
6025 value: oid,
6026 });
6027 }
6028
6029 // Update working dir
6030 if (!noCheckout) {
6031 let ops;
6032 // First pass - just analyze files (not directories) and figure out what needs to be done
6033 try {
6034 ops = await analyze({
6035 fs,
6036 cache,
6037 onProgress,
6038 dir,
6039 gitdir,
6040 ref,
6041 force,
6042 filepaths,
6043 });
6044 } catch (err) {
6045 // Throw a more helpful error message for this common mistake.
6046 if (err instanceof NotFoundError && err.data.what === oid) {
6047 throw new CommitNotFetchedError(ref, oid)
6048 } else {
6049 throw err
6050 }
6051 }
6052
6053 // Report conflicts
6054 const conflicts = ops
6055 .filter(([method]) => method === 'conflict')
6056 .map(([method, fullpath]) => fullpath);
6057 if (conflicts.length > 0) {
6058 throw new CheckoutConflictError(conflicts)
6059 }
6060
6061 // Collect errors
6062 const errors = ops
6063 .filter(([method]) => method === 'error')
6064 .map(([method, fullpath]) => fullpath);
6065 if (errors.length > 0) {
6066 throw new InternalError(errors.join(', '))
6067 }
6068
6069 if (dryRun) {
6070 // Since the format of 'ops' is in flux, I really would rather folk besides myself not start relying on it
6071 // return ops
6072 return
6073 }
6074
6075 // Second pass - execute planned changes
6076 // The cheapest semi-parallel solution without computing a full dependency graph will be
6077 // to just do ops in 4 dumb phases: delete files, delete dirs, create dirs, write files
6078
6079 let count = 0;
6080 const total = ops.length;
6081 await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) {
6082 await Promise.all(
6083 ops
6084 .filter(
6085 ([method]) => method === 'delete' || method === 'delete-index'
6086 )
6087 .map(async function([method, fullpath]) {
6088 const filepath = `${dir}/${fullpath}`;
6089 if (method === 'delete') {
6090 await fs.rm(filepath);
6091 }
6092 index.delete({ filepath: fullpath });
6093 if (onProgress) {
6094 await onProgress({
6095 phase: 'Updating workdir',
6096 loaded: ++count,
6097 total,
6098 });
6099 }
6100 })
6101 );
6102 });
6103
6104 // Note: this is cannot be done naively in parallel
6105 await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) {
6106 for (const [method, fullpath] of ops) {
6107 if (method === 'rmdir' || method === 'rmdir-index') {
6108 const filepath = `${dir}/${fullpath}`;
6109 try {
6110 if (method === 'rmdir-index') {
6111 index.delete({ filepath: fullpath });
6112 }
6113 await fs.rmdir(filepath);
6114 if (onProgress) {
6115 await onProgress({
6116 phase: 'Updating workdir',
6117 loaded: ++count,
6118 total,
6119 });
6120 }
6121 } catch (e) {
6122 if (e.code === 'ENOTEMPTY') {
6123 console.log(
6124 `Did not delete ${fullpath} because directory is not empty`
6125 );
6126 } else {
6127 throw e
6128 }
6129 }
6130 }
6131 }
6132 });
6133
6134 await Promise.all(
6135 ops
6136 .filter(([method]) => method === 'mkdir' || method === 'mkdir-index')
6137 .map(async function([_, fullpath]) {
6138 const filepath = `${dir}/${fullpath}`;
6139 await fs.mkdir(filepath);
6140 if (onProgress) {
6141 await onProgress({
6142 phase: 'Updating workdir',
6143 loaded: ++count,
6144 total,
6145 });
6146 }
6147 })
6148 );
6149
6150 await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) {
6151 await Promise.all(
6152 ops
6153 .filter(
6154 ([method]) =>
6155 method === 'create' ||
6156 method === 'create-index' ||
6157 method === 'update' ||
6158 method === 'mkdir-index'
6159 )
6160 .map(async function([method, fullpath, oid, mode, chmod]) {
6161 const filepath = `${dir}/${fullpath}`;
6162 try {
6163 if (method !== 'create-index' && method !== 'mkdir-index') {
6164 const { object } = await _readObject({ fs, cache, gitdir, oid });
6165 if (chmod) {
6166 // Note: the mode option of fs.write only works when creating files,
6167 // not updating them. Since the `fs` plugin doesn't expose `chmod` this
6168 // is our only option.
6169 await fs.rm(filepath);
6170 }
6171 if (mode === 0o100644) {
6172 // regular file
6173 await fs.write(filepath, object);
6174 } else if (mode === 0o100755) {
6175 // executable file
6176 await fs.write(filepath, object, { mode: 0o777 });
6177 } else if (mode === 0o120000) {
6178 // symlink
6179 await fs.writelink(filepath, object);
6180 } else {
6181 throw new InternalError(
6182 `Invalid mode 0o${mode.toString(8)} detected in blob ${oid}`
6183 )
6184 }
6185 }
6186
6187 const stats = await fs.lstat(filepath);
6188 // We can't trust the executable bit returned by lstat on Windows,
6189 // so we need to preserve this value from the TREE.
6190 // TODO: Figure out how git handles this internally.
6191 if (mode === 0o100755) {
6192 stats.mode = 0o755;
6193 }
6194 // Submodules are present in the git index but use a unique mode different from trees
6195 if (method === 'mkdir-index') {
6196 stats.mode = 0o160000;
6197 }
6198 index.insert({
6199 filepath: fullpath,
6200 stats,
6201 oid,
6202 });
6203 if (onProgress) {
6204 await onProgress({
6205 phase: 'Updating workdir',
6206 loaded: ++count,
6207 total,
6208 });
6209 }
6210 } catch (e) {
6211 console.log(e);
6212 }
6213 })
6214 );
6215 });
6216 }
6217
6218 // Update HEAD
6219 if (!noUpdateHead) {
6220 const fullRef = await GitRefManager.expand({ fs, gitdir, ref });
6221 if (fullRef.startsWith('refs/heads')) {
6222 await GitRefManager.writeSymbolicRef({
6223 fs,
6224 gitdir,
6225 ref: 'HEAD',
6226 value: fullRef,
6227 });
6228 } else {
6229 // detached head
6230 await GitRefManager.writeRef({ fs, gitdir, ref: 'HEAD', value: oid });
6231 }
6232 }
6233}
6234
6235async function analyze({
6236 fs,
6237 cache,
6238 onProgress,
6239 dir,
6240 gitdir,
6241 ref,
6242 force,
6243 filepaths,
6244}) {
6245 let count = 0;
6246 return _walk({
6247 fs,
6248 cache,
6249 dir,
6250 gitdir,
6251 trees: [TREE({ ref }), WORKDIR(), STAGE()],
6252 map: async function(fullpath, [commit, workdir, stage]) {
6253 if (fullpath === '.') return
6254 // match against base paths
6255 if (filepaths && !filepaths.some(base => worthWalking(fullpath, base))) {
6256 return null
6257 }
6258 // Emit progress event
6259 if (onProgress) {
6260 await onProgress({ phase: 'Analyzing workdir', loaded: ++count });
6261 }
6262
6263 // This is a kind of silly pattern but it worked so well for me in the past
6264 // and it makes intuitively demonstrating exhaustiveness so *easy*.
6265 // This checks for the presense and/or absence of each of the 3 entries,
6266 // converts that to a 3-bit binary representation, and then handles
6267 // every possible combination (2^3 or 8 cases) with a lookup table.
6268 const key = [!!stage, !!commit, !!workdir].map(Number).join('');
6269 switch (key) {
6270 // Impossible case.
6271 case '000':
6272 return
6273 // Ignore workdir files that are not tracked and not part of the new commit.
6274 case '001':
6275 // OK, make an exception for explicitly named files.
6276 if (force && filepaths && filepaths.includes(fullpath)) {
6277 return ['delete', fullpath]
6278 }
6279 return
6280 // New entries
6281 case '010': {
6282 switch (await commit.type()) {
6283 case 'tree': {
6284 return ['mkdir', fullpath]
6285 }
6286 case 'blob': {
6287 return [
6288 'create',
6289 fullpath,
6290 await commit.oid(),
6291 await commit.mode(),
6292 ]
6293 }
6294 case 'commit': {
6295 return [
6296 'mkdir-index',
6297 fullpath,
6298 await commit.oid(),
6299 await commit.mode(),
6300 ]
6301 }
6302 default: {
6303 return [
6304 'error',
6305 `new entry Unhandled type ${await commit.type()}`,
6306 ]
6307 }
6308 }
6309 }
6310 // New entries but there is already something in the workdir there.
6311 case '011': {
6312 switch (`${await commit.type()}-${await workdir.type()}`) {
6313 case 'tree-tree': {
6314 return // noop
6315 }
6316 case 'tree-blob':
6317 case 'blob-tree': {
6318 return ['conflict', fullpath]
6319 }
6320 case 'blob-blob': {
6321 // Is the incoming file different?
6322 if ((await commit.oid()) !== (await workdir.oid())) {
6323 if (force) {
6324 return [
6325 'update',
6326 fullpath,
6327 await commit.oid(),
6328 await commit.mode(),
6329 (await commit.mode()) !== (await workdir.mode()),
6330 ]
6331 } else {
6332 return ['conflict', fullpath]
6333 }
6334 } else {
6335 // Is the incoming file a different mode?
6336 if ((await commit.mode()) !== (await workdir.mode())) {
6337 if (force) {
6338 return [
6339 'update',
6340 fullpath,
6341 await commit.oid(),
6342 await commit.mode(),
6343 true,
6344 ]
6345 } else {
6346 return ['conflict', fullpath]
6347 }
6348 } else {
6349 return [
6350 'create-index',
6351 fullpath,
6352 await commit.oid(),
6353 await commit.mode(),
6354 ]
6355 }
6356 }
6357 }
6358 case 'commit-tree': {
6359 // TODO: submodule
6360 // We'll ignore submodule directories for now.
6361 // Users prefer we not throw an error for lack of submodule support.
6362 // gitlinks
6363 return
6364 }
6365 case 'commit-blob': {
6366 // TODO: submodule
6367 // But... we'll complain if there is a *file* where we would
6368 // put a submodule if we had submodule support.
6369 return ['conflict', fullpath]
6370 }
6371 default: {
6372 return ['error', `new entry Unhandled type ${commit.type}`]
6373 }
6374 }
6375 }
6376 // Something in stage but not in the commit OR the workdir.
6377 // Note: I verified this behavior against canonical git.
6378 case '100': {
6379 return ['delete-index', fullpath]
6380 }
6381 // Deleted entries
6382 // TODO: How to handle if stage type and workdir type mismatch?
6383 case '101': {
6384 switch (await stage.type()) {
6385 case 'tree': {
6386 return ['rmdir', fullpath]
6387 }
6388 case 'blob': {
6389 // Git checks that the workdir.oid === stage.oid before deleting file
6390 if ((await stage.oid()) !== (await workdir.oid())) {
6391 if (force) {
6392 return ['delete', fullpath]
6393 } else {
6394 return ['conflict', fullpath]
6395 }
6396 } else {
6397 return ['delete', fullpath]
6398 }
6399 }
6400 case 'commit': {
6401 return ['rmdir-index', fullpath]
6402 }
6403 default: {
6404 return [
6405 'error',
6406 `delete entry Unhandled type ${await stage.type()}`,
6407 ]
6408 }
6409 }
6410 }
6411 /* eslint-disable no-fallthrough */
6412 // File missing from workdir
6413 case '110':
6414 // Possibly modified entries
6415 case '111': {
6416 /* eslint-enable no-fallthrough */
6417 switch (`${await stage.type()}-${await commit.type()}`) {
6418 case 'tree-tree': {
6419 return
6420 }
6421 case 'blob-blob': {
6422 // If the file hasn't changed, there is no need to do anything.
6423 // Existing file modifications in the workdir can be be left as is.
6424 if (
6425 (await stage.oid()) === (await commit.oid()) &&
6426 (await stage.mode()) === (await commit.mode()) &&
6427 !force
6428 ) {
6429 return
6430 }
6431
6432 // Check for local changes that would be lost
6433 if (workdir) {
6434 // Note: canonical git only compares with the stage. But we're smart enough
6435 // to compare to the stage AND the incoming commit.
6436 if (
6437 (await workdir.oid()) !== (await stage.oid()) &&
6438 (await workdir.oid()) !== (await commit.oid())
6439 ) {
6440 if (force) {
6441 return [
6442 'update',
6443 fullpath,
6444 await commit.oid(),
6445 await commit.mode(),
6446 (await commit.mode()) !== (await workdir.mode()),
6447 ]
6448 } else {
6449 return ['conflict', fullpath]
6450 }
6451 }
6452 } else if (force) {
6453 return [
6454 'update',
6455 fullpath,
6456 await commit.oid(),
6457 await commit.mode(),
6458 (await commit.mode()) !== (await stage.mode()),
6459 ]
6460 }
6461 // Has file mode changed?
6462 if ((await commit.mode()) !== (await stage.mode())) {
6463 return [
6464 'update',
6465 fullpath,
6466 await commit.oid(),
6467 await commit.mode(),
6468 true,
6469 ]
6470 }
6471 // TODO: HANDLE SYMLINKS
6472 // Has the file content changed?
6473 if ((await commit.oid()) !== (await stage.oid())) {
6474 return [
6475 'update',
6476 fullpath,
6477 await commit.oid(),
6478 await commit.mode(),
6479 false,
6480 ]
6481 } else {
6482 return
6483 }
6484 }
6485 case 'tree-blob': {
6486 return ['update-dir-to-blob', fullpath, await commit.oid()]
6487 }
6488 case 'blob-tree': {
6489 return ['update-blob-to-tree', fullpath]
6490 }
6491 case 'commit-commit': {
6492 return [
6493 'mkdir-index',
6494 fullpath,
6495 await commit.oid(),
6496 await commit.mode(),
6497 ]
6498 }
6499 default: {
6500 return [
6501 'error',
6502 `update entry Unhandled type ${await stage.type()}-${await commit.type()}`,
6503 ]
6504 }
6505 }
6506 }
6507 }
6508 },
6509 // Modify the default flat mapping
6510 reduce: async function(parent, children) {
6511 children = flat(children);
6512 if (!parent) {
6513 return children
6514 } else if (parent && parent[0] === 'rmdir') {
6515 children.push(parent);
6516 return children
6517 } else {
6518 children.unshift(parent);
6519 return children
6520 }
6521 },
6522 })
6523}
6524
6525// @ts-check
6526
6527/**
6528 * Checkout a branch
6529 *
6530 * If the branch already exists it will check out that branch. Otherwise, it will create a new remote tracking branch set to track the remote branch of that name.
6531 *
6532 * @param {object} args
6533 * @param {FsClient} args.fs - a file system implementation
6534 * @param {ProgressCallback} [args.onProgress] - optional progress event callback
6535 * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path
6536 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
6537 * @param {string} [args.ref = 'HEAD'] - Source to checkout files from
6538 * @param {string[]} [args.filepaths] - Limit the checkout to the given files and directories
6539 * @param {string} [args.remote = 'origin'] - Which remote repository to use
6540 * @param {boolean} [args.noCheckout = false] - If true, will update HEAD but won't update the working directory
6541 * @param {boolean} [args.noUpdateHead] - If true, will update the working directory but won't update HEAD. Defaults to `false` when `ref` is provided, and `true` if `ref` is not provided.
6542 * @param {boolean} [args.dryRun = false] - If true, simulates a checkout so you can test whether it would succeed.
6543 * @param {boolean} [args.force = false] - If true, conflicts will be ignored and files will be overwritten regardless of local changes.
6544 * @param {boolean} [args.track = true] - If false, will not set the remote branch tracking information. Defaults to true.
6545 * @param {object} [args.cache] - a [cache](cache.md) object
6546 *
6547 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
6548 *
6549 * @example
6550 * // switch to the main branch
6551 * await git.checkout({
6552 * fs,
6553 * dir: '/tutorial',
6554 * ref: 'main'
6555 * })
6556 * console.log('done')
6557 *
6558 * @example
6559 * // restore the 'docs' and 'src/docs' folders to the way they were, overwriting any changes
6560 * await git.checkout({
6561 * fs,
6562 * dir: '/tutorial',
6563 * force: true,
6564 * filepaths: ['docs', 'src/docs']
6565 * })
6566 * console.log('done')
6567 *
6568 * @example
6569 * // restore the 'docs' and 'src/docs' folders to the way they are in the 'develop' branch, overwriting any changes
6570 * await git.checkout({
6571 * fs,
6572 * dir: '/tutorial',
6573 * ref: 'develop',
6574 * noUpdateHead: true,
6575 * force: true,
6576 * filepaths: ['docs', 'src/docs']
6577 * })
6578 * console.log('done')
6579 */
6580async function checkout({
6581 fs,
6582 onProgress,
6583 dir,
6584 gitdir = join(dir, '.git'),
6585 remote = 'origin',
6586 ref: _ref,
6587 filepaths,
6588 noCheckout = false,
6589 noUpdateHead = _ref === undefined,
6590 dryRun = false,
6591 force = false,
6592 track = true,
6593 cache = {},
6594}) {
6595 try {
6596 assertParameter('fs', fs);
6597 assertParameter('dir', dir);
6598 assertParameter('gitdir', gitdir);
6599
6600 const ref = _ref || 'HEAD';
6601 return await _checkout({
6602 fs: new FileSystem(fs),
6603 cache,
6604 onProgress,
6605 dir,
6606 gitdir,
6607 remote,
6608 ref,
6609 filepaths,
6610 noCheckout,
6611 noUpdateHead,
6612 dryRun,
6613 force,
6614 track,
6615 })
6616 } catch (err) {
6617 err.caller = 'git.checkout';
6618 throw err
6619 }
6620}
6621
6622// @see https://git-scm.com/docs/git-rev-parse.html#_specifying_revisions
6623const abbreviateRx = new RegExp('^refs/(heads/|tags/|remotes/)?(.*)');
6624
6625function abbreviateRef(ref) {
6626 const match = abbreviateRx.exec(ref);
6627 if (match) {
6628 if (match[1] === 'remotes/' && ref.endsWith('/HEAD')) {
6629 return match[2].slice(0, -5)
6630 } else {
6631 return match[2]
6632 }
6633 }
6634 return ref
6635}
6636
6637// @ts-check
6638
6639/**
6640 * @param {Object} args
6641 * @param {import('../models/FileSystem.js').FileSystem} args.fs
6642 * @param {string} args.gitdir
6643 * @param {boolean} [args.fullname = false] - Return the full path (e.g. "refs/heads/main") instead of the abbreviated form.
6644 * @param {boolean} [args.test = false] - If the current branch doesn't actually exist (such as right after git init) then return `undefined`.
6645 *
6646 * @returns {Promise<string|void>} The name of the current branch or undefined if the HEAD is detached.
6647 *
6648 */
6649async function _currentBranch({
6650 fs,
6651 gitdir,
6652 fullname = false,
6653 test = false,
6654}) {
6655 const ref = await GitRefManager.resolve({
6656 fs,
6657 gitdir,
6658 ref: 'HEAD',
6659 depth: 2,
6660 });
6661 if (test) {
6662 try {
6663 await GitRefManager.resolve({ fs, gitdir, ref });
6664 } catch (_) {
6665 return
6666 }
6667 }
6668 // Return `undefined` for detached HEAD
6669 if (!ref.startsWith('refs/')) return
6670 return fullname ? ref : abbreviateRef(ref)
6671}
6672
6673function translateSSHtoHTTP(url) {
6674 // handle "shorter scp-like syntax"
6675 url = url.replace(/^git@([^:]+):/, 'https://$1/');
6676 // handle proper SSH URLs
6677 url = url.replace(/^ssh:\/\//, 'https://');
6678 return url
6679}
6680
6681function calculateBasicAuthHeader({ username = '', password = '' }) {
6682 return `Basic ${Buffer.from(`${username}:${password}`).toString('base64')}`
6683}
6684
6685// Currently 'for await' upsets my linters.
6686async function forAwait(iterable, cb) {
6687 const iter = getIterator(iterable);
6688 while (true) {
6689 const { value, done } = await iter.next();
6690 if (value) await cb(value);
6691 if (done) break
6692 }
6693 if (iter.return) iter.return();
6694}
6695
6696async function collect(iterable) {
6697 let size = 0;
6698 const buffers = [];
6699 // This will be easier once `for await ... of` loops are available.
6700 await forAwait(iterable, value => {
6701 buffers.push(value);
6702 size += value.byteLength;
6703 });
6704 const result = new Uint8Array(size);
6705 let nextIndex = 0;
6706 for (const buffer of buffers) {
6707 result.set(buffer, nextIndex);
6708 nextIndex += buffer.byteLength;
6709 }
6710 return result
6711}
6712
6713function extractAuthFromUrl(url) {
6714 // For whatever reason, the `fetch` API does not convert credentials embedded in the URL
6715 // into Basic Authentication headers automatically. Instead it throws an error!
6716 // So we must manually parse the URL, rip out the user:password portion if it is present
6717 // and compute the Authorization header.
6718 // Note: I tried using new URL(url) but that throws a security exception in Edge. :rolleyes:
6719 let userpass = url.match(/^https?:\/\/([^/]+)@/);
6720 // No credentials, return the url unmodified and an empty auth object
6721 if (userpass == null) return { url, auth: {} }
6722 userpass = userpass[1];
6723 const [username, password] = userpass.split(':');
6724 // Remove credentials from URL
6725 url = url.replace(`${userpass}@`, '');
6726 // Has credentials, return the fetch-safe URL and the parsed credentials
6727 return { url, auth: { username, password } }
6728}
6729
6730function padHex(b, n) {
6731 const s = n.toString(16);
6732 return '0'.repeat(b - s.length) + s
6733}
6734
6735/**
6736pkt-line Format
6737---------------
6738
6739Much (but not all) of the payload is described around pkt-lines.
6740
6741A pkt-line is a variable length binary string. The first four bytes
6742of the line, the pkt-len, indicates the total length of the line,
6743in hexadecimal. The pkt-len includes the 4 bytes used to contain
6744the length's hexadecimal representation.
6745
6746A pkt-line MAY contain binary data, so implementors MUST ensure
6747pkt-line parsing/formatting routines are 8-bit clean.
6748
6749A non-binary line SHOULD BE terminated by an LF, which if present
6750MUST be included in the total length. Receivers MUST treat pkt-lines
6751with non-binary data the same whether or not they contain the trailing
6752LF (stripping the LF if present, and not complaining when it is
6753missing).
6754
6755The maximum length of a pkt-line's data component is 65516 bytes.
6756Implementations MUST NOT send pkt-line whose length exceeds 65520
6757(65516 bytes of payload + 4 bytes of length data).
6758
6759Implementations SHOULD NOT send an empty pkt-line ("0004").
6760
6761A pkt-line with a length field of 0 ("0000"), called a flush-pkt,
6762is a special case and MUST be handled differently than an empty
6763pkt-line ("0004").
6764
6765----
6766 pkt-line = data-pkt / flush-pkt
6767
6768 data-pkt = pkt-len pkt-payload
6769 pkt-len = 4*(HEXDIG)
6770 pkt-payload = (pkt-len - 4)*(OCTET)
6771
6772 flush-pkt = "0000"
6773----
6774
6775Examples (as C-style strings):
6776
6777----
6778 pkt-line actual value
6779 ---------------------------------
6780 "0006a\n" "a\n"
6781 "0005a" "a"
6782 "000bfoobar\n" "foobar\n"
6783 "0004" ""
6784----
6785*/
6786
6787// I'm really using this more as a namespace.
6788// There's not a lot of "state" in a pkt-line
6789
6790class GitPktLine {
6791 static flush() {
6792 return Buffer.from('0000', 'utf8')
6793 }
6794
6795 static delim() {
6796 return Buffer.from('0001', 'utf8')
6797 }
6798
6799 static encode(line) {
6800 if (typeof line === 'string') {
6801 line = Buffer.from(line);
6802 }
6803 const length = line.length + 4;
6804 const hexlength = padHex(4, length);
6805 return Buffer.concat([Buffer.from(hexlength, 'utf8'), line])
6806 }
6807
6808 static streamReader(stream) {
6809 const reader = new StreamReader(stream);
6810 return async function read() {
6811 try {
6812 let length = await reader.read(4);
6813 if (length == null) return true
6814 length = parseInt(length.toString('utf8'), 16);
6815 if (length === 0) return null
6816 if (length === 1) return null // delim packets
6817 const buffer = await reader.read(length - 4);
6818 if (buffer == null) return true
6819 return buffer
6820 } catch (err) {
6821 console.log('error', err);
6822 return true
6823 }
6824 }
6825 }
6826}
6827
6828// @ts-check
6829
6830/**
6831 * @param {function} read
6832 */
6833async function parseCapabilitiesV2(read) {
6834 /** @type {Object<string, string | true>} */
6835 const capabilities2 = {};
6836
6837 let line;
6838 while (true) {
6839 line = await read();
6840 if (line === true) break
6841 if (line === null) continue
6842 line = line.toString('utf8').replace(/\n$/, '');
6843 const i = line.indexOf('=');
6844 if (i > -1) {
6845 const key = line.slice(0, i);
6846 const value = line.slice(i + 1);
6847 capabilities2[key] = value;
6848 } else {
6849 capabilities2[line] = true;
6850 }
6851 }
6852 return { protocolVersion: 2, capabilities2 }
6853}
6854
6855async function parseRefsAdResponse(stream, { service }) {
6856 const capabilities = new Set();
6857 const refs = new Map();
6858 const symrefs = new Map();
6859
6860 // There is probably a better way to do this, but for now
6861 // let's just throw the result parser inline here.
6862 const read = GitPktLine.streamReader(stream);
6863 let lineOne = await read();
6864 // skip past any flushes
6865 while (lineOne === null) lineOne = await read();
6866
6867 if (lineOne === true) throw new EmptyServerResponseError()
6868
6869 // Handle protocol v2 responses (Bitbucket Server doesn't include a `# service=` line)
6870 if (lineOne.includes('version 2')) {
6871 return parseCapabilitiesV2(read)
6872 }
6873
6874 // Clients MUST ignore an LF at the end of the line.
6875 if (lineOne.toString('utf8').replace(/\n$/, '') !== `# service=${service}`) {
6876 throw new ParseError(`# service=${service}\\n`, lineOne.toString('utf8'))
6877 }
6878 let lineTwo = await read();
6879 // skip past any flushes
6880 while (lineTwo === null) lineTwo = await read();
6881 // In the edge case of a brand new repo, zero refs (and zero capabilities)
6882 // are returned.
6883 if (lineTwo === true) return { capabilities, refs, symrefs }
6884 lineTwo = lineTwo.toString('utf8');
6885
6886 // Handle protocol v2 responses
6887 if (lineTwo.includes('version 2')) {
6888 return parseCapabilitiesV2(read)
6889 }
6890
6891 const [firstRef, capabilitiesLine] = splitAndAssert(lineTwo, '\x00', '\\x00');
6892 capabilitiesLine.split(' ').map(x => capabilities.add(x));
6893 const [ref, name] = splitAndAssert(firstRef, ' ', ' ');
6894 refs.set(name, ref);
6895 while (true) {
6896 const line = await read();
6897 if (line === true) break
6898 if (line !== null) {
6899 const [ref, name] = splitAndAssert(line.toString('utf8'), ' ', ' ');
6900 refs.set(name, ref);
6901 }
6902 }
6903 // Symrefs are thrown into the "capabilities" unfortunately.
6904 for (const cap of capabilities) {
6905 if (cap.startsWith('symref=')) {
6906 const m = cap.match(/symref=([^:]+):(.*)/);
6907 if (m.length === 3) {
6908 symrefs.set(m[1], m[2]);
6909 }
6910 }
6911 }
6912 return { protocolVersion: 1, capabilities, refs, symrefs }
6913}
6914
6915function splitAndAssert(line, sep, expected) {
6916 const split = line.trim().split(sep);
6917 if (split.length !== 2) {
6918 throw new ParseError(
6919 `Two strings separated by '${expected}'`,
6920 line.toString('utf8')
6921 )
6922 }
6923 return split
6924}
6925
6926// Try to accomodate known CORS proxy implementations:
6927// - https://jcubic.pl/proxy.php? <-- uses query string
6928// - https://cors.isomorphic-git.org <-- uses path
6929const corsProxify = (corsProxy, url) =>
6930 corsProxy.endsWith('?')
6931 ? `${corsProxy}${url}`
6932 : `${corsProxy}/${url.replace(/^https?:\/\//, '')}`;
6933
6934const updateHeaders = (headers, auth) => {
6935 // Update the basic auth header
6936 if (auth.username || auth.password) {
6937 headers.Authorization = calculateBasicAuthHeader(auth);
6938 }
6939 // but any manually provided headers take precedence
6940 if (auth.headers) {
6941 Object.assign(headers, auth.headers);
6942 }
6943};
6944
6945/**
6946 * @param {GitHttpResponse} res
6947 *
6948 * @returns {{ preview: string, response: string, data: Buffer }}
6949 */
6950const stringifyBody = async res => {
6951 try {
6952 // Some services provide a meaningful error message in the body of 403s like "token lacks the scopes necessary to perform this action"
6953 const data = Buffer.from(await collect(res.body));
6954 const response = data.toString('utf8');
6955 const preview =
6956 response.length < 256 ? response : response.slice(0, 256) + '...';
6957 return { preview, response, data }
6958 } catch (e) {
6959 return {}
6960 }
6961};
6962
6963class GitRemoteHTTP {
6964 static async capabilities() {
6965 return ['discover', 'connect']
6966 }
6967
6968 /**
6969 * @param {Object} args
6970 * @param {HttpClient} args.http
6971 * @param {ProgressCallback} [args.onProgress]
6972 * @param {AuthCallback} [args.onAuth]
6973 * @param {AuthFailureCallback} [args.onAuthFailure]
6974 * @param {AuthSuccessCallback} [args.onAuthSuccess]
6975 * @param {string} [args.corsProxy]
6976 * @param {string} args.service
6977 * @param {string} args.url
6978 * @param {Object<string, string>} args.headers
6979 * @param {1 | 2} args.protocolVersion - Git Protocol Version
6980 */
6981 static async discover({
6982 http,
6983 onProgress,
6984 onAuth,
6985 onAuthSuccess,
6986 onAuthFailure,
6987 corsProxy,
6988 service,
6989 url: _origUrl,
6990 headers,
6991 protocolVersion,
6992 }) {
6993 let { url, auth } = extractAuthFromUrl(_origUrl);
6994 const proxifiedURL = corsProxy ? corsProxify(corsProxy, url) : url;
6995 if (auth.username || auth.password) {
6996 headers.Authorization = calculateBasicAuthHeader(auth);
6997 }
6998 if (protocolVersion === 2) {
6999 headers['Git-Protocol'] = 'version=2';
7000 }
7001
7002 let res;
7003 let tryAgain;
7004 let providedAuthBefore = false;
7005 do {
7006 res = await http.request({
7007 onProgress,
7008 method: 'GET',
7009 url: `${proxifiedURL}/info/refs?service=${service}`,
7010 headers,
7011 });
7012
7013 // the default loop behavior
7014 tryAgain = false;
7015
7016 // 401 is the "correct" response for access denied. 203 is Non-Authoritative Information and comes from Azure DevOps, which
7017 // apparently doesn't realize this is a git request and is returning the HTML for the "Azure DevOps Services | Sign In" page.
7018 if (res.statusCode === 401 || res.statusCode === 203) {
7019 // On subsequent 401s, call `onAuthFailure` instead of `onAuth`.
7020 // This is so that naive `onAuth` callbacks that return a fixed value don't create an infinite loop of retrying.
7021 const getAuth = providedAuthBefore ? onAuthFailure : onAuth;
7022 if (getAuth) {
7023 // Acquire credentials and try again
7024 // TODO: read `useHttpPath` value from git config and pass along?
7025 auth = await getAuth(url, {
7026 ...auth,
7027 headers: { ...headers },
7028 });
7029 if (auth && auth.cancel) {
7030 throw new UserCanceledError()
7031 } else if (auth) {
7032 updateHeaders(headers, auth);
7033 providedAuthBefore = true;
7034 tryAgain = true;
7035 }
7036 }
7037 } else if (
7038 res.statusCode === 200 &&
7039 providedAuthBefore &&
7040 onAuthSuccess
7041 ) {
7042 await onAuthSuccess(url, auth);
7043 }
7044 } while (tryAgain)
7045
7046 if (res.statusCode !== 200) {
7047 const { response } = await stringifyBody(res);
7048 throw new HttpError(res.statusCode, res.statusMessage, response)
7049 }
7050 // Git "smart" HTTP servers should respond with the correct Content-Type header.
7051 if (
7052 res.headers['content-type'] === `application/x-${service}-advertisement`
7053 ) {
7054 const remoteHTTP = await parseRefsAdResponse(res.body, { service });
7055 remoteHTTP.auth = auth;
7056 return remoteHTTP
7057 } else {
7058 // If they don't send the correct content-type header, that's a good indicator it is either a "dumb" HTTP
7059 // server, or the user specified an incorrect remote URL and the response is actually an HTML page.
7060 // In this case, we save the response as plain text so we can generate a better error message if needed.
7061 const { preview, response, data } = await stringifyBody(res);
7062 // For backwards compatibility, try to parse it anyway.
7063 // TODO: maybe just throw instead of trying?
7064 try {
7065 const remoteHTTP = await parseRefsAdResponse([data], { service });
7066 remoteHTTP.auth = auth;
7067 return remoteHTTP
7068 } catch (e) {
7069 throw new SmartHttpError(preview, response)
7070 }
7071 }
7072 }
7073
7074 /**
7075 * @param {Object} args
7076 * @param {HttpClient} args.http
7077 * @param {ProgressCallback} [args.onProgress]
7078 * @param {string} [args.corsProxy]
7079 * @param {string} args.service
7080 * @param {string} args.url
7081 * @param {Object<string, string>} [args.headers]
7082 * @param {any} args.body
7083 * @param {any} args.auth
7084 */
7085 static async connect({
7086 http,
7087 onProgress,
7088 corsProxy,
7089 service,
7090 url,
7091 auth,
7092 body,
7093 headers,
7094 }) {
7095 // We already have the "correct" auth value at this point, but
7096 // we need to strip out the username/password from the URL yet again.
7097 const urlAuth = extractAuthFromUrl(url);
7098 if (urlAuth) url = urlAuth.url;
7099
7100 if (corsProxy) url = corsProxify(corsProxy, url);
7101
7102 headers['content-type'] = `application/x-${service}-request`;
7103 headers.accept = `application/x-${service}-result`;
7104 updateHeaders(headers, auth);
7105
7106 const res = await http.request({
7107 onProgress,
7108 method: 'POST',
7109 url: `${url}/${service}`,
7110 body,
7111 headers,
7112 });
7113 if (res.statusCode !== 200) {
7114 const { response } = stringifyBody(res);
7115 throw new HttpError(res.statusCode, res.statusMessage, response)
7116 }
7117 return res
7118 }
7119}
7120
7121function parseRemoteUrl({ url }) {
7122 // the stupid "shorter scp-like syntax"
7123 if (url.startsWith('git@')) {
7124 return {
7125 transport: 'ssh',
7126 address: url,
7127 }
7128 }
7129 const matches = url.match(/(\w+)(:\/\/|::)(.*)/);
7130 if (matches === null) return
7131 /*
7132 * When git encounters a URL of the form <transport>://<address>, where <transport> is
7133 * a protocol that it cannot handle natively, it automatically invokes git remote-<transport>
7134 * with the full URL as the second argument.
7135 *
7136 * @see https://git-scm.com/docs/git-remote-helpers
7137 */
7138 if (matches[2] === '://') {
7139 return {
7140 transport: matches[1],
7141 address: matches[0],
7142 }
7143 }
7144 /*
7145 * A URL of the form <transport>::<address> explicitly instructs git to invoke
7146 * git remote-<transport> with <address> as the second argument.
7147 *
7148 * @see https://git-scm.com/docs/git-remote-helpers
7149 */
7150 if (matches[2] === '::') {
7151 return {
7152 transport: matches[1],
7153 address: matches[3],
7154 }
7155 }
7156}
7157
7158class GitRemoteManager {
7159 static getRemoteHelperFor({ url }) {
7160 // TODO: clean up the remoteHelper API and move into PluginCore
7161 const remoteHelpers = new Map();
7162 remoteHelpers.set('http', GitRemoteHTTP);
7163 remoteHelpers.set('https', GitRemoteHTTP);
7164
7165 const parts = parseRemoteUrl({ url });
7166 if (!parts) {
7167 throw new UrlParseError(url)
7168 }
7169 if (remoteHelpers.has(parts.transport)) {
7170 return remoteHelpers.get(parts.transport)
7171 }
7172 throw new UnknownTransportError(
7173 url,
7174 parts.transport,
7175 parts.transport === 'ssh' ? translateSSHtoHTTP(url) : undefined
7176 )
7177 }
7178}
7179
7180let lock$1 = null;
7181
7182class GitShallowManager {
7183 static async read({ fs, gitdir }) {
7184 if (lock$1 === null) lock$1 = new AsyncLock();
7185 const filepath = join(gitdir, 'shallow');
7186 const oids = new Set();
7187 await lock$1.acquire(filepath, async function() {
7188 const text = await fs.read(filepath, { encoding: 'utf8' });
7189 if (text === null) return oids // no file
7190 if (text.trim() === '') return oids // empty file
7191 text
7192 .trim()
7193 .split('\n')
7194 .map(oid => oids.add(oid));
7195 });
7196 return oids
7197 }
7198
7199 static async write({ fs, gitdir, oids }) {
7200 if (lock$1 === null) lock$1 = new AsyncLock();
7201 const filepath = join(gitdir, 'shallow');
7202 if (oids.size > 0) {
7203 const text = [...oids].join('\n') + '\n';
7204 await lock$1.acquire(filepath, async function() {
7205 await fs.write(filepath, text, {
7206 encoding: 'utf8',
7207 });
7208 });
7209 } else {
7210 // No shallows
7211 await lock$1.acquire(filepath, async function() {
7212 await fs.rm(filepath);
7213 });
7214 }
7215 }
7216}
7217
7218async function hasObjectLoose({ fs, gitdir, oid }) {
7219 const source = `objects/${oid.slice(0, 2)}/${oid.slice(2)}`;
7220 return fs.exists(`${gitdir}/${source}`)
7221}
7222
7223async function hasObjectPacked({
7224 fs,
7225 cache,
7226 gitdir,
7227 oid,
7228 getExternalRefDelta,
7229}) {
7230 // Check to see if it's in a packfile.
7231 // Iterate through all the .idx files
7232 let list = await fs.readdir(join(gitdir, 'objects/pack'));
7233 list = list.filter(x => x.endsWith('.idx'));
7234 for (const filename of list) {
7235 const indexFile = `${gitdir}/objects/pack/${filename}`;
7236 const p = await readPackIndex({
7237 fs,
7238 cache,
7239 filename: indexFile,
7240 getExternalRefDelta,
7241 });
7242 if (p.error) throw new InternalError(p.error)
7243 // If the packfile DOES have the oid we're looking for...
7244 if (p.offsets.has(oid)) {
7245 return true
7246 }
7247 }
7248 // Failed to find it
7249 return false
7250}
7251
7252async function hasObject({
7253 fs,
7254 cache,
7255 gitdir,
7256 oid,
7257 format = 'content',
7258}) {
7259 // Curry the current read method so that the packfile un-deltification
7260 // process can acquire external ref-deltas.
7261 const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid });
7262
7263 // Look for it in the loose object directory.
7264 let result = await hasObjectLoose({ fs, gitdir, oid });
7265 // Check to see if it's in a packfile.
7266 if (!result) {
7267 result = await hasObjectPacked({
7268 fs,
7269 cache,
7270 gitdir,
7271 oid,
7272 getExternalRefDelta,
7273 });
7274 }
7275 // Finally
7276 return result
7277}
7278
7279// TODO: make a function that just returns obCount. then emptyPackfile = () => sizePack(pack) === 0
7280function emptyPackfile(pack) {
7281 const pheader = '5041434b';
7282 const version = '00000002';
7283 const obCount = '00000000';
7284 const header = pheader + version + obCount;
7285 return pack.slice(0, 12).toString('hex') === header
7286}
7287
7288function filterCapabilities(server, client) {
7289 const serverNames = server.map(cap => cap.split('=', 1)[0]);
7290 return client.filter(cap => {
7291 const name = cap.split('=', 1)[0];
7292 return serverNames.includes(name)
7293 })
7294}
7295
7296const pkg = {
7297 name: 'isomorphic-git',
7298 version: '1.24.3',
7299 agent: 'git/isomorphic-git@1.24.3',
7300};
7301
7302class FIFO {
7303 constructor() {
7304 this._queue = [];
7305 }
7306
7307 write(chunk) {
7308 if (this._ended) {
7309 throw Error('You cannot write to a FIFO that has already been ended!')
7310 }
7311 if (this._waiting) {
7312 const resolve = this._waiting;
7313 this._waiting = null;
7314 resolve({ value: chunk });
7315 } else {
7316 this._queue.push(chunk);
7317 }
7318 }
7319
7320 end() {
7321 this._ended = true;
7322 if (this._waiting) {
7323 const resolve = this._waiting;
7324 this._waiting = null;
7325 resolve({ done: true });
7326 }
7327 }
7328
7329 destroy(err) {
7330 this._ended = true;
7331 this.error = err;
7332 }
7333
7334 async next() {
7335 if (this._queue.length > 0) {
7336 return { value: this._queue.shift() }
7337 }
7338 if (this._ended) {
7339 return { done: true }
7340 }
7341 if (this._waiting) {
7342 throw Error(
7343 'You cannot call read until the previous call to read has returned!'
7344 )
7345 }
7346 return new Promise(resolve => {
7347 this._waiting = resolve;
7348 })
7349 }
7350}
7351
7352// Note: progress messages are designed to be written directly to the terminal,
7353// so they are often sent with just a carriage return to overwrite the last line of output.
7354// But there are also messages delimited with newlines.
7355// I also include CRLF just in case.
7356function findSplit(str) {
7357 const r = str.indexOf('\r');
7358 const n = str.indexOf('\n');
7359 if (r === -1 && n === -1) return -1
7360 if (r === -1) return n + 1 // \n
7361 if (n === -1) return r + 1 // \r
7362 if (n === r + 1) return n + 1 // \r\n
7363 return Math.min(r, n) + 1 // \r or \n
7364}
7365
7366function splitLines(input) {
7367 const output = new FIFO();
7368 let tmp = ''
7369 ;(async () => {
7370 await forAwait(input, chunk => {
7371 chunk = chunk.toString('utf8');
7372 tmp += chunk;
7373 while (true) {
7374 const i = findSplit(tmp);
7375 if (i === -1) break
7376 output.write(tmp.slice(0, i));
7377 tmp = tmp.slice(i);
7378 }
7379 });
7380 if (tmp.length > 0) {
7381 output.write(tmp);
7382 }
7383 output.end();
7384 })();
7385 return output
7386}
7387
7388/*
7389If 'side-band' or 'side-band-64k' capabilities have been specified by
7390the client, the server will send the packfile data multiplexed.
7391
7392Each packet starting with the packet-line length of the amount of data
7393that follows, followed by a single byte specifying the sideband the
7394following data is coming in on.
7395
7396In 'side-band' mode, it will send up to 999 data bytes plus 1 control
7397code, for a total of up to 1000 bytes in a pkt-line. In 'side-band-64k'
7398mode it will send up to 65519 data bytes plus 1 control code, for a
7399total of up to 65520 bytes in a pkt-line.
7400
7401The sideband byte will be a '1', '2' or a '3'. Sideband '1' will contain
7402packfile data, sideband '2' will be used for progress information that the
7403client will generally print to stderr and sideband '3' is used for error
7404information.
7405
7406If no 'side-band' capability was specified, the server will stream the
7407entire packfile without multiplexing.
7408*/
7409
7410class GitSideBand {
7411 static demux(input) {
7412 const read = GitPktLine.streamReader(input);
7413 // And now for the ridiculous side-band or side-band-64k protocol
7414 const packetlines = new FIFO();
7415 const packfile = new FIFO();
7416 const progress = new FIFO();
7417 // TODO: Use a proper through stream?
7418 const nextBit = async function() {
7419 const line = await read();
7420 // Skip over flush packets
7421 if (line === null) return nextBit()
7422 // A made up convention to signal there's no more to read.
7423 if (line === true) {
7424 packetlines.end();
7425 progress.end();
7426 packfile.end();
7427 return
7428 }
7429 // Examine first byte to determine which output "stream" to use
7430 switch (line[0]) {
7431 case 1: {
7432 // pack data
7433 packfile.write(line.slice(1));
7434 break
7435 }
7436 case 2: {
7437 // progress message
7438 progress.write(line.slice(1));
7439 break
7440 }
7441 case 3: {
7442 // fatal error message just before stream aborts
7443 const error = line.slice(1);
7444 progress.write(error);
7445 packfile.destroy(new Error(error.toString('utf8')));
7446 return
7447 }
7448 default: {
7449 // Not part of the side-band-64k protocol
7450 packetlines.write(line.slice(0));
7451 }
7452 }
7453 // Careful not to blow up the stack.
7454 // I think Promises in a tail-call position should be OK.
7455 nextBit();
7456 };
7457 nextBit();
7458 return {
7459 packetlines,
7460 packfile,
7461 progress,
7462 }
7463 }
7464 // static mux ({
7465 // protocol, // 'side-band' or 'side-band-64k'
7466 // packetlines,
7467 // packfile,
7468 // progress,
7469 // error
7470 // }) {
7471 // const MAX_PACKET_LENGTH = protocol === 'side-band-64k' ? 999 : 65519
7472 // let output = new PassThrough()
7473 // packetlines.on('data', data => {
7474 // if (data === null) {
7475 // output.write(GitPktLine.flush())
7476 // } else {
7477 // output.write(GitPktLine.encode(data))
7478 // }
7479 // })
7480 // let packfileWasEmpty = true
7481 // let packfileEnded = false
7482 // let progressEnded = false
7483 // let errorEnded = false
7484 // let goodbye = Buffer.concat([
7485 // GitPktLine.encode(Buffer.from('010A', 'hex')),
7486 // GitPktLine.flush()
7487 // ])
7488 // packfile
7489 // .on('data', data => {
7490 // packfileWasEmpty = false
7491 // const buffers = splitBuffer(data, MAX_PACKET_LENGTH)
7492 // for (const buffer of buffers) {
7493 // output.write(
7494 // GitPktLine.encode(Buffer.concat([Buffer.from('01', 'hex'), buffer]))
7495 // )
7496 // }
7497 // })
7498 // .on('end', () => {
7499 // packfileEnded = true
7500 // if (!packfileWasEmpty) output.write(goodbye)
7501 // if (progressEnded && errorEnded) output.end()
7502 // })
7503 // progress
7504 // .on('data', data => {
7505 // const buffers = splitBuffer(data, MAX_PACKET_LENGTH)
7506 // for (const buffer of buffers) {
7507 // output.write(
7508 // GitPktLine.encode(Buffer.concat([Buffer.from('02', 'hex'), buffer]))
7509 // )
7510 // }
7511 // })
7512 // .on('end', () => {
7513 // progressEnded = true
7514 // if (packfileEnded && errorEnded) output.end()
7515 // })
7516 // error
7517 // .on('data', data => {
7518 // const buffers = splitBuffer(data, MAX_PACKET_LENGTH)
7519 // for (const buffer of buffers) {
7520 // output.write(
7521 // GitPktLine.encode(Buffer.concat([Buffer.from('03', 'hex'), buffer]))
7522 // )
7523 // }
7524 // })
7525 // .on('end', () => {
7526 // errorEnded = true
7527 // if (progressEnded && packfileEnded) output.end()
7528 // })
7529 // return output
7530 // }
7531}
7532
7533async function parseUploadPackResponse(stream) {
7534 const { packetlines, packfile, progress } = GitSideBand.demux(stream);
7535 const shallows = [];
7536 const unshallows = [];
7537 const acks = [];
7538 let nak = false;
7539 let done = false;
7540 return new Promise((resolve, reject) => {
7541 // Parse the response
7542 forAwait(packetlines, data => {
7543 const line = data.toString('utf8').trim();
7544 if (line.startsWith('shallow')) {
7545 const oid = line.slice(-41).trim();
7546 if (oid.length !== 40) {
7547 reject(new InvalidOidError(oid));
7548 }
7549 shallows.push(oid);
7550 } else if (line.startsWith('unshallow')) {
7551 const oid = line.slice(-41).trim();
7552 if (oid.length !== 40) {
7553 reject(new InvalidOidError(oid));
7554 }
7555 unshallows.push(oid);
7556 } else if (line.startsWith('ACK')) {
7557 const [, oid, status] = line.split(' ');
7558 acks.push({ oid, status });
7559 if (!status) done = true;
7560 } else if (line.startsWith('NAK')) {
7561 nak = true;
7562 done = true;
7563 }
7564 if (done) {
7565 resolve({ shallows, unshallows, acks, nak, packfile, progress });
7566 }
7567 });
7568 })
7569}
7570
7571function writeUploadPackRequest({
7572 capabilities = [],
7573 wants = [],
7574 haves = [],
7575 shallows = [],
7576 depth = null,
7577 since = null,
7578 exclude = [],
7579}) {
7580 const packstream = [];
7581 wants = [...new Set(wants)]; // remove duplicates
7582 let firstLineCapabilities = ` ${capabilities.join(' ')}`;
7583 for (const oid of wants) {
7584 packstream.push(GitPktLine.encode(`want ${oid}${firstLineCapabilities}\n`));
7585 firstLineCapabilities = '';
7586 }
7587 for (const oid of shallows) {
7588 packstream.push(GitPktLine.encode(`shallow ${oid}\n`));
7589 }
7590 if (depth !== null) {
7591 packstream.push(GitPktLine.encode(`deepen ${depth}\n`));
7592 }
7593 if (since !== null) {
7594 packstream.push(
7595 GitPktLine.encode(`deepen-since ${Math.floor(since.valueOf() / 1000)}\n`)
7596 );
7597 }
7598 for (const oid of exclude) {
7599 packstream.push(GitPktLine.encode(`deepen-not ${oid}\n`));
7600 }
7601 packstream.push(GitPktLine.flush());
7602 for (const oid of haves) {
7603 packstream.push(GitPktLine.encode(`have ${oid}\n`));
7604 }
7605 packstream.push(GitPktLine.encode(`done\n`));
7606 return packstream
7607}
7608
7609// @ts-check
7610
7611/**
7612 *
7613 * @typedef {object} FetchResult - The object returned has the following schema:
7614 * @property {string | null} defaultBranch - The branch that is cloned if no branch is specified
7615 * @property {string | null} fetchHead - The SHA-1 object id of the fetched head commit
7616 * @property {string | null} fetchHeadDescription - a textual description of the branch that was fetched
7617 * @property {Object<string, string>} [headers] - The HTTP response headers returned by the git server
7618 * @property {string[]} [pruned] - A list of branches that were pruned, if you provided the `prune` parameter
7619 *
7620 */
7621
7622/**
7623 * @param {object} args
7624 * @param {import('../models/FileSystem.js').FileSystem} args.fs
7625 * @param {any} args.cache
7626 * @param {HttpClient} args.http
7627 * @param {ProgressCallback} [args.onProgress]
7628 * @param {MessageCallback} [args.onMessage]
7629 * @param {AuthCallback} [args.onAuth]
7630 * @param {AuthFailureCallback} [args.onAuthFailure]
7631 * @param {AuthSuccessCallback} [args.onAuthSuccess]
7632 * @param {string} args.gitdir
7633 * @param {string|void} [args.url]
7634 * @param {string} [args.corsProxy]
7635 * @param {string} [args.ref]
7636 * @param {string} [args.remoteRef]
7637 * @param {string} [args.remote]
7638 * @param {boolean} [args.singleBranch = false]
7639 * @param {boolean} [args.tags = false]
7640 * @param {number} [args.depth]
7641 * @param {Date} [args.since]
7642 * @param {string[]} [args.exclude = []]
7643 * @param {boolean} [args.relative = false]
7644 * @param {Object<string, string>} [args.headers]
7645 * @param {boolean} [args.prune]
7646 * @param {boolean} [args.pruneTags]
7647 *
7648 * @returns {Promise<FetchResult>}
7649 * @see FetchResult
7650 */
7651async function _fetch({
7652 fs,
7653 cache,
7654 http,
7655 onProgress,
7656 onMessage,
7657 onAuth,
7658 onAuthSuccess,
7659 onAuthFailure,
7660 gitdir,
7661 ref: _ref,
7662 remoteRef: _remoteRef,
7663 remote: _remote,
7664 url: _url,
7665 corsProxy,
7666 depth = null,
7667 since = null,
7668 exclude = [],
7669 relative = false,
7670 tags = false,
7671 singleBranch = false,
7672 headers = {},
7673 prune = false,
7674 pruneTags = false,
7675}) {
7676 const ref = _ref || (await _currentBranch({ fs, gitdir, test: true }));
7677 const config = await GitConfigManager.get({ fs, gitdir });
7678 // Figure out what remote to use.
7679 const remote =
7680 _remote || (ref && (await config.get(`branch.${ref}.remote`))) || 'origin';
7681 // Lookup the URL for the given remote.
7682 const url = _url || (await config.get(`remote.${remote}.url`));
7683 if (typeof url === 'undefined') {
7684 throw new MissingParameterError('remote OR url')
7685 }
7686 // Figure out what remote ref to use.
7687 const remoteRef =
7688 _remoteRef ||
7689 (ref && (await config.get(`branch.${ref}.merge`))) ||
7690 _ref ||
7691 'HEAD';
7692
7693 if (corsProxy === undefined) {
7694 corsProxy = await config.get('http.corsProxy');
7695 }
7696
7697 const GitRemoteHTTP = GitRemoteManager.getRemoteHelperFor({ url });
7698 const remoteHTTP = await GitRemoteHTTP.discover({
7699 http,
7700 onAuth,
7701 onAuthSuccess,
7702 onAuthFailure,
7703 corsProxy,
7704 service: 'git-upload-pack',
7705 url,
7706 headers,
7707 protocolVersion: 1,
7708 });
7709 const auth = remoteHTTP.auth; // hack to get new credentials from CredentialManager API
7710 const remoteRefs = remoteHTTP.refs;
7711 // For the special case of an empty repository with no refs, return null.
7712 if (remoteRefs.size === 0) {
7713 return {
7714 defaultBranch: null,
7715 fetchHead: null,
7716 fetchHeadDescription: null,
7717 }
7718 }
7719 // Check that the remote supports the requested features
7720 if (depth !== null && !remoteHTTP.capabilities.has('shallow')) {
7721 throw new RemoteCapabilityError('shallow', 'depth')
7722 }
7723 if (since !== null && !remoteHTTP.capabilities.has('deepen-since')) {
7724 throw new RemoteCapabilityError('deepen-since', 'since')
7725 }
7726 if (exclude.length > 0 && !remoteHTTP.capabilities.has('deepen-not')) {
7727 throw new RemoteCapabilityError('deepen-not', 'exclude')
7728 }
7729 if (relative === true && !remoteHTTP.capabilities.has('deepen-relative')) {
7730 throw new RemoteCapabilityError('deepen-relative', 'relative')
7731 }
7732 // Figure out the SHA for the requested ref
7733 const { oid, fullref } = GitRefManager.resolveAgainstMap({
7734 ref: remoteRef,
7735 map: remoteRefs,
7736 });
7737 // Filter out refs we want to ignore: only keep ref we're cloning, HEAD, branches, and tags (if we're keeping them)
7738 for (const remoteRef of remoteRefs.keys()) {
7739 if (
7740 remoteRef === fullref ||
7741 remoteRef === 'HEAD' ||
7742 remoteRef.startsWith('refs/heads/') ||
7743 (tags && remoteRef.startsWith('refs/tags/'))
7744 ) {
7745 continue
7746 }
7747 remoteRefs.delete(remoteRef);
7748 }
7749 // Assemble the application/x-git-upload-pack-request
7750 const capabilities = filterCapabilities(
7751 [...remoteHTTP.capabilities],
7752 [
7753 'multi_ack_detailed',
7754 'no-done',
7755 'side-band-64k',
7756 // Note: I removed 'thin-pack' option since our code doesn't "fatten" packfiles,
7757 // which is necessary for compatibility with git. It was the cause of mysterious
7758 // 'fatal: pack has [x] unresolved deltas' errors that plagued us for some time.
7759 // isomorphic-git is perfectly happy with thin packfiles in .git/objects/pack but
7760 // canonical git it turns out is NOT.
7761 'ofs-delta',
7762 `agent=${pkg.agent}`,
7763 ]
7764 );
7765 if (relative) capabilities.push('deepen-relative');
7766 // Start figuring out which oids from the remote we want to request
7767 const wants = singleBranch ? [oid] : remoteRefs.values();
7768 // Come up with a reasonable list of oids to tell the remote we already have
7769 // (preferably oids that are close ancestors of the branch heads we're fetching)
7770 const haveRefs = singleBranch
7771 ? [ref]
7772 : await GitRefManager.listRefs({
7773 fs,
7774 gitdir,
7775 filepath: `refs`,
7776 });
7777 let haves = [];
7778 for (let ref of haveRefs) {
7779 try {
7780 ref = await GitRefManager.expand({ fs, gitdir, ref });
7781 const oid = await GitRefManager.resolve({ fs, gitdir, ref });
7782 if (await hasObject({ fs, cache, gitdir, oid })) {
7783 haves.push(oid);
7784 }
7785 } catch (err) {}
7786 }
7787 haves = [...new Set(haves)];
7788 const oids = await GitShallowManager.read({ fs, gitdir });
7789 const shallows = remoteHTTP.capabilities.has('shallow') ? [...oids] : [];
7790 const packstream = writeUploadPackRequest({
7791 capabilities,
7792 wants,
7793 haves,
7794 shallows,
7795 depth,
7796 since,
7797 exclude,
7798 });
7799 // CodeCommit will hang up if we don't send a Content-Length header
7800 // so we can't stream the body.
7801 const packbuffer = Buffer.from(await collect(packstream));
7802 const raw = await GitRemoteHTTP.connect({
7803 http,
7804 onProgress,
7805 corsProxy,
7806 service: 'git-upload-pack',
7807 url,
7808 auth,
7809 body: [packbuffer],
7810 headers,
7811 });
7812 const response = await parseUploadPackResponse(raw.body);
7813 if (raw.headers) {
7814 response.headers = raw.headers;
7815 }
7816 // Apply all the 'shallow' and 'unshallow' commands
7817 for (const oid of response.shallows) {
7818 if (!oids.has(oid)) {
7819 // this is in a try/catch mostly because my old test fixtures are missing objects
7820 try {
7821 // server says it's shallow, but do we have the parents?
7822 const { object } = await _readObject({ fs, cache, gitdir, oid });
7823 const commit = new GitCommit(object);
7824 const hasParents = await Promise.all(
7825 commit
7826 .headers()
7827 .parent.map(oid => hasObject({ fs, cache, gitdir, oid }))
7828 );
7829 const haveAllParents =
7830 hasParents.length === 0 || hasParents.every(has => has);
7831 if (!haveAllParents) {
7832 oids.add(oid);
7833 }
7834 } catch (err) {
7835 oids.add(oid);
7836 }
7837 }
7838 }
7839 for (const oid of response.unshallows) {
7840 oids.delete(oid);
7841 }
7842 await GitShallowManager.write({ fs, gitdir, oids });
7843 // Update local remote refs
7844 if (singleBranch) {
7845 const refs = new Map([[fullref, oid]]);
7846 // But wait, maybe it was a symref, like 'HEAD'!
7847 // We need to save all the refs in the symref chain (sigh).
7848 const symrefs = new Map();
7849 let bail = 10;
7850 let key = fullref;
7851 while (bail--) {
7852 const value = remoteHTTP.symrefs.get(key);
7853 if (value === undefined) break
7854 symrefs.set(key, value);
7855 key = value;
7856 }
7857 // final value must not be a symref but a real ref
7858 const realRef = remoteRefs.get(key);
7859 // There may be no ref at all if we've fetched a specific commit hash
7860 if (realRef) {
7861 refs.set(key, realRef);
7862 }
7863 const { pruned } = await GitRefManager.updateRemoteRefs({
7864 fs,
7865 gitdir,
7866 remote,
7867 refs,
7868 symrefs,
7869 tags,
7870 prune,
7871 });
7872 if (prune) {
7873 response.pruned = pruned;
7874 }
7875 } else {
7876 const { pruned } = await GitRefManager.updateRemoteRefs({
7877 fs,
7878 gitdir,
7879 remote,
7880 refs: remoteRefs,
7881 symrefs: remoteHTTP.symrefs,
7882 tags,
7883 prune,
7884 pruneTags,
7885 });
7886 if (prune) {
7887 response.pruned = pruned;
7888 }
7889 }
7890 // We need this value later for the `clone` command.
7891 response.HEAD = remoteHTTP.symrefs.get('HEAD');
7892 // AWS CodeCommit doesn't list HEAD as a symref, but we can reverse engineer it
7893 // Find the SHA of the branch called HEAD
7894 if (response.HEAD === undefined) {
7895 const { oid } = GitRefManager.resolveAgainstMap({
7896 ref: 'HEAD',
7897 map: remoteRefs,
7898 });
7899 // Use the name of the first branch that's not called HEAD that has
7900 // the same SHA as the branch called HEAD.
7901 for (const [key, value] of remoteRefs.entries()) {
7902 if (key !== 'HEAD' && value === oid) {
7903 response.HEAD = key;
7904 break
7905 }
7906 }
7907 }
7908 const noun = fullref.startsWith('refs/tags') ? 'tag' : 'branch';
7909 response.FETCH_HEAD = {
7910 oid,
7911 description: `${noun} '${abbreviateRef(fullref)}' of ${url}`,
7912 };
7913
7914 if (onProgress || onMessage) {
7915 const lines = splitLines(response.progress);
7916 forAwait(lines, async line => {
7917 if (onMessage) await onMessage(line);
7918 if (onProgress) {
7919 const matches = line.match(/([^:]*).*\((\d+?)\/(\d+?)\)/);
7920 if (matches) {
7921 await onProgress({
7922 phase: matches[1].trim(),
7923 loaded: parseInt(matches[2], 10),
7924 total: parseInt(matches[3], 10),
7925 });
7926 }
7927 }
7928 });
7929 }
7930 const packfile = Buffer.from(await collect(response.packfile));
7931 const packfileSha = packfile.slice(-20).toString('hex');
7932 const res = {
7933 defaultBranch: response.HEAD,
7934 fetchHead: response.FETCH_HEAD.oid,
7935 fetchHeadDescription: response.FETCH_HEAD.description,
7936 };
7937 if (response.headers) {
7938 res.headers = response.headers;
7939 }
7940 if (prune) {
7941 res.pruned = response.pruned;
7942 }
7943 // This is a quick fix for the empty .git/objects/pack/pack-.pack file error,
7944 // which due to the way `git-list-pack` works causes the program to hang when it tries to read it.
7945 // TODO: Longer term, we should actually:
7946 // a) NOT concatenate the entire packfile into memory (line 78),
7947 // b) compute the SHA of the stream except for the last 20 bytes, using the same library used in push.js, and
7948 // c) compare the computed SHA with the last 20 bytes of the stream before saving to disk, and throwing a "packfile got corrupted during download" error if the SHA doesn't match.
7949 if (packfileSha !== '' && !emptyPackfile(packfile)) {
7950 res.packfile = `objects/pack/pack-${packfileSha}.pack`;
7951 const fullpath = join(gitdir, res.packfile);
7952 await fs.write(fullpath, packfile);
7953 const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid });
7954 const idx = await GitPackIndex.fromPack({
7955 pack: packfile,
7956 getExternalRefDelta,
7957 onProgress,
7958 });
7959 await fs.write(fullpath.replace(/\.pack$/, '.idx'), await idx.toBuffer());
7960 }
7961 return res
7962}
7963
7964// @ts-check
7965
7966/**
7967 * Initialize a new repository
7968 *
7969 * @param {object} args
7970 * @param {import('../models/FileSystem.js').FileSystem} args.fs
7971 * @param {string} [args.dir]
7972 * @param {string} [args.gitdir]
7973 * @param {boolean} [args.bare = false]
7974 * @param {string} [args.defaultBranch = 'master']
7975 * @returns {Promise<void>}
7976 */
7977async function _init({
7978 fs,
7979 bare = false,
7980 dir,
7981 gitdir = bare ? dir : join(dir, '.git'),
7982 defaultBranch = 'master',
7983}) {
7984 // Don't overwrite an existing config
7985 if (await fs.exists(gitdir + '/config')) return
7986
7987 let folders = [
7988 'hooks',
7989 'info',
7990 'objects/info',
7991 'objects/pack',
7992 'refs/heads',
7993 'refs/tags',
7994 ];
7995 folders = folders.map(dir => gitdir + '/' + dir);
7996 for (const folder of folders) {
7997 await fs.mkdir(folder);
7998 }
7999
8000 await fs.write(
8001 gitdir + '/config',
8002 '[core]\n' +
8003 '\trepositoryformatversion = 0\n' +
8004 '\tfilemode = false\n' +
8005 `\tbare = ${bare}\n` +
8006 (bare ? '' : '\tlogallrefupdates = true\n') +
8007 '\tsymlinks = false\n' +
8008 '\tignorecase = true\n'
8009 );
8010 await fs.write(gitdir + '/HEAD', `ref: refs/heads/${defaultBranch}\n`);
8011}
8012
8013// @ts-check
8014
8015/**
8016 * @param {object} args
8017 * @param {import('../models/FileSystem.js').FileSystem} args.fs
8018 * @param {object} args.cache
8019 * @param {HttpClient} args.http
8020 * @param {ProgressCallback} [args.onProgress]
8021 * @param {MessageCallback} [args.onMessage]
8022 * @param {AuthCallback} [args.onAuth]
8023 * @param {AuthFailureCallback} [args.onAuthFailure]
8024 * @param {AuthSuccessCallback} [args.onAuthSuccess]
8025 * @param {string} [args.dir]
8026 * @param {string} args.gitdir
8027 * @param {string} args.url
8028 * @param {string} args.corsProxy
8029 * @param {string} args.ref
8030 * @param {boolean} args.singleBranch
8031 * @param {boolean} args.noCheckout
8032 * @param {boolean} args.noTags
8033 * @param {string} args.remote
8034 * @param {number} args.depth
8035 * @param {Date} args.since
8036 * @param {string[]} args.exclude
8037 * @param {boolean} args.relative
8038 * @param {Object<string, string>} args.headers
8039 *
8040 * @returns {Promise<void>} Resolves successfully when clone completes
8041 *
8042 */
8043async function _clone({
8044 fs,
8045 cache,
8046 http,
8047 onProgress,
8048 onMessage,
8049 onAuth,
8050 onAuthSuccess,
8051 onAuthFailure,
8052 dir,
8053 gitdir,
8054 url,
8055 corsProxy,
8056 ref,
8057 remote,
8058 depth,
8059 since,
8060 exclude,
8061 relative,
8062 singleBranch,
8063 noCheckout,
8064 noTags,
8065 headers,
8066}) {
8067 try {
8068 await _init({ fs, gitdir });
8069 await _addRemote({ fs, gitdir, remote, url, force: false });
8070 if (corsProxy) {
8071 const config = await GitConfigManager.get({ fs, gitdir });
8072 await config.set(`http.corsProxy`, corsProxy);
8073 await GitConfigManager.save({ fs, gitdir, config });
8074 }
8075 const { defaultBranch, fetchHead } = await _fetch({
8076 fs,
8077 cache,
8078 http,
8079 onProgress,
8080 onMessage,
8081 onAuth,
8082 onAuthSuccess,
8083 onAuthFailure,
8084 gitdir,
8085 ref,
8086 remote,
8087 corsProxy,
8088 depth,
8089 since,
8090 exclude,
8091 relative,
8092 singleBranch,
8093 headers,
8094 tags: !noTags,
8095 });
8096 if (fetchHead === null) return
8097 ref = ref || defaultBranch;
8098 ref = ref.replace('refs/heads/', '');
8099 // Checkout that branch
8100 await _checkout({
8101 fs,
8102 cache,
8103 onProgress,
8104 dir,
8105 gitdir,
8106 ref,
8107 remote,
8108 noCheckout,
8109 });
8110 } catch (err) {
8111 // Remove partial local repository, see #1283
8112 // Ignore any error as we are already failing.
8113 // The catch is necessary so the original error is not masked.
8114 await fs
8115 .rmdir(gitdir, { recursive: true, maxRetries: 10 })
8116 .catch(() => undefined);
8117 throw err
8118 }
8119}
8120
8121// @ts-check
8122
8123/**
8124 * Clone a repository
8125 *
8126 * @param {object} args
8127 * @param {FsClient} args.fs - a file system implementation
8128 * @param {HttpClient} args.http - an HTTP client
8129 * @param {ProgressCallback} [args.onProgress] - optional progress event callback
8130 * @param {MessageCallback} [args.onMessage] - optional message event callback
8131 * @param {AuthCallback} [args.onAuth] - optional auth fill callback
8132 * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback
8133 * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback
8134 * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path
8135 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
8136 * @param {string} args.url - The URL of the remote repository
8137 * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Value is stored in the git config file for that repo.
8138 * @param {string} [args.ref] - Which branch to checkout. By default this is the designated "main branch" of the repository.
8139 * @param {boolean} [args.singleBranch = false] - Instead of the default behavior of fetching all the branches, only fetch a single branch.
8140 * @param {boolean} [args.noCheckout = false] - If true, clone will only fetch the repo, not check out a branch. Skipping checkout can save a lot of time normally spent writing files to disk.
8141 * @param {boolean} [args.noTags = false] - By default clone will fetch all tags. `noTags` disables that behavior.
8142 * @param {string} [args.remote = 'origin'] - What to name the remote that is created.
8143 * @param {number} [args.depth] - Integer. Determines how much of the git repository's history to retrieve
8144 * @param {Date} [args.since] - Only fetch commits created after the given date. Mutually exclusive with `depth`.
8145 * @param {string[]} [args.exclude = []] - A list of branches or tags. Instructs the remote server not to send us any commits reachable from these refs.
8146 * @param {boolean} [args.relative = false] - Changes the meaning of `depth` to be measured from the current shallow depth rather than from the branch tip.
8147 * @param {Object<string, string>} [args.headers = {}] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config
8148 * @param {object} [args.cache] - a [cache](cache.md) object
8149 *
8150 * @returns {Promise<void>} Resolves successfully when clone completes
8151 *
8152 * @example
8153 * await git.clone({
8154 * fs,
8155 * http,
8156 * dir: '/tutorial',
8157 * corsProxy: 'https://cors.isomorphic-git.org',
8158 * url: 'https://github.com/isomorphic-git/isomorphic-git',
8159 * singleBranch: true,
8160 * depth: 1
8161 * })
8162 * console.log('done')
8163 *
8164 */
8165async function clone({
8166 fs,
8167 http,
8168 onProgress,
8169 onMessage,
8170 onAuth,
8171 onAuthSuccess,
8172 onAuthFailure,
8173 dir,
8174 gitdir = join(dir, '.git'),
8175 url,
8176 corsProxy = undefined,
8177 ref = undefined,
8178 remote = 'origin',
8179 depth = undefined,
8180 since = undefined,
8181 exclude = [],
8182 relative = false,
8183 singleBranch = false,
8184 noCheckout = false,
8185 noTags = false,
8186 headers = {},
8187 cache = {},
8188}) {
8189 try {
8190 assertParameter('fs', fs);
8191 assertParameter('http', http);
8192 assertParameter('gitdir', gitdir);
8193 if (!noCheckout) {
8194 assertParameter('dir', dir);
8195 }
8196 assertParameter('url', url);
8197
8198 return await _clone({
8199 fs: new FileSystem(fs),
8200 cache,
8201 http,
8202 onProgress,
8203 onMessage,
8204 onAuth,
8205 onAuthSuccess,
8206 onAuthFailure,
8207 dir,
8208 gitdir,
8209 url,
8210 corsProxy,
8211 ref,
8212 remote,
8213 depth,
8214 since,
8215 exclude,
8216 relative,
8217 singleBranch,
8218 noCheckout,
8219 noTags,
8220 headers,
8221 })
8222 } catch (err) {
8223 err.caller = 'git.clone';
8224 throw err
8225 }
8226}
8227
8228// @ts-check
8229
8230/**
8231 * Create a new commit
8232 *
8233 * @param {Object} args
8234 * @param {FsClient} args.fs - a file system implementation
8235 * @param {SignCallback} [args.onSign] - a PGP signing implementation
8236 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
8237 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
8238 * @param {string} args.message - The commit message to use.
8239 * @param {Object} [args.author] - The details about the author.
8240 * @param {string} [args.author.name] - Default is `user.name` config.
8241 * @param {string} [args.author.email] - Default is `user.email` config.
8242 * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00).
8243 * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`.
8244 * @param {Object} [args.committer = author] - The details about the commit committer, in the same format as the author parameter. If not specified, the author details are used.
8245 * @param {string} [args.committer.name] - Default is `user.name` config.
8246 * @param {string} [args.committer.email] - Default is `user.email` config.
8247 * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00).
8248 * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`.
8249 * @param {string} [args.signingKey] - Sign the tag object using this private PGP key.
8250 * @param {boolean} [args.dryRun = false] - If true, simulates making a commit so you can test whether it would succeed. Implies `noUpdateBranch`.
8251 * @param {boolean} [args.noUpdateBranch = false] - If true, does not update the branch pointer after creating the commit.
8252 * @param {string} [args.ref] - The fully expanded name of the branch to commit to. Default is the current branch pointed to by HEAD. (TODO: fix it so it can expand branch names without throwing if the branch doesn't exist yet.)
8253 * @param {string[]} [args.parent] - The SHA-1 object ids of the commits to use as parents. If not specified, the commit pointed to by `ref` is used.
8254 * @param {string} [args.tree] - The SHA-1 object id of the tree to use. If not specified, a new tree object is created from the current git index.
8255 * @param {object} [args.cache] - a [cache](cache.md) object
8256 *
8257 * @returns {Promise<string>} Resolves successfully with the SHA-1 object id of the newly created commit.
8258 *
8259 * @example
8260 * let sha = await git.commit({
8261 * fs,
8262 * dir: '/tutorial',
8263 * author: {
8264 * name: 'Mr. Test',
8265 * email: 'mrtest@example.com',
8266 * },
8267 * message: 'Added the a.txt file'
8268 * })
8269 * console.log(sha)
8270 *
8271 */
8272async function commit({
8273 fs: _fs,
8274 onSign,
8275 dir,
8276 gitdir = join(dir, '.git'),
8277 message,
8278 author: _author,
8279 committer: _committer,
8280 signingKey,
8281 dryRun = false,
8282 noUpdateBranch = false,
8283 ref,
8284 parent,
8285 tree,
8286 cache = {},
8287}) {
8288 try {
8289 assertParameter('fs', _fs);
8290 assertParameter('message', message);
8291 if (signingKey) {
8292 assertParameter('onSign', onSign);
8293 }
8294 const fs = new FileSystem(_fs);
8295
8296 const author = await normalizeAuthorObject({ fs, gitdir, author: _author });
8297 if (!author) throw new MissingNameError('author')
8298
8299 const committer = await normalizeCommitterObject({
8300 fs,
8301 gitdir,
8302 author,
8303 committer: _committer,
8304 });
8305 if (!committer) throw new MissingNameError('committer')
8306
8307 return await _commit({
8308 fs,
8309 cache,
8310 onSign,
8311 gitdir,
8312 message,
8313 author,
8314 committer,
8315 signingKey,
8316 dryRun,
8317 noUpdateBranch,
8318 ref,
8319 parent,
8320 tree,
8321 })
8322 } catch (err) {
8323 err.caller = 'git.commit';
8324 throw err
8325 }
8326}
8327
8328// @ts-check
8329
8330/**
8331 * Get the name of the branch currently pointed to by .git/HEAD
8332 *
8333 * @param {Object} args
8334 * @param {FsClient} args.fs - a file system implementation
8335 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
8336 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
8337 * @param {boolean} [args.fullname = false] - Return the full path (e.g. "refs/heads/main") instead of the abbreviated form.
8338 * @param {boolean} [args.test = false] - If the current branch doesn't actually exist (such as right after git init) then return `undefined`.
8339 *
8340 * @returns {Promise<string|void>} The name of the current branch or undefined if the HEAD is detached.
8341 *
8342 * @example
8343 * // Get the current branch name
8344 * let branch = await git.currentBranch({
8345 * fs,
8346 * dir: '/tutorial',
8347 * fullname: false
8348 * })
8349 * console.log(branch)
8350 *
8351 */
8352async function currentBranch({
8353 fs,
8354 dir,
8355 gitdir = join(dir, '.git'),
8356 fullname = false,
8357 test = false,
8358}) {
8359 try {
8360 assertParameter('fs', fs);
8361 assertParameter('gitdir', gitdir);
8362 return await _currentBranch({
8363 fs: new FileSystem(fs),
8364 gitdir,
8365 fullname,
8366 test,
8367 })
8368 } catch (err) {
8369 err.caller = 'git.currentBranch';
8370 throw err
8371 }
8372}
8373
8374// @ts-check
8375
8376/**
8377 * @param {Object} args
8378 * @param {import('../models/FileSystem.js').FileSystem} args.fs
8379 * @param {string} args.gitdir
8380 * @param {string} args.ref
8381 *
8382 * @returns {Promise<void>}
8383 */
8384async function _deleteBranch({ fs, gitdir, ref }) {
8385 ref = ref.startsWith('refs/heads/') ? ref : `refs/heads/${ref}`;
8386 const exist = await GitRefManager.exists({ fs, gitdir, ref });
8387 if (!exist) {
8388 throw new NotFoundError(ref)
8389 }
8390
8391 const fullRef = await GitRefManager.expand({ fs, gitdir, ref });
8392 const currentRef = await _currentBranch({ fs, gitdir, fullname: true });
8393 if (fullRef === currentRef) {
8394 // detach HEAD
8395 const value = await GitRefManager.resolve({ fs, gitdir, ref: fullRef });
8396 await GitRefManager.writeRef({ fs, gitdir, ref: 'HEAD', value });
8397 }
8398
8399 // Delete a specified branch
8400 await GitRefManager.deleteRef({ fs, gitdir, ref: fullRef });
8401}
8402
8403// @ts-check
8404
8405/**
8406 * Delete a local branch
8407 *
8408 * > Note: This only deletes loose branches - it should be fixed in the future to delete packed branches as well.
8409 *
8410 * @param {Object} args
8411 * @param {FsClient} args.fs - a file system implementation
8412 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
8413 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
8414 * @param {string} args.ref - The branch to delete
8415 *
8416 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
8417 *
8418 * @example
8419 * await git.deleteBranch({ fs, dir: '/tutorial', ref: 'local-branch' })
8420 * console.log('done')
8421 *
8422 */
8423async function deleteBranch({
8424 fs,
8425 dir,
8426 gitdir = join(dir, '.git'),
8427 ref,
8428}) {
8429 try {
8430 assertParameter('fs', fs);
8431 assertParameter('ref', ref);
8432 return await _deleteBranch({
8433 fs: new FileSystem(fs),
8434 gitdir,
8435 ref,
8436 })
8437 } catch (err) {
8438 err.caller = 'git.deleteBranch';
8439 throw err
8440 }
8441}
8442
8443// @ts-check
8444
8445/**
8446 * Delete a local ref
8447 *
8448 * @param {Object} args
8449 * @param {FsClient} args.fs - a file system implementation
8450 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
8451 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
8452 * @param {string} args.ref - The ref to delete
8453 *
8454 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
8455 *
8456 * @example
8457 * await git.deleteRef({ fs, dir: '/tutorial', ref: 'refs/tags/test-tag' })
8458 * console.log('done')
8459 *
8460 */
8461async function deleteRef({ fs, dir, gitdir = join(dir, '.git'), ref }) {
8462 try {
8463 assertParameter('fs', fs);
8464 assertParameter('ref', ref);
8465 await GitRefManager.deleteRef({ fs: new FileSystem(fs), gitdir, ref });
8466 } catch (err) {
8467 err.caller = 'git.deleteRef';
8468 throw err
8469 }
8470}
8471
8472// @ts-check
8473
8474/**
8475 * @param {Object} args
8476 * @param {import('../models/FileSystem.js').FileSystem} args.fs
8477 * @param {string} args.gitdir
8478 * @param {string} args.remote
8479 *
8480 * @returns {Promise<void>}
8481 */
8482async function _deleteRemote({ fs, gitdir, remote }) {
8483 const config = await GitConfigManager.get({ fs, gitdir });
8484 await config.deleteSection('remote', remote);
8485 await GitConfigManager.save({ fs, gitdir, config });
8486}
8487
8488// @ts-check
8489
8490/**
8491 * Removes the local config entry for a given remote
8492 *
8493 * @param {Object} args
8494 * @param {FsClient} args.fs - a file system implementation
8495 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
8496 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
8497 * @param {string} args.remote - The name of the remote to delete
8498 *
8499 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
8500 *
8501 * @example
8502 * await git.deleteRemote({ fs, dir: '/tutorial', remote: 'upstream' })
8503 * console.log('done')
8504 *
8505 */
8506async function deleteRemote({
8507 fs,
8508 dir,
8509 gitdir = join(dir, '.git'),
8510 remote,
8511}) {
8512 try {
8513 assertParameter('fs', fs);
8514 assertParameter('remote', remote);
8515 return await _deleteRemote({
8516 fs: new FileSystem(fs),
8517 gitdir,
8518 remote,
8519 })
8520 } catch (err) {
8521 err.caller = 'git.deleteRemote';
8522 throw err
8523 }
8524}
8525
8526// @ts-check
8527
8528/**
8529 * Delete a local tag ref
8530 *
8531 * @param {Object} args
8532 * @param {import('../models/FileSystem.js').FileSystem} args.fs
8533 * @param {string} args.gitdir
8534 * @param {string} args.ref - The tag to delete
8535 *
8536 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
8537 *
8538 * @example
8539 * await git.deleteTag({ dir: '$input((/))', ref: '$input((test-tag))' })
8540 * console.log('done')
8541 *
8542 */
8543async function _deleteTag({ fs, gitdir, ref }) {
8544 ref = ref.startsWith('refs/tags/') ? ref : `refs/tags/${ref}`;
8545 await GitRefManager.deleteRef({ fs, gitdir, ref });
8546}
8547
8548// @ts-check
8549
8550/**
8551 * Delete a local tag ref
8552 *
8553 * @param {Object} args
8554 * @param {FsClient} args.fs - a file system implementation
8555 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
8556 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
8557 * @param {string} args.ref - The tag to delete
8558 *
8559 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
8560 *
8561 * @example
8562 * await git.deleteTag({ fs, dir: '/tutorial', ref: 'test-tag' })
8563 * console.log('done')
8564 *
8565 */
8566async function deleteTag({ fs, dir, gitdir = join(dir, '.git'), ref }) {
8567 try {
8568 assertParameter('fs', fs);
8569 assertParameter('ref', ref);
8570 return await _deleteTag({
8571 fs: new FileSystem(fs),
8572 gitdir,
8573 ref,
8574 })
8575 } catch (err) {
8576 err.caller = 'git.deleteTag';
8577 throw err
8578 }
8579}
8580
8581async function expandOidLoose({ fs, gitdir, oid: short }) {
8582 const prefix = short.slice(0, 2);
8583 const objectsSuffixes = await fs.readdir(`${gitdir}/objects/${prefix}`);
8584 return objectsSuffixes
8585 .map(suffix => `${prefix}${suffix}`)
8586 .filter(_oid => _oid.startsWith(short))
8587}
8588
8589async function expandOidPacked({
8590 fs,
8591 cache,
8592 gitdir,
8593 oid: short,
8594 getExternalRefDelta,
8595}) {
8596 // Iterate through all the .pack files
8597 const results = [];
8598 let list = await fs.readdir(join(gitdir, 'objects/pack'));
8599 list = list.filter(x => x.endsWith('.idx'));
8600 for (const filename of list) {
8601 const indexFile = `${gitdir}/objects/pack/${filename}`;
8602 const p = await readPackIndex({
8603 fs,
8604 cache,
8605 filename: indexFile,
8606 getExternalRefDelta,
8607 });
8608 if (p.error) throw new InternalError(p.error)
8609 // Search through the list of oids in the packfile
8610 for (const oid of p.offsets.keys()) {
8611 if (oid.startsWith(short)) results.push(oid);
8612 }
8613 }
8614 return results
8615}
8616
8617async function _expandOid({ fs, cache, gitdir, oid: short }) {
8618 // Curry the current read method so that the packfile un-deltification
8619 // process can acquire external ref-deltas.
8620 const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid });
8621
8622 const results1 = await expandOidLoose({ fs, gitdir, oid: short });
8623 const results2 = await expandOidPacked({
8624 fs,
8625 cache,
8626 gitdir,
8627 oid: short,
8628 getExternalRefDelta,
8629 });
8630 const results = results1.concat(results2);
8631
8632 if (results.length === 1) {
8633 return results[0]
8634 }
8635 if (results.length > 1) {
8636 throw new AmbiguousError('oids', short, results)
8637 }
8638 throw new NotFoundError(`an object matching "${short}"`)
8639}
8640
8641// @ts-check
8642
8643/**
8644 * Expand and resolve a short oid into a full oid
8645 *
8646 * @param {Object} args
8647 * @param {FsClient} args.fs - a file system implementation
8648 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
8649 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
8650 * @param {string} args.oid - The shortened oid prefix to expand (like "0414d2a")
8651 * @param {object} [args.cache] - a [cache](cache.md) object
8652 *
8653 * @returns {Promise<string>} Resolves successfully with the full oid (like "0414d2a286d7bbc7a4a326a61c1f9f888a8ab87f")
8654 *
8655 * @example
8656 * let oid = await git.expandOid({ fs, dir: '/tutorial', oid: '0414d2a'})
8657 * console.log(oid)
8658 *
8659 */
8660async function expandOid({
8661 fs,
8662 dir,
8663 gitdir = join(dir, '.git'),
8664 oid,
8665 cache = {},
8666}) {
8667 try {
8668 assertParameter('fs', fs);
8669 assertParameter('gitdir', gitdir);
8670 assertParameter('oid', oid);
8671 return await _expandOid({
8672 fs: new FileSystem(fs),
8673 cache,
8674 gitdir,
8675 oid,
8676 })
8677 } catch (err) {
8678 err.caller = 'git.expandOid';
8679 throw err
8680 }
8681}
8682
8683// @ts-check
8684
8685/**
8686 * Expand an abbreviated ref to its full name
8687 *
8688 * @param {Object} args
8689 * @param {FsClient} args.fs - a file system implementation
8690 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
8691 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
8692 * @param {string} args.ref - The ref to expand (like "v1.0.0")
8693 *
8694 * @returns {Promise<string>} Resolves successfully with a full ref name ("refs/tags/v1.0.0")
8695 *
8696 * @example
8697 * let fullRef = await git.expandRef({ fs, dir: '/tutorial', ref: 'main'})
8698 * console.log(fullRef)
8699 *
8700 */
8701async function expandRef({ fs, dir, gitdir = join(dir, '.git'), ref }) {
8702 try {
8703 assertParameter('fs', fs);
8704 assertParameter('gitdir', gitdir);
8705 assertParameter('ref', ref);
8706 return await GitRefManager.expand({
8707 fs: new FileSystem(fs),
8708 gitdir,
8709 ref,
8710 })
8711 } catch (err) {
8712 err.caller = 'git.expandRef';
8713 throw err
8714 }
8715}
8716
8717// @ts-check
8718
8719/**
8720 * @param {object} args
8721 * @param {import('../models/FileSystem.js').FileSystem} args.fs
8722 * @param {any} args.cache
8723 * @param {string} args.gitdir
8724 * @param {string[]} args.oids
8725 *
8726 */
8727async function _findMergeBase({ fs, cache, gitdir, oids }) {
8728 // Note: right now, the tests are geared so that the output should match that of
8729 // `git merge-base --all --octopus`
8730 // because without the --octopus flag, git's output seems to depend on the ORDER of the oids,
8731 // and computing virtual merge bases is just too much for me to fathom right now.
8732
8733 // If we start N independent walkers, one at each of the given `oids`, and walk backwards
8734 // through ancestors, eventually we'll discover a commit where each one of these N walkers
8735 // has passed through. So we just need to keep track of which walkers have visited each commit
8736 // until we find a commit that N distinct walkers has visited.
8737 const visits = {};
8738 const passes = oids.length;
8739 let heads = oids.map((oid, index) => ({ index, oid }));
8740 while (heads.length) {
8741 // Count how many times we've passed each commit
8742 const result = new Set();
8743 for (const { oid, index } of heads) {
8744 if (!visits[oid]) visits[oid] = new Set();
8745 visits[oid].add(index);
8746 if (visits[oid].size === passes) {
8747 result.add(oid);
8748 }
8749 }
8750 if (result.size > 0) {
8751 return [...result]
8752 }
8753 // We haven't found a common ancestor yet
8754 const newheads = new Map();
8755 for (const { oid, index } of heads) {
8756 try {
8757 const { object } = await _readObject({ fs, cache, gitdir, oid });
8758 const commit = GitCommit.from(object);
8759 const { parent } = commit.parseHeaders();
8760 for (const oid of parent) {
8761 if (!visits[oid] || !visits[oid].has(index)) {
8762 newheads.set(oid + ':' + index, { oid, index });
8763 }
8764 }
8765 } catch (err) {
8766 // do nothing
8767 }
8768 }
8769 heads = Array.from(newheads.values());
8770 }
8771 return []
8772}
8773
8774const LINEBREAKS = /^.*(\r?\n|$)/gm;
8775
8776function mergeFile({ branches, contents }) {
8777 const ourName = branches[1];
8778 const theirName = branches[2];
8779
8780 const baseContent = contents[0];
8781 const ourContent = contents[1];
8782 const theirContent = contents[2];
8783
8784 const ours = ourContent.match(LINEBREAKS);
8785 const base = baseContent.match(LINEBREAKS);
8786 const theirs = theirContent.match(LINEBREAKS);
8787
8788 // Here we let the diff3 library do the heavy lifting.
8789 const result = diff3Merge(ours, base, theirs);
8790
8791 const markerSize = 7;
8792
8793 // Here we note whether there are conflicts and format the results
8794 let mergedText = '';
8795 let cleanMerge = true;
8796
8797 for (const item of result) {
8798 if (item.ok) {
8799 mergedText += item.ok.join('');
8800 }
8801 if (item.conflict) {
8802 cleanMerge = false;
8803 mergedText += `${'<'.repeat(markerSize)} ${ourName}\n`;
8804 mergedText += item.conflict.a.join('');
8805
8806 mergedText += `${'='.repeat(markerSize)}\n`;
8807 mergedText += item.conflict.b.join('');
8808 mergedText += `${'>'.repeat(markerSize)} ${theirName}\n`;
8809 }
8810 }
8811 return { cleanMerge, mergedText }
8812}
8813
8814// @ts-check
8815
8816/**
8817 * Create a merged tree
8818 *
8819 * @param {Object} args
8820 * @param {import('../models/FileSystem.js').FileSystem} args.fs
8821 * @param {object} args.cache
8822 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
8823 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
8824 * @param {string} args.ourOid - The SHA-1 object id of our tree
8825 * @param {string} args.baseOid - The SHA-1 object id of the base tree
8826 * @param {string} args.theirOid - The SHA-1 object id of their tree
8827 * @param {string} [args.ourName='ours'] - The name to use in conflicted files for our hunks
8828 * @param {string} [args.baseName='base'] - The name to use in conflicted files (in diff3 format) for the base hunks
8829 * @param {string} [args.theirName='theirs'] - The name to use in conflicted files for their hunks
8830 * @param {boolean} [args.dryRun=false]
8831 * @param {boolean} [args.abortOnConflict=false]
8832 * @param {MergeDriverCallback} [args.mergeDriver]
8833 *
8834 * @returns {Promise<string>} - The SHA-1 object id of the merged tree
8835 *
8836 */
8837async function mergeTree({
8838 fs,
8839 cache,
8840 dir,
8841 gitdir = join(dir, '.git'),
8842 index,
8843 ourOid,
8844 baseOid,
8845 theirOid,
8846 ourName = 'ours',
8847 baseName = 'base',
8848 theirName = 'theirs',
8849 dryRun = false,
8850 abortOnConflict = true,
8851 mergeDriver,
8852}) {
8853 const ourTree = TREE({ ref: ourOid });
8854 const baseTree = TREE({ ref: baseOid });
8855 const theirTree = TREE({ ref: theirOid });
8856
8857 const unmergedFiles = [];
8858
8859 const results = await _walk({
8860 fs,
8861 cache,
8862 dir,
8863 gitdir,
8864 trees: [ourTree, baseTree, theirTree],
8865 map: async function(filepath, [ours, base, theirs]) {
8866 const path = basename(filepath);
8867 // What we did, what they did
8868 const ourChange = await modified(ours, base);
8869 const theirChange = await modified(theirs, base);
8870 switch (`${ourChange}-${theirChange}`) {
8871 case 'false-false': {
8872 return {
8873 mode: await base.mode(),
8874 path,
8875 oid: await base.oid(),
8876 type: await base.type(),
8877 }
8878 }
8879 case 'false-true': {
8880 return theirs
8881 ? {
8882 mode: await theirs.mode(),
8883 path,
8884 oid: await theirs.oid(),
8885 type: await theirs.type(),
8886 }
8887 : undefined
8888 }
8889 case 'true-false': {
8890 return ours
8891 ? {
8892 mode: await ours.mode(),
8893 path,
8894 oid: await ours.oid(),
8895 type: await ours.type(),
8896 }
8897 : undefined
8898 }
8899 case 'true-true': {
8900 // Modifications
8901 if (
8902 ours &&
8903 base &&
8904 theirs &&
8905 (await ours.type()) === 'blob' &&
8906 (await base.type()) === 'blob' &&
8907 (await theirs.type()) === 'blob'
8908 ) {
8909 return mergeBlobs({
8910 fs,
8911 gitdir,
8912 path,
8913 ours,
8914 base,
8915 theirs,
8916 ourName,
8917 baseName,
8918 theirName,
8919 mergeDriver,
8920 }).then(async r => {
8921 if (!r.cleanMerge) {
8922 unmergedFiles.push(filepath);
8923 if (!abortOnConflict) {
8924 const baseOid = await base.oid();
8925 const ourOid = await ours.oid();
8926 const theirOid = await theirs.oid();
8927
8928 index.delete({ filepath });
8929
8930 index.insert({ filepath, oid: baseOid, stage: 1 });
8931 index.insert({ filepath, oid: ourOid, stage: 2 });
8932 index.insert({ filepath, oid: theirOid, stage: 3 });
8933 }
8934 } else if (!abortOnConflict) {
8935 index.insert({ filepath, oid: r.mergeResult.oid, stage: 0 });
8936 }
8937 return r.mergeResult
8938 })
8939 }
8940 // all other types of conflicts fail
8941 // TODO: Merge conflicts involving deletions/additions
8942 throw new MergeNotSupportedError()
8943 }
8944 }
8945 },
8946 /**
8947 * @param {TreeEntry} [parent]
8948 * @param {Array<TreeEntry>} children
8949 */
8950 reduce:
8951 unmergedFiles.length !== 0 && (!dir || abortOnConflict)
8952 ? undefined
8953 : async (parent, children) => {
8954 const entries = children.filter(Boolean); // remove undefineds
8955
8956 // if the parent was deleted, the children have to go
8957 if (!parent) return
8958
8959 // automatically delete directories if they have been emptied
8960 if (parent && parent.type === 'tree' && entries.length === 0) return
8961
8962 if (entries.length > 0) {
8963 const tree = new GitTree(entries);
8964 const object = tree.toObject();
8965 const oid = await _writeObject({
8966 fs,
8967 gitdir,
8968 type: 'tree',
8969 object,
8970 dryRun,
8971 });
8972 parent.oid = oid;
8973 }
8974 return parent
8975 },
8976 });
8977
8978 if (unmergedFiles.length !== 0) {
8979 if (dir && !abortOnConflict) {
8980 await _walk({
8981 fs,
8982 cache,
8983 dir,
8984 gitdir,
8985 trees: [TREE({ ref: results.oid })],
8986 map: async function(filepath, [entry]) {
8987 const path = `${dir}/${filepath}`;
8988 if ((await entry.type()) === 'blob') {
8989 const mode = await entry.mode();
8990 const content = new TextDecoder().decode(await entry.content());
8991 await fs.write(path, content, { mode });
8992 }
8993 return true
8994 },
8995 });
8996 }
8997 return new MergeConflictError(unmergedFiles)
8998 }
8999
9000 return results.oid
9001}
9002
9003/**
9004 *
9005 * @param {Object} args
9006 * @param {import('../models/FileSystem').FileSystem} args.fs
9007 * @param {string} args.gitdir
9008 * @param {string} args.path
9009 * @param {WalkerEntry} args.ours
9010 * @param {WalkerEntry} args.base
9011 * @param {WalkerEntry} args.theirs
9012 * @param {string} [args.ourName]
9013 * @param {string} [args.baseName]
9014 * @param {string} [args.theirName]
9015 * @param {boolean} [args.dryRun = false]
9016 * @param {MergeDriverCallback} [args.mergeDriver]
9017 *
9018 */
9019async function mergeBlobs({
9020 fs,
9021 gitdir,
9022 path,
9023 ours,
9024 base,
9025 theirs,
9026 ourName,
9027 theirName,
9028 baseName,
9029 dryRun,
9030 mergeDriver = mergeFile,
9031}) {
9032 const type = 'blob';
9033 // Compute the new mode.
9034 // Since there are ONLY two valid blob modes ('100755' and '100644') it boils down to this
9035 const mode =
9036 (await base.mode()) === (await ours.mode())
9037 ? await theirs.mode()
9038 : await ours.mode();
9039 // The trivial case: nothing to merge except maybe mode
9040 if ((await ours.oid()) === (await theirs.oid())) {
9041 return {
9042 cleanMerge: true,
9043 mergeResult: { mode, path, oid: await ours.oid(), type },
9044 }
9045 }
9046 // if only one side made oid changes, return that side's oid
9047 if ((await ours.oid()) === (await base.oid())) {
9048 return {
9049 cleanMerge: true,
9050 mergeResult: { mode, path, oid: await theirs.oid(), type },
9051 }
9052 }
9053 if ((await theirs.oid()) === (await base.oid())) {
9054 return {
9055 cleanMerge: true,
9056 mergeResult: { mode, path, oid: await ours.oid(), type },
9057 }
9058 }
9059 // if both sides made changes do a merge
9060 const ourContent = Buffer.from(await ours.content()).toString('utf8');
9061 const baseContent = Buffer.from(await base.content()).toString('utf8');
9062 const theirContent = Buffer.from(await theirs.content()).toString('utf8');
9063 const { mergedText, cleanMerge } = await mergeDriver({
9064 branches: [baseName, ourName, theirName],
9065 contents: [baseContent, ourContent, theirContent],
9066 path,
9067 });
9068 const oid = await _writeObject({
9069 fs,
9070 gitdir,
9071 type: 'blob',
9072 object: Buffer.from(mergedText, 'utf8'),
9073 dryRun,
9074 });
9075
9076 return { cleanMerge, mergeResult: { mode, path, oid, type } }
9077}
9078
9079// @ts-check
9080
9081// import diff3 from 'node-diff3'
9082/**
9083 *
9084 * @typedef {Object} MergeResult - Returns an object with a schema like this:
9085 * @property {string} [oid] - The SHA-1 object id that is now at the head of the branch. Absent only if `dryRun` was specified and `mergeCommit` is true.
9086 * @property {boolean} [alreadyMerged] - True if the branch was already merged so no changes were made
9087 * @property {boolean} [fastForward] - True if it was a fast-forward merge
9088 * @property {boolean} [mergeCommit] - True if merge resulted in a merge commit
9089 * @property {string} [tree] - The SHA-1 object id of the tree resulting from a merge commit
9090 *
9091 */
9092
9093/**
9094 * @param {object} args
9095 * @param {import('../models/FileSystem.js').FileSystem} args.fs
9096 * @param {object} args.cache
9097 * @param {string} args.gitdir
9098 * @param {string} [args.ours]
9099 * @param {string} args.theirs
9100 * @param {boolean} args.fastForward
9101 * @param {boolean} args.fastForwardOnly
9102 * @param {boolean} args.dryRun
9103 * @param {boolean} args.noUpdateBranch
9104 * @param {boolean} args.abortOnConflict
9105 * @param {string} [args.message]
9106 * @param {Object} args.author
9107 * @param {string} args.author.name
9108 * @param {string} args.author.email
9109 * @param {number} args.author.timestamp
9110 * @param {number} args.author.timezoneOffset
9111 * @param {Object} args.committer
9112 * @param {string} args.committer.name
9113 * @param {string} args.committer.email
9114 * @param {number} args.committer.timestamp
9115 * @param {number} args.committer.timezoneOffset
9116 * @param {string} [args.signingKey]
9117 * @param {SignCallback} [args.onSign] - a PGP signing implementation
9118 * @param {MergeDriverCallback} [args.mergeDriver]
9119 *
9120 * @returns {Promise<MergeResult>} Resolves to a description of the merge operation
9121 *
9122 */
9123async function _merge({
9124 fs,
9125 cache,
9126 dir,
9127 gitdir,
9128 ours,
9129 theirs,
9130 fastForward = true,
9131 fastForwardOnly = false,
9132 dryRun = false,
9133 noUpdateBranch = false,
9134 abortOnConflict = true,
9135 message,
9136 author,
9137 committer,
9138 signingKey,
9139 onSign,
9140 mergeDriver,
9141}) {
9142 if (ours === undefined) {
9143 ours = await _currentBranch({ fs, gitdir, fullname: true });
9144 }
9145 ours = await GitRefManager.expand({
9146 fs,
9147 gitdir,
9148 ref: ours,
9149 });
9150 theirs = await GitRefManager.expand({
9151 fs,
9152 gitdir,
9153 ref: theirs,
9154 });
9155 const ourOid = await GitRefManager.resolve({
9156 fs,
9157 gitdir,
9158 ref: ours,
9159 });
9160 const theirOid = await GitRefManager.resolve({
9161 fs,
9162 gitdir,
9163 ref: theirs,
9164 });
9165 // find most recent common ancestor of ref a and ref b
9166 const baseOids = await _findMergeBase({
9167 fs,
9168 cache,
9169 gitdir,
9170 oids: [ourOid, theirOid],
9171 });
9172 if (baseOids.length !== 1) {
9173 // TODO: Recursive Merge strategy
9174 throw new MergeNotSupportedError()
9175 }
9176 const baseOid = baseOids[0];
9177 // handle fast-forward case
9178 if (baseOid === theirOid) {
9179 return {
9180 oid: ourOid,
9181 alreadyMerged: true,
9182 }
9183 }
9184 if (fastForward && baseOid === ourOid) {
9185 if (!dryRun && !noUpdateBranch) {
9186 await GitRefManager.writeRef({ fs, gitdir, ref: ours, value: theirOid });
9187 }
9188 return {
9189 oid: theirOid,
9190 fastForward: true,
9191 }
9192 } else {
9193 // not a simple fast-forward
9194 if (fastForwardOnly) {
9195 throw new FastForwardError()
9196 }
9197 // try a fancier merge
9198 const tree = await GitIndexManager.acquire(
9199 { fs, gitdir, cache, allowUnmerged: false },
9200 async index => {
9201 return mergeTree({
9202 fs,
9203 cache,
9204 dir,
9205 gitdir,
9206 index,
9207 ourOid,
9208 theirOid,
9209 baseOid,
9210 ourName: abbreviateRef(ours),
9211 baseName: 'base',
9212 theirName: abbreviateRef(theirs),
9213 dryRun,
9214 abortOnConflict,
9215 mergeDriver,
9216 })
9217 }
9218 );
9219
9220 // Defer throwing error until the index lock is relinquished and index is
9221 // written to filsesystem
9222 if (tree instanceof MergeConflictError) throw tree
9223
9224 if (!message) {
9225 message = `Merge branch '${abbreviateRef(theirs)}' into ${abbreviateRef(
9226 ours
9227 )}`;
9228 }
9229 const oid = await _commit({
9230 fs,
9231 cache,
9232 gitdir,
9233 message,
9234 ref: ours,
9235 tree,
9236 parent: [ourOid, theirOid],
9237 author,
9238 committer,
9239 signingKey,
9240 onSign,
9241 dryRun,
9242 noUpdateBranch,
9243 });
9244 return {
9245 oid,
9246 tree,
9247 mergeCommit: true,
9248 }
9249 }
9250}
9251
9252// @ts-check
9253
9254/**
9255 * @param {object} args
9256 * @param {import('../models/FileSystem.js').FileSystem} args.fs
9257 * @param {object} args.cache
9258 * @param {HttpClient} args.http
9259 * @param {ProgressCallback} [args.onProgress]
9260 * @param {MessageCallback} [args.onMessage]
9261 * @param {AuthCallback} [args.onAuth]
9262 * @param {AuthFailureCallback} [args.onAuthFailure]
9263 * @param {AuthSuccessCallback} [args.onAuthSuccess]
9264 * @param {string} args.dir
9265 * @param {string} args.gitdir
9266 * @param {string} args.ref
9267 * @param {string} [args.url]
9268 * @param {string} [args.remote]
9269 * @param {string} [args.remoteRef]
9270 * @param {boolean} [args.prune]
9271 * @param {boolean} [args.pruneTags]
9272 * @param {string} [args.corsProxy]
9273 * @param {boolean} args.singleBranch
9274 * @param {boolean} args.fastForward
9275 * @param {boolean} args.fastForwardOnly
9276 * @param {Object<string, string>} [args.headers]
9277 * @param {Object} args.author
9278 * @param {string} args.author.name
9279 * @param {string} args.author.email
9280 * @param {number} args.author.timestamp
9281 * @param {number} args.author.timezoneOffset
9282 * @param {Object} args.committer
9283 * @param {string} args.committer.name
9284 * @param {string} args.committer.email
9285 * @param {number} args.committer.timestamp
9286 * @param {number} args.committer.timezoneOffset
9287 * @param {string} [args.signingKey]
9288 *
9289 * @returns {Promise<void>} Resolves successfully when pull operation completes
9290 *
9291 */
9292async function _pull({
9293 fs,
9294 cache,
9295 http,
9296 onProgress,
9297 onMessage,
9298 onAuth,
9299 onAuthSuccess,
9300 onAuthFailure,
9301 dir,
9302 gitdir,
9303 ref,
9304 url,
9305 remote,
9306 remoteRef,
9307 prune,
9308 pruneTags,
9309 fastForward,
9310 fastForwardOnly,
9311 corsProxy,
9312 singleBranch,
9313 headers,
9314 author,
9315 committer,
9316 signingKey,
9317}) {
9318 try {
9319 // If ref is undefined, use 'HEAD'
9320 if (!ref) {
9321 const head = await _currentBranch({ fs, gitdir });
9322 // TODO: use a better error.
9323 if (!head) {
9324 throw new MissingParameterError('ref')
9325 }
9326 ref = head;
9327 }
9328
9329 const { fetchHead, fetchHeadDescription } = await _fetch({
9330 fs,
9331 cache,
9332 http,
9333 onProgress,
9334 onMessage,
9335 onAuth,
9336 onAuthSuccess,
9337 onAuthFailure,
9338 gitdir,
9339 corsProxy,
9340 ref,
9341 url,
9342 remote,
9343 remoteRef,
9344 singleBranch,
9345 headers,
9346 prune,
9347 pruneTags,
9348 });
9349 // Merge the remote tracking branch into the local one.
9350 await _merge({
9351 fs,
9352 cache,
9353 gitdir,
9354 ours: ref,
9355 theirs: fetchHead,
9356 fastForward,
9357 fastForwardOnly,
9358 message: `Merge ${fetchHeadDescription}`,
9359 author,
9360 committer,
9361 signingKey,
9362 dryRun: false,
9363 noUpdateBranch: false,
9364 });
9365 await _checkout({
9366 fs,
9367 cache,
9368 onProgress,
9369 dir,
9370 gitdir,
9371 ref,
9372 remote,
9373 noCheckout: false,
9374 });
9375 } catch (err) {
9376 err.caller = 'git.pull';
9377 throw err
9378 }
9379}
9380
9381// @ts-check
9382
9383/**
9384 * Like `pull`, but hard-coded with `fastForward: true` so there is no need for an `author` parameter.
9385 *
9386 * @param {object} args
9387 * @param {FsClient} args.fs - a file system client
9388 * @param {HttpClient} args.http - an HTTP client
9389 * @param {ProgressCallback} [args.onProgress] - optional progress event callback
9390 * @param {MessageCallback} [args.onMessage] - optional message event callback
9391 * @param {AuthCallback} [args.onAuth] - optional auth fill callback
9392 * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback
9393 * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback
9394 * @param {string} args.dir] - The [working tree](dir-vs-gitdir.md) directory path
9395 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
9396 * @param {string} [args.ref] - Which branch to merge into. By default this is the currently checked out branch.
9397 * @param {string} [args.url] - (Added in 1.1.0) The URL of the remote repository. The default is the value set in the git config for that remote.
9398 * @param {string} [args.remote] - (Added in 1.1.0) If URL is not specified, determines which remote to use.
9399 * @param {string} [args.remoteRef] - (Added in 1.1.0) The name of the branch on the remote to fetch. By default this is the configured remote tracking branch.
9400 * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config.
9401 * @param {boolean} [args.singleBranch = false] - Instead of the default behavior of fetching all the branches, only fetch a single branch.
9402 * @param {Object<string, string>} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config
9403 * @param {object} [args.cache] - a [cache](cache.md) object
9404 *
9405 * @returns {Promise<void>} Resolves successfully when pull operation completes
9406 *
9407 * @example
9408 * await git.fastForward({
9409 * fs,
9410 * http,
9411 * dir: '/tutorial',
9412 * ref: 'main',
9413 * singleBranch: true
9414 * })
9415 * console.log('done')
9416 *
9417 */
9418async function fastForward({
9419 fs,
9420 http,
9421 onProgress,
9422 onMessage,
9423 onAuth,
9424 onAuthSuccess,
9425 onAuthFailure,
9426 dir,
9427 gitdir = join(dir, '.git'),
9428 ref,
9429 url,
9430 remote,
9431 remoteRef,
9432 corsProxy,
9433 singleBranch,
9434 headers = {},
9435 cache = {},
9436}) {
9437 try {
9438 assertParameter('fs', fs);
9439 assertParameter('http', http);
9440 assertParameter('gitdir', gitdir);
9441
9442 const thisWillNotBeUsed = {
9443 name: '',
9444 email: '',
9445 timestamp: Date.now(),
9446 timezoneOffset: 0,
9447 };
9448
9449 return await _pull({
9450 fs: new FileSystem(fs),
9451 cache,
9452 http,
9453 onProgress,
9454 onMessage,
9455 onAuth,
9456 onAuthSuccess,
9457 onAuthFailure,
9458 dir,
9459 gitdir,
9460 ref,
9461 url,
9462 remote,
9463 remoteRef,
9464 fastForwardOnly: true,
9465 corsProxy,
9466 singleBranch,
9467 headers,
9468 author: thisWillNotBeUsed,
9469 committer: thisWillNotBeUsed,
9470 })
9471 } catch (err) {
9472 err.caller = 'git.fastForward';
9473 throw err
9474 }
9475}
9476
9477// @ts-check
9478
9479/**
9480 *
9481 * @typedef {object} FetchResult - The object returned has the following schema:
9482 * @property {string | null} defaultBranch - The branch that is cloned if no branch is specified
9483 * @property {string | null} fetchHead - The SHA-1 object id of the fetched head commit
9484 * @property {string | null} fetchHeadDescription - a textual description of the branch that was fetched
9485 * @property {Object<string, string>} [headers] - The HTTP response headers returned by the git server
9486 * @property {string[]} [pruned] - A list of branches that were pruned, if you provided the `prune` parameter
9487 *
9488 */
9489
9490/**
9491 * Fetch commits from a remote repository
9492 *
9493 * @param {object} args
9494 * @param {FsClient} args.fs - a file system client
9495 * @param {HttpClient} args.http - an HTTP client
9496 * @param {ProgressCallback} [args.onProgress] - optional progress event callback
9497 * @param {MessageCallback} [args.onMessage] - optional message event callback
9498 * @param {AuthCallback} [args.onAuth] - optional auth fill callback
9499 * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback
9500 * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback
9501 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
9502 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
9503 * @param {string} [args.url] - The URL of the remote repository. The default is the value set in the git config for that remote.
9504 * @param {string} [args.remote] - If URL is not specified, determines which remote to use.
9505 * @param {boolean} [args.singleBranch = false] - Instead of the default behavior of fetching all the branches, only fetch a single branch.
9506 * @param {string} [args.ref] - Which branch to fetch if `singleBranch` is true. By default this is the current branch or the remote's default branch.
9507 * @param {string} [args.remoteRef] - The name of the branch on the remote to fetch if `singleBranch` is true. By default this is the configured remote tracking branch.
9508 * @param {boolean} [args.tags = false] - Also fetch tags
9509 * @param {number} [args.depth] - Integer. Determines how much of the git repository's history to retrieve
9510 * @param {boolean} [args.relative = false] - Changes the meaning of `depth` to be measured from the current shallow depth rather than from the branch tip.
9511 * @param {Date} [args.since] - Only fetch commits created after the given date. Mutually exclusive with `depth`.
9512 * @param {string[]} [args.exclude = []] - A list of branches or tags. Instructs the remote server not to send us any commits reachable from these refs.
9513 * @param {boolean} [args.prune = false] - Delete local remote-tracking branches that are not present on the remote
9514 * @param {boolean} [args.pruneTags = false] - Prune local tags that don’t exist on the remote, and force-update those tags that differ
9515 * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config.
9516 * @param {Object<string, string>} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config
9517 * @param {object} [args.cache] - a [cache](cache.md) object
9518 *
9519 * @returns {Promise<FetchResult>} Resolves successfully when fetch completes
9520 * @see FetchResult
9521 *
9522 * @example
9523 * let result = await git.fetch({
9524 * fs,
9525 * http,
9526 * dir: '/tutorial',
9527 * corsProxy: 'https://cors.isomorphic-git.org',
9528 * url: 'https://github.com/isomorphic-git/isomorphic-git',
9529 * ref: 'main',
9530 * depth: 1,
9531 * singleBranch: true,
9532 * tags: false
9533 * })
9534 * console.log(result)
9535 *
9536 */
9537async function fetch({
9538 fs,
9539 http,
9540 onProgress,
9541 onMessage,
9542 onAuth,
9543 onAuthSuccess,
9544 onAuthFailure,
9545 dir,
9546 gitdir = join(dir, '.git'),
9547 ref,
9548 remote,
9549 remoteRef,
9550 url,
9551 corsProxy,
9552 depth = null,
9553 since = null,
9554 exclude = [],
9555 relative = false,
9556 tags = false,
9557 singleBranch = false,
9558 headers = {},
9559 prune = false,
9560 pruneTags = false,
9561 cache = {},
9562}) {
9563 try {
9564 assertParameter('fs', fs);
9565 assertParameter('http', http);
9566 assertParameter('gitdir', gitdir);
9567
9568 return await _fetch({
9569 fs: new FileSystem(fs),
9570 cache,
9571 http,
9572 onProgress,
9573 onMessage,
9574 onAuth,
9575 onAuthSuccess,
9576 onAuthFailure,
9577 gitdir,
9578 ref,
9579 remote,
9580 remoteRef,
9581 url,
9582 corsProxy,
9583 depth,
9584 since,
9585 exclude,
9586 relative,
9587 tags,
9588 singleBranch,
9589 headers,
9590 prune,
9591 pruneTags,
9592 })
9593 } catch (err) {
9594 err.caller = 'git.fetch';
9595 throw err
9596 }
9597}
9598
9599// @ts-check
9600
9601/**
9602 * Find the merge base for a set of commits
9603 *
9604 * @param {object} args
9605 * @param {FsClient} args.fs - a file system client
9606 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
9607 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
9608 * @param {string[]} args.oids - Which commits
9609 * @param {object} [args.cache] - a [cache](cache.md) object
9610 *
9611 */
9612async function findMergeBase({
9613 fs,
9614 dir,
9615 gitdir = join(dir, '.git'),
9616 oids,
9617 cache = {},
9618}) {
9619 try {
9620 assertParameter('fs', fs);
9621 assertParameter('gitdir', gitdir);
9622 assertParameter('oids', oids);
9623
9624 return await _findMergeBase({
9625 fs: new FileSystem(fs),
9626 cache,
9627 gitdir,
9628 oids,
9629 })
9630 } catch (err) {
9631 err.caller = 'git.findMergeBase';
9632 throw err
9633 }
9634}
9635
9636// @ts-check
9637
9638/**
9639 * Find the root git directory
9640 *
9641 * Starting at `filepath`, walks upward until it finds a directory that contains a subdirectory called '.git'.
9642 *
9643 * @param {Object} args
9644 * @param {import('../models/FileSystem.js').FileSystem} args.fs
9645 * @param {string} args.filepath
9646 *
9647 * @returns {Promise<string>} Resolves successfully with a root git directory path
9648 */
9649async function _findRoot({ fs, filepath }) {
9650 if (await fs.exists(join(filepath, '.git'))) {
9651 return filepath
9652 } else {
9653 const parent = dirname(filepath);
9654 if (parent === filepath) {
9655 throw new NotFoundError(`git root for ${filepath}`)
9656 }
9657 return _findRoot({ fs, filepath: parent })
9658 }
9659}
9660
9661// @ts-check
9662
9663/**
9664 * Find the root git directory
9665 *
9666 * Starting at `filepath`, walks upward until it finds a directory that contains a subdirectory called '.git'.
9667 *
9668 * @param {Object} args
9669 * @param {FsClient} args.fs - a file system client
9670 * @param {string} args.filepath - The file directory to start searching in.
9671 *
9672 * @returns {Promise<string>} Resolves successfully with a root git directory path
9673 * @throws {NotFoundError}
9674 *
9675 * @example
9676 * let gitroot = await git.findRoot({
9677 * fs,
9678 * filepath: '/tutorial/src/utils'
9679 * })
9680 * console.log(gitroot)
9681 *
9682 */
9683async function findRoot({ fs, filepath }) {
9684 try {
9685 assertParameter('fs', fs);
9686 assertParameter('filepath', filepath);
9687
9688 return await _findRoot({ fs: new FileSystem(fs), filepath })
9689 } catch (err) {
9690 err.caller = 'git.findRoot';
9691 throw err
9692 }
9693}
9694
9695// @ts-check
9696
9697/**
9698 * Read an entry from the git config files.
9699 *
9700 * *Caveats:*
9701 * - Currently only the local `$GIT_DIR/config` file can be read or written. However support for the global `~/.gitconfig` and system `$(prefix)/etc/gitconfig` will be added in the future.
9702 * - The current parser does not support the more exotic features of the git-config file format such as `[include]` and `[includeIf]`.
9703 *
9704 * @param {Object} args
9705 * @param {FsClient} args.fs - a file system implementation
9706 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
9707 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
9708 * @param {string} args.path - The key of the git config entry
9709 *
9710 * @returns {Promise<any>} Resolves with the config value
9711 *
9712 * @example
9713 * // Read config value
9714 * let value = await git.getConfig({
9715 * fs,
9716 * dir: '/tutorial',
9717 * path: 'remote.origin.url'
9718 * })
9719 * console.log(value)
9720 *
9721 */
9722async function getConfig({ fs, dir, gitdir = join(dir, '.git'), path }) {
9723 try {
9724 assertParameter('fs', fs);
9725 assertParameter('gitdir', gitdir);
9726 assertParameter('path', path);
9727
9728 return await _getConfig({
9729 fs: new FileSystem(fs),
9730 gitdir,
9731 path,
9732 })
9733 } catch (err) {
9734 err.caller = 'git.getConfig';
9735 throw err
9736 }
9737}
9738
9739// @ts-check
9740
9741/**
9742 * @param {Object} args
9743 * @param {import('../models/FileSystem.js').FileSystem} args.fs
9744 * @param {string} args.gitdir
9745 * @param {string} args.path
9746 *
9747 * @returns {Promise<Array<any>>} Resolves with an array of the config value
9748 *
9749 */
9750async function _getConfigAll({ fs, gitdir, path }) {
9751 const config = await GitConfigManager.get({ fs, gitdir });
9752 return config.getall(path)
9753}
9754
9755// @ts-check
9756
9757/**
9758 * Read a multi-valued entry from the git config files.
9759 *
9760 * *Caveats:*
9761 * - Currently only the local `$GIT_DIR/config` file can be read or written. However support for the global `~/.gitconfig` and system `$(prefix)/etc/gitconfig` will be added in the future.
9762 * - The current parser does not support the more exotic features of the git-config file format such as `[include]` and `[includeIf]`.
9763 *
9764 * @param {Object} args
9765 * @param {FsClient} args.fs - a file system implementation
9766 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
9767 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
9768 * @param {string} args.path - The key of the git config entry
9769 *
9770 * @returns {Promise<Array<any>>} Resolves with the config value
9771 *
9772 */
9773async function getConfigAll({
9774 fs,
9775 dir,
9776 gitdir = join(dir, '.git'),
9777 path,
9778}) {
9779 try {
9780 assertParameter('fs', fs);
9781 assertParameter('gitdir', gitdir);
9782 assertParameter('path', path);
9783
9784 return await _getConfigAll({
9785 fs: new FileSystem(fs),
9786 gitdir,
9787 path,
9788 })
9789 } catch (err) {
9790 err.caller = 'git.getConfigAll';
9791 throw err
9792 }
9793}
9794
9795// @ts-check
9796
9797/**
9798 *
9799 * @typedef {Object} GetRemoteInfoResult - The object returned has the following schema:
9800 * @property {string[]} capabilities - The list of capabilities returned by the server (part of the Git protocol)
9801 * @property {Object} [refs]
9802 * @property {string} [HEAD] - The default branch of the remote
9803 * @property {Object<string, string>} [refs.heads] - The branches on the remote
9804 * @property {Object<string, string>} [refs.pull] - The special branches representing pull requests (non-standard)
9805 * @property {Object<string, string>} [refs.tags] - The tags on the remote
9806 *
9807 */
9808
9809/**
9810 * List a remote servers branches, tags, and capabilities.
9811 *
9812 * This is a rare command that doesn't require an `fs`, `dir`, or even `gitdir` argument.
9813 * It just communicates to a remote git server, using the first step of the `git-upload-pack` handshake, but stopping short of fetching the packfile.
9814 *
9815 * @param {object} args
9816 * @param {HttpClient} args.http - an HTTP client
9817 * @param {AuthCallback} [args.onAuth] - optional auth fill callback
9818 * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback
9819 * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback
9820 * @param {string} args.url - The URL of the remote repository. Will be gotten from gitconfig if absent.
9821 * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config.
9822 * @param {boolean} [args.forPush = false] - By default, the command queries the 'fetch' capabilities. If true, it will ask for the 'push' capabilities.
9823 * @param {Object<string, string>} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config
9824 *
9825 * @returns {Promise<GetRemoteInfoResult>} Resolves successfully with an object listing the branches, tags, and capabilities of the remote.
9826 * @see GetRemoteInfoResult
9827 *
9828 * @example
9829 * let info = await git.getRemoteInfo({
9830 * http,
9831 * url:
9832 * "https://cors.isomorphic-git.org/github.com/isomorphic-git/isomorphic-git.git"
9833 * });
9834 * console.log(info);
9835 *
9836 */
9837async function getRemoteInfo({
9838 http,
9839 onAuth,
9840 onAuthSuccess,
9841 onAuthFailure,
9842 corsProxy,
9843 url,
9844 headers = {},
9845 forPush = false,
9846}) {
9847 try {
9848 assertParameter('http', http);
9849 assertParameter('url', url);
9850
9851 const GitRemoteHTTP = GitRemoteManager.getRemoteHelperFor({ url });
9852 const remote = await GitRemoteHTTP.discover({
9853 http,
9854 onAuth,
9855 onAuthSuccess,
9856 onAuthFailure,
9857 corsProxy,
9858 service: forPush ? 'git-receive-pack' : 'git-upload-pack',
9859 url,
9860 headers,
9861 protocolVersion: 1,
9862 });
9863
9864 // Note: remote.capabilities, remote.refs, and remote.symrefs are Set and Map objects,
9865 // but one of the objectives of the public API is to always return JSON-compatible objects
9866 // so we must JSONify them.
9867 const result = {
9868 capabilities: [...remote.capabilities],
9869 };
9870 // Convert the flat list into an object tree, because I figure 99% of the time
9871 // that will be easier to use.
9872 for (const [ref, oid] of remote.refs) {
9873 const parts = ref.split('/');
9874 const last = parts.pop();
9875 let o = result;
9876 for (const part of parts) {
9877 o[part] = o[part] || {};
9878 o = o[part];
9879 }
9880 o[last] = oid;
9881 }
9882 // Merge symrefs on top of refs to more closely match actual git repo layouts
9883 for (const [symref, ref] of remote.symrefs) {
9884 const parts = symref.split('/');
9885 const last = parts.pop();
9886 let o = result;
9887 for (const part of parts) {
9888 o[part] = o[part] || {};
9889 o = o[part];
9890 }
9891 o[last] = ref;
9892 }
9893 return result
9894 } catch (err) {
9895 err.caller = 'git.getRemoteInfo';
9896 throw err
9897 }
9898}
9899
9900// @ts-check
9901
9902/**
9903 * @param {any} remote
9904 * @param {string} prefix
9905 * @param {boolean} symrefs
9906 * @param {boolean} peelTags
9907 * @returns {ServerRef[]}
9908 */
9909function formatInfoRefs(remote, prefix, symrefs, peelTags) {
9910 const refs = [];
9911 for (const [key, value] of remote.refs) {
9912 if (prefix && !key.startsWith(prefix)) continue
9913
9914 if (key.endsWith('^{}')) {
9915 if (peelTags) {
9916 const _key = key.replace('^{}', '');
9917 // Peeled tags are almost always listed immediately after the original tag
9918 const last = refs[refs.length - 1];
9919 const r = last.ref === _key ? last : refs.find(x => x.ref === _key);
9920 if (r === undefined) {
9921 throw new Error('I did not expect this to happen')
9922 }
9923 r.peeled = value;
9924 }
9925 continue
9926 }
9927 /** @type ServerRef */
9928 const ref = { ref: key, oid: value };
9929 if (symrefs) {
9930 if (remote.symrefs.has(key)) {
9931 ref.target = remote.symrefs.get(key);
9932 }
9933 }
9934 refs.push(ref);
9935 }
9936 return refs
9937}
9938
9939// @ts-check
9940
9941/**
9942 * @typedef {Object} GetRemoteInfo2Result - This object has the following schema:
9943 * @property {1 | 2} protocolVersion - Git protocol version the server supports
9944 * @property {Object<string, string | true>} capabilities - An object of capabilities represented as keys and values
9945 * @property {ServerRef[]} [refs] - Server refs (they get returned by protocol version 1 whether you want them or not)
9946 */
9947
9948/**
9949 * List a remote server's capabilities.
9950 *
9951 * This is a rare command that doesn't require an `fs`, `dir`, or even `gitdir` argument.
9952 * It just communicates to a remote git server, determining what protocol version, commands, and features it supports.
9953 *
9954 * > The successor to [`getRemoteInfo`](./getRemoteInfo.md), this command supports Git Wire Protocol Version 2.
9955 * > Therefore its return type is more complicated as either:
9956 * >
9957 * > - v1 capabilities (and refs) or
9958 * > - v2 capabilities (and no refs)
9959 * >
9960 * > are returned.
9961 * > If you just care about refs, use [`listServerRefs`](./listServerRefs.md)
9962 *
9963 * @param {object} args
9964 * @param {HttpClient} args.http - an HTTP client
9965 * @param {AuthCallback} [args.onAuth] - optional auth fill callback
9966 * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback
9967 * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback
9968 * @param {string} args.url - The URL of the remote repository. Will be gotten from gitconfig if absent.
9969 * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config.
9970 * @param {boolean} [args.forPush = false] - By default, the command queries the 'fetch' capabilities. If true, it will ask for the 'push' capabilities.
9971 * @param {Object<string, string>} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config
9972 * @param {1 | 2} [args.protocolVersion = 2] - Which version of the Git Protocol to use.
9973 *
9974 * @returns {Promise<GetRemoteInfo2Result>} Resolves successfully with an object listing the capabilities of the remote.
9975 * @see GetRemoteInfo2Result
9976 * @see ServerRef
9977 *
9978 * @example
9979 * let info = await git.getRemoteInfo2({
9980 * http,
9981 * corsProxy: "https://cors.isomorphic-git.org",
9982 * url: "https://github.com/isomorphic-git/isomorphic-git.git"
9983 * });
9984 * console.log(info);
9985 *
9986 */
9987async function getRemoteInfo2({
9988 http,
9989 onAuth,
9990 onAuthSuccess,
9991 onAuthFailure,
9992 corsProxy,
9993 url,
9994 headers = {},
9995 forPush = false,
9996 protocolVersion = 2,
9997}) {
9998 try {
9999 assertParameter('http', http);
10000 assertParameter('url', url);
10001
10002 const GitRemoteHTTP = GitRemoteManager.getRemoteHelperFor({ url });
10003 const remote = await GitRemoteHTTP.discover({
10004 http,
10005 onAuth,
10006 onAuthSuccess,
10007 onAuthFailure,
10008 corsProxy,
10009 service: forPush ? 'git-receive-pack' : 'git-upload-pack',
10010 url,
10011 headers,
10012 protocolVersion,
10013 });
10014
10015 if (remote.protocolVersion === 2) {
10016 /** @type GetRemoteInfo2Result */
10017 return {
10018 protocolVersion: remote.protocolVersion,
10019 capabilities: remote.capabilities2,
10020 }
10021 }
10022
10023 // Note: remote.capabilities, remote.refs, and remote.symrefs are Set and Map objects,
10024 // but one of the objectives of the public API is to always return JSON-compatible objects
10025 // so we must JSONify them.
10026 /** @type Object<string, true> */
10027 const capabilities = {};
10028 for (const cap of remote.capabilities) {
10029 const [key, value] = cap.split('=');
10030 if (value) {
10031 capabilities[key] = value;
10032 } else {
10033 capabilities[key] = true;
10034 }
10035 }
10036 /** @type GetRemoteInfo2Result */
10037 return {
10038 protocolVersion: 1,
10039 capabilities,
10040 refs: formatInfoRefs(remote, undefined, true, true),
10041 }
10042 } catch (err) {
10043 err.caller = 'git.getRemoteInfo2';
10044 throw err
10045 }
10046}
10047
10048async function hashObject({
10049 type,
10050 object,
10051 format = 'content',
10052 oid = undefined,
10053}) {
10054 if (format !== 'deflated') {
10055 if (format !== 'wrapped') {
10056 object = GitObject.wrap({ type, object });
10057 }
10058 oid = await shasum(object);
10059 }
10060 return { oid, object }
10061}
10062
10063// @ts-check
10064
10065/**
10066 *
10067 * @typedef {object} HashBlobResult - The object returned has the following schema:
10068 * @property {string} oid - The SHA-1 object id
10069 * @property {'blob'} type - The type of the object
10070 * @property {Uint8Array} object - The wrapped git object (the thing that is hashed)
10071 * @property {'wrapped'} format - The format of the object
10072 *
10073 */
10074
10075/**
10076 * Compute what the SHA-1 object id of a file would be
10077 *
10078 * @param {object} args
10079 * @param {Uint8Array|string} args.object - The object to write. If `object` is a String then it will be converted to a Uint8Array using UTF-8 encoding.
10080 *
10081 * @returns {Promise<HashBlobResult>} Resolves successfully with the SHA-1 object id and the wrapped object Uint8Array.
10082 * @see HashBlobResult
10083 *
10084 * @example
10085 * let { oid, type, object, format } = await git.hashBlob({
10086 * object: 'Hello world!',
10087 * })
10088 *
10089 * console.log('oid', oid)
10090 * console.log('type', type)
10091 * console.log('object', object)
10092 * console.log('format', format)
10093 *
10094 */
10095async function hashBlob({ object }) {
10096 try {
10097 assertParameter('object', object);
10098
10099 // Convert object to buffer
10100 if (typeof object === 'string') {
10101 object = Buffer.from(object, 'utf8');
10102 } else {
10103 object = Buffer.from(object);
10104 }
10105
10106 const type = 'blob';
10107 const { oid, object: _object } = await hashObject({
10108 type: 'blob',
10109 format: 'content',
10110 object,
10111 });
10112 return { oid, type, object: new Uint8Array(_object), format: 'wrapped' }
10113 } catch (err) {
10114 err.caller = 'git.hashBlob';
10115 throw err
10116 }
10117}
10118
10119// @ts-check
10120
10121/**
10122 * @param {object} args
10123 * @param {import('../models/FileSystem.js').FileSystem} args.fs
10124 * @param {any} args.cache
10125 * @param {ProgressCallback} [args.onProgress]
10126 * @param {string} args.dir
10127 * @param {string} args.gitdir
10128 * @param {string} args.filepath
10129 *
10130 * @returns {Promise<{oids: string[]}>}
10131 */
10132async function _indexPack({
10133 fs,
10134 cache,
10135 onProgress,
10136 dir,
10137 gitdir,
10138 filepath,
10139}) {
10140 try {
10141 filepath = join(dir, filepath);
10142 const pack = await fs.read(filepath);
10143 const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid });
10144 const idx = await GitPackIndex.fromPack({
10145 pack,
10146 getExternalRefDelta,
10147 onProgress,
10148 });
10149 await fs.write(filepath.replace(/\.pack$/, '.idx'), await idx.toBuffer());
10150 return {
10151 oids: [...idx.hashes],
10152 }
10153 } catch (err) {
10154 err.caller = 'git.indexPack';
10155 throw err
10156 }
10157}
10158
10159// @ts-check
10160
10161/**
10162 * Create the .idx file for a given .pack file
10163 *
10164 * @param {object} args
10165 * @param {FsClient} args.fs - a file system client
10166 * @param {ProgressCallback} [args.onProgress] - optional progress event callback
10167 * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path
10168 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
10169 * @param {string} args.filepath - The path to the .pack file to index
10170 * @param {object} [args.cache] - a [cache](cache.md) object
10171 *
10172 * @returns {Promise<{oids: string[]}>} Resolves with a list of the SHA-1 object ids contained in the packfile
10173 *
10174 * @example
10175 * let packfiles = await fs.promises.readdir('/tutorial/.git/objects/pack')
10176 * packfiles = packfiles.filter(name => name.endsWith('.pack'))
10177 * console.log('packfiles', packfiles)
10178 *
10179 * const { oids } = await git.indexPack({
10180 * fs,
10181 * dir: '/tutorial',
10182 * filepath: `.git/objects/pack/${packfiles[0]}`,
10183 * async onProgress (evt) {
10184 * console.log(`${evt.phase}: ${evt.loaded} / ${evt.total}`)
10185 * }
10186 * })
10187 * console.log(oids)
10188 *
10189 */
10190async function indexPack({
10191 fs,
10192 onProgress,
10193 dir,
10194 gitdir = join(dir, '.git'),
10195 filepath,
10196 cache = {},
10197}) {
10198 try {
10199 assertParameter('fs', fs);
10200 assertParameter('dir', dir);
10201 assertParameter('gitdir', dir);
10202 assertParameter('filepath', filepath);
10203
10204 return await _indexPack({
10205 fs: new FileSystem(fs),
10206 cache,
10207 onProgress,
10208 dir,
10209 gitdir,
10210 filepath,
10211 })
10212 } catch (err) {
10213 err.caller = 'git.indexPack';
10214 throw err
10215 }
10216}
10217
10218// @ts-check
10219
10220/**
10221 * Initialize a new repository
10222 *
10223 * @param {object} args
10224 * @param {FsClient} args.fs - a file system client
10225 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
10226 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
10227 * @param {boolean} [args.bare = false] - Initialize a bare repository
10228 * @param {string} [args.defaultBranch = 'master'] - The name of the default branch (might be changed to a required argument in 2.0.0)
10229 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
10230 *
10231 * @example
10232 * await git.init({ fs, dir: '/tutorial' })
10233 * console.log('done')
10234 *
10235 */
10236async function init({
10237 fs,
10238 bare = false,
10239 dir,
10240 gitdir = bare ? dir : join(dir, '.git'),
10241 defaultBranch = 'master',
10242}) {
10243 try {
10244 assertParameter('fs', fs);
10245 assertParameter('gitdir', gitdir);
10246 if (!bare) {
10247 assertParameter('dir', dir);
10248 }
10249
10250 return await _init({
10251 fs: new FileSystem(fs),
10252 bare,
10253 dir,
10254 gitdir,
10255 defaultBranch,
10256 })
10257 } catch (err) {
10258 err.caller = 'git.init';
10259 throw err
10260 }
10261}
10262
10263// @ts-check
10264
10265/**
10266 * @param {object} args
10267 * @param {import('../models/FileSystem.js').FileSystem} args.fs
10268 * @param {any} args.cache
10269 * @param {string} args.gitdir
10270 * @param {string} args.oid
10271 * @param {string} args.ancestor
10272 * @param {number} args.depth - Maximum depth to search before giving up. -1 means no maximum depth.
10273 *
10274 * @returns {Promise<boolean>}
10275 */
10276async function _isDescendent({
10277 fs,
10278 cache,
10279 gitdir,
10280 oid,
10281 ancestor,
10282 depth,
10283}) {
10284 const shallows = await GitShallowManager.read({ fs, gitdir });
10285 if (!oid) {
10286 throw new MissingParameterError('oid')
10287 }
10288 if (!ancestor) {
10289 throw new MissingParameterError('ancestor')
10290 }
10291 // If you don't like this behavior, add your own check.
10292 // Edge cases are hard to define a perfect solution.
10293 if (oid === ancestor) return false
10294 // We do not use recursion here, because that would lead to depth-first traversal,
10295 // and we want to maintain a breadth-first traversal to avoid hitting shallow clone depth cutoffs.
10296 const queue = [oid];
10297 const visited = new Set();
10298 let searchdepth = 0;
10299 while (queue.length) {
10300 if (searchdepth++ === depth) {
10301 throw new MaxDepthError(depth)
10302 }
10303 const oid = queue.shift();
10304 const { type, object } = await _readObject({
10305 fs,
10306 cache,
10307 gitdir,
10308 oid,
10309 });
10310 if (type !== 'commit') {
10311 throw new ObjectTypeError(oid, type, 'commit')
10312 }
10313 const commit = GitCommit.from(object).parse();
10314 // Are any of the parents the sought-after ancestor?
10315 for (const parent of commit.parent) {
10316 if (parent === ancestor) return true
10317 }
10318 // If not, add them to heads (unless we know this is a shallow commit)
10319 if (!shallows.has(oid)) {
10320 for (const parent of commit.parent) {
10321 if (!visited.has(parent)) {
10322 queue.push(parent);
10323 visited.add(parent);
10324 }
10325 }
10326 }
10327 // Eventually, we'll travel entire tree to the roots where all the parents are empty arrays,
10328 // or hit the shallow depth and throw an error. Excluding the possibility of grafts, or
10329 // different branches cloned to different depths, you would hit this error at the same time
10330 // for all parents, so trying to continue is futile.
10331 }
10332 return false
10333}
10334
10335// @ts-check
10336
10337/**
10338 * Check whether a git commit is descended from another
10339 *
10340 * @param {object} args
10341 * @param {FsClient} args.fs - a file system client
10342 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
10343 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
10344 * @param {string} args.oid - The descendent commit
10345 * @param {string} args.ancestor - The (proposed) ancestor commit
10346 * @param {number} [args.depth = -1] - Maximum depth to search before giving up. -1 means no maximum depth.
10347 * @param {object} [args.cache] - a [cache](cache.md) object
10348 *
10349 * @returns {Promise<boolean>} Resolves to true if `oid` is a descendent of `ancestor`
10350 *
10351 * @example
10352 * let oid = await git.resolveRef({ fs, dir: '/tutorial', ref: 'main' })
10353 * let ancestor = await git.resolveRef({ fs, dir: '/tutorial', ref: 'v0.20.0' })
10354 * console.log(oid, ancestor)
10355 * await git.isDescendent({ fs, dir: '/tutorial', oid, ancestor, depth: -1 })
10356 *
10357 */
10358async function isDescendent({
10359 fs,
10360 dir,
10361 gitdir = join(dir, '.git'),
10362 oid,
10363 ancestor,
10364 depth = -1,
10365 cache = {},
10366}) {
10367 try {
10368 assertParameter('fs', fs);
10369 assertParameter('gitdir', gitdir);
10370 assertParameter('oid', oid);
10371 assertParameter('ancestor', ancestor);
10372
10373 return await _isDescendent({
10374 fs: new FileSystem(fs),
10375 cache,
10376 gitdir,
10377 oid,
10378 ancestor,
10379 depth,
10380 })
10381 } catch (err) {
10382 err.caller = 'git.isDescendent';
10383 throw err
10384 }
10385}
10386
10387// @ts-check
10388
10389/**
10390 * Test whether a filepath should be ignored (because of .gitignore or .git/exclude)
10391 *
10392 * @param {object} args
10393 * @param {FsClient} args.fs - a file system client
10394 * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path
10395 * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path
10396 * @param {string} args.filepath - The filepath to test
10397 *
10398 * @returns {Promise<boolean>} Resolves to true if the file should be ignored
10399 *
10400 * @example
10401 * await git.isIgnored({ fs, dir: '/tutorial', filepath: 'docs/add.md' })
10402 *
10403 */
10404async function isIgnored({
10405 fs,
10406 dir,
10407 gitdir = join(dir, '.git'),
10408 filepath,
10409}) {
10410 try {
10411 assertParameter('fs', fs);
10412 assertParameter('dir', dir);
10413 assertParameter('gitdir', gitdir);
10414 assertParameter('filepath', filepath);
10415
10416 return GitIgnoreManager.isIgnored({
10417 fs: new FileSystem(fs),
10418 dir,
10419 gitdir,
10420 filepath,
10421 })
10422 } catch (err) {
10423 err.caller = 'git.isIgnored';
10424 throw err
10425 }
10426}
10427
10428// @ts-check
10429
10430/**
10431 * List branches
10432 *
10433 * By default it lists local branches. If a 'remote' is specified, it lists the remote's branches. When listing remote branches, the HEAD branch is not filtered out, so it may be included in the list of results.
10434 *
10435 * Note that specifying a remote does not actually contact the server and update the list of branches.
10436 * If you want an up-to-date list, first do a `fetch` to that remote.
10437 * (Which branch you fetch doesn't matter - the list of branches available on the remote is updated during the fetch handshake.)
10438 *
10439 * Also note, that a branch is a reference to a commit. If you initialize a new repository it has no commits, so the
10440 * `listBranches` function will return an empty list, until you create the first commit.
10441 *
10442 * @param {object} args
10443 * @param {FsClient} args.fs - a file system client
10444 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
10445 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
10446 * @param {string} [args.remote] - Instead of the branches in `refs/heads`, list the branches in `refs/remotes/${remote}`.
10447 *
10448 * @returns {Promise<Array<string>>} Resolves successfully with an array of branch names
10449 *
10450 * @example
10451 * let branches = await git.listBranches({ fs, dir: '/tutorial' })
10452 * console.log(branches)
10453 * let remoteBranches = await git.listBranches({ fs, dir: '/tutorial', remote: 'origin' })
10454 * console.log(remoteBranches)
10455 *
10456 */
10457async function listBranches({
10458 fs,
10459 dir,
10460 gitdir = join(dir, '.git'),
10461 remote,
10462}) {
10463 try {
10464 assertParameter('fs', fs);
10465 assertParameter('gitdir', gitdir);
10466
10467 return GitRefManager.listBranches({
10468 fs: new FileSystem(fs),
10469 gitdir,
10470 remote,
10471 })
10472 } catch (err) {
10473 err.caller = 'git.listBranches';
10474 throw err
10475 }
10476}
10477
10478// @ts-check
10479
10480/**
10481 * @param {object} args
10482 * @param {import('../models/FileSystem.js').FileSystem} args.fs
10483 * @param {object} args.cache
10484 * @param {string} args.gitdir
10485 * @param {string} [args.ref]
10486 *
10487 * @returns {Promise<Array<string>>}
10488 */
10489async function _listFiles({ fs, gitdir, ref, cache }) {
10490 if (ref) {
10491 const oid = await GitRefManager.resolve({ gitdir, fs, ref });
10492 const filenames = [];
10493 await accumulateFilesFromOid({
10494 fs,
10495 cache,
10496 gitdir,
10497 oid,
10498 filenames,
10499 prefix: '',
10500 });
10501 return filenames
10502 } else {
10503 return GitIndexManager.acquire({ fs, gitdir, cache }, async function(
10504 index
10505 ) {
10506 return index.entries.map(x => x.path)
10507 })
10508 }
10509}
10510
10511async function accumulateFilesFromOid({
10512 fs,
10513 cache,
10514 gitdir,
10515 oid,
10516 filenames,
10517 prefix,
10518}) {
10519 const { tree } = await _readTree({ fs, cache, gitdir, oid });
10520 // TODO: Use `walk` to do this. Should be faster.
10521 for (const entry of tree) {
10522 if (entry.type === 'tree') {
10523 await accumulateFilesFromOid({
10524 fs,
10525 cache,
10526 gitdir,
10527 oid: entry.oid,
10528 filenames,
10529 prefix: join(prefix, entry.path),
10530 });
10531 } else {
10532 filenames.push(join(prefix, entry.path));
10533 }
10534 }
10535}
10536
10537// @ts-check
10538
10539/**
10540 * List all the files in the git index or a commit
10541 *
10542 * > Note: This function is efficient for listing the files in the staging area, but listing all the files in a commit requires recursively walking through the git object store.
10543 * > If you do not require a complete list of every file, better performance can be achieved by using [walk](./walk) and ignoring subdirectories you don't care about.
10544 *
10545 * @param {object} args
10546 * @param {FsClient} args.fs - a file system client
10547 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
10548 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
10549 * @param {string} [args.ref] - Return a list of all the files in the commit at `ref` instead of the files currently in the git index (aka staging area)
10550 * @param {object} [args.cache] - a [cache](cache.md) object
10551 *
10552 * @returns {Promise<Array<string>>} Resolves successfully with an array of filepaths
10553 *
10554 * @example
10555 * // All the files in the previous commit
10556 * let files = await git.listFiles({ fs, dir: '/tutorial', ref: 'HEAD' })
10557 * console.log(files)
10558 * // All the files in the current staging area
10559 * files = await git.listFiles({ fs, dir: '/tutorial' })
10560 * console.log(files)
10561 *
10562 */
10563async function listFiles({
10564 fs,
10565 dir,
10566 gitdir = join(dir, '.git'),
10567 ref,
10568 cache = {},
10569}) {
10570 try {
10571 assertParameter('fs', fs);
10572 assertParameter('gitdir', gitdir);
10573
10574 return await _listFiles({
10575 fs: new FileSystem(fs),
10576 cache,
10577 gitdir,
10578 ref,
10579 })
10580 } catch (err) {
10581 err.caller = 'git.listFiles';
10582 throw err
10583 }
10584}
10585
10586// @ts-check
10587
10588/**
10589 * List all the object notes
10590 *
10591 * @param {object} args
10592 * @param {import('../models/FileSystem.js').FileSystem} args.fs
10593 * @param {any} args.cache
10594 * @param {string} args.gitdir
10595 * @param {string} args.ref
10596 *
10597 * @returns {Promise<Array<{target: string, note: string}>>}
10598 */
10599
10600async function _listNotes({ fs, cache, gitdir, ref }) {
10601 // Get the current note commit
10602 let parent;
10603 try {
10604 parent = await GitRefManager.resolve({ gitdir, fs, ref });
10605 } catch (err) {
10606 if (err instanceof NotFoundError) {
10607 return []
10608 }
10609 }
10610
10611 // Create the current note tree
10612 const result = await _readTree({
10613 fs,
10614 cache,
10615 gitdir,
10616 oid: parent,
10617 });
10618
10619 // Format the tree entries
10620 const notes = result.tree.map(entry => ({
10621 target: entry.path,
10622 note: entry.oid,
10623 }));
10624 return notes
10625}
10626
10627// @ts-check
10628
10629/**
10630 * List all the object notes
10631 *
10632 * @param {object} args
10633 * @param {FsClient} args.fs - a file system client
10634 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
10635 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
10636 * @param {string} [args.ref] - The notes ref to look under
10637 * @param {object} [args.cache] - a [cache](cache.md) object
10638 *
10639 * @returns {Promise<Array<{target: string, note: string}>>} Resolves successfully with an array of entries containing SHA-1 object ids of the note and the object the note targets
10640 */
10641
10642async function listNotes({
10643 fs,
10644 dir,
10645 gitdir = join(dir, '.git'),
10646 ref = 'refs/notes/commits',
10647 cache = {},
10648}) {
10649 try {
10650 assertParameter('fs', fs);
10651 assertParameter('gitdir', gitdir);
10652 assertParameter('ref', ref);
10653
10654 return await _listNotes({
10655 fs: new FileSystem(fs),
10656 cache,
10657 gitdir,
10658 ref,
10659 })
10660 } catch (err) {
10661 err.caller = 'git.listNotes';
10662 throw err
10663 }
10664}
10665
10666// @ts-check
10667
10668/**
10669 * @param {object} args
10670 * @param {import('../models/FileSystem.js').FileSystem} args.fs
10671 * @param {string} args.gitdir
10672 *
10673 * @returns {Promise<Array<{remote: string, url: string}>>}
10674 */
10675async function _listRemotes({ fs, gitdir }) {
10676 const config = await GitConfigManager.get({ fs, gitdir });
10677 const remoteNames = await config.getSubsections('remote');
10678 const remotes = Promise.all(
10679 remoteNames.map(async remote => {
10680 const url = await config.get(`remote.${remote}.url`);
10681 return { remote, url }
10682 })
10683 );
10684 return remotes
10685}
10686
10687// @ts-check
10688
10689/**
10690 * List remotes
10691 *
10692 * @param {object} args
10693 * @param {FsClient} args.fs - a file system client
10694 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
10695 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
10696 *
10697 * @returns {Promise<Array<{remote: string, url: string}>>} Resolves successfully with an array of `{remote, url}` objects
10698 *
10699 * @example
10700 * let remotes = await git.listRemotes({ fs, dir: '/tutorial' })
10701 * console.log(remotes)
10702 *
10703 */
10704async function listRemotes({ fs, dir, gitdir = join(dir, '.git') }) {
10705 try {
10706 assertParameter('fs', fs);
10707 assertParameter('gitdir', gitdir);
10708
10709 return await _listRemotes({
10710 fs: new FileSystem(fs),
10711 gitdir,
10712 })
10713 } catch (err) {
10714 err.caller = 'git.listRemotes';
10715 throw err
10716 }
10717}
10718
10719/**
10720 * @typedef {Object} ServerRef - This object has the following schema:
10721 * @property {string} ref - The name of the ref
10722 * @property {string} oid - The SHA-1 object id the ref points to
10723 * @property {string} [target] - The target ref pointed to by a symbolic ref
10724 * @property {string} [peeled] - If the oid is the SHA-1 object id of an annotated tag, this is the SHA-1 object id that the annotated tag points to
10725 */
10726
10727async function parseListRefsResponse(stream) {
10728 const read = GitPktLine.streamReader(stream);
10729
10730 // TODO: when we re-write everything to minimize memory usage,
10731 // we could make this a generator
10732 const refs = [];
10733
10734 let line;
10735 while (true) {
10736 line = await read();
10737 if (line === true) break
10738 if (line === null) continue
10739 line = line.toString('utf8').replace(/\n$/, '');
10740 const [oid, ref, ...attrs] = line.split(' ');
10741 const r = { ref, oid };
10742 for (const attr of attrs) {
10743 const [name, value] = attr.split(':');
10744 if (name === 'symref-target') {
10745 r.target = value;
10746 } else if (name === 'peeled') {
10747 r.peeled = value;
10748 }
10749 }
10750 refs.push(r);
10751 }
10752
10753 return refs
10754}
10755
10756/**
10757 * @param {object} args
10758 * @param {string} [args.prefix] - Only list refs that start with this prefix
10759 * @param {boolean} [args.symrefs = false] - Include symbolic ref targets
10760 * @param {boolean} [args.peelTags = false] - Include peeled tags values
10761 * @returns {Uint8Array[]}
10762 */
10763async function writeListRefsRequest({ prefix, symrefs, peelTags }) {
10764 const packstream = [];
10765 // command
10766 packstream.push(GitPktLine.encode('command=ls-refs\n'));
10767 // capability-list
10768 packstream.push(GitPktLine.encode(`agent=${pkg.agent}\n`));
10769 // [command-args]
10770 if (peelTags || symrefs || prefix) {
10771 packstream.push(GitPktLine.delim());
10772 }
10773 if (peelTags) packstream.push(GitPktLine.encode('peel'));
10774 if (symrefs) packstream.push(GitPktLine.encode('symrefs'));
10775 if (prefix) packstream.push(GitPktLine.encode(`ref-prefix ${prefix}`));
10776 packstream.push(GitPktLine.flush());
10777 return packstream
10778}
10779
10780// @ts-check
10781
10782/**
10783 * Fetch a list of refs (branches, tags, etc) from a server.
10784 *
10785 * This is a rare command that doesn't require an `fs`, `dir`, or even `gitdir` argument.
10786 * It just requires an `http` argument.
10787 *
10788 * ### About `protocolVersion`
10789 *
10790 * There's a rather fun trade-off between Git Protocol Version 1 and Git Protocol Version 2.
10791 * Version 2 actually requires 2 HTTP requests instead of 1, making it similar to fetch or push in that regard.
10792 * However, version 2 supports server-side filtering by prefix, whereas that filtering is done client-side in version 1.
10793 * Which protocol is most efficient therefore depends on the number of refs on the remote, the latency of the server, and speed of the network connection.
10794 * For an small repos (or fast Internet connections), the requirement to make two trips to the server makes protocol 2 slower.
10795 * But for large repos (or slow Internet connections), the decreased payload size of the second request makes up for the additional request.
10796 *
10797 * Hard numbers vary by situation, but here's some numbers from my machine:
10798 *
10799 * Using isomorphic-git in a browser, with a CORS proxy, listing only the branches (refs/heads) of https://github.com/isomorphic-git/isomorphic-git
10800 * - Protocol Version 1 took ~300ms and transfered 84 KB.
10801 * - Protocol Version 2 took ~500ms and transfered 4.1 KB.
10802 *
10803 * Using isomorphic-git in a browser, with a CORS proxy, listing only the branches (refs/heads) of https://gitlab.com/gitlab-org/gitlab
10804 * - Protocol Version 1 took ~4900ms and transfered 9.41 MB.
10805 * - Protocol Version 2 took ~1280ms and transfered 433 KB.
10806 *
10807 * Finally, there is a fun quirk regarding the `symrefs` parameter.
10808 * Protocol Version 1 will generally only return the `HEAD` symref and not others.
10809 * Historically, this meant that servers don't use symbolic refs except for `HEAD`, which is used to point at the "default branch".
10810 * However Protocol Version 2 can return *all* the symbolic refs on the server.
10811 * So if you are running your own git server, you could take advantage of that I guess.
10812 *
10813 * #### TL;DR
10814 * If you are _not_ taking advantage of `prefix` I would recommend `protocolVersion: 1`.
10815 * Otherwise, I recommend to use the default which is `protocolVersion: 2`.
10816 *
10817 * @param {object} args
10818 * @param {HttpClient} args.http - an HTTP client
10819 * @param {AuthCallback} [args.onAuth] - optional auth fill callback
10820 * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback
10821 * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback
10822 * @param {string} args.url - The URL of the remote repository. Will be gotten from gitconfig if absent.
10823 * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config.
10824 * @param {boolean} [args.forPush = false] - By default, the command queries the 'fetch' capabilities. If true, it will ask for the 'push' capabilities.
10825 * @param {Object<string, string>} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config
10826 * @param {1 | 2} [args.protocolVersion = 2] - Which version of the Git Protocol to use.
10827 * @param {string} [args.prefix] - Only list refs that start with this prefix
10828 * @param {boolean} [args.symrefs = false] - Include symbolic ref targets
10829 * @param {boolean} [args.peelTags = false] - Include annotated tag peeled targets
10830 *
10831 * @returns {Promise<ServerRef[]>} Resolves successfully with an array of ServerRef objects
10832 * @see ServerRef
10833 *
10834 * @example
10835 * // List all the branches on a repo
10836 * let refs = await git.listServerRefs({
10837 * http,
10838 * corsProxy: "https://cors.isomorphic-git.org",
10839 * url: "https://github.com/isomorphic-git/isomorphic-git.git",
10840 * prefix: "refs/heads/",
10841 * });
10842 * console.log(refs);
10843 *
10844 * @example
10845 * // Get the default branch on a repo
10846 * let refs = await git.listServerRefs({
10847 * http,
10848 * corsProxy: "https://cors.isomorphic-git.org",
10849 * url: "https://github.com/isomorphic-git/isomorphic-git.git",
10850 * prefix: "HEAD",
10851 * symrefs: true,
10852 * });
10853 * console.log(refs);
10854 *
10855 * @example
10856 * // List all the tags on a repo
10857 * let refs = await git.listServerRefs({
10858 * http,
10859 * corsProxy: "https://cors.isomorphic-git.org",
10860 * url: "https://github.com/isomorphic-git/isomorphic-git.git",
10861 * prefix: "refs/tags/",
10862 * peelTags: true,
10863 * });
10864 * console.log(refs);
10865 *
10866 * @example
10867 * // List all the pull requests on a repo
10868 * let refs = await git.listServerRefs({
10869 * http,
10870 * corsProxy: "https://cors.isomorphic-git.org",
10871 * url: "https://github.com/isomorphic-git/isomorphic-git.git",
10872 * prefix: "refs/pull/",
10873 * });
10874 * console.log(refs);
10875 *
10876 */
10877async function listServerRefs({
10878 http,
10879 onAuth,
10880 onAuthSuccess,
10881 onAuthFailure,
10882 corsProxy,
10883 url,
10884 headers = {},
10885 forPush = false,
10886 protocolVersion = 2,
10887 prefix,
10888 symrefs,
10889 peelTags,
10890}) {
10891 try {
10892 assertParameter('http', http);
10893 assertParameter('url', url);
10894
10895 const remote = await GitRemoteHTTP.discover({
10896 http,
10897 onAuth,
10898 onAuthSuccess,
10899 onAuthFailure,
10900 corsProxy,
10901 service: forPush ? 'git-receive-pack' : 'git-upload-pack',
10902 url,
10903 headers,
10904 protocolVersion,
10905 });
10906
10907 if (remote.protocolVersion === 1) {
10908 return formatInfoRefs(remote, prefix, symrefs, peelTags)
10909 }
10910
10911 // Protocol Version 2
10912 const body = await writeListRefsRequest({ prefix, symrefs, peelTags });
10913
10914 const res = await GitRemoteHTTP.connect({
10915 http,
10916 auth: remote.auth,
10917 headers,
10918 corsProxy,
10919 service: forPush ? 'git-receive-pack' : 'git-upload-pack',
10920 url,
10921 body,
10922 });
10923
10924 return parseListRefsResponse(res.body)
10925 } catch (err) {
10926 err.caller = 'git.listServerRefs';
10927 throw err
10928 }
10929}
10930
10931// @ts-check
10932
10933/**
10934 * List tags
10935 *
10936 * @param {object} args
10937 * @param {FsClient} args.fs - a file system client
10938 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
10939 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
10940 *
10941 * @returns {Promise<Array<string>>} Resolves successfully with an array of tag names
10942 *
10943 * @example
10944 * let tags = await git.listTags({ fs, dir: '/tutorial' })
10945 * console.log(tags)
10946 *
10947 */
10948async function listTags({ fs, dir, gitdir = join(dir, '.git') }) {
10949 try {
10950 assertParameter('fs', fs);
10951 assertParameter('gitdir', gitdir);
10952 return GitRefManager.listTags({ fs: new FileSystem(fs), gitdir })
10953 } catch (err) {
10954 err.caller = 'git.listTags';
10955 throw err
10956 }
10957}
10958
10959async function resolveCommit({ fs, cache, gitdir, oid }) {
10960 const { type, object } = await _readObject({ fs, cache, gitdir, oid });
10961 // Resolve annotated tag objects to whatever
10962 if (type === 'tag') {
10963 oid = GitAnnotatedTag.from(object).parse().object;
10964 return resolveCommit({ fs, cache, gitdir, oid })
10965 }
10966 if (type !== 'commit') {
10967 throw new ObjectTypeError(oid, type, 'commit')
10968 }
10969 return { commit: GitCommit.from(object), oid }
10970}
10971
10972// @ts-check
10973
10974/**
10975 * @param {object} args
10976 * @param {import('../models/FileSystem.js').FileSystem} args.fs
10977 * @param {any} args.cache
10978 * @param {string} args.gitdir
10979 * @param {string} args.oid
10980 *
10981 * @returns {Promise<ReadCommitResult>} Resolves successfully with a git commit object
10982 * @see ReadCommitResult
10983 * @see CommitObject
10984 *
10985 */
10986async function _readCommit({ fs, cache, gitdir, oid }) {
10987 const { commit, oid: commitOid } = await resolveCommit({
10988 fs,
10989 cache,
10990 gitdir,
10991 oid,
10992 });
10993 const result = {
10994 oid: commitOid,
10995 commit: commit.parse(),
10996 payload: commit.withoutSignature(),
10997 };
10998 // @ts-ignore
10999 return result
11000}
11001
11002function compareAge(a, b) {
11003 return a.committer.timestamp - b.committer.timestamp
11004}
11005
11006// @ts-check
11007
11008// the empty file content object id
11009const EMPTY_OID = 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391';
11010
11011async function resolveFileIdInTree({ fs, cache, gitdir, oid, fileId }) {
11012 if (fileId === EMPTY_OID) return
11013 const _oid = oid;
11014 let filepath;
11015 const result = await resolveTree({ fs, cache, gitdir, oid });
11016 const tree = result.tree;
11017 if (fileId === result.oid) {
11018 filepath = result.path;
11019 } else {
11020 filepath = await _resolveFileId({
11021 fs,
11022 cache,
11023 gitdir,
11024 tree,
11025 fileId,
11026 oid: _oid,
11027 });
11028 if (Array.isArray(filepath)) {
11029 if (filepath.length === 0) filepath = undefined;
11030 else if (filepath.length === 1) filepath = filepath[0];
11031 }
11032 }
11033 return filepath
11034}
11035
11036async function _resolveFileId({
11037 fs,
11038 cache,
11039 gitdir,
11040 tree,
11041 fileId,
11042 oid,
11043 filepaths = [],
11044 parentPath = '',
11045}) {
11046 const walks = tree.entries().map(function(entry) {
11047 let result;
11048 if (entry.oid === fileId) {
11049 result = join(parentPath, entry.path);
11050 filepaths.push(result);
11051 } else if (entry.type === 'tree') {
11052 result = _readObject({
11053 fs,
11054 cache,
11055 gitdir,
11056 oid: entry.oid,
11057 }).then(function({ object }) {
11058 return _resolveFileId({
11059 fs,
11060 cache,
11061 gitdir,
11062 tree: GitTree.from(object),
11063 fileId,
11064 oid,
11065 filepaths,
11066 parentPath: join(parentPath, entry.path),
11067 })
11068 });
11069 }
11070 return result
11071 });
11072
11073 await Promise.all(walks);
11074 return filepaths
11075}
11076
11077// @ts-check
11078
11079/**
11080 * Get commit descriptions from the git history
11081 *
11082 * @param {object} args
11083 * @param {import('../models/FileSystem.js').FileSystem} args.fs
11084 * @param {any} args.cache
11085 * @param {string} args.gitdir
11086 * @param {string=} args.filepath optional get the commit for the filepath only
11087 * @param {string} args.ref
11088 * @param {number|void} args.depth
11089 * @param {boolean=} [args.force=false] do not throw error if filepath is not exist (works only for a single file). defaults to false
11090 * @param {boolean=} [args.follow=false] Continue listing the history of a file beyond renames (works only for a single file). defaults to false
11091 * @param {boolean=} args.follow Continue listing the history of a file beyond renames (works only for a single file). defaults to false
11092 *
11093 * @returns {Promise<Array<ReadCommitResult>>} Resolves to an array of ReadCommitResult objects
11094 * @see ReadCommitResult
11095 * @see CommitObject
11096 *
11097 * @example
11098 * let commits = await git.log({ dir: '$input((/))', depth: $input((5)), ref: '$input((master))' })
11099 * console.log(commits)
11100 *
11101 */
11102async function _log({
11103 fs,
11104 cache,
11105 gitdir,
11106 filepath,
11107 ref,
11108 depth,
11109 since,
11110 force,
11111 follow,
11112}) {
11113 const sinceTimestamp =
11114 typeof since === 'undefined'
11115 ? undefined
11116 : Math.floor(since.valueOf() / 1000);
11117 // TODO: In the future, we may want to have an API where we return a
11118 // async iterator that emits commits.
11119 const commits = [];
11120 const shallowCommits = await GitShallowManager.read({ fs, gitdir });
11121 const oid = await GitRefManager.resolve({ fs, gitdir, ref });
11122 const tips = [await _readCommit({ fs, cache, gitdir, oid })];
11123 let lastFileOid;
11124 let lastCommit;
11125 let isOk;
11126
11127 function endCommit(commit) {
11128 if (isOk && filepath) commits.push(commit);
11129 }
11130
11131 while (tips.length > 0) {
11132 const commit = tips.pop();
11133
11134 // Stop the log if we've hit the age limit
11135 if (
11136 sinceTimestamp !== undefined &&
11137 commit.commit.committer.timestamp <= sinceTimestamp
11138 ) {
11139 break
11140 }
11141
11142 if (filepath) {
11143 let vFileOid;
11144 try {
11145 vFileOid = await resolveFilepath({
11146 fs,
11147 cache,
11148 gitdir,
11149 oid: commit.commit.tree,
11150 filepath,
11151 });
11152 if (lastCommit && lastFileOid !== vFileOid) {
11153 commits.push(lastCommit);
11154 }
11155 lastFileOid = vFileOid;
11156 lastCommit = commit;
11157 isOk = true;
11158 } catch (e) {
11159 if (e instanceof NotFoundError) {
11160 let found = follow && lastFileOid;
11161 if (found) {
11162 found = await resolveFileIdInTree({
11163 fs,
11164 cache,
11165 gitdir,
11166 oid: commit.commit.tree,
11167 fileId: lastFileOid,
11168 });
11169 if (found) {
11170 if (Array.isArray(found)) {
11171 if (lastCommit) {
11172 const lastFound = await resolveFileIdInTree({
11173 fs,
11174 cache,
11175 gitdir,
11176 oid: lastCommit.commit.tree,
11177 fileId: lastFileOid,
11178 });
11179 if (Array.isArray(lastFound)) {
11180 found = found.filter(p => lastFound.indexOf(p) === -1);
11181 if (found.length === 1) {
11182 found = found[0];
11183 filepath = found;
11184 if (lastCommit) commits.push(lastCommit);
11185 } else {
11186 found = false;
11187 if (lastCommit) commits.push(lastCommit);
11188 break
11189 }
11190 }
11191 }
11192 } else {
11193 filepath = found;
11194 if (lastCommit) commits.push(lastCommit);
11195 }
11196 }
11197 }
11198 if (!found) {
11199 if (isOk && lastFileOid) {
11200 commits.push(lastCommit);
11201 if (!force) break
11202 }
11203 if (!force && !follow) throw e
11204 }
11205 lastCommit = commit;
11206 isOk = false;
11207 } else throw e
11208 }
11209 } else {
11210 commits.push(commit);
11211 }
11212
11213 // Stop the loop if we have enough commits now.
11214 if (depth !== undefined && commits.length === depth) {
11215 endCommit(commit);
11216 break
11217 }
11218
11219 // If this is not a shallow commit...
11220 if (!shallowCommits.has(commit.oid)) {
11221 // Add the parents of this commit to the queue
11222 // Note: for the case of a commit with no parents, it will concat an empty array, having no net effect.
11223 for (const oid of commit.commit.parent) {
11224 const commit = await _readCommit({ fs, cache, gitdir, oid });
11225 if (!tips.map(commit => commit.oid).includes(commit.oid)) {
11226 tips.push(commit);
11227 }
11228 }
11229 }
11230
11231 // Stop the loop if there are no more commit parents
11232 if (tips.length === 0) {
11233 endCommit(commit);
11234 }
11235
11236 // Process tips in order by age
11237 tips.sort((a, b) => compareAge(a.commit, b.commit));
11238 }
11239 return commits
11240}
11241
11242// @ts-check
11243
11244/**
11245 * Get commit descriptions from the git history
11246 *
11247 * @param {object} args
11248 * @param {FsClient} args.fs - a file system client
11249 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
11250 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
11251 * @param {string=} args.filepath optional get the commit for the filepath only
11252 * @param {string} [args.ref = 'HEAD'] - The commit to begin walking backwards through the history from
11253 * @param {number=} [args.depth] - Limit the number of commits returned. No limit by default.
11254 * @param {Date} [args.since] - Return history newer than the given date. Can be combined with `depth` to get whichever is shorter.
11255 * @param {boolean=} [args.force=false] do not throw error if filepath is not exist (works only for a single file). defaults to false
11256 * @param {boolean=} [args.follow=false] Continue listing the history of a file beyond renames (works only for a single file). defaults to false
11257 * @param {object} [args.cache] - a [cache](cache.md) object
11258 *
11259 * @returns {Promise<Array<ReadCommitResult>>} Resolves to an array of ReadCommitResult objects
11260 * @see ReadCommitResult
11261 * @see CommitObject
11262 *
11263 * @example
11264 * let commits = await git.log({
11265 * fs,
11266 * dir: '/tutorial',
11267 * depth: 5,
11268 * ref: 'main'
11269 * })
11270 * console.log(commits)
11271 *
11272 */
11273async function log({
11274 fs,
11275 dir,
11276 gitdir = join(dir, '.git'),
11277 filepath,
11278 ref = 'HEAD',
11279 depth,
11280 since, // Date
11281 force,
11282 follow,
11283 cache = {},
11284}) {
11285 try {
11286 assertParameter('fs', fs);
11287 assertParameter('gitdir', gitdir);
11288 assertParameter('ref', ref);
11289
11290 return await _log({
11291 fs: new FileSystem(fs),
11292 cache,
11293 gitdir,
11294 filepath,
11295 ref,
11296 depth,
11297 since,
11298 force,
11299 follow,
11300 })
11301 } catch (err) {
11302 err.caller = 'git.log';
11303 throw err
11304 }
11305}
11306
11307// @ts-check
11308
11309/**
11310 *
11311 * @typedef {Object} MergeResult - Returns an object with a schema like this:
11312 * @property {string} [oid] - The SHA-1 object id that is now at the head of the branch. Absent only if `dryRun` was specified and `mergeCommit` is true.
11313 * @property {boolean} [alreadyMerged] - True if the branch was already merged so no changes were made
11314 * @property {boolean} [fastForward] - True if it was a fast-forward merge
11315 * @property {boolean} [mergeCommit] - True if merge resulted in a merge commit
11316 * @property {string} [tree] - The SHA-1 object id of the tree resulting from a merge commit
11317 *
11318 */
11319
11320/**
11321 * Merge two branches
11322 *
11323 * Currently it will fail if multiple candidate merge bases are found. (It doesn't yet implement the recursive merge strategy.)
11324 *
11325 * Currently it does not support selecting alternative merge strategies.
11326 *
11327 * Currently it is not possible to abort an incomplete merge. To restore the worktree to a clean state, you will need to checkout an earlier commit.
11328 *
11329 * Currently it does not directly support the behavior of `git merge --continue`. To complete a merge after manual conflict resolution, you will need to add and commit the files manually, and specify the appropriate parent commits.
11330 *
11331 * ## Manually resolving merge conflicts
11332 * By default, if isomorphic-git encounters a merge conflict it cannot resolve using the builtin diff3 algorithm or provided merge driver, it will abort and throw a `MergeNotSupportedError`.
11333 * This leaves the index and working tree untouched.
11334 *
11335 * When `abortOnConflict` is set to `false`, and a merge conflict cannot be automatically resolved, a `MergeConflictError` is thrown and the results of the incomplete merge will be written to the working directory.
11336 * This includes conflict markers in files with unresolved merge conflicts.
11337 *
11338 * To complete the merge, edit the conflicting files as you see fit, and then add and commit the resolved merge.
11339 *
11340 * For a proper merge commit, be sure to specify the branches or commits you are merging in the `parent` argument to `git.commit`.
11341 * For example, say we are merging the branch `feature` into the branch `main` and there is a conflict we want to resolve manually.
11342 * The flow would look like this:
11343 *
11344 * ```
11345 * await git.merge({
11346 * fs,
11347 * dir,
11348 * ours: 'main',
11349 * theirs: 'feature',
11350 * abortOnConflict: false,
11351 * }).catch(e => {
11352 * if (e instanceof Errors.MergeConflictError) {
11353 * console.log(
11354 * 'Automatic merge failed for the following files: '
11355 * + `${e.data}. `
11356 * + 'Resolve these conflicts and then commit your changes.'
11357 * )
11358 * } else throw e
11359 * })
11360 *
11361 * // This is the where we manually edit the files that have been written to the working directory
11362 * // ...
11363 * // Files have been edited and we are ready to commit
11364 *
11365 * await git.add({
11366 * fs,
11367 * dir,
11368 * filepath: '.',
11369 * })
11370 *
11371 * await git.commit({
11372 * fs,
11373 * dir,
11374 * ref: 'main',
11375 * message: "Merge branch 'feature' into main",
11376 * parent: ['main', 'feature'], // Be sure to specify the parents when creating a merge commit
11377 * })
11378 * ```
11379 *
11380 * @param {object} args
11381 * @param {FsClient} args.fs - a file system client
11382 * @param {SignCallback} [args.onSign] - a PGP signing implementation
11383 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
11384 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
11385 * @param {string} [args.ours] - The branch receiving the merge. If undefined, defaults to the current branch.
11386 * @param {string} args.theirs - The branch to be merged
11387 * @param {boolean} [args.fastForward = true] - If false, create a merge commit in all cases.
11388 * @param {boolean} [args.fastForwardOnly = false] - If true, then non-fast-forward merges will throw an Error instead of performing a merge.
11389 * @param {boolean} [args.dryRun = false] - If true, simulates a merge so you can test whether it would succeed.
11390 * @param {boolean} [args.noUpdateBranch = false] - If true, does not update the branch pointer after creating the commit.
11391 * @param {boolean} [args.abortOnConflict = true] - If true, merges with conflicts will not update the worktree or index.
11392 * @param {string} [args.message] - Overrides the default auto-generated merge commit message
11393 * @param {Object} [args.author] - passed to [commit](commit.md) when creating a merge commit
11394 * @param {string} [args.author.name] - Default is `user.name` config.
11395 * @param {string} [args.author.email] - Default is `user.email` config.
11396 * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00).
11397 * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`.
11398 * @param {Object} [args.committer] - passed to [commit](commit.md) when creating a merge commit
11399 * @param {string} [args.committer.name] - Default is `user.name` config.
11400 * @param {string} [args.committer.email] - Default is `user.email` config.
11401 * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00).
11402 * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`.
11403 * @param {string} [args.signingKey] - passed to [commit](commit.md) when creating a merge commit
11404 * @param {object} [args.cache] - a [cache](cache.md) object
11405 * @param {MergeDriverCallback} [args.mergeDriver] - a [merge driver](mergeDriver.md) implementation
11406 *
11407 * @returns {Promise<MergeResult>} Resolves to a description of the merge operation
11408 * @see MergeResult
11409 *
11410 * @example
11411 * let m = await git.merge({
11412 * fs,
11413 * dir: '/tutorial',
11414 * ours: 'main',
11415 * theirs: 'remotes/origin/main'
11416 * })
11417 * console.log(m)
11418 *
11419 */
11420async function merge({
11421 fs: _fs,
11422 onSign,
11423 dir,
11424 gitdir = join(dir, '.git'),
11425 ours,
11426 theirs,
11427 fastForward = true,
11428 fastForwardOnly = false,
11429 dryRun = false,
11430 noUpdateBranch = false,
11431 abortOnConflict = true,
11432 message,
11433 author: _author,
11434 committer: _committer,
11435 signingKey,
11436 cache = {},
11437 mergeDriver,
11438}) {
11439 try {
11440 assertParameter('fs', _fs);
11441 if (signingKey) {
11442 assertParameter('onSign', onSign);
11443 }
11444 const fs = new FileSystem(_fs);
11445
11446 const author = await normalizeAuthorObject({ fs, gitdir, author: _author });
11447 if (!author && (!fastForwardOnly || !fastForward)) {
11448 throw new MissingNameError('author')
11449 }
11450
11451 const committer = await normalizeCommitterObject({
11452 fs,
11453 gitdir,
11454 author,
11455 committer: _committer,
11456 });
11457 if (!committer && (!fastForwardOnly || !fastForward)) {
11458 throw new MissingNameError('committer')
11459 }
11460
11461 return await _merge({
11462 fs,
11463 cache,
11464 dir,
11465 gitdir,
11466 ours,
11467 theirs,
11468 fastForward,
11469 fastForwardOnly,
11470 dryRun,
11471 noUpdateBranch,
11472 abortOnConflict,
11473 message,
11474 author,
11475 committer,
11476 signingKey,
11477 onSign,
11478 mergeDriver,
11479 })
11480 } catch (err) {
11481 err.caller = 'git.merge';
11482 throw err
11483 }
11484}
11485
11486/**
11487 * @enum {number}
11488 */
11489const types = {
11490 commit: 0b0010000,
11491 tree: 0b0100000,
11492 blob: 0b0110000,
11493 tag: 0b1000000,
11494 ofs_delta: 0b1100000,
11495 ref_delta: 0b1110000,
11496};
11497
11498/**
11499 * @param {object} args
11500 * @param {import('../models/FileSystem.js').FileSystem} args.fs
11501 * @param {any} args.cache
11502 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
11503 * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path
11504 * @param {string[]} args.oids
11505 */
11506async function _pack({
11507 fs,
11508 cache,
11509 dir,
11510 gitdir = join(dir, '.git'),
11511 oids,
11512}) {
11513 const hash = new Hash();
11514 const outputStream = [];
11515 function write(chunk, enc) {
11516 const buff = Buffer.from(chunk, enc);
11517 outputStream.push(buff);
11518 hash.update(buff);
11519 }
11520 async function writeObject({ stype, object }) {
11521 // Object type is encoded in bits 654
11522 const type = types[stype];
11523 // The length encoding gets complicated.
11524 let length = object.length;
11525 // Whether the next byte is part of the variable-length encoded number
11526 // is encoded in bit 7
11527 let multibyte = length > 0b1111 ? 0b10000000 : 0b0;
11528 // Last four bits of length is encoded in bits 3210
11529 const lastFour = length & 0b1111;
11530 // Discard those bits
11531 length = length >>> 4;
11532 // The first byte is then (1-bit multibyte?), (3-bit type), (4-bit least sig 4-bits of length)
11533 let byte = (multibyte | type | lastFour).toString(16);
11534 write(byte, 'hex');
11535 // Now we keep chopping away at length 7-bits at a time until its zero,
11536 // writing out the bytes in what amounts to little-endian order.
11537 while (multibyte) {
11538 multibyte = length > 0b01111111 ? 0b10000000 : 0b0;
11539 byte = multibyte | (length & 0b01111111);
11540 write(padHex(2, byte), 'hex');
11541 length = length >>> 7;
11542 }
11543 // Lastly, we can compress and write the object.
11544 write(Buffer.from(await deflate(object)));
11545 }
11546 write('PACK');
11547 write('00000002', 'hex');
11548 // Write a 4 byte (32-bit) int
11549 write(padHex(8, oids.length), 'hex');
11550 for (const oid of oids) {
11551 const { type, object } = await _readObject({ fs, cache, gitdir, oid });
11552 await writeObject({ write, object, stype: type });
11553 }
11554 // Write SHA1 checksum
11555 const digest = hash.digest();
11556 outputStream.push(digest);
11557 return outputStream
11558}
11559
11560// @ts-check
11561
11562/**
11563 *
11564 * @typedef {Object} PackObjectsResult The packObjects command returns an object with two properties:
11565 * @property {string} filename - The suggested filename for the packfile if you want to save it to disk somewhere. It includes the packfile SHA.
11566 * @property {Uint8Array} [packfile] - The packfile contents. Not present if `write` parameter was true, in which case the packfile was written straight to disk.
11567 */
11568
11569/**
11570 * @param {object} args
11571 * @param {import('../models/FileSystem.js').FileSystem} args.fs
11572 * @param {any} args.cache
11573 * @param {string} args.gitdir
11574 * @param {string[]} args.oids
11575 * @param {boolean} args.write
11576 *
11577 * @returns {Promise<PackObjectsResult>}
11578 * @see PackObjectsResult
11579 */
11580async function _packObjects({ fs, cache, gitdir, oids, write }) {
11581 const buffers = await _pack({ fs, cache, gitdir, oids });
11582 const packfile = Buffer.from(await collect(buffers));
11583 const packfileSha = packfile.slice(-20).toString('hex');
11584 const filename = `pack-${packfileSha}.pack`;
11585 if (write) {
11586 await fs.write(join(gitdir, `objects/pack/${filename}`), packfile);
11587 return { filename }
11588 }
11589 return {
11590 filename,
11591 packfile: new Uint8Array(packfile),
11592 }
11593}
11594
11595// @ts-check
11596
11597/**
11598 *
11599 * @typedef {Object} PackObjectsResult The packObjects command returns an object with two properties:
11600 * @property {string} filename - The suggested filename for the packfile if you want to save it to disk somewhere. It includes the packfile SHA.
11601 * @property {Uint8Array} [packfile] - The packfile contents. Not present if `write` parameter was true, in which case the packfile was written straight to disk.
11602 */
11603
11604/**
11605 * Create a packfile from an array of SHA-1 object ids
11606 *
11607 * @param {object} args
11608 * @param {FsClient} args.fs - a file system client
11609 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
11610 * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path
11611 * @param {string[]} args.oids - An array of SHA-1 object ids to be included in the packfile
11612 * @param {boolean} [args.write = false] - Whether to save the packfile to disk or not
11613 * @param {object} [args.cache] - a [cache](cache.md) object
11614 *
11615 * @returns {Promise<PackObjectsResult>} Resolves successfully when the packfile is ready with the filename and buffer
11616 * @see PackObjectsResult
11617 *
11618 * @example
11619 * // Create a packfile containing only an empty tree
11620 * let { packfile } = await git.packObjects({
11621 * fs,
11622 * dir: '/tutorial',
11623 * oids: ['4b825dc642cb6eb9a060e54bf8d69288fbee4904']
11624 * })
11625 * console.log(packfile)
11626 *
11627 */
11628async function packObjects({
11629 fs,
11630 dir,
11631 gitdir = join(dir, '.git'),
11632 oids,
11633 write = false,
11634 cache = {},
11635}) {
11636 try {
11637 assertParameter('fs', fs);
11638 assertParameter('gitdir', gitdir);
11639 assertParameter('oids', oids);
11640
11641 return await _packObjects({
11642 fs: new FileSystem(fs),
11643 cache,
11644 gitdir,
11645 oids,
11646 write,
11647 })
11648 } catch (err) {
11649 err.caller = 'git.packObjects';
11650 throw err
11651 }
11652}
11653
11654// @ts-check
11655
11656/**
11657 * Fetch and merge commits from a remote repository
11658 *
11659 * @param {object} args
11660 * @param {FsClient} args.fs - a file system client
11661 * @param {HttpClient} args.http - an HTTP client
11662 * @param {ProgressCallback} [args.onProgress] - optional progress event callback
11663 * @param {MessageCallback} [args.onMessage] - optional message event callback
11664 * @param {AuthCallback} [args.onAuth] - optional auth fill callback
11665 * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback
11666 * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback
11667 * @param {string} args.dir] - The [working tree](dir-vs-gitdir.md) directory path
11668 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
11669 * @param {string} [args.ref] - Which branch to merge into. By default this is the currently checked out branch.
11670 * @param {string} [args.url] - (Added in 1.1.0) The URL of the remote repository. The default is the value set in the git config for that remote.
11671 * @param {string} [args.remote] - (Added in 1.1.0) If URL is not specified, determines which remote to use.
11672 * @param {string} [args.remoteRef] - (Added in 1.1.0) The name of the branch on the remote to fetch. By default this is the configured remote tracking branch.
11673 * @param {boolean} [args.prune = false] - Delete local remote-tracking branches that are not present on the remote
11674 * @param {boolean} [args.pruneTags = false] - Prune local tags that don’t exist on the remote, and force-update those tags that differ
11675 * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config.
11676 * @param {boolean} [args.singleBranch = false] - Instead of the default behavior of fetching all the branches, only fetch a single branch.
11677 * @param {boolean} [args.fastForward = true] - If false, only create merge commits.
11678 * @param {boolean} [args.fastForwardOnly = false] - Only perform simple fast-forward merges. (Don't create merge commits.)
11679 * @param {Object<string, string>} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config
11680 * @param {Object} [args.author] - The details about the author.
11681 * @param {string} [args.author.name] - Default is `user.name` config.
11682 * @param {string} [args.author.email] - Default is `user.email` config.
11683 * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00).
11684 * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`.
11685 * @param {Object} [args.committer = author] - The details about the commit committer, in the same format as the author parameter. If not specified, the author details are used.
11686 * @param {string} [args.committer.name] - Default is `user.name` config.
11687 * @param {string} [args.committer.email] - Default is `user.email` config.
11688 * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00).
11689 * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`.
11690 * @param {string} [args.signingKey] - passed to [commit](commit.md) when creating a merge commit
11691 * @param {object} [args.cache] - a [cache](cache.md) object
11692 *
11693 * @returns {Promise<void>} Resolves successfully when pull operation completes
11694 *
11695 * @example
11696 * await git.pull({
11697 * fs,
11698 * http,
11699 * dir: '/tutorial',
11700 * ref: 'main',
11701 * singleBranch: true
11702 * })
11703 * console.log('done')
11704 *
11705 */
11706async function pull({
11707 fs: _fs,
11708 http,
11709 onProgress,
11710 onMessage,
11711 onAuth,
11712 onAuthSuccess,
11713 onAuthFailure,
11714 dir,
11715 gitdir = join(dir, '.git'),
11716 ref,
11717 url,
11718 remote,
11719 remoteRef,
11720 prune = false,
11721 pruneTags = false,
11722 fastForward = true,
11723 fastForwardOnly = false,
11724 corsProxy,
11725 singleBranch,
11726 headers = {},
11727 author: _author,
11728 committer: _committer,
11729 signingKey,
11730 cache = {},
11731}) {
11732 try {
11733 assertParameter('fs', _fs);
11734 assertParameter('gitdir', gitdir);
11735
11736 const fs = new FileSystem(_fs);
11737
11738 const author = await normalizeAuthorObject({ fs, gitdir, author: _author });
11739 if (!author) throw new MissingNameError('author')
11740
11741 const committer = await normalizeCommitterObject({
11742 fs,
11743 gitdir,
11744 author,
11745 committer: _committer,
11746 });
11747 if (!committer) throw new MissingNameError('committer')
11748
11749 return await _pull({
11750 fs,
11751 cache,
11752 http,
11753 onProgress,
11754 onMessage,
11755 onAuth,
11756 onAuthSuccess,
11757 onAuthFailure,
11758 dir,
11759 gitdir,
11760 ref,
11761 url,
11762 remote,
11763 remoteRef,
11764 fastForward,
11765 fastForwardOnly,
11766 corsProxy,
11767 singleBranch,
11768 headers,
11769 author,
11770 committer,
11771 signingKey,
11772 prune,
11773 pruneTags,
11774 })
11775 } catch (err) {
11776 err.caller = 'git.pull';
11777 throw err
11778 }
11779}
11780
11781/**
11782 * @param {object} args
11783 * @param {import('../models/FileSystem.js').FileSystem} args.fs
11784 * @param {any} args.cache
11785 * @param {string} [args.dir]
11786 * @param {string} args.gitdir
11787 * @param {Iterable<string>} args.start
11788 * @param {Iterable<string>} args.finish
11789 * @returns {Promise<Set<string>>}
11790 */
11791async function listCommitsAndTags({
11792 fs,
11793 cache,
11794 dir,
11795 gitdir = join(dir, '.git'),
11796 start,
11797 finish,
11798}) {
11799 const shallows = await GitShallowManager.read({ fs, gitdir });
11800 const startingSet = new Set();
11801 const finishingSet = new Set();
11802 for (const ref of start) {
11803 startingSet.add(await GitRefManager.resolve({ fs, gitdir, ref }));
11804 }
11805 for (const ref of finish) {
11806 // We may not have these refs locally so we must try/catch
11807 try {
11808 const oid = await GitRefManager.resolve({ fs, gitdir, ref });
11809 finishingSet.add(oid);
11810 } catch (err) {}
11811 }
11812 const visited = new Set();
11813 // Because git commits are named by their hash, there is no
11814 // way to construct a cycle. Therefore we won't worry about
11815 // setting a default recursion limit.
11816 async function walk(oid) {
11817 visited.add(oid);
11818 const { type, object } = await _readObject({ fs, cache, gitdir, oid });
11819 // Recursively resolve annotated tags
11820 if (type === 'tag') {
11821 const tag = GitAnnotatedTag.from(object);
11822 const commit = tag.headers().object;
11823 return walk(commit)
11824 }
11825 if (type !== 'commit') {
11826 throw new ObjectTypeError(oid, type, 'commit')
11827 }
11828 if (!shallows.has(oid)) {
11829 const commit = GitCommit.from(object);
11830 const parents = commit.headers().parent;
11831 for (oid of parents) {
11832 if (!finishingSet.has(oid) && !visited.has(oid)) {
11833 await walk(oid);
11834 }
11835 }
11836 }
11837 }
11838 // Let's go walking!
11839 for (const oid of startingSet) {
11840 await walk(oid);
11841 }
11842 return visited
11843}
11844
11845/**
11846 * @param {object} args
11847 * @param {import('../models/FileSystem.js').FileSystem} args.fs
11848 * @param {any} args.cache
11849 * @param {string} [args.dir]
11850 * @param {string} args.gitdir
11851 * @param {Iterable<string>} args.oids
11852 * @returns {Promise<Set<string>>}
11853 */
11854async function listObjects({
11855 fs,
11856 cache,
11857 dir,
11858 gitdir = join(dir, '.git'),
11859 oids,
11860}) {
11861 const visited = new Set();
11862 // We don't do the purest simplest recursion, because we can
11863 // avoid reading Blob objects entirely since the Tree objects
11864 // tell us which oids are Blobs and which are Trees.
11865 async function walk(oid) {
11866 if (visited.has(oid)) return
11867 visited.add(oid);
11868 const { type, object } = await _readObject({ fs, cache, gitdir, oid });
11869 if (type === 'tag') {
11870 const tag = GitAnnotatedTag.from(object);
11871 const obj = tag.headers().object;
11872 await walk(obj);
11873 } else if (type === 'commit') {
11874 const commit = GitCommit.from(object);
11875 const tree = commit.headers().tree;
11876 await walk(tree);
11877 } else if (type === 'tree') {
11878 const tree = GitTree.from(object);
11879 for (const entry of tree) {
11880 // add blobs to the set
11881 // skip over submodules whose type is 'commit'
11882 if (entry.type === 'blob') {
11883 visited.add(entry.oid);
11884 }
11885 // recurse for trees
11886 if (entry.type === 'tree') {
11887 await walk(entry.oid);
11888 }
11889 }
11890 }
11891 }
11892 // Let's go walking!
11893 for (const oid of oids) {
11894 await walk(oid);
11895 }
11896 return visited
11897}
11898
11899async function parseReceivePackResponse(packfile) {
11900 /** @type PushResult */
11901 const result = {};
11902 let response = '';
11903 const read = GitPktLine.streamReader(packfile);
11904 let line = await read();
11905 while (line !== true) {
11906 if (line !== null) response += line.toString('utf8') + '\n';
11907 line = await read();
11908 }
11909
11910 const lines = response.toString('utf8').split('\n');
11911 // We're expecting "unpack {unpack-result}"
11912 line = lines.shift();
11913 if (!line.startsWith('unpack ')) {
11914 throw new ParseError('unpack ok" or "unpack [error message]', line)
11915 }
11916 result.ok = line === 'unpack ok';
11917 if (!result.ok) {
11918 result.error = line.slice('unpack '.length);
11919 }
11920 result.refs = {};
11921 for (const line of lines) {
11922 if (line.trim() === '') continue
11923 const status = line.slice(0, 2);
11924 const refAndMessage = line.slice(3);
11925 let space = refAndMessage.indexOf(' ');
11926 if (space === -1) space = refAndMessage.length;
11927 const ref = refAndMessage.slice(0, space);
11928 const error = refAndMessage.slice(space + 1);
11929 result.refs[ref] = {
11930 ok: status === 'ok',
11931 error,
11932 };
11933 }
11934 return result
11935}
11936
11937async function writeReceivePackRequest({
11938 capabilities = [],
11939 triplets = [],
11940}) {
11941 const packstream = [];
11942 let capsFirstLine = `\x00 ${capabilities.join(' ')}`;
11943 for (const trip of triplets) {
11944 packstream.push(
11945 GitPktLine.encode(
11946 `${trip.oldoid} ${trip.oid} ${trip.fullRef}${capsFirstLine}\n`
11947 )
11948 );
11949 capsFirstLine = '';
11950 }
11951 packstream.push(GitPktLine.flush());
11952 return packstream
11953}
11954
11955// @ts-check
11956
11957/**
11958 * @param {object} args
11959 * @param {import('../models/FileSystem.js').FileSystem} args.fs
11960 * @param {any} args.cache
11961 * @param {HttpClient} args.http
11962 * @param {ProgressCallback} [args.onProgress]
11963 * @param {MessageCallback} [args.onMessage]
11964 * @param {AuthCallback} [args.onAuth]
11965 * @param {AuthFailureCallback} [args.onAuthFailure]
11966 * @param {AuthSuccessCallback} [args.onAuthSuccess]
11967 * @param {string} args.gitdir
11968 * @param {string} [args.ref]
11969 * @param {string} [args.remoteRef]
11970 * @param {string} [args.remote]
11971 * @param {boolean} [args.force = false]
11972 * @param {boolean} [args.delete = false]
11973 * @param {string} [args.url]
11974 * @param {string} [args.corsProxy]
11975 * @param {Object<string, string>} [args.headers]
11976 *
11977 * @returns {Promise<PushResult>}
11978 */
11979async function _push({
11980 fs,
11981 cache,
11982 http,
11983 onProgress,
11984 onMessage,
11985 onAuth,
11986 onAuthSuccess,
11987 onAuthFailure,
11988 gitdir,
11989 ref: _ref,
11990 remoteRef: _remoteRef,
11991 remote,
11992 url: _url,
11993 force = false,
11994 delete: _delete = false,
11995 corsProxy,
11996 headers = {},
11997}) {
11998 const ref = _ref || (await _currentBranch({ fs, gitdir }));
11999 if (typeof ref === 'undefined') {
12000 throw new MissingParameterError('ref')
12001 }
12002 const config = await GitConfigManager.get({ fs, gitdir });
12003 // Figure out what remote to use.
12004 remote =
12005 remote ||
12006 (await config.get(`branch.${ref}.pushRemote`)) ||
12007 (await config.get('remote.pushDefault')) ||
12008 (await config.get(`branch.${ref}.remote`)) ||
12009 'origin';
12010 // Lookup the URL for the given remote.
12011 const url =
12012 _url ||
12013 (await config.get(`remote.${remote}.pushurl`)) ||
12014 (await config.get(`remote.${remote}.url`));
12015 if (typeof url === 'undefined') {
12016 throw new MissingParameterError('remote OR url')
12017 }
12018 // Figure out what remote ref to use.
12019 const remoteRef = _remoteRef || (await config.get(`branch.${ref}.merge`));
12020 if (typeof url === 'undefined') {
12021 throw new MissingParameterError('remoteRef')
12022 }
12023
12024 if (corsProxy === undefined) {
12025 corsProxy = await config.get('http.corsProxy');
12026 }
12027
12028 const fullRef = await GitRefManager.expand({ fs, gitdir, ref });
12029 const oid = _delete
12030 ? '0000000000000000000000000000000000000000'
12031 : await GitRefManager.resolve({ fs, gitdir, ref: fullRef });
12032
12033 /** @type typeof import("../managers/GitRemoteHTTP").GitRemoteHTTP */
12034 const GitRemoteHTTP = GitRemoteManager.getRemoteHelperFor({ url });
12035 const httpRemote = await GitRemoteHTTP.discover({
12036 http,
12037 onAuth,
12038 onAuthSuccess,
12039 onAuthFailure,
12040 corsProxy,
12041 service: 'git-receive-pack',
12042 url,
12043 headers,
12044 protocolVersion: 1,
12045 });
12046 const auth = httpRemote.auth; // hack to get new credentials from CredentialManager API
12047 let fullRemoteRef;
12048 if (!remoteRef) {
12049 fullRemoteRef = fullRef;
12050 } else {
12051 try {
12052 fullRemoteRef = await GitRefManager.expandAgainstMap({
12053 ref: remoteRef,
12054 map: httpRemote.refs,
12055 });
12056 } catch (err) {
12057 if (err instanceof NotFoundError) {
12058 // The remote reference doesn't exist yet.
12059 // If it is fully specified, use that value. Otherwise, treat it as a branch.
12060 fullRemoteRef = remoteRef.startsWith('refs/')
12061 ? remoteRef
12062 : `refs/heads/${remoteRef}`;
12063 } else {
12064 throw err
12065 }
12066 }
12067 }
12068 const oldoid =
12069 httpRemote.refs.get(fullRemoteRef) ||
12070 '0000000000000000000000000000000000000000';
12071
12072 // Remotes can always accept thin-packs UNLESS they specify the 'no-thin' capability
12073 const thinPack = !httpRemote.capabilities.has('no-thin');
12074
12075 let objects = new Set();
12076 if (!_delete) {
12077 const finish = [...httpRemote.refs.values()];
12078 let skipObjects = new Set();
12079
12080 // If remote branch is present, look for a common merge base.
12081 if (oldoid !== '0000000000000000000000000000000000000000') {
12082 // trick to speed up common force push scenarios
12083 const mergebase = await _findMergeBase({
12084 fs,
12085 cache,
12086 gitdir,
12087 oids: [oid, oldoid],
12088 });
12089 for (const oid of mergebase) finish.push(oid);
12090 if (thinPack) {
12091 skipObjects = await listObjects({ fs, cache, gitdir, oids: mergebase });
12092 }
12093 }
12094
12095 // If remote does not have the commit, figure out the objects to send
12096 if (!finish.includes(oid)) {
12097 const commits = await listCommitsAndTags({
12098 fs,
12099 cache,
12100 gitdir,
12101 start: [oid],
12102 finish,
12103 });
12104 objects = await listObjects({ fs, cache, gitdir, oids: commits });
12105 }
12106
12107 if (thinPack) {
12108 // If there's a default branch for the remote lets skip those objects too.
12109 // Since this is an optional optimization, we just catch and continue if there is
12110 // an error (because we can't find a default branch, or can't find a commit, etc)
12111 try {
12112 // Sadly, the discovery phase with 'forPush' doesn't return symrefs, so we have to
12113 // rely on existing ones.
12114 const ref = await GitRefManager.resolve({
12115 fs,
12116 gitdir,
12117 ref: `refs/remotes/${remote}/HEAD`,
12118 depth: 2,
12119 });
12120 const { oid } = await GitRefManager.resolveAgainstMap({
12121 ref: ref.replace(`refs/remotes/${remote}/`, ''),
12122 fullref: ref,
12123 map: httpRemote.refs,
12124 });
12125 const oids = [oid];
12126 for (const oid of await listObjects({ fs, cache, gitdir, oids })) {
12127 skipObjects.add(oid);
12128 }
12129 } catch (e) {}
12130
12131 // Remove objects that we know the remote already has
12132 for (const oid of skipObjects) {
12133 objects.delete(oid);
12134 }
12135 }
12136
12137 if (oid === oldoid) force = true;
12138 if (!force) {
12139 // Is it a tag that already exists?
12140 if (
12141 fullRef.startsWith('refs/tags') &&
12142 oldoid !== '0000000000000000000000000000000000000000'
12143 ) {
12144 throw new PushRejectedError('tag-exists')
12145 }
12146 // Is it a non-fast-forward commit?
12147 if (
12148 oid !== '0000000000000000000000000000000000000000' &&
12149 oldoid !== '0000000000000000000000000000000000000000' &&
12150 !(await _isDescendent({
12151 fs,
12152 cache,
12153 gitdir,
12154 oid,
12155 ancestor: oldoid,
12156 depth: -1,
12157 }))
12158 ) {
12159 throw new PushRejectedError('not-fast-forward')
12160 }
12161 }
12162 }
12163 // We can only safely use capabilities that the server also understands.
12164 // For instance, AWS CodeCommit aborts a push if you include the `agent`!!!
12165 const capabilities = filterCapabilities(
12166 [...httpRemote.capabilities],
12167 ['report-status', 'side-band-64k', `agent=${pkg.agent}`]
12168 );
12169 const packstream1 = await writeReceivePackRequest({
12170 capabilities,
12171 triplets: [{ oldoid, oid, fullRef: fullRemoteRef }],
12172 });
12173 const packstream2 = _delete
12174 ? []
12175 : await _pack({
12176 fs,
12177 cache,
12178 gitdir,
12179 oids: [...objects],
12180 });
12181 const res = await GitRemoteHTTP.connect({
12182 http,
12183 onProgress,
12184 corsProxy,
12185 service: 'git-receive-pack',
12186 url,
12187 auth,
12188 headers,
12189 body: [...packstream1, ...packstream2],
12190 });
12191 const { packfile, progress } = await GitSideBand.demux(res.body);
12192 if (onMessage) {
12193 const lines = splitLines(progress);
12194 forAwait(lines, async line => {
12195 await onMessage(line);
12196 });
12197 }
12198 // Parse the response!
12199 const result = await parseReceivePackResponse(packfile);
12200 if (res.headers) {
12201 result.headers = res.headers;
12202 }
12203
12204 // Update the local copy of the remote ref
12205 if (remote && result.ok && result.refs[fullRemoteRef].ok) {
12206 // TODO: I think this should actually be using a refspec transform rather than assuming 'refs/remotes/{remote}'
12207 const ref = `refs/remotes/${remote}/${fullRemoteRef.replace(
12208 'refs/heads',
12209 ''
12210 )}`;
12211 if (_delete) {
12212 await GitRefManager.deleteRef({ fs, gitdir, ref });
12213 } else {
12214 await GitRefManager.writeRef({ fs, gitdir, ref, value: oid });
12215 }
12216 }
12217 if (result.ok && Object.values(result.refs).every(result => result.ok)) {
12218 return result
12219 } else {
12220 const prettyDetails = Object.entries(result.refs)
12221 .filter(([k, v]) => !v.ok)
12222 .map(([k, v]) => `\n - ${k}: ${v.error}`)
12223 .join('');
12224 throw new GitPushError(prettyDetails, result)
12225 }
12226}
12227
12228// @ts-check
12229
12230/**
12231 * Push a branch or tag
12232 *
12233 * The push command returns an object that describes the result of the attempted push operation.
12234 * *Notes:* If there were no errors, then there will be no `errors` property. There can be a mix of `ok` messages and `errors` messages.
12235 *
12236 * | param | type [= default] | description |
12237 * | ------ | ---------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
12238 * | ok | Array\<string\> | The first item is "unpack" if the overall operation was successful. The remaining items are the names of refs that were updated successfully. |
12239 * | errors | Array\<string\> | If the overall operation threw and error, the first item will be "unpack {Overall error message}". The remaining items are individual refs that failed to be updated in the format "{ref name} {error message}". |
12240 *
12241 * @param {object} args
12242 * @param {FsClient} args.fs - a file system client
12243 * @param {HttpClient} args.http - an HTTP client
12244 * @param {ProgressCallback} [args.onProgress] - optional progress event callback
12245 * @param {MessageCallback} [args.onMessage] - optional message event callback
12246 * @param {AuthCallback} [args.onAuth] - optional auth fill callback
12247 * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback
12248 * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback
12249 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
12250 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
12251 * @param {string} [args.ref] - Which branch to push. By default this is the currently checked out branch.
12252 * @param {string} [args.url] - The URL of the remote repository. The default is the value set in the git config for that remote.
12253 * @param {string} [args.remote] - If URL is not specified, determines which remote to use.
12254 * @param {string} [args.remoteRef] - The name of the receiving branch on the remote. By default this is the configured remote tracking branch.
12255 * @param {boolean} [args.force = false] - If true, behaves the same as `git push --force`
12256 * @param {boolean} [args.delete = false] - If true, delete the remote ref
12257 * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config.
12258 * @param {Object<string, string>} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config
12259 * @param {object} [args.cache] - a [cache](cache.md) object
12260 *
12261 * @returns {Promise<PushResult>} Resolves successfully when push completes with a detailed description of the operation from the server.
12262 * @see PushResult
12263 * @see RefUpdateStatus
12264 *
12265 * @example
12266 * let pushResult = await git.push({
12267 * fs,
12268 * http,
12269 * dir: '/tutorial',
12270 * remote: 'origin',
12271 * ref: 'main',
12272 * onAuth: () => ({ username: process.env.GITHUB_TOKEN }),
12273 * })
12274 * console.log(pushResult)
12275 *
12276 */
12277async function push({
12278 fs,
12279 http,
12280 onProgress,
12281 onMessage,
12282 onAuth,
12283 onAuthSuccess,
12284 onAuthFailure,
12285 dir,
12286 gitdir = join(dir, '.git'),
12287 ref,
12288 remoteRef,
12289 remote = 'origin',
12290 url,
12291 force = false,
12292 delete: _delete = false,
12293 corsProxy,
12294 headers = {},
12295 cache = {},
12296}) {
12297 try {
12298 assertParameter('fs', fs);
12299 assertParameter('http', http);
12300 assertParameter('gitdir', gitdir);
12301
12302 return await _push({
12303 fs: new FileSystem(fs),
12304 cache,
12305 http,
12306 onProgress,
12307 onMessage,
12308 onAuth,
12309 onAuthSuccess,
12310 onAuthFailure,
12311 gitdir,
12312 ref,
12313 remoteRef,
12314 remote,
12315 url,
12316 force,
12317 delete: _delete,
12318 corsProxy,
12319 headers,
12320 })
12321 } catch (err) {
12322 err.caller = 'git.push';
12323 throw err
12324 }
12325}
12326
12327async function resolveBlob({ fs, cache, gitdir, oid }) {
12328 const { type, object } = await _readObject({ fs, cache, gitdir, oid });
12329 // Resolve annotated tag objects to whatever
12330 if (type === 'tag') {
12331 oid = GitAnnotatedTag.from(object).parse().object;
12332 return resolveBlob({ fs, cache, gitdir, oid })
12333 }
12334 if (type !== 'blob') {
12335 throw new ObjectTypeError(oid, type, 'blob')
12336 }
12337 return { oid, blob: new Uint8Array(object) }
12338}
12339
12340// @ts-check
12341
12342/**
12343 *
12344 * @typedef {Object} ReadBlobResult - The object returned has the following schema:
12345 * @property {string} oid
12346 * @property {Uint8Array} blob
12347 *
12348 */
12349
12350/**
12351 * @param {object} args
12352 * @param {import('../models/FileSystem.js').FileSystem} args.fs
12353 * @param {any} args.cache
12354 * @param {string} args.gitdir
12355 * @param {string} args.oid
12356 * @param {string} [args.filepath]
12357 *
12358 * @returns {Promise<ReadBlobResult>} Resolves successfully with a blob object description
12359 * @see ReadBlobResult
12360 */
12361async function _readBlob({
12362 fs,
12363 cache,
12364 gitdir,
12365 oid,
12366 filepath = undefined,
12367}) {
12368 if (filepath !== undefined) {
12369 oid = await resolveFilepath({ fs, cache, gitdir, oid, filepath });
12370 }
12371 const blob = await resolveBlob({
12372 fs,
12373 cache,
12374 gitdir,
12375 oid,
12376 });
12377 return blob
12378}
12379
12380// @ts-check
12381
12382/**
12383 *
12384 * @typedef {Object} ReadBlobResult - The object returned has the following schema:
12385 * @property {string} oid
12386 * @property {Uint8Array} blob
12387 *
12388 */
12389
12390/**
12391 * Read a blob object directly
12392 *
12393 * @param {object} args
12394 * @param {FsClient} args.fs - a file system client
12395 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
12396 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
12397 * @param {string} args.oid - The SHA-1 object id to get. Annotated tags, commits, and trees are peeled.
12398 * @param {string} [args.filepath] - Don't return the object with `oid` itself, but resolve `oid` to a tree and then return the blob object at that filepath.
12399 * @param {object} [args.cache] - a [cache](cache.md) object
12400 *
12401 * @returns {Promise<ReadBlobResult>} Resolves successfully with a blob object description
12402 * @see ReadBlobResult
12403 *
12404 * @example
12405 * // Get the contents of 'README.md' in the main branch.
12406 * let commitOid = await git.resolveRef({ fs, dir: '/tutorial', ref: 'main' })
12407 * console.log(commitOid)
12408 * let { blob } = await git.readBlob({
12409 * fs,
12410 * dir: '/tutorial',
12411 * oid: commitOid,
12412 * filepath: 'README.md'
12413 * })
12414 * console.log(Buffer.from(blob).toString('utf8'))
12415 *
12416 */
12417async function readBlob({
12418 fs,
12419 dir,
12420 gitdir = join(dir, '.git'),
12421 oid,
12422 filepath,
12423 cache = {},
12424}) {
12425 try {
12426 assertParameter('fs', fs);
12427 assertParameter('gitdir', gitdir);
12428 assertParameter('oid', oid);
12429
12430 return await _readBlob({
12431 fs: new FileSystem(fs),
12432 cache,
12433 gitdir,
12434 oid,
12435 filepath,
12436 })
12437 } catch (err) {
12438 err.caller = 'git.readBlob';
12439 throw err
12440 }
12441}
12442
12443// @ts-check
12444
12445/**
12446 * Read a commit object directly
12447 *
12448 * @param {object} args
12449 * @param {FsClient} args.fs - a file system client
12450 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
12451 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
12452 * @param {string} args.oid - The SHA-1 object id to get. Annotated tags are peeled.
12453 * @param {object} [args.cache] - a [cache](cache.md) object
12454 *
12455 * @returns {Promise<ReadCommitResult>} Resolves successfully with a git commit object
12456 * @see ReadCommitResult
12457 * @see CommitObject
12458 *
12459 * @example
12460 * // Read a commit object
12461 * let sha = await git.resolveRef({ fs, dir: '/tutorial', ref: 'main' })
12462 * console.log(sha)
12463 * let commit = await git.readCommit({ fs, dir: '/tutorial', oid: sha })
12464 * console.log(commit)
12465 *
12466 */
12467async function readCommit({
12468 fs,
12469 dir,
12470 gitdir = join(dir, '.git'),
12471 oid,
12472 cache = {},
12473}) {
12474 try {
12475 assertParameter('fs', fs);
12476 assertParameter('gitdir', gitdir);
12477 assertParameter('oid', oid);
12478
12479 return await _readCommit({
12480 fs: new FileSystem(fs),
12481 cache,
12482 gitdir,
12483 oid,
12484 })
12485 } catch (err) {
12486 err.caller = 'git.readCommit';
12487 throw err
12488 }
12489}
12490
12491// @ts-check
12492
12493/**
12494 * Read the contents of a note
12495 *
12496 * @param {object} args
12497 * @param {import('../models/FileSystem.js').FileSystem} args.fs
12498 * @param {any} args.cache
12499 * @param {string} args.gitdir
12500 * @param {string} [args.ref] - The notes ref to look under
12501 * @param {string} args.oid
12502 *
12503 * @returns {Promise<Uint8Array>} Resolves successfully with note contents as a Buffer.
12504 */
12505
12506async function _readNote({
12507 fs,
12508 cache,
12509 gitdir,
12510 ref = 'refs/notes/commits',
12511 oid,
12512}) {
12513 const parent = await GitRefManager.resolve({ gitdir, fs, ref });
12514 const { blob } = await _readBlob({
12515 fs,
12516 cache,
12517 gitdir,
12518 oid: parent,
12519 filepath: oid,
12520 });
12521
12522 return blob
12523}
12524
12525// @ts-check
12526
12527/**
12528 * Read the contents of a note
12529 *
12530 * @param {object} args
12531 * @param {FsClient} args.fs - a file system client
12532 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
12533 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
12534 * @param {string} [args.ref] - The notes ref to look under
12535 * @param {string} args.oid - The SHA-1 object id of the object to get the note for.
12536 * @param {object} [args.cache] - a [cache](cache.md) object
12537 *
12538 * @returns {Promise<Uint8Array>} Resolves successfully with note contents as a Buffer.
12539 */
12540
12541async function readNote({
12542 fs,
12543 dir,
12544 gitdir = join(dir, '.git'),
12545 ref = 'refs/notes/commits',
12546 oid,
12547 cache = {},
12548}) {
12549 try {
12550 assertParameter('fs', fs);
12551 assertParameter('gitdir', gitdir);
12552 assertParameter('ref', ref);
12553 assertParameter('oid', oid);
12554
12555 return await _readNote({
12556 fs: new FileSystem(fs),
12557 cache,
12558 gitdir,
12559 ref,
12560 oid,
12561 })
12562 } catch (err) {
12563 err.caller = 'git.readNote';
12564 throw err
12565 }
12566}
12567
12568// @ts-check
12569
12570/**
12571 *
12572 * @typedef {Object} DeflatedObject
12573 * @property {string} oid
12574 * @property {'deflated'} type
12575 * @property {'deflated'} format
12576 * @property {Uint8Array} object
12577 * @property {string} [source]
12578 *
12579 */
12580
12581/**
12582 *
12583 * @typedef {Object} WrappedObject
12584 * @property {string} oid
12585 * @property {'wrapped'} type
12586 * @property {'wrapped'} format
12587 * @property {Uint8Array} object
12588 * @property {string} [source]
12589 *
12590 */
12591
12592/**
12593 *
12594 * @typedef {Object} RawObject
12595 * @property {string} oid
12596 * @property {'blob'|'commit'|'tree'|'tag'} type
12597 * @property {'content'} format
12598 * @property {Uint8Array} object
12599 * @property {string} [source]
12600 *
12601 */
12602
12603/**
12604 *
12605 * @typedef {Object} ParsedBlobObject
12606 * @property {string} oid
12607 * @property {'blob'} type
12608 * @property {'parsed'} format
12609 * @property {string} object
12610 * @property {string} [source]
12611 *
12612 */
12613
12614/**
12615 *
12616 * @typedef {Object} ParsedCommitObject
12617 * @property {string} oid
12618 * @property {'commit'} type
12619 * @property {'parsed'} format
12620 * @property {CommitObject} object
12621 * @property {string} [source]
12622 *
12623 */
12624
12625/**
12626 *
12627 * @typedef {Object} ParsedTreeObject
12628 * @property {string} oid
12629 * @property {'tree'} type
12630 * @property {'parsed'} format
12631 * @property {TreeObject} object
12632 * @property {string} [source]
12633 *
12634 */
12635
12636/**
12637 *
12638 * @typedef {Object} ParsedTagObject
12639 * @property {string} oid
12640 * @property {'tag'} type
12641 * @property {'parsed'} format
12642 * @property {TagObject} object
12643 * @property {string} [source]
12644 *
12645 */
12646
12647/**
12648 *
12649 * @typedef {ParsedBlobObject | ParsedCommitObject | ParsedTreeObject | ParsedTagObject} ParsedObject
12650 */
12651
12652/**
12653 *
12654 * @typedef {DeflatedObject | WrappedObject | RawObject | ParsedObject } ReadObjectResult
12655 */
12656
12657/**
12658 * Read a git object directly by its SHA-1 object id
12659 *
12660 * Regarding `ReadObjectResult`:
12661 *
12662 * - `oid` will be the same as the `oid` argument unless the `filepath` argument is provided, in which case it will be the oid of the tree or blob being returned.
12663 * - `type` of deflated objects is `'deflated'`, and `type` of wrapped objects is `'wrapped'`
12664 * - `format` is usually, but not always, the format you requested. Packfiles do not store each object individually compressed so if you end up reading the object from a packfile it will be returned in format 'content' even if you requested 'deflated' or 'wrapped'.
12665 * - `object` will be an actual Object if format is 'parsed' and the object is a commit, tree, or annotated tag. Blobs are still formatted as Buffers unless an encoding is provided in which case they'll be strings. If format is anything other than 'parsed', object will be a Buffer.
12666 * - `source` is the name of the packfile or loose object file where the object was found.
12667 *
12668 * The `format` parameter can have the following values:
12669 *
12670 * | param | description |
12671 * | ---------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
12672 * | 'deflated' | Return the raw deflate-compressed buffer for an object if possible. Useful for efficiently shuffling around loose objects when you don't care about the contents and can save time by not inflating them. |
12673 * | 'wrapped' | Return the inflated object buffer wrapped in the git object header if possible. This is the raw data used when calculating the SHA-1 object id of a git object. |
12674 * | 'content' | Return the object buffer without the git header. |
12675 * | 'parsed' | Returns a parsed representation of the object. |
12676 *
12677 * The result will be in one of the following schemas:
12678 *
12679 * ## `'deflated'` format
12680 *
12681 * {@link DeflatedObject typedef}
12682 *
12683 * ## `'wrapped'` format
12684 *
12685 * {@link WrappedObject typedef}
12686 *
12687 * ## `'content'` format
12688 *
12689 * {@link RawObject typedef}
12690 *
12691 * ## `'parsed'` format
12692 *
12693 * ### parsed `'blob'` type
12694 *
12695 * {@link ParsedBlobObject typedef}
12696 *
12697 * ### parsed `'commit'` type
12698 *
12699 * {@link ParsedCommitObject typedef}
12700 * {@link CommitObject typedef}
12701 *
12702 * ### parsed `'tree'` type
12703 *
12704 * {@link ParsedTreeObject typedef}
12705 * {@link TreeObject typedef}
12706 * {@link TreeEntry typedef}
12707 *
12708 * ### parsed `'tag'` type
12709 *
12710 * {@link ParsedTagObject typedef}
12711 * {@link TagObject typedef}
12712 *
12713 * @deprecated
12714 * > This command is overly complicated.
12715 * >
12716 * > If you know the type of object you are reading, use [`readBlob`](./readBlob.md), [`readCommit`](./readCommit.md), [`readTag`](./readTag.md), or [`readTree`](./readTree.md).
12717 *
12718 * @param {object} args
12719 * @param {FsClient} args.fs - a file system client
12720 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
12721 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
12722 * @param {string} args.oid - The SHA-1 object id to get
12723 * @param {'deflated' | 'wrapped' | 'content' | 'parsed'} [args.format = 'parsed'] - What format to return the object in. The choices are described in more detail below.
12724 * @param {string} [args.filepath] - Don't return the object with `oid` itself, but resolve `oid` to a tree and then return the object at that filepath. To return the root directory of a tree set filepath to `''`
12725 * @param {string} [args.encoding] - A convenience argument that only affects blobs. Instead of returning `object` as a buffer, it returns a string parsed using the given encoding.
12726 * @param {object} [args.cache] - a [cache](cache.md) object
12727 *
12728 * @returns {Promise<ReadObjectResult>} Resolves successfully with a git object description
12729 * @see ReadObjectResult
12730 *
12731 * @example
12732 * // Given a ransom SHA-1 object id, figure out what it is
12733 * let { type, object } = await git.readObject({
12734 * fs,
12735 * dir: '/tutorial',
12736 * oid: '0698a781a02264a6f37ba3ff41d78067eaf0f075'
12737 * })
12738 * switch (type) {
12739 * case 'commit': {
12740 * console.log(object)
12741 * break
12742 * }
12743 * case 'tree': {
12744 * console.log(object)
12745 * break
12746 * }
12747 * case 'blob': {
12748 * console.log(object)
12749 * break
12750 * }
12751 * case 'tag': {
12752 * console.log(object)
12753 * break
12754 * }
12755 * }
12756 *
12757 */
12758async function readObject({
12759 fs: _fs,
12760 dir,
12761 gitdir = join(dir, '.git'),
12762 oid,
12763 format = 'parsed',
12764 filepath = undefined,
12765 encoding = undefined,
12766 cache = {},
12767}) {
12768 try {
12769 assertParameter('fs', _fs);
12770 assertParameter('gitdir', gitdir);
12771 assertParameter('oid', oid);
12772
12773 const fs = new FileSystem(_fs);
12774 if (filepath !== undefined) {
12775 oid = await resolveFilepath({
12776 fs,
12777 cache,
12778 gitdir,
12779 oid,
12780 filepath,
12781 });
12782 }
12783 // GitObjectManager does not know how to parse content, so we tweak that parameter before passing it.
12784 const _format = format === 'parsed' ? 'content' : format;
12785 const result = await _readObject({
12786 fs,
12787 cache,
12788 gitdir,
12789 oid,
12790 format: _format,
12791 });
12792 result.oid = oid;
12793 if (format === 'parsed') {
12794 result.format = 'parsed';
12795 switch (result.type) {
12796 case 'commit':
12797 result.object = GitCommit.from(result.object).parse();
12798 break
12799 case 'tree':
12800 result.object = GitTree.from(result.object).entries();
12801 break
12802 case 'blob':
12803 // Here we consider returning a raw Buffer as the 'content' format
12804 // and returning a string as the 'parsed' format
12805 if (encoding) {
12806 result.object = result.object.toString(encoding);
12807 } else {
12808 result.object = new Uint8Array(result.object);
12809 result.format = 'content';
12810 }
12811 break
12812 case 'tag':
12813 result.object = GitAnnotatedTag.from(result.object).parse();
12814 break
12815 default:
12816 throw new ObjectTypeError(
12817 result.oid,
12818 result.type,
12819 'blob|commit|tag|tree'
12820 )
12821 }
12822 } else if (result.format === 'deflated' || result.format === 'wrapped') {
12823 result.type = result.format;
12824 }
12825 return result
12826 } catch (err) {
12827 err.caller = 'git.readObject';
12828 throw err
12829 }
12830}
12831
12832// @ts-check
12833
12834/**
12835 *
12836 * @typedef {Object} ReadTagResult - The object returned has the following schema:
12837 * @property {string} oid - SHA-1 object id of this tag
12838 * @property {TagObject} tag - the parsed tag object
12839 * @property {string} payload - PGP signing payload
12840 */
12841
12842/**
12843 * @param {object} args
12844 * @param {import('../models/FileSystem.js').FileSystem} args.fs
12845 * @param {any} args.cache
12846 * @param {string} args.gitdir
12847 * @param {string} args.oid
12848 *
12849 * @returns {Promise<ReadTagResult>}
12850 */
12851async function _readTag({ fs, cache, gitdir, oid }) {
12852 const { type, object } = await _readObject({
12853 fs,
12854 cache,
12855 gitdir,
12856 oid,
12857 format: 'content',
12858 });
12859 if (type !== 'tag') {
12860 throw new ObjectTypeError(oid, type, 'tag')
12861 }
12862 const tag = GitAnnotatedTag.from(object);
12863 const result = {
12864 oid,
12865 tag: tag.parse(),
12866 payload: tag.payload(),
12867 };
12868 // @ts-ignore
12869 return result
12870}
12871
12872/**
12873 *
12874 * @typedef {Object} ReadTagResult - The object returned has the following schema:
12875 * @property {string} oid - SHA-1 object id of this tag
12876 * @property {TagObject} tag - the parsed tag object
12877 * @property {string} payload - PGP signing payload
12878 */
12879
12880/**
12881 * Read an annotated tag object directly
12882 *
12883 * @param {object} args
12884 * @param {FsClient} args.fs - a file system client
12885 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
12886 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
12887 * @param {string} args.oid - The SHA-1 object id to get
12888 * @param {object} [args.cache] - a [cache](cache.md) object
12889 *
12890 * @returns {Promise<ReadTagResult>} Resolves successfully with a git object description
12891 * @see ReadTagResult
12892 * @see TagObject
12893 *
12894 */
12895async function readTag({
12896 fs,
12897 dir,
12898 gitdir = join(dir, '.git'),
12899 oid,
12900 cache = {},
12901}) {
12902 try {
12903 assertParameter('fs', fs);
12904 assertParameter('gitdir', gitdir);
12905 assertParameter('oid', oid);
12906
12907 return await _readTag({
12908 fs: new FileSystem(fs),
12909 cache,
12910 gitdir,
12911 oid,
12912 })
12913 } catch (err) {
12914 err.caller = 'git.readTag';
12915 throw err
12916 }
12917}
12918
12919// @ts-check
12920
12921/**
12922 *
12923 * @typedef {Object} ReadTreeResult - The object returned has the following schema:
12924 * @property {string} oid - SHA-1 object id of this tree
12925 * @property {TreeObject} tree - the parsed tree object
12926 */
12927
12928/**
12929 * Read a tree object directly
12930 *
12931 * @param {object} args
12932 * @param {FsClient} args.fs - a file system client
12933 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
12934 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
12935 * @param {string} args.oid - The SHA-1 object id to get. Annotated tags and commits are peeled.
12936 * @param {string} [args.filepath] - Don't return the object with `oid` itself, but resolve `oid` to a tree and then return the tree object at that filepath.
12937 * @param {object} [args.cache] - a [cache](cache.md) object
12938 *
12939 * @returns {Promise<ReadTreeResult>} Resolves successfully with a git tree object
12940 * @see ReadTreeResult
12941 * @see TreeObject
12942 * @see TreeEntry
12943 *
12944 */
12945async function readTree({
12946 fs,
12947 dir,
12948 gitdir = join(dir, '.git'),
12949 oid,
12950 filepath = undefined,
12951 cache = {},
12952}) {
12953 try {
12954 assertParameter('fs', fs);
12955 assertParameter('gitdir', gitdir);
12956 assertParameter('oid', oid);
12957
12958 return await _readTree({
12959 fs: new FileSystem(fs),
12960 cache,
12961 gitdir,
12962 oid,
12963 filepath,
12964 })
12965 } catch (err) {
12966 err.caller = 'git.readTree';
12967 throw err
12968 }
12969}
12970
12971// @ts-check
12972
12973/**
12974 * Remove a file from the git index (aka staging area)
12975 *
12976 * Note that this does NOT delete the file in the working directory.
12977 *
12978 * @param {object} args
12979 * @param {FsClient} args.fs - a file system client
12980 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
12981 * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path
12982 * @param {string} args.filepath - The path to the file to remove from the index
12983 * @param {object} [args.cache] - a [cache](cache.md) object
12984 *
12985 * @returns {Promise<void>} Resolves successfully once the git index has been updated
12986 *
12987 * @example
12988 * await git.remove({ fs, dir: '/tutorial', filepath: 'README.md' })
12989 * console.log('done')
12990 *
12991 */
12992async function remove({
12993 fs: _fs,
12994 dir,
12995 gitdir = join(dir, '.git'),
12996 filepath,
12997 cache = {},
12998}) {
12999 try {
13000 assertParameter('fs', _fs);
13001 assertParameter('gitdir', gitdir);
13002 assertParameter('filepath', filepath);
13003
13004 await GitIndexManager.acquire(
13005 { fs: new FileSystem(_fs), gitdir, cache },
13006 async function(index) {
13007 index.delete({ filepath });
13008 }
13009 );
13010 } catch (err) {
13011 err.caller = 'git.remove';
13012 throw err
13013 }
13014}
13015
13016// @ts-check
13017
13018/**
13019 * @param {object} args
13020 * @param {import('../models/FileSystem.js').FileSystem} args.fs
13021 * @param {object} args.cache
13022 * @param {SignCallback} [args.onSign]
13023 * @param {string} [args.dir]
13024 * @param {string} [args.gitdir=join(dir,'.git')]
13025 * @param {string} [args.ref]
13026 * @param {string} args.oid
13027 * @param {Object} args.author
13028 * @param {string} args.author.name
13029 * @param {string} args.author.email
13030 * @param {number} args.author.timestamp
13031 * @param {number} args.author.timezoneOffset
13032 * @param {Object} args.committer
13033 * @param {string} args.committer.name
13034 * @param {string} args.committer.email
13035 * @param {number} args.committer.timestamp
13036 * @param {number} args.committer.timezoneOffset
13037 * @param {string} [args.signingKey]
13038 *
13039 * @returns {Promise<string>}
13040 */
13041
13042async function _removeNote({
13043 fs,
13044 cache,
13045 onSign,
13046 gitdir,
13047 ref = 'refs/notes/commits',
13048 oid,
13049 author,
13050 committer,
13051 signingKey,
13052}) {
13053 // Get the current note commit
13054 let parent;
13055 try {
13056 parent = await GitRefManager.resolve({ gitdir, fs, ref });
13057 } catch (err) {
13058 if (!(err instanceof NotFoundError)) {
13059 throw err
13060 }
13061 }
13062
13063 // I'm using the "empty tree" magic number here for brevity
13064 const result = await _readTree({
13065 fs,
13066 gitdir,
13067 oid: parent || '4b825dc642cb6eb9a060e54bf8d69288fbee4904',
13068 });
13069 let tree = result.tree;
13070
13071 // Remove the note blob entry from the tree
13072 tree = tree.filter(entry => entry.path !== oid);
13073
13074 // Create the new note tree
13075 const treeOid = await _writeTree({
13076 fs,
13077 gitdir,
13078 tree,
13079 });
13080
13081 // Create the new note commit
13082 const commitOid = await _commit({
13083 fs,
13084 cache,
13085 onSign,
13086 gitdir,
13087 ref,
13088 tree: treeOid,
13089 parent: parent && [parent],
13090 message: `Note removed by 'isomorphic-git removeNote'\n`,
13091 author,
13092 committer,
13093 signingKey,
13094 });
13095
13096 return commitOid
13097}
13098
13099// @ts-check
13100
13101/**
13102 * Remove an object note
13103 *
13104 * @param {object} args
13105 * @param {FsClient} args.fs - a file system client
13106 * @param {SignCallback} [args.onSign] - a PGP signing implementation
13107 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
13108 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
13109 * @param {string} [args.ref] - The notes ref to look under
13110 * @param {string} args.oid - The SHA-1 object id of the object to remove the note from.
13111 * @param {Object} [args.author] - The details about the author.
13112 * @param {string} [args.author.name] - Default is `user.name` config.
13113 * @param {string} [args.author.email] - Default is `user.email` config.
13114 * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00).
13115 * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`.
13116 * @param {Object} [args.committer = author] - The details about the note committer, in the same format as the author parameter. If not specified, the author details are used.
13117 * @param {string} [args.committer.name] - Default is `user.name` config.
13118 * @param {string} [args.committer.email] - Default is `user.email` config.
13119 * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00).
13120 * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`.
13121 * @param {string} [args.signingKey] - Sign the tag object using this private PGP key.
13122 * @param {object} [args.cache] - a [cache](cache.md) object
13123 *
13124 * @returns {Promise<string>} Resolves successfully with the SHA-1 object id of the commit object for the note removal.
13125 */
13126
13127async function removeNote({
13128 fs: _fs,
13129 onSign,
13130 dir,
13131 gitdir = join(dir, '.git'),
13132 ref = 'refs/notes/commits',
13133 oid,
13134 author: _author,
13135 committer: _committer,
13136 signingKey,
13137 cache = {},
13138}) {
13139 try {
13140 assertParameter('fs', _fs);
13141 assertParameter('gitdir', gitdir);
13142 assertParameter('oid', oid);
13143
13144 const fs = new FileSystem(_fs);
13145
13146 const author = await normalizeAuthorObject({ fs, gitdir, author: _author });
13147 if (!author) throw new MissingNameError('author')
13148
13149 const committer = await normalizeCommitterObject({
13150 fs,
13151 gitdir,
13152 author,
13153 committer: _committer,
13154 });
13155 if (!committer) throw new MissingNameError('committer')
13156
13157 return await _removeNote({
13158 fs,
13159 cache,
13160 onSign,
13161 gitdir,
13162 ref,
13163 oid,
13164 author,
13165 committer,
13166 signingKey,
13167 })
13168 } catch (err) {
13169 err.caller = 'git.removeNote';
13170 throw err
13171 }
13172}
13173
13174// @ts-check
13175
13176/**
13177 * Rename a branch
13178 *
13179 * @param {object} args
13180 * @param {import('../models/FileSystem.js').FileSystem} args.fs
13181 * @param {string} args.gitdir
13182 * @param {string} args.ref - The name of the new branch
13183 * @param {string} args.oldref - The name of the old branch
13184 * @param {boolean} [args.checkout = false]
13185 *
13186 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
13187 */
13188async function _renameBranch({
13189 fs,
13190 gitdir,
13191 oldref,
13192 ref,
13193 checkout = false,
13194}) {
13195 if (ref !== cleanGitRef.clean(ref)) {
13196 throw new InvalidRefNameError(ref, cleanGitRef.clean(ref))
13197 }
13198
13199 if (oldref !== cleanGitRef.clean(oldref)) {
13200 throw new InvalidRefNameError(oldref, cleanGitRef.clean(oldref))
13201 }
13202
13203 const fulloldref = `refs/heads/${oldref}`;
13204 const fullnewref = `refs/heads/${ref}`;
13205
13206 const newexist = await GitRefManager.exists({ fs, gitdir, ref: fullnewref });
13207
13208 if (newexist) {
13209 throw new AlreadyExistsError('branch', ref, false)
13210 }
13211
13212 const value = await GitRefManager.resolve({
13213 fs,
13214 gitdir,
13215 ref: fulloldref,
13216 depth: 1,
13217 });
13218
13219 await GitRefManager.writeRef({ fs, gitdir, ref: fullnewref, value });
13220 await GitRefManager.deleteRef({ fs, gitdir, ref: fulloldref });
13221
13222 const fullCurrentBranchRef = await _currentBranch({
13223 fs,
13224 gitdir,
13225 fullname: true,
13226 });
13227 const isCurrentBranch = fullCurrentBranchRef === fulloldref;
13228
13229 if (checkout || isCurrentBranch) {
13230 // Update HEAD
13231 await GitRefManager.writeSymbolicRef({
13232 fs,
13233 gitdir,
13234 ref: 'HEAD',
13235 value: fullnewref,
13236 });
13237 }
13238}
13239
13240// @ts-check
13241
13242/**
13243 * Rename a branch
13244 *
13245 * @param {object} args
13246 * @param {FsClient} args.fs - a file system implementation
13247 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
13248 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
13249 * @param {string} args.ref - What to name the branch
13250 * @param {string} args.oldref - What the name of the branch was
13251 * @param {boolean} [args.checkout = false] - Update `HEAD` to point at the newly created branch
13252 *
13253 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
13254 *
13255 * @example
13256 * await git.renameBranch({ fs, dir: '/tutorial', ref: 'main', oldref: 'master' })
13257 * console.log('done')
13258 *
13259 */
13260async function renameBranch({
13261 fs,
13262 dir,
13263 gitdir = join(dir, '.git'),
13264 ref,
13265 oldref,
13266 checkout = false,
13267}) {
13268 try {
13269 assertParameter('fs', fs);
13270 assertParameter('gitdir', gitdir);
13271 assertParameter('ref', ref);
13272 assertParameter('oldref', oldref);
13273 return await _renameBranch({
13274 fs: new FileSystem(fs),
13275 gitdir,
13276 ref,
13277 oldref,
13278 checkout,
13279 })
13280 } catch (err) {
13281 err.caller = 'git.renameBranch';
13282 throw err
13283 }
13284}
13285
13286async function hashObject$1({ gitdir, type, object }) {
13287 return shasum(GitObject.wrap({ type, object }))
13288}
13289
13290// @ts-check
13291
13292/**
13293 * Reset a file in the git index (aka staging area)
13294 *
13295 * Note that this does NOT modify the file in the working directory.
13296 *
13297 * @param {object} args
13298 * @param {FsClient} args.fs - a file system client
13299 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
13300 * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path
13301 * @param {string} args.filepath - The path to the file to reset in the index
13302 * @param {string} [args.ref = 'HEAD'] - A ref to the commit to use
13303 * @param {object} [args.cache] - a [cache](cache.md) object
13304 *
13305 * @returns {Promise<void>} Resolves successfully once the git index has been updated
13306 *
13307 * @example
13308 * await git.resetIndex({ fs, dir: '/tutorial', filepath: 'README.md' })
13309 * console.log('done')
13310 *
13311 */
13312async function resetIndex({
13313 fs: _fs,
13314 dir,
13315 gitdir = join(dir, '.git'),
13316 filepath,
13317 ref,
13318 cache = {},
13319}) {
13320 try {
13321 assertParameter('fs', _fs);
13322 assertParameter('gitdir', gitdir);
13323 assertParameter('filepath', filepath);
13324
13325 const fs = new FileSystem(_fs);
13326
13327 let oid;
13328 let workdirOid;
13329
13330 try {
13331 // Resolve commit
13332 oid = await GitRefManager.resolve({ fs, gitdir, ref: ref || 'HEAD' });
13333 } catch (e) {
13334 if (ref) {
13335 // Only throw the error if a ref is explicitly provided
13336 throw e
13337 }
13338 }
13339
13340 // Not having an oid at this point means `resetIndex()` was called without explicit `ref` on a new git
13341 // repository. If that happens, we can skip resolving the file path.
13342 if (oid) {
13343 try {
13344 // Resolve blob
13345 oid = await resolveFilepath({
13346 fs,
13347 cache,
13348 gitdir,
13349 oid,
13350 filepath,
13351 });
13352 } catch (e) {
13353 // This means we're resetting the file to a "deleted" state
13354 oid = null;
13355 }
13356 }
13357
13358 // For files that aren't in the workdir use zeros
13359 let stats = {
13360 ctime: new Date(0),
13361 mtime: new Date(0),
13362 dev: 0,
13363 ino: 0,
13364 mode: 0,
13365 uid: 0,
13366 gid: 0,
13367 size: 0,
13368 };
13369 // If the file exists in the workdir...
13370 const object = dir && (await fs.read(join(dir, filepath)));
13371 if (object) {
13372 // ... and has the same hash as the desired state...
13373 workdirOid = await hashObject$1({
13374 gitdir,
13375 type: 'blob',
13376 object,
13377 });
13378 if (oid === workdirOid) {
13379 // ... use the workdir Stats object
13380 stats = await fs.lstat(join(dir, filepath));
13381 }
13382 }
13383 await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) {
13384 index.delete({ filepath });
13385 if (oid) {
13386 index.insert({ filepath, stats, oid });
13387 }
13388 });
13389 } catch (err) {
13390 err.caller = 'git.reset';
13391 throw err
13392 }
13393}
13394
13395// @ts-check
13396
13397/**
13398 * Get the value of a symbolic ref or resolve a ref to its SHA-1 object id
13399 *
13400 * @param {object} args
13401 * @param {FsClient} args.fs - a file system client
13402 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
13403 * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path
13404 * @param {string} args.ref - The ref to resolve
13405 * @param {number} [args.depth = undefined] - How many symbolic references to follow before returning
13406 *
13407 * @returns {Promise<string>} Resolves successfully with a SHA-1 object id or the value of a symbolic ref
13408 *
13409 * @example
13410 * let currentCommit = await git.resolveRef({ fs, dir: '/tutorial', ref: 'HEAD' })
13411 * console.log(currentCommit)
13412 * let currentBranch = await git.resolveRef({ fs, dir: '/tutorial', ref: 'HEAD', depth: 2 })
13413 * console.log(currentBranch)
13414 *
13415 */
13416async function resolveRef({
13417 fs,
13418 dir,
13419 gitdir = join(dir, '.git'),
13420 ref,
13421 depth,
13422}) {
13423 try {
13424 assertParameter('fs', fs);
13425 assertParameter('gitdir', gitdir);
13426 assertParameter('ref', ref);
13427
13428 const oid = await GitRefManager.resolve({
13429 fs: new FileSystem(fs),
13430 gitdir,
13431 ref,
13432 depth,
13433 });
13434 return oid
13435 } catch (err) {
13436 err.caller = 'git.resolveRef';
13437 throw err
13438 }
13439}
13440
13441// @ts-check
13442
13443/**
13444 * Write an entry to the git config files.
13445 *
13446 * *Caveats:*
13447 * - Currently only the local `$GIT_DIR/config` file can be read or written. However support for the global `~/.gitconfig` and system `$(prefix)/etc/gitconfig` will be added in the future.
13448 * - The current parser does not support the more exotic features of the git-config file format such as `[include]` and `[includeIf]`.
13449 *
13450 * @param {Object} args
13451 * @param {FsClient} args.fs - a file system implementation
13452 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
13453 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
13454 * @param {string} args.path - The key of the git config entry
13455 * @param {string | boolean | number | void} args.value - A value to store at that path. (Use `undefined` as the value to delete a config entry.)
13456 * @param {boolean} [args.append = false] - If true, will append rather than replace when setting (use with multi-valued config options).
13457 *
13458 * @returns {Promise<void>} Resolves successfully when operation completed
13459 *
13460 * @example
13461 * // Write config value
13462 * await git.setConfig({
13463 * fs,
13464 * dir: '/tutorial',
13465 * path: 'user.name',
13466 * value: 'Mr. Test'
13467 * })
13468 *
13469 * // Print out config file
13470 * let file = await fs.promises.readFile('/tutorial/.git/config', 'utf8')
13471 * console.log(file)
13472 *
13473 * // Delete a config entry
13474 * await git.setConfig({
13475 * fs,
13476 * dir: '/tutorial',
13477 * path: 'user.name',
13478 * value: undefined
13479 * })
13480 *
13481 * // Print out config file
13482 * file = await fs.promises.readFile('/tutorial/.git/config', 'utf8')
13483 * console.log(file)
13484 */
13485async function setConfig({
13486 fs: _fs,
13487 dir,
13488 gitdir = join(dir, '.git'),
13489 path,
13490 value,
13491 append = false,
13492}) {
13493 try {
13494 assertParameter('fs', _fs);
13495 assertParameter('gitdir', gitdir);
13496 assertParameter('path', path);
13497 // assertParameter('value', value) // We actually allow 'undefined' as a value to unset/delete
13498
13499 const fs = new FileSystem(_fs);
13500 const config = await GitConfigManager.get({ fs, gitdir });
13501 if (append) {
13502 await config.append(path, value);
13503 } else {
13504 await config.set(path, value);
13505 }
13506 await GitConfigManager.save({ fs, gitdir, config });
13507 } catch (err) {
13508 err.caller = 'git.setConfig';
13509 throw err
13510 }
13511}
13512
13513// @ts-check
13514
13515/**
13516 * Tell whether a file has been changed
13517 *
13518 * The possible resolve values are:
13519 *
13520 * | status | description |
13521 * | --------------------- | ------------------------------------------------------------------------------------- |
13522 * | `"ignored"` | file ignored by a .gitignore rule |
13523 * | `"unmodified"` | file unchanged from HEAD commit |
13524 * | `"*modified"` | file has modifications, not yet staged |
13525 * | `"*deleted"` | file has been removed, but the removal is not yet staged |
13526 * | `"*added"` | file is untracked, not yet staged |
13527 * | `"absent"` | file not present in HEAD commit, staging area, or working dir |
13528 * | `"modified"` | file has modifications, staged |
13529 * | `"deleted"` | file has been removed, staged |
13530 * | `"added"` | previously untracked file, staged |
13531 * | `"*unmodified"` | working dir and HEAD commit match, but index differs |
13532 * | `"*absent"` | file not present in working dir or HEAD commit, but present in the index |
13533 * | `"*undeleted"` | file was deleted from the index, but is still in the working dir |
13534 * | `"*undeletemodified"` | file was deleted from the index, but is present with modifications in the working dir |
13535 *
13536 * @param {object} args
13537 * @param {FsClient} args.fs - a file system client
13538 * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path
13539 * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path
13540 * @param {string} args.filepath - The path to the file to query
13541 * @param {object} [args.cache] - a [cache](cache.md) object
13542 *
13543 * @returns {Promise<'ignored'|'unmodified'|'*modified'|'*deleted'|'*added'|'absent'|'modified'|'deleted'|'added'|'*unmodified'|'*absent'|'*undeleted'|'*undeletemodified'>} Resolves successfully with the file's git status
13544 *
13545 * @example
13546 * let status = await git.status({ fs, dir: '/tutorial', filepath: 'README.md' })
13547 * console.log(status)
13548 *
13549 */
13550async function status({
13551 fs: _fs,
13552 dir,
13553 gitdir = join(dir, '.git'),
13554 filepath,
13555 cache = {},
13556}) {
13557 try {
13558 assertParameter('fs', _fs);
13559 assertParameter('gitdir', gitdir);
13560 assertParameter('filepath', filepath);
13561
13562 const fs = new FileSystem(_fs);
13563 const ignored = await GitIgnoreManager.isIgnored({
13564 fs,
13565 gitdir,
13566 dir,
13567 filepath,
13568 });
13569 if (ignored) {
13570 return 'ignored'
13571 }
13572 const headTree = await getHeadTree({ fs, cache, gitdir });
13573 const treeOid = await getOidAtPath({
13574 fs,
13575 cache,
13576 gitdir,
13577 tree: headTree,
13578 path: filepath,
13579 });
13580 const indexEntry = await GitIndexManager.acquire(
13581 { fs, gitdir, cache },
13582 async function(index) {
13583 for (const entry of index) {
13584 if (entry.path === filepath) return entry
13585 }
13586 return null
13587 }
13588 );
13589 const stats = await fs.lstat(join(dir, filepath));
13590
13591 const H = treeOid !== null; // head
13592 const I = indexEntry !== null; // index
13593 const W = stats !== null; // working dir
13594
13595 const getWorkdirOid = async () => {
13596 if (I && !compareStats(indexEntry, stats)) {
13597 return indexEntry.oid
13598 } else {
13599 const object = await fs.read(join(dir, filepath));
13600 const workdirOid = await hashObject$1({
13601 gitdir,
13602 type: 'blob',
13603 object,
13604 });
13605 // If the oid in the index === working dir oid but stats differed update cache
13606 if (I && indexEntry.oid === workdirOid) {
13607 // and as long as our fs.stats aren't bad.
13608 // size of -1 happens over a BrowserFS HTTP Backend that doesn't serve Content-Length headers
13609 // (like the Karma webserver) because BrowserFS HTTP Backend uses HTTP HEAD requests to do fs.stat
13610 if (stats.size !== -1) {
13611 // We don't await this so we can return faster for one-off cases.
13612 GitIndexManager.acquire({ fs, gitdir, cache }, async function(
13613 index
13614 ) {
13615 index.insert({ filepath, stats, oid: workdirOid });
13616 });
13617 }
13618 }
13619 return workdirOid
13620 }
13621 };
13622
13623 if (!H && !W && !I) return 'absent' // ---
13624 if (!H && !W && I) return '*absent' // -A-
13625 if (!H && W && !I) return '*added' // --A
13626 if (!H && W && I) {
13627 const workdirOid = await getWorkdirOid();
13628 // @ts-ignore
13629 return workdirOid === indexEntry.oid ? 'added' : '*added' // -AA : -AB
13630 }
13631 if (H && !W && !I) return 'deleted' // A--
13632 if (H && !W && I) {
13633 // @ts-ignore
13634 return treeOid === indexEntry.oid ? '*deleted' : '*deleted' // AA- : AB-
13635 }
13636 if (H && W && !I) {
13637 const workdirOid = await getWorkdirOid();
13638 return workdirOid === treeOid ? '*undeleted' : '*undeletemodified' // A-A : A-B
13639 }
13640 if (H && W && I) {
13641 const workdirOid = await getWorkdirOid();
13642 if (workdirOid === treeOid) {
13643 // @ts-ignore
13644 return workdirOid === indexEntry.oid ? 'unmodified' : '*unmodified' // AAA : ABA
13645 } else {
13646 // @ts-ignore
13647 return workdirOid === indexEntry.oid ? 'modified' : '*modified' // ABB : AAB
13648 }
13649 }
13650 /*
13651 ---
13652 -A-
13653 --A
13654 -AA
13655 -AB
13656 A--
13657 AA-
13658 AB-
13659 A-A
13660 A-B
13661 AAA
13662 ABA
13663 ABB
13664 AAB
13665 */
13666 } catch (err) {
13667 err.caller = 'git.status';
13668 throw err
13669 }
13670}
13671
13672async function getOidAtPath({ fs, cache, gitdir, tree, path }) {
13673 if (typeof path === 'string') path = path.split('/');
13674 const dirname = path.shift();
13675 for (const entry of tree) {
13676 if (entry.path === dirname) {
13677 if (path.length === 0) {
13678 return entry.oid
13679 }
13680 const { type, object } = await _readObject({
13681 fs,
13682 cache,
13683 gitdir,
13684 oid: entry.oid,
13685 });
13686 if (type === 'tree') {
13687 const tree = GitTree.from(object);
13688 return getOidAtPath({ fs, cache, gitdir, tree, path })
13689 }
13690 if (type === 'blob') {
13691 throw new ObjectTypeError(entry.oid, type, 'blob', path.join('/'))
13692 }
13693 }
13694 }
13695 return null
13696}
13697
13698async function getHeadTree({ fs, cache, gitdir }) {
13699 // Get the tree from the HEAD commit.
13700 let oid;
13701 try {
13702 oid = await GitRefManager.resolve({ fs, gitdir, ref: 'HEAD' });
13703 } catch (e) {
13704 // Handle fresh branches with no commits
13705 if (e instanceof NotFoundError) {
13706 return []
13707 }
13708 }
13709 const { tree } = await _readTree({ fs, cache, gitdir, oid });
13710 return tree
13711}
13712
13713// @ts-check
13714
13715/**
13716 * Efficiently get the status of multiple files at once.
13717 *
13718 * The returned `StatusMatrix` is admittedly not the easiest format to read.
13719 * However it conveys a large amount of information in dense format that should make it easy to create reports about the current state of the repository;
13720 * without having to do multiple, time-consuming isomorphic-git calls.
13721 * My hope is that the speed and flexibility of the function will make up for the learning curve of interpreting the return value.
13722 *
13723 * ```js live
13724 * // get the status of all the files in 'src'
13725 * let status = await git.statusMatrix({
13726 * fs,
13727 * dir: '/tutorial',
13728 * filter: f => f.startsWith('src/')
13729 * })
13730 * console.log(status)
13731 * ```
13732 *
13733 * ```js live
13734 * // get the status of all the JSON and Markdown files
13735 * let status = await git.statusMatrix({
13736 * fs,
13737 * dir: '/tutorial',
13738 * filter: f => f.endsWith('.json') || f.endsWith('.md')
13739 * })
13740 * console.log(status)
13741 * ```
13742 *
13743 * The result is returned as a 2D array.
13744 * The outer array represents the files and/or blobs in the repo, in alphabetical order.
13745 * The inner arrays describe the status of the file:
13746 * the first value is the filepath, and the next three are integers
13747 * representing the HEAD status, WORKDIR status, and STAGE status of the entry.
13748 *
13749 * ```js
13750 * // example StatusMatrix
13751 * [
13752 * ["a.txt", 0, 2, 0], // new, untracked
13753 * ["b.txt", 0, 2, 2], // added, staged
13754 * ["c.txt", 0, 2, 3], // added, staged, with unstaged changes
13755 * ["d.txt", 1, 1, 1], // unmodified
13756 * ["e.txt", 1, 2, 1], // modified, unstaged
13757 * ["f.txt", 1, 2, 2], // modified, staged
13758 * ["g.txt", 1, 2, 3], // modified, staged, with unstaged changes
13759 * ["h.txt", 1, 0, 1], // deleted, unstaged
13760 * ["i.txt", 1, 0, 0], // deleted, staged
13761 * ]
13762 * ```
13763 *
13764 * - The HEAD status is either absent (0) or present (1).
13765 * - The WORKDIR status is either absent (0), identical to HEAD (1), or different from HEAD (2).
13766 * - The STAGE status is either absent (0), identical to HEAD (1), identical to WORKDIR (2), or different from WORKDIR (3).
13767 *
13768 * ```ts
13769 * type Filename = string
13770 * type HeadStatus = 0 | 1
13771 * type WorkdirStatus = 0 | 1 | 2
13772 * type StageStatus = 0 | 1 | 2 | 3
13773 *
13774 * type StatusRow = [Filename, HeadStatus, WorkdirStatus, StageStatus]
13775 *
13776 * type StatusMatrix = StatusRow[]
13777 * ```
13778 *
13779 * > Think of the natural progression of file modifications as being from HEAD (previous) -> WORKDIR (current) -> STAGE (next).
13780 * > Then HEAD is "version 1", WORKDIR is "version 2", and STAGE is "version 3".
13781 * > Then, imagine a "version 0" which is before the file was created.
13782 * > Then the status value in each column corresponds to the oldest version of the file it is identical to.
13783 * > (For a file to be identical to "version 0" means the file is deleted.)
13784 *
13785 * Here are some examples of queries you can answer using the result:
13786 *
13787 * #### Q: What files have been deleted?
13788 * ```js
13789 * const FILE = 0, WORKDIR = 2
13790 *
13791 * const filenames = (await statusMatrix({ dir }))
13792 * .filter(row => row[WORKDIR] === 0)
13793 * .map(row => row[FILE])
13794 * ```
13795 *
13796 * #### Q: What files have unstaged changes?
13797 * ```js
13798 * const FILE = 0, WORKDIR = 2, STAGE = 3
13799 *
13800 * const filenames = (await statusMatrix({ dir }))
13801 * .filter(row => row[WORKDIR] !== row[STAGE])
13802 * .map(row => row[FILE])
13803 * ```
13804 *
13805 * #### Q: What files have been modified since the last commit?
13806 * ```js
13807 * const FILE = 0, HEAD = 1, WORKDIR = 2
13808 *
13809 * const filenames = (await statusMatrix({ dir }))
13810 * .filter(row => row[HEAD] !== row[WORKDIR])
13811 * .map(row => row[FILE])
13812 * ```
13813 *
13814 * #### Q: What files will NOT be changed if I commit right now?
13815 * ```js
13816 * const FILE = 0, HEAD = 1, STAGE = 3
13817 *
13818 * const filenames = (await statusMatrix({ dir }))
13819 * .filter(row => row[HEAD] === row[STAGE])
13820 * .map(row => row[FILE])
13821 * ```
13822 *
13823 * For reference, here are all possible combinations:
13824 *
13825 * | HEAD | WORKDIR | STAGE | `git status --short` equivalent |
13826 * | ---- | ------- | ----- | ------------------------------- |
13827 * | 0 | 0 | 0 | `` |
13828 * | 0 | 0 | 3 | `AD` |
13829 * | 0 | 2 | 0 | `??` |
13830 * | 0 | 2 | 2 | `A ` |
13831 * | 0 | 2 | 3 | `AM` |
13832 * | 1 | 0 | 0 | `D ` |
13833 * | 1 | 0 | 1 | ` D` |
13834 * | 1 | 0 | 3 | `MD` |
13835 * | 1 | 1 | 0 | `D ` + `??` |
13836 * | 1 | 1 | 1 | `` |
13837 * | 1 | 1 | 3 | `MM` |
13838 * | 1 | 2 | 0 | `D ` + `??` |
13839 * | 1 | 2 | 1 | ` M` |
13840 * | 1 | 2 | 2 | `M ` |
13841 * | 1 | 2 | 3 | `MM` |
13842 *
13843 * @param {object} args
13844 * @param {FsClient} args.fs - a file system client
13845 * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path
13846 * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path
13847 * @param {string} [args.ref = 'HEAD'] - Optionally specify a different commit to compare against the workdir and stage instead of the HEAD
13848 * @param {string[]} [args.filepaths = ['.']] - Limit the query to the given files and directories
13849 * @param {function(string): boolean} [args.filter] - Filter the results to only those whose filepath matches a function.
13850 * @param {object} [args.cache] - a [cache](cache.md) object
13851 * @param {boolean} [args.ignored = false] - include ignored files in the result
13852 *
13853 * @returns {Promise<Array<StatusRow>>} Resolves with a status matrix, described below.
13854 * @see StatusRow
13855 */
13856async function statusMatrix({
13857 fs: _fs,
13858 dir,
13859 gitdir = join(dir, '.git'),
13860 ref = 'HEAD',
13861 filepaths = ['.'],
13862 filter,
13863 cache = {},
13864 ignored: shouldIgnore = false,
13865}) {
13866 try {
13867 assertParameter('fs', _fs);
13868 assertParameter('gitdir', gitdir);
13869 assertParameter('ref', ref);
13870
13871 const fs = new FileSystem(_fs);
13872 return await _walk({
13873 fs,
13874 cache,
13875 dir,
13876 gitdir,
13877 trees: [TREE({ ref }), WORKDIR(), STAGE()],
13878 map: async function(filepath, [head, workdir, stage]) {
13879 // Ignore ignored files, but only if they are not already tracked.
13880 if (!head && !stage && workdir) {
13881 if (!shouldIgnore) {
13882 const isIgnored = await GitIgnoreManager.isIgnored({
13883 fs,
13884 dir,
13885 filepath,
13886 });
13887 if (isIgnored) {
13888 return null
13889 }
13890 }
13891 }
13892 // match against base paths
13893 if (!filepaths.some(base => worthWalking(filepath, base))) {
13894 return null
13895 }
13896 // Late filter against file names
13897 if (filter) {
13898 if (!filter(filepath)) return
13899 }
13900
13901 const [headType, workdirType, stageType] = await Promise.all([
13902 head && head.type(),
13903 workdir && workdir.type(),
13904 stage && stage.type(),
13905 ]);
13906
13907 const isBlob = [headType, workdirType, stageType].includes('blob');
13908
13909 // For now, bail on directories unless the file is also a blob in another tree
13910 if ((headType === 'tree' || headType === 'special') && !isBlob) return
13911 if (headType === 'commit') return null
13912
13913 if ((workdirType === 'tree' || workdirType === 'special') && !isBlob)
13914 return
13915
13916 if (stageType === 'commit') return null
13917 if ((stageType === 'tree' || stageType === 'special') && !isBlob) return
13918
13919 // Figure out the oids for files, using the staged oid for the working dir oid if the stats match.
13920 const headOid = headType === 'blob' ? await head.oid() : undefined;
13921 const stageOid = stageType === 'blob' ? await stage.oid() : undefined;
13922 let workdirOid;
13923 if (
13924 headType !== 'blob' &&
13925 workdirType === 'blob' &&
13926 stageType !== 'blob'
13927 ) {
13928 // We don't actually NEED the sha. Any sha will do
13929 // TODO: update this logic to handle N trees instead of just 3.
13930 workdirOid = '42';
13931 } else if (workdirType === 'blob') {
13932 workdirOid = await workdir.oid();
13933 }
13934 const entry = [undefined, headOid, workdirOid, stageOid];
13935 const result = entry.map(value => entry.indexOf(value));
13936 result.shift(); // remove leading undefined entry
13937 return [filepath, ...result]
13938 },
13939 })
13940 } catch (err) {
13941 err.caller = 'git.statusMatrix';
13942 throw err
13943 }
13944}
13945
13946// @ts-check
13947
13948/**
13949 * Create a lightweight tag
13950 *
13951 * @param {object} args
13952 * @param {FsClient} args.fs - a file system client
13953 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
13954 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
13955 * @param {string} args.ref - What to name the tag
13956 * @param {string} [args.object = 'HEAD'] - What oid the tag refers to. (Will resolve to oid if value is a ref.) By default, the commit object which is referred by the current `HEAD` is used.
13957 * @param {boolean} [args.force = false] - Instead of throwing an error if a tag named `ref` already exists, overwrite the existing tag.
13958 *
13959 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
13960 *
13961 * @example
13962 * await git.tag({ fs, dir: '/tutorial', ref: 'test-tag' })
13963 * console.log('done')
13964 *
13965 */
13966async function tag({
13967 fs: _fs,
13968 dir,
13969 gitdir = join(dir, '.git'),
13970 ref,
13971 object,
13972 force = false,
13973}) {
13974 try {
13975 assertParameter('fs', _fs);
13976 assertParameter('gitdir', gitdir);
13977 assertParameter('ref', ref);
13978
13979 const fs = new FileSystem(_fs);
13980
13981 if (ref === undefined) {
13982 throw new MissingParameterError('ref')
13983 }
13984
13985 ref = ref.startsWith('refs/tags/') ? ref : `refs/tags/${ref}`;
13986
13987 // Resolve passed object
13988 const value = await GitRefManager.resolve({
13989 fs,
13990 gitdir,
13991 ref: object || 'HEAD',
13992 });
13993
13994 if (!force && (await GitRefManager.exists({ fs, gitdir, ref }))) {
13995 throw new AlreadyExistsError('tag', ref)
13996 }
13997
13998 await GitRefManager.writeRef({ fs, gitdir, ref, value });
13999 } catch (err) {
14000 err.caller = 'git.tag';
14001 throw err
14002 }
14003}
14004
14005// @ts-check
14006
14007/**
14008 * Register file contents in the working tree or object database to the git index (aka staging area).
14009 *
14010 * @param {object} args
14011 * @param {FsClient} args.fs - a file system client
14012 * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path
14013 * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path
14014 * @param {string} args.filepath - File to act upon.
14015 * @param {string} [args.oid] - OID of the object in the object database to add to the index with the specified filepath.
14016 * @param {number} [args.mode = 100644] - The file mode to add the file to the index.
14017 * @param {boolean} [args.add] - Adds the specified file to the index if it does not yet exist in the index.
14018 * @param {boolean} [args.remove] - Remove the specified file from the index if it does not exist in the workspace anymore.
14019 * @param {boolean} [args.force] - Remove the specified file from the index, even if it still exists in the workspace.
14020 * @param {object} [args.cache] - a [cache](cache.md) object
14021 *
14022 * @returns {Promise<string | void>} Resolves successfully with the SHA-1 object id of the object written or updated in the index, or nothing if the file was removed.
14023 *
14024 * @example
14025 * await git.updateIndex({
14026 * fs,
14027 * dir: '/tutorial',
14028 * filepath: 'readme.md'
14029 * })
14030 *
14031 * @example
14032 * // Manually create a blob in the object database.
14033 * let oid = await git.writeBlob({
14034 * fs,
14035 * dir: '/tutorial',
14036 * blob: new Uint8Array([])
14037 * })
14038 *
14039 * // Write the object in the object database to the index.
14040 * await git.updateIndex({
14041 * fs,
14042 * dir: '/tutorial',
14043 * add: true,
14044 * filepath: 'readme.md',
14045 * oid
14046 * })
14047 */
14048async function updateIndex({
14049 fs: _fs,
14050 dir,
14051 gitdir = join(dir, '.git'),
14052 cache = {},
14053 filepath,
14054 oid,
14055 mode,
14056 add,
14057 remove,
14058 force,
14059}) {
14060 try {
14061 assertParameter('fs', _fs);
14062 assertParameter('gitdir', gitdir);
14063 assertParameter('filepath', filepath);
14064
14065 const fs = new FileSystem(_fs);
14066
14067 if (remove) {
14068 return await GitIndexManager.acquire(
14069 { fs, gitdir, cache },
14070 async function(index) {
14071 let fileStats;
14072
14073 if (!force) {
14074 // Check if the file is still present in the working directory
14075 fileStats = await fs.lstat(join(dir, filepath));
14076
14077 if (fileStats) {
14078 if (fileStats.isDirectory()) {
14079 // Removing directories should not work
14080 throw new InvalidFilepathError('directory')
14081 }
14082
14083 // Do nothing if we don't force and the file still exists in the workdir
14084 return
14085 }
14086 }
14087
14088 // Directories are not allowed, so we make sure the provided filepath exists in the index
14089 if (index.has({ filepath })) {
14090 index.delete({
14091 filepath,
14092 });
14093 }
14094 }
14095 )
14096 }
14097
14098 // Test if it is a file and exists on disk if `remove` is not provided, only of no oid is provided
14099 let fileStats;
14100
14101 if (!oid) {
14102 fileStats = await fs.lstat(join(dir, filepath));
14103
14104 if (!fileStats) {
14105 throw new NotFoundError(
14106 `file at "${filepath}" on disk and "remove" not set`
14107 )
14108 }
14109
14110 if (fileStats.isDirectory()) {
14111 throw new InvalidFilepathError('directory')
14112 }
14113 }
14114
14115 return await GitIndexManager.acquire({ fs, gitdir, cache }, async function(
14116 index
14117 ) {
14118 if (!add && !index.has({ filepath })) {
14119 // If the index does not contain the filepath yet and `add` is not set, we should throw
14120 throw new NotFoundError(
14121 `file at "${filepath}" in index and "add" not set`
14122 )
14123 }
14124
14125 // By default we use 0 for the stats of the index file
14126 let stats = {
14127 ctime: new Date(0),
14128 mtime: new Date(0),
14129 dev: 0,
14130 ino: 0,
14131 mode,
14132 uid: 0,
14133 gid: 0,
14134 size: 0,
14135 };
14136
14137 if (!oid) {
14138 stats = fileStats;
14139
14140 // Write the file to the object database
14141 const object = stats.isSymbolicLink()
14142 ? await fs.readlink(join(dir, filepath))
14143 : await fs.read(join(dir, filepath));
14144
14145 oid = await _writeObject({
14146 fs,
14147 gitdir,
14148 type: 'blob',
14149 format: 'content',
14150 object,
14151 });
14152 }
14153
14154 index.insert({
14155 filepath,
14156 oid: oid,
14157 stats,
14158 });
14159
14160 return oid
14161 })
14162 } catch (err) {
14163 err.caller = 'git.updateIndex';
14164 throw err
14165 }
14166}
14167
14168// @ts-check
14169
14170/**
14171 * Return the version number of isomorphic-git
14172 *
14173 * I don't know why you might need this. I added it just so I could check that I was getting
14174 * the correct version of the library and not a cached version.
14175 *
14176 * @returns {string} the version string taken from package.json at publication time
14177 *
14178 * @example
14179 * console.log(git.version())
14180 *
14181 */
14182function version() {
14183 try {
14184 return pkg.version
14185 } catch (err) {
14186 err.caller = 'git.version';
14187 throw err
14188 }
14189}
14190
14191// @ts-check
14192
14193/**
14194 * @callback WalkerMap
14195 * @param {string} filename
14196 * @param {Array<WalkerEntry | null>} entries
14197 * @returns {Promise<any>}
14198 */
14199
14200/**
14201 * @callback WalkerReduce
14202 * @param {any} parent
14203 * @param {any[]} children
14204 * @returns {Promise<any>}
14205 */
14206
14207/**
14208 * @callback WalkerIterateCallback
14209 * @param {WalkerEntry[]} entries
14210 * @returns {Promise<any[]>}
14211 */
14212
14213/**
14214 * @callback WalkerIterate
14215 * @param {WalkerIterateCallback} walk
14216 * @param {IterableIterator<WalkerEntry[]>} children
14217 * @returns {Promise<any[]>}
14218 */
14219
14220/**
14221 * A powerful recursive tree-walking utility.
14222 *
14223 * The `walk` API simplifies gathering detailed information about a tree or comparing all the filepaths in two or more trees.
14224 * Trees can be git commits, the working directory, or the or git index (staging area).
14225 * As long as a file or directory is present in at least one of the trees, it will be traversed.
14226 * Entries are traversed in alphabetical order.
14227 *
14228 * The arguments to `walk` are the `trees` you want to traverse, and 3 optional transform functions:
14229 * `map`, `reduce`, and `iterate`.
14230 *
14231 * ## `TREE`, `WORKDIR`, and `STAGE`
14232 *
14233 * Tree walkers are represented by three separate functions that can be imported:
14234 *
14235 * ```js
14236 * import { TREE, WORKDIR, STAGE } from 'isomorphic-git'
14237 * ```
14238 *
14239 * These functions return opaque handles called `Walker`s.
14240 * The only thing that `Walker` objects are good for is passing into `walk`.
14241 * Here are the three `Walker`s passed into `walk` by the `statusMatrix` command for example:
14242 *
14243 * ```js
14244 * let ref = 'HEAD'
14245 *
14246 * let trees = [TREE({ ref }), WORKDIR(), STAGE()]
14247 * ```
14248 *
14249 * For the arguments, see the doc pages for [TREE](./TREE.md), [WORKDIR](./WORKDIR.md), and [STAGE](./STAGE.md).
14250 *
14251 * `map`, `reduce`, and `iterate` allow you control the recursive walk by pruning and transforming `WalkerEntry`s into the desired result.
14252 *
14253 * ## WalkerEntry
14254 *
14255 * {@link WalkerEntry typedef}
14256 *
14257 * `map` receives an array of `WalkerEntry[]` as its main argument, one `WalkerEntry` for each `Walker` in the `trees` argument.
14258 * The methods are memoized per `WalkerEntry` so calling them multiple times in a `map` function does not adversely impact performance.
14259 * By only computing these values if needed, you build can build lean, mean, efficient walking machines.
14260 *
14261 * ### WalkerEntry#type()
14262 *
14263 * Returns the kind as a string. This is normally either `tree` or `blob`.
14264 *
14265 * `TREE`, `STAGE`, and `WORKDIR` walkers all return a string.
14266 *
14267 * Possible values:
14268 *
14269 * - `'tree'` directory
14270 * - `'blob'` file
14271 * - `'special'` used by `WORKDIR` to represent irregular files like sockets and FIFOs
14272 * - `'commit'` used by `TREE` to represent submodules
14273 *
14274 * ```js
14275 * await entry.type()
14276 * ```
14277 *
14278 * ### WalkerEntry#mode()
14279 *
14280 * Returns the file mode as a number. Use this to distinguish between regular files, symlinks, and executable files.
14281 *
14282 * `TREE`, `STAGE`, and `WORKDIR` walkers all return a number for all `type`s of entries.
14283 *
14284 * It has been normalized to one of the 4 values that are allowed in git commits:
14285 *
14286 * - `0o40000` directory
14287 * - `0o100644` file
14288 * - `0o100755` file (executable)
14289 * - `0o120000` symlink
14290 *
14291 * Tip: to make modes more readable, you can print them to octal using `.toString(8)`.
14292 *
14293 * ```js
14294 * await entry.mode()
14295 * ```
14296 *
14297 * ### WalkerEntry#oid()
14298 *
14299 * Returns the SHA-1 object id for blobs and trees.
14300 *
14301 * `TREE` walkers return a string for `blob` and `tree` entries.
14302 *
14303 * `STAGE` and `WORKDIR` walkers return a string for `blob` entries and `undefined` for `tree` entries.
14304 *
14305 * ```js
14306 * await entry.oid()
14307 * ```
14308 *
14309 * ### WalkerEntry#content()
14310 *
14311 * Returns the file contents as a Buffer.
14312 *
14313 * `TREE` and `WORKDIR` walkers return a Buffer for `blob` entries and `undefined` for `tree` entries.
14314 *
14315 * `STAGE` walkers always return `undefined` since the file contents are never stored in the stage.
14316 *
14317 * ```js
14318 * await entry.content()
14319 * ```
14320 *
14321 * ### WalkerEntry#stat()
14322 *
14323 * Returns a normalized subset of filesystem Stat data.
14324 *
14325 * `WORKDIR` walkers return a `Stat` for `blob` and `tree` entries.
14326 *
14327 * `STAGE` walkers return a `Stat` for `blob` entries and `undefined` for `tree` entries.
14328 *
14329 * `TREE` walkers return `undefined` for all entry types.
14330 *
14331 * ```js
14332 * await entry.stat()
14333 * ```
14334 *
14335 * {@link Stat typedef}
14336 *
14337 * ## map(string, Array<WalkerEntry|null>) => Promise<any>
14338 *
14339 * {@link WalkerMap typedef}
14340 *
14341 * This is the function that is called once per entry BEFORE visiting the children of that node.
14342 *
14343 * If you return `null` for a `tree` entry, then none of the children of that `tree` entry will be walked.
14344 *
14345 * This is a good place for query logic, such as examining the contents of a file.
14346 * Ultimately, compare all the entries and return any values you are interested in.
14347 * If you do not return a value (or return undefined) that entry will be filtered from the results.
14348 *
14349 * Example 1: Find all the files containing the word 'foo'.
14350 * ```js
14351 * async function map(filepath, [head, workdir]) {
14352 * let content = (await workdir.content()).toString('utf8')
14353 * if (content.contains('foo')) {
14354 * return {
14355 * filepath,
14356 * content
14357 * }
14358 * }
14359 * }
14360 * ```
14361 *
14362 * Example 2: Return the difference between the working directory and the HEAD commit
14363 * ```js
14364 * const map = async (filepath, [head, workdir]) => {
14365 * return {
14366 * filepath,
14367 * oid: await head?.oid(),
14368 * diff: diff(
14369 * (await head?.content())?.toString('utf8') || '',
14370 * (await workdir?.content())?.toString('utf8') || ''
14371 * )
14372 * }
14373 * }
14374 * ```
14375 *
14376 * Example 3:
14377 * ```js
14378 * let path = require('path')
14379 * // Only examine files in the directory `cwd`
14380 * let cwd = 'src/app'
14381 * async function map (filepath, [head, workdir, stage]) {
14382 * if (
14383 * // don't skip the root directory
14384 * head.fullpath !== '.' &&
14385 * // return true for 'src' and 'src/app'
14386 * !cwd.startsWith(filepath) &&
14387 * // return true for 'src/app/*'
14388 * path.dirname(filepath) !== cwd
14389 * ) {
14390 * return null
14391 * } else {
14392 * return filepath
14393 * }
14394 * }
14395 * ```
14396 *
14397 * ## reduce(parent, children)
14398 *
14399 * {@link WalkerReduce typedef}
14400 *
14401 * This is the function that is called once per entry AFTER visiting the children of that node.
14402 *
14403 * Default: `async (parent, children) => parent === undefined ? children.flat() : [parent, children].flat()`
14404 *
14405 * The default implementation of this function returns all directories and children in a giant flat array.
14406 * You can define a different accumulation method though.
14407 *
14408 * Example: Return a hierarchical structure
14409 * ```js
14410 * async function reduce (parent, children) {
14411 * return Object.assign(parent, { children })
14412 * }
14413 * ```
14414 *
14415 * ## iterate(walk, children)
14416 *
14417 * {@link WalkerIterate typedef}
14418 *
14419 * {@link WalkerIterateCallback typedef}
14420 *
14421 * Default: `(walk, children) => Promise.all([...children].map(walk))`
14422 *
14423 * The default implementation recurses all children concurrently using Promise.all.
14424 * However you could use a custom function to traverse children serially or use a global queue to throttle recursion.
14425 *
14426 * @param {object} args
14427 * @param {FsClient} args.fs - a file system client
14428 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
14429 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
14430 * @param {Walker[]} args.trees - The trees you want to traverse
14431 * @param {WalkerMap} [args.map] - Transform `WalkerEntry`s into a result form
14432 * @param {WalkerReduce} [args.reduce] - Control how mapped entries are combined with their parent result
14433 * @param {WalkerIterate} [args.iterate] - Fine-tune how entries within a tree are iterated over
14434 * @param {object} [args.cache] - a [cache](cache.md) object
14435 *
14436 * @returns {Promise<any>} The finished tree-walking result
14437 */
14438async function walk({
14439 fs,
14440 dir,
14441 gitdir = join(dir, '.git'),
14442 trees,
14443 map,
14444 reduce,
14445 iterate,
14446 cache = {},
14447}) {
14448 try {
14449 assertParameter('fs', fs);
14450 assertParameter('gitdir', gitdir);
14451 assertParameter('trees', trees);
14452
14453 return await _walk({
14454 fs: new FileSystem(fs),
14455 cache,
14456 dir,
14457 gitdir,
14458 trees,
14459 map,
14460 reduce,
14461 iterate,
14462 })
14463 } catch (err) {
14464 err.caller = 'git.walk';
14465 throw err
14466 }
14467}
14468
14469// @ts-check
14470
14471/**
14472 * Write a blob object directly
14473 *
14474 * @param {object} args
14475 * @param {FsClient} args.fs - a file system client
14476 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
14477 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
14478 * @param {Uint8Array} args.blob - The blob object to write
14479 *
14480 * @returns {Promise<string>} Resolves successfully with the SHA-1 object id of the newly written object
14481 *
14482 * @example
14483 * // Manually create a blob.
14484 * let oid = await git.writeBlob({
14485 * fs,
14486 * dir: '/tutorial',
14487 * blob: new Uint8Array([])
14488 * })
14489 *
14490 * console.log('oid', oid) // should be 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391'
14491 *
14492 */
14493async function writeBlob({ fs, dir, gitdir = join(dir, '.git'), blob }) {
14494 try {
14495 assertParameter('fs', fs);
14496 assertParameter('gitdir', gitdir);
14497 assertParameter('blob', blob);
14498
14499 return await _writeObject({
14500 fs: new FileSystem(fs),
14501 gitdir,
14502 type: 'blob',
14503 object: blob,
14504 format: 'content',
14505 })
14506 } catch (err) {
14507 err.caller = 'git.writeBlob';
14508 throw err
14509 }
14510}
14511
14512// @ts-check
14513
14514/**
14515 * @param {object} args
14516 * @param {import('../models/FileSystem.js').FileSystem} args.fs
14517 * @param {string} args.gitdir
14518 * @param {CommitObject} args.commit
14519 *
14520 * @returns {Promise<string>}
14521 * @see CommitObject
14522 *
14523 */
14524async function _writeCommit({ fs, gitdir, commit }) {
14525 // Convert object to buffer
14526 const object = GitCommit.from(commit).toObject();
14527 const oid = await _writeObject({
14528 fs,
14529 gitdir,
14530 type: 'commit',
14531 object,
14532 format: 'content',
14533 });
14534 return oid
14535}
14536
14537// @ts-check
14538
14539/**
14540 * Write a commit object directly
14541 *
14542 * @param {object} args
14543 * @param {FsClient} args.fs - a file system client
14544 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
14545 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
14546 * @param {CommitObject} args.commit - The object to write
14547 *
14548 * @returns {Promise<string>} Resolves successfully with the SHA-1 object id of the newly written object
14549 * @see CommitObject
14550 *
14551 */
14552async function writeCommit({
14553 fs,
14554 dir,
14555 gitdir = join(dir, '.git'),
14556 commit,
14557}) {
14558 try {
14559 assertParameter('fs', fs);
14560 assertParameter('gitdir', gitdir);
14561 assertParameter('commit', commit);
14562
14563 return await _writeCommit({
14564 fs: new FileSystem(fs),
14565 gitdir,
14566 commit,
14567 })
14568 } catch (err) {
14569 err.caller = 'git.writeCommit';
14570 throw err
14571 }
14572}
14573
14574// @ts-check
14575
14576/**
14577 * Write a git object directly
14578 *
14579 * `format` can have the following values:
14580 *
14581 * | param | description |
14582 * | ---------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- |
14583 * | 'deflated' | Treat `object` as the raw deflate-compressed buffer for an object, meaning can be written to `.git/objects/**` as-is. |
14584 * | 'wrapped' | Treat `object` as the inflated object buffer wrapped in the git object header. This is the raw buffer used when calculating the SHA-1 object id of a git object. |
14585 * | 'content' | Treat `object` as the object buffer without the git header. |
14586 * | 'parsed' | Treat `object` as a parsed representation of the object. |
14587 *
14588 * If `format` is `'parsed'`, then `object` must match one of the schemas for `CommitObject`, `TreeObject`, `TagObject`, or a `string` (for blobs).
14589 *
14590 * {@link CommitObject typedef}
14591 *
14592 * {@link TreeObject typedef}
14593 *
14594 * {@link TagObject typedef}
14595 *
14596 * If `format` is `'content'`, `'wrapped'`, or `'deflated'`, `object` should be a `Uint8Array`.
14597 *
14598 * @deprecated
14599 * > This command is overly complicated.
14600 * >
14601 * > If you know the type of object you are writing, use [`writeBlob`](./writeBlob.md), [`writeCommit`](./writeCommit.md), [`writeTag`](./writeTag.md), or [`writeTree`](./writeTree.md).
14602 *
14603 * @param {object} args
14604 * @param {FsClient} args.fs - a file system client
14605 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
14606 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
14607 * @param {string | Uint8Array | CommitObject | TreeObject | TagObject} args.object - The object to write.
14608 * @param {'blob'|'tree'|'commit'|'tag'} [args.type] - The kind of object to write.
14609 * @param {'deflated' | 'wrapped' | 'content' | 'parsed'} [args.format = 'parsed'] - What format the object is in. The possible choices are listed below.
14610 * @param {string} [args.oid] - If `format` is `'deflated'` then this param is required. Otherwise it is calculated.
14611 * @param {string} [args.encoding] - If `type` is `'blob'` then `object` will be converted to a Uint8Array using `encoding`.
14612 *
14613 * @returns {Promise<string>} Resolves successfully with the SHA-1 object id of the newly written object.
14614 *
14615 * @example
14616 * // Manually create an annotated tag.
14617 * let sha = await git.resolveRef({ fs, dir: '/tutorial', ref: 'HEAD' })
14618 * console.log('commit', sha)
14619 *
14620 * let oid = await git.writeObject({
14621 * fs,
14622 * dir: '/tutorial',
14623 * type: 'tag',
14624 * object: {
14625 * object: sha,
14626 * type: 'commit',
14627 * tag: 'my-tag',
14628 * tagger: {
14629 * name: 'your name',
14630 * email: 'email@example.com',
14631 * timestamp: Math.floor(Date.now()/1000),
14632 * timezoneOffset: new Date().getTimezoneOffset()
14633 * },
14634 * message: 'Optional message'
14635 * }
14636 * })
14637 *
14638 * console.log('tag', oid)
14639 *
14640 */
14641async function writeObject({
14642 fs: _fs,
14643 dir,
14644 gitdir = join(dir, '.git'),
14645 type,
14646 object,
14647 format = 'parsed',
14648 oid,
14649 encoding = undefined,
14650}) {
14651 try {
14652 const fs = new FileSystem(_fs);
14653 // Convert object to buffer
14654 if (format === 'parsed') {
14655 switch (type) {
14656 case 'commit':
14657 object = GitCommit.from(object).toObject();
14658 break
14659 case 'tree':
14660 object = GitTree.from(object).toObject();
14661 break
14662 case 'blob':
14663 object = Buffer.from(object, encoding);
14664 break
14665 case 'tag':
14666 object = GitAnnotatedTag.from(object).toObject();
14667 break
14668 default:
14669 throw new ObjectTypeError(oid || '', type, 'blob|commit|tag|tree')
14670 }
14671 // GitObjectManager does not know how to serialize content, so we tweak that parameter before passing it.
14672 format = 'content';
14673 }
14674 oid = await _writeObject({
14675 fs,
14676 gitdir,
14677 type,
14678 object,
14679 oid,
14680 format,
14681 });
14682 return oid
14683 } catch (err) {
14684 err.caller = 'git.writeObject';
14685 throw err
14686 }
14687}
14688
14689// @ts-check
14690
14691/**
14692 * Write a ref which refers to the specified SHA-1 object id, or a symbolic ref which refers to the specified ref.
14693 *
14694 * @param {object} args
14695 * @param {FsClient} args.fs - a file system client
14696 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
14697 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
14698 * @param {string} args.ref - The name of the ref to write
14699 * @param {string} args.value - When `symbolic` is false, a ref or an SHA-1 object id. When true, a ref starting with `refs/`.
14700 * @param {boolean} [args.force = false] - Instead of throwing an error if a ref named `ref` already exists, overwrite the existing ref.
14701 * @param {boolean} [args.symbolic = false] - Whether the ref is symbolic or not.
14702 *
14703 * @returns {Promise<void>} Resolves successfully when filesystem operations are complete
14704 *
14705 * @example
14706 * await git.writeRef({
14707 * fs,
14708 * dir: '/tutorial',
14709 * ref: 'refs/heads/another-branch',
14710 * value: 'HEAD'
14711 * })
14712 * await git.writeRef({
14713 * fs,
14714 * dir: '/tutorial',
14715 * ref: 'HEAD',
14716 * value: 'refs/heads/another-branch',
14717 * force: true,
14718 * symbolic: true
14719 * })
14720 * console.log('done')
14721 *
14722 */
14723async function writeRef({
14724 fs: _fs,
14725 dir,
14726 gitdir = join(dir, '.git'),
14727 ref,
14728 value,
14729 force = false,
14730 symbolic = false,
14731}) {
14732 try {
14733 assertParameter('fs', _fs);
14734 assertParameter('gitdir', gitdir);
14735 assertParameter('ref', ref);
14736 assertParameter('value', value);
14737
14738 const fs = new FileSystem(_fs);
14739
14740 if (ref !== cleanGitRef.clean(ref)) {
14741 throw new InvalidRefNameError(ref, cleanGitRef.clean(ref))
14742 }
14743
14744 if (!force && (await GitRefManager.exists({ fs, gitdir, ref }))) {
14745 throw new AlreadyExistsError('ref', ref)
14746 }
14747
14748 if (symbolic) {
14749 await GitRefManager.writeSymbolicRef({
14750 fs,
14751 gitdir,
14752 ref,
14753 value,
14754 });
14755 } else {
14756 value = await GitRefManager.resolve({
14757 fs,
14758 gitdir,
14759 ref: value,
14760 });
14761 await GitRefManager.writeRef({
14762 fs,
14763 gitdir,
14764 ref,
14765 value,
14766 });
14767 }
14768 } catch (err) {
14769 err.caller = 'git.writeRef';
14770 throw err
14771 }
14772}
14773
14774// @ts-check
14775
14776/**
14777 * @param {object} args
14778 * @param {import('../models/FileSystem.js').FileSystem} args.fs
14779 * @param {string} args.gitdir
14780 * @param {TagObject} args.tag
14781 *
14782 * @returns {Promise<string>}
14783 */
14784async function _writeTag({ fs, gitdir, tag }) {
14785 // Convert object to buffer
14786 const object = GitAnnotatedTag.from(tag).toObject();
14787 const oid = await _writeObject({
14788 fs,
14789 gitdir,
14790 type: 'tag',
14791 object,
14792 format: 'content',
14793 });
14794 return oid
14795}
14796
14797// @ts-check
14798
14799/**
14800 * Write an annotated tag object directly
14801 *
14802 * @param {object} args
14803 * @param {FsClient} args.fs - a file system client
14804 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
14805 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
14806 * @param {TagObject} args.tag - The object to write
14807 *
14808 * @returns {Promise<string>} Resolves successfully with the SHA-1 object id of the newly written object
14809 * @see TagObject
14810 *
14811 * @example
14812 * // Manually create an annotated tag.
14813 * let sha = await git.resolveRef({ fs, dir: '/tutorial', ref: 'HEAD' })
14814 * console.log('commit', sha)
14815 *
14816 * let oid = await git.writeTag({
14817 * fs,
14818 * dir: '/tutorial',
14819 * tag: {
14820 * object: sha,
14821 * type: 'commit',
14822 * tag: 'my-tag',
14823 * tagger: {
14824 * name: 'your name',
14825 * email: 'email@example.com',
14826 * timestamp: Math.floor(Date.now()/1000),
14827 * timezoneOffset: new Date().getTimezoneOffset()
14828 * },
14829 * message: 'Optional message'
14830 * }
14831 * })
14832 *
14833 * console.log('tag', oid)
14834 *
14835 */
14836async function writeTag({ fs, dir, gitdir = join(dir, '.git'), tag }) {
14837 try {
14838 assertParameter('fs', fs);
14839 assertParameter('gitdir', gitdir);
14840 assertParameter('tag', tag);
14841
14842 return await _writeTag({
14843 fs: new FileSystem(fs),
14844 gitdir,
14845 tag,
14846 })
14847 } catch (err) {
14848 err.caller = 'git.writeTag';
14849 throw err
14850 }
14851}
14852
14853// @ts-check
14854
14855/**
14856 * Write a tree object directly
14857 *
14858 * @param {object} args
14859 * @param {FsClient} args.fs - a file system client
14860 * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path
14861 * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path
14862 * @param {TreeObject} args.tree - The object to write
14863 *
14864 * @returns {Promise<string>} Resolves successfully with the SHA-1 object id of the newly written object.
14865 * @see TreeObject
14866 * @see TreeEntry
14867 *
14868 */
14869async function writeTree({ fs, dir, gitdir = join(dir, '.git'), tree }) {
14870 try {
14871 assertParameter('fs', fs);
14872 assertParameter('gitdir', gitdir);
14873 assertParameter('tree', tree);
14874
14875 return await _writeTree({
14876 fs: new FileSystem(fs),
14877 gitdir,
14878 tree,
14879 })
14880 } catch (err) {
14881 err.caller = 'git.writeTree';
14882 throw err
14883 }
14884}
14885
14886// default export
14887var index = {
14888 Errors,
14889 STAGE,
14890 TREE,
14891 WORKDIR,
14892 add,
14893 abortMerge,
14894 addNote,
14895 addRemote,
14896 annotatedTag,
14897 branch,
14898 checkout,
14899 clone,
14900 commit,
14901 getConfig,
14902 getConfigAll,
14903 setConfig,
14904 currentBranch,
14905 deleteBranch,
14906 deleteRef,
14907 deleteRemote,
14908 deleteTag,
14909 expandOid,
14910 expandRef,
14911 fastForward,
14912 fetch,
14913 findMergeBase,
14914 findRoot,
14915 getRemoteInfo,
14916 getRemoteInfo2,
14917 hashBlob,
14918 indexPack,
14919 init,
14920 isDescendent,
14921 isIgnored,
14922 listBranches,
14923 listFiles,
14924 listNotes,
14925 listRemotes,
14926 listServerRefs,
14927 listTags,
14928 log,
14929 merge,
14930 packObjects,
14931 pull,
14932 push,
14933 readBlob,
14934 readCommit,
14935 readNote,
14936 readObject,
14937 readTag,
14938 readTree,
14939 remove,
14940 removeNote,
14941 renameBranch,
14942 resetIndex,
14943 updateIndex,
14944 resolveRef,
14945 status,
14946 statusMatrix,
14947 tag,
14948 version,
14949 walk,
14950 writeBlob,
14951 writeCommit,
14952 writeObject,
14953 writeRef,
14954 writeTag,
14955 writeTree,
14956};
14957
14958export default index;
14959export { Errors, STAGE, TREE, WORKDIR, abortMerge, add, addNote, addRemote, annotatedTag, branch, checkout, clone, commit, currentBranch, deleteBranch, deleteRef, deleteRemote, deleteTag, expandOid, expandRef, fastForward, fetch, findMergeBase, findRoot, getConfig, getConfigAll, getRemoteInfo, getRemoteInfo2, hashBlob, indexPack, init, isDescendent, isIgnored, listBranches, listFiles, listNotes, listRemotes, listServerRefs, listTags, log, merge, packObjects, pull, push, readBlob, readCommit, readNote, readObject, readTag, readTree, remove, removeNote, renameBranch, resetIndex, resolveRef, setConfig, status, statusMatrix, tag, updateIndex, version, walk, writeBlob, writeCommit, writeObject, writeRef, writeTag, writeTree };