1 | 'use strict';
|
2 |
|
3 | function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
4 |
|
5 | var fs = _interopDefault(require('fs'));
|
6 | var path = _interopDefault(require('path'));
|
7 | var homeOrTmp = _interopDefault(require('home-or-tmp'));
|
8 | var tar = _interopDefault(require('tar'));
|
9 | var EventEmitter = _interopDefault(require('events'));
|
10 | var https = _interopDefault(require('https'));
|
11 | var child_process = _interopDefault(require('child_process'));
|
12 |
|
13 | class DegitError extends Error {
|
14 | constructor(message, opts) {
|
15 | super(message);
|
16 | Object.assign(this, opts);
|
17 | }
|
18 | }
|
19 |
|
20 | function tryRequire(file) {
|
21 | try {
|
22 | return require(file);
|
23 | } catch (err) {
|
24 | return null;
|
25 | }
|
26 | }
|
27 |
|
28 | function exec(command) {
|
29 | return new Promise((fulfil, reject) => {
|
30 | child_process.exec(command, (err, stdout, stderr) => {
|
31 | if (err) {
|
32 | reject(err);
|
33 | return;
|
34 | }
|
35 |
|
36 | fulfil({ stdout, stderr });
|
37 | });
|
38 | });
|
39 | }
|
40 |
|
41 | function mkdirp(dir) {
|
42 | const parent = path.dirname(dir);
|
43 | if (parent === dir) return;
|
44 |
|
45 | mkdirp(parent);
|
46 |
|
47 | try {
|
48 | fs.mkdirSync(dir);
|
49 | } catch (err) {
|
50 | if (err.code !== 'EEXIST') throw err;
|
51 | }
|
52 | }
|
53 |
|
54 | function fetch(url, dest) {
|
55 | return new Promise((fulfil, reject) => {
|
56 | https.get(url, response => {
|
57 | const code = response.statusCode;
|
58 | if (code >= 400) {
|
59 | reject({ code, message: response.statusMessage });
|
60 | } else if (code >= 300) {
|
61 | fetch(response.headers.location, dest).then(fulfil, reject);
|
62 | } else {
|
63 | response.pipe(fs.createWriteStream(dest))
|
64 | .on('finish', () => fulfil())
|
65 | .on('error', reject);
|
66 | }
|
67 | }).on('error', reject);
|
68 | });
|
69 | }
|
70 |
|
71 | const base = path.join(homeOrTmp, '.degit');
|
72 |
|
73 | function degit(src, opts) {
|
74 | return new Degit(src, opts);
|
75 | }
|
76 |
|
77 | class Degit extends EventEmitter {
|
78 | constructor(src, opts = {}) {
|
79 | super();
|
80 |
|
81 | this.src = src;
|
82 | this.cache = opts.cache;
|
83 | this.force = opts.force;
|
84 | this.verbose = opts.verbose;
|
85 |
|
86 | this.repo = parse(src);
|
87 | }
|
88 |
|
89 | async clone(dest) {
|
90 | this._checkDirIsEmpty(dest);
|
91 |
|
92 | const repo = this.repo;
|
93 |
|
94 | const dir = path.join(base, repo.site, repo.user, repo.name);
|
95 | const cached = tryRequire(path.join(dir, 'map.json')) || {};
|
96 |
|
97 | const hash = this.cache ?
|
98 | this._getHashFromCache(repo, cached) :
|
99 | await this._getHash(repo, cached);
|
100 |
|
101 | if (!hash) {
|
102 |
|
103 | throw new DegitError(`could not find commit hash for ${repo.ref}`, {
|
104 | code: 'MISSING_REF',
|
105 | ref: repo.ref
|
106 | });
|
107 | }
|
108 |
|
109 | const file = `${dir}/${hash}.tar.gz`;
|
110 | const url = (
|
111 | repo.site === 'gitlab' ? `${repo.url}/repository/archive.tar.gz?ref=${hash}` :
|
112 | repo.site === 'bitbucket' ? `${repo.url}/get/${hash}.tar.gz` :
|
113 | `${repo.url}/archive/${hash}.tar.gz`
|
114 | );
|
115 |
|
116 | try {
|
117 | if (!this.cache) {
|
118 | try {
|
119 | fs.statSync(file);
|
120 | this._verbose({
|
121 | code: 'FILE_EXISTS',
|
122 | message: `${file} already exists locally`
|
123 | });
|
124 | } catch (err) {
|
125 | mkdirp(path.dirname(file));
|
126 | this._verbose({
|
127 | code: 'DOWNLOADING',
|
128 | message: `downloading ${url} to ${file}`
|
129 | });
|
130 |
|
131 | await fetch(url, file);
|
132 | }
|
133 | }
|
134 | } catch (err) {
|
135 | throw new DegitError(`could not download ${url}`, {
|
136 | code: 'COULD_NOT_DOWNLOAD',
|
137 | url,
|
138 | original: err
|
139 | });
|
140 | }
|
141 |
|
142 | updateCache(dir, repo, hash, cached);
|
143 |
|
144 | this._verbose({
|
145 | code: 'EXTRACTING',
|
146 | message: `extracting ${file} to ${dest}`
|
147 | });
|
148 |
|
149 | mkdirp(dest);
|
150 | await untar(file, dest);
|
151 |
|
152 | this._info({
|
153 | code: 'SUCCESS',
|
154 | message: `cloned ${repo.user}/${repo.name}#${repo.ref}${dest !== '.' ? ` to ${dest}` : ''}`,
|
155 | repo,
|
156 | dest
|
157 | });
|
158 | }
|
159 |
|
160 | _checkDirIsEmpty(dir) {
|
161 | try {
|
162 | const files = fs.readdirSync(dir);
|
163 | if (files.length > 0) {
|
164 | if (this.force) {
|
165 | this._info({
|
166 | code: 'DEST_NOT_EMPTY',
|
167 | message: `destination directory is not empty. Using options.force, continuing`
|
168 | });
|
169 | } else {
|
170 | throw new DegitError(`destination directory is not empty, aborting. Use options.force to override`, {
|
171 | code: 'DEST_NOT_EMPTY'
|
172 | });
|
173 | }
|
174 | } else {
|
175 | this._verbose({
|
176 | code: 'DEST_IS_EMPTY',
|
177 | message: `destination directory is empty`
|
178 | });
|
179 | }
|
180 | } catch (err) {
|
181 | if (err.code !== 'ENOENT') throw err;
|
182 | }
|
183 | }
|
184 |
|
185 | _info(info) {
|
186 | this.emit('info', info);
|
187 | }
|
188 |
|
189 | _verbose(info) {
|
190 | if (this.verbose) this._info(info);
|
191 | }
|
192 |
|
193 | async _getHash(repo, cached) {
|
194 | try {
|
195 | const refs = await fetchRefs(repo);
|
196 | return this._selectRef(refs, repo.ref);
|
197 | } catch (err) {
|
198 | return this._getHashFromCache(repo, cached);
|
199 | }
|
200 | }
|
201 |
|
202 | _getHashFromCache(repo, cached) {
|
203 | if (repo.ref in cached) {
|
204 | const hash = cached[repo.ref];
|
205 | this._info({
|
206 | code: 'USING_CACHE',
|
207 | message: `using cached commit hash ${hash}`
|
208 | });
|
209 | return hash;
|
210 | }
|
211 | }
|
212 |
|
213 | _selectRef(refs, selector) {
|
214 | for (const ref of refs) {
|
215 | if (ref.name === selector) {
|
216 | this._verbose({
|
217 | code: 'FOUND_MATCH',
|
218 | message: `found matching commit hash: ${ref.hash}`
|
219 | });
|
220 | return ref.hash;
|
221 | }
|
222 | }
|
223 |
|
224 | if (selector.length < 8) return null;
|
225 |
|
226 | for (const ref of refs) {
|
227 | if (ref.hash.startsWith(selector)) return ref.hash;
|
228 | }
|
229 | }
|
230 | }
|
231 |
|
232 | const supported = new Set(['github', 'gitlab', 'bitbucket']);
|
233 |
|
234 | function parse(src) {
|
235 | const match = /^(?:https:\/\/([^/]+)\/|git@([^/]+):|([^/]+):)?([^/\s]+)\/([^/\s#]+)(?:#(.+))?/.exec(src);
|
236 | if (!match) {
|
237 | throw new DegitError(`could not parse ${src}`, {
|
238 | code: 'BAD_SRC'
|
239 | });
|
240 | }
|
241 |
|
242 | const site = (match[1] || match[2] || match[3] || 'github').replace(/\.(com|org)$/, '');
|
243 | if (!supported.has(site)) {
|
244 | throw new DegitError(`degit supports GitHub, GitLab and BitBucket`, {
|
245 | code: 'UNSUPPORTED_HOST'
|
246 | });
|
247 | }
|
248 |
|
249 | const user = match[4];
|
250 | const name = match[5].replace(/\.git$/, '');
|
251 | const ref = match[6] || 'master';
|
252 |
|
253 | const url = `https://${site}.${site === 'bitbucket' ? 'org' : 'com'}/${user}/${name}`;
|
254 |
|
255 | return { site, user, name, ref, url };
|
256 | }
|
257 |
|
258 | async function untar(file, dest) {
|
259 | return tar.extract({
|
260 | file,
|
261 | strip: 1,
|
262 | C: dest
|
263 | });
|
264 | }
|
265 |
|
266 | async function fetchRefs(repo) {
|
267 | const { stdout } = await exec(`git ls-remote ${repo.url}`);
|
268 |
|
269 | return stdout.split('\n').filter(Boolean).map(row => {
|
270 | const [hash, ref] = row.split('\t');
|
271 |
|
272 | if (ref === 'HEAD') {
|
273 | return {
|
274 | type: 'HEAD',
|
275 | hash
|
276 | };
|
277 | }
|
278 |
|
279 | const match = /refs\/(\w+)\/(.+)/.exec(ref);
|
280 | if (!match) throw new DegitError(`could not parse ${ref}`, { code: 'BAD_REF' });
|
281 |
|
282 | return {
|
283 | type: (
|
284 | match[1] === 'heads' ? 'branch' :
|
285 | match[1] === 'refs' ? 'ref' :
|
286 | match[1]
|
287 | ),
|
288 | name: match[2],
|
289 | hash
|
290 | };
|
291 | });
|
292 | }
|
293 |
|
294 | function updateCache(dir, repo, hash, cached) {
|
295 | if (cached[repo.ref] === hash) return;
|
296 |
|
297 | const oldHash = cached[repo.ref];
|
298 | if (oldHash) {
|
299 | let used = false;
|
300 | for (const key in cached) {
|
301 | if (cached[key] === hash) {
|
302 | used = true;
|
303 | break;
|
304 | }
|
305 | }
|
306 |
|
307 | if (!used) {
|
308 |
|
309 | try {
|
310 | fs.unlinkSync(path.join(dir, `${oldHash}.tar.gz`));
|
311 | } catch (err) {
|
312 |
|
313 | }
|
314 | }
|
315 | }
|
316 |
|
317 | cached[repo.ref] = hash;
|
318 | fs.writeFileSync(path.join(dir, 'map.json'), JSON.stringify(cached, null, ' '));
|
319 | }
|
320 |
|
321 | module.exports = degit;
|
322 |
|