1 |
|
2 |
|
3 |
|
4 |
|
5 |
|
6 |
|
7 |
|
8 |
|
9 |
|
10 |
|
11 |
|
12 |
|
13 | 'use strict';
|
14 |
|
15 | const { join: joinPaths, relative: relativePaths } = require('path');
|
16 |
|
17 | const Archiver = require('archiver');
|
18 | const async = require('async');
|
19 | const fse = require('fs-extra');
|
20 | const klaw = require('klaw');
|
21 | const Ignore = require('ignore');
|
22 | const { debug } = require('@adobe/helix-log');
|
23 |
|
24 | const {
|
25 | isCheckedOut,
|
26 | createBlobReadStream,
|
27 | resolveCommit,
|
28 | getObject,
|
29 | } = require('./git');
|
30 | const { resolveRepositoryPath } = require('./utils');
|
31 |
|
32 | const CACHE_DIR = './tmp';
|
33 |
|
34 |
|
35 |
|
36 |
|
37 |
|
38 |
|
39 |
|
40 |
|
41 |
|
42 |
|
43 | async function collectTreeEntries(repPath, tree, result, treePath) {
|
44 | const entries = await Promise.all(tree.entries.map(async ({
|
45 | oid, type, mode, path,
|
46 | }) => ({
|
47 | oid, type, mode, path: joinPaths(treePath, path),
|
48 | })));
|
49 | result.push(...entries);
|
50 |
|
51 | const treeEntries = entries.filter((entry) => entry.type === 'tree');
|
52 | for (let i = 0; i < treeEntries.length; i += 1) {
|
53 | const { oid, path } = treeEntries[i];
|
54 |
|
55 | const { object: subTree } = await getObject(repPath, oid);
|
56 | await collectTreeEntries(repPath, subTree, result, path);
|
57 | }
|
58 | return result;
|
59 | }
|
60 |
|
61 |
|
62 |
|
63 |
|
64 |
|
65 |
|
66 |
|
67 |
|
68 |
|
69 | async function archiveGitTree(repPath, tree, archive) {
|
70 |
|
71 | const allEntries = await collectTreeEntries(repPath, tree, [], '');
|
72 |
|
73 | const process = async ({ type, oid, path }) => {
|
74 | if (type === 'tree' || type === 'commit') {
|
75 |
|
76 | archive.append(null, { name: `${path}/` });
|
77 | } else {
|
78 |
|
79 | const stream = await createBlobReadStream(repPath, oid);
|
80 | archive.append(stream, { name: path });
|
81 | }
|
82 | };
|
83 |
|
84 | return new Promise((resolve, reject) => {
|
85 | async.eachSeries(
|
86 | allEntries,
|
87 | async.asyncify(process),
|
88 | (err) => {
|
89 | if (err) {
|
90 | reject(err);
|
91 | } else {
|
92 | resolve(archive);
|
93 | }
|
94 | },
|
95 | );
|
96 | });
|
97 | }
|
98 |
|
99 |
|
100 |
|
101 |
|
102 |
|
103 |
|
104 |
|
105 |
|
106 | async function collectFSEntries(dirPath, allEntries) {
|
107 |
|
108 | const ignore = Ignore();
|
109 | const ignoreFilePath = joinPaths(dirPath, '.gitignore');
|
110 | if (await fse.pathExists(ignoreFilePath)) {
|
111 | const data = await fse.readFile(ignoreFilePath);
|
112 | ignore.add(data.toString());
|
113 | }
|
114 | ignore.add('.git');
|
115 |
|
116 | const filterIgnored = (item) => !ignore.ignores(relativePaths(dirPath, item));
|
117 |
|
118 | return new Promise((resolve, reject) => {
|
119 | klaw(dirPath, { filter: filterIgnored })
|
120 | .on('readable', function onAvail() {
|
121 | let item = this.read();
|
122 | while (item) {
|
123 | allEntries.push(item);
|
124 | item = this.read();
|
125 | }
|
126 | })
|
127 | .on('error', (err) => reject(err))
|
128 | .on('end', () => resolve(allEntries));
|
129 | });
|
130 | }
|
131 |
|
132 |
|
133 |
|
134 |
|
135 |
|
136 |
|
137 |
|
138 |
|
139 | async function archiveWorkingDir(dirPath, archive) {
|
140 |
|
141 | const allEntries = await collectFSEntries(dirPath, []);
|
142 |
|
143 | const process = (entry, cb) => {
|
144 | const p = relativePaths(dirPath, entry.path);
|
145 | if (p.length) {
|
146 | if (entry.stats.isDirectory()) {
|
147 | archive.append(null, { name: `${p}/` });
|
148 | } else {
|
149 | archive.append(fse.createReadStream(entry.path), { name: p });
|
150 | }
|
151 | }
|
152 | cb();
|
153 | };
|
154 |
|
155 | return new Promise((resolve, reject) => {
|
156 | async.eachSeries(
|
157 | allEntries,
|
158 | process,
|
159 | (err) => {
|
160 | if (err) {
|
161 | reject(err);
|
162 | } else {
|
163 | resolve(archive);
|
164 | }
|
165 | },
|
166 | );
|
167 | });
|
168 | }
|
169 |
|
170 |
|
171 |
|
172 |
|
173 |
|
174 |
|
175 |
|
176 |
|
177 | function createMiddleware(options, archiveFormat) {
|
178 | |
179 |
|
180 |
|
181 |
|
182 |
|
183 |
|
184 |
|
185 |
|
186 |
|
187 | return async (req, res, next) => {
|
188 |
|
189 | const { owner } = req.params;
|
190 | const repoName = req.params.repo;
|
191 | const refName = req.params.ref;
|
192 |
|
193 | const repPath = resolveRepositoryPath(options, owner, repoName);
|
194 |
|
195 |
|
196 | const serveUncommitted = await isCheckedOut(repPath, refName);
|
197 |
|
198 | let commitSha;
|
199 | let archiveFileName;
|
200 | let archiveFilePath;
|
201 |
|
202 | resolveCommit(repPath, refName)
|
203 | .then((oid) => {
|
204 | commitSha = oid;
|
205 | return getObject(repPath, commitSha);
|
206 | })
|
207 | .then(({ object: commit }) => getObject(repPath, commit.tree))
|
208 | .then(async ({ object: tree }) => {
|
209 | archiveFileName = `${owner}-${repoName}-${serveUncommitted ? 'SNAPSHOT' : commitSha}${archiveFormat === 'zip' ? '.zip' : '.tgz'}`;
|
210 | archiveFilePath = joinPaths(CACHE_DIR, archiveFileName);
|
211 | await fse.ensureDir(CACHE_DIR);
|
212 |
|
213 |
|
214 | if (!serveUncommitted && await fse.pathExists(archiveFilePath)) {
|
215 |
|
216 | return fse.createReadStream(archiveFilePath);
|
217 | }
|
218 |
|
219 |
|
220 | let archive;
|
221 | if (archiveFormat === 'zip') {
|
222 |
|
223 | archive = new Archiver('zip', {
|
224 | zlib: { level: 9 },
|
225 | });
|
226 | } else {
|
227 |
|
228 | archive = new Archiver('tar', {
|
229 | gzip: true,
|
230 | gzipOptions: {
|
231 | level: 9,
|
232 | },
|
233 | });
|
234 | }
|
235 | if (serveUncommitted) {
|
236 |
|
237 | archive = await archiveWorkingDir(repPath, archive);
|
238 | } else {
|
239 | archive = await archiveGitTree(repPath, tree, archive);
|
240 | }
|
241 |
|
242 | return new Promise((resolve, reject) => {
|
243 | if (serveUncommitted) {
|
244 |
|
245 | archive.finalize();
|
246 | resolve(archive);
|
247 | } else {
|
248 |
|
249 | archive.pipe(fse.createWriteStream(archiveFilePath))
|
250 | .on('finish', () => resolve(fse.createReadStream(archiveFilePath)))
|
251 | .on('error', (err) => reject(err));
|
252 | archive.finalize();
|
253 | }
|
254 | });
|
255 | })
|
256 | .then((archiveStream) => {
|
257 | const mimeType = archiveFormat === 'zip' ? 'application/zip' : 'application/x-gzip';
|
258 | res.writeHead(200, {
|
259 | 'Content-Type': mimeType,
|
260 | 'Content-Disposition': `attachment; filename=${archiveFileName}`,
|
261 | });
|
262 | archiveStream.pipe(res);
|
263 | })
|
264 | .catch((err) => {
|
265 | debug(`[archiveHandler] code: ${err.code} message: ${err.message} stack: ${err.stack}`);
|
266 | next(err);
|
267 | });
|
268 | };
|
269 | }
|
270 | module.exports = createMiddleware;
|