1 |
|
2 |
|
3 |
|
4 |
|
5 |
|
6 |
|
7 |
|
8 |
|
9 |
|
10 |
|
11 |
|
12 |
|
13 |
|
14 |
|
15 |
|
16 |
|
17 | buildNode6IfNecessary();
|
18 |
|
19 | if (process.env.PUPPETEER_SKIP_CHROMIUM_DOWNLOAD) {
|
20 | console.log('**INFO** Skipping Chromium download. "PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" environment variable was found.');
|
21 | return;
|
22 | }
|
23 | if (process.env.NPM_CONFIG_PUPPETEER_SKIP_CHROMIUM_DOWNLOAD || process.env.npm_config_puppeteer_skip_chromium_download) {
|
24 | console.log('**INFO** Skipping Chromium download. "PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" was set in npm config.');
|
25 | return;
|
26 | }
|
27 |
|
28 | const downloadHost = process.env.PUPPETEER_DOWNLOAD_HOST || process.env.npm_config_puppeteer_download_host;
|
29 |
|
30 | const puppeteer = require('./index');
|
31 | const browserFetcher = puppeteer.createBrowserFetcher({ host: downloadHost });
|
32 |
|
33 | const revision = process.env.PUPPETEER_CHROMIUM_REVISION || process.env.npm_config_puppeteer_chromium_revision
|
34 | || require('./package.json').puppeteer.chromium_revision;
|
35 |
|
36 | const revisionInfo = browserFetcher.revisionInfo(revision);
|
37 |
|
38 |
|
39 | if (revisionInfo.local)
|
40 | return;
|
41 |
|
42 |
|
43 | const NPM_HTTPS_PROXY = process.env.npm_config_https_proxy || process.env.npm_config_proxy;
|
44 | const NPM_HTTP_PROXY = process.env.npm_config_http_proxy || process.env.npm_config_proxy;
|
45 | const NPM_NO_PROXY = process.env.npm_config_no_proxy;
|
46 |
|
47 | if (NPM_HTTPS_PROXY)
|
48 | process.env.HTTPS_PROXY = NPM_HTTPS_PROXY;
|
49 | if (NPM_HTTP_PROXY)
|
50 | process.env.HTTP_PROXY = NPM_HTTP_PROXY;
|
51 | if (NPM_NO_PROXY)
|
52 | process.env.NO_PROXY = NPM_NO_PROXY;
|
53 |
|
54 | browserFetcher.download(revisionInfo.revision, onProgress)
|
55 | .then(() => browserFetcher.localRevisions())
|
56 | .then(onSuccess)
|
57 | .catch(onError);
|
58 |
|
59 |
|
60 |
|
61 |
|
62 |
|
63 | function onSuccess(localRevisions) {
|
64 | console.log('Chromium downloaded to ' + revisionInfo.folderPath);
|
65 | localRevisions = localRevisions.filter(revision => revision !== revisionInfo.revision);
|
66 |
|
67 | const cleanupOldVersions = localRevisions.map(revision => browserFetcher.remove(revision));
|
68 | return Promise.all(cleanupOldVersions);
|
69 | }
|
70 |
|
71 |
|
72 |
|
73 |
|
74 | function onError(error) {
|
75 | console.error(`ERROR: Failed to download Chromium r${revision}! Set "PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" env variable to skip download.`);
|
76 | console.error(error);
|
77 | process.exit(1);
|
78 | }
|
79 |
|
80 | let progressBar = null;
|
81 | let lastDownloadedBytes = 0;
|
82 | function onProgress(downloadedBytes, totalBytes) {
|
83 | if (!progressBar) {
|
84 | const ProgressBar = require('progress');
|
85 | progressBar = new ProgressBar(`Downloading Chromium r${revision} - ${toMegabytes(totalBytes)} [:bar] :percent :etas `, {
|
86 | complete: '=',
|
87 | incomplete: ' ',
|
88 | width: 20,
|
89 | total: totalBytes,
|
90 | });
|
91 | }
|
92 | const delta = downloadedBytes - lastDownloadedBytes;
|
93 | lastDownloadedBytes = downloadedBytes;
|
94 | progressBar.tick(delta);
|
95 | }
|
96 |
|
97 | function toMegabytes(bytes) {
|
98 | const mb = bytes / 1024 / 1024;
|
99 | return `${Math.round(mb * 10) / 10} Mb`;
|
100 | }
|
101 |
|
102 | function buildNode6IfNecessary() {
|
103 | const fs = require('fs');
|
104 | const path = require('path');
|
105 |
|
106 |
|
107 |
|
108 | if (!fs.existsSync(path.join('utils', 'node6-transform')))
|
109 | return;
|
110 | let asyncawait = true;
|
111 | try {
|
112 | new Function('async function test(){await 1}');
|
113 | } catch (error) {
|
114 | asyncawait = false;
|
115 | }
|
116 |
|
117 | if (asyncawait)
|
118 | return;
|
119 |
|
120 | console.log('Building Puppeteer for Node 6');
|
121 | require(path.join(__dirname, 'utils', 'node6-transform'));
|
122 | }
|