UNPKG

6.46 kBJavaScriptView Raw
1'use strict';
2
3const fs = require('fs');
4const path = require('path');
5const knox = require('knox');
6const mime = require('mime');
7const async = require('async');
8
9// Set up our custom mime times for fonts
10mime.define({
11 'application/x-font-opentype': ['otf','eot','ttf'],
12 'image/svg+xml': ['svg'],
13 'application/font-woff': ['woff']
14});
15
16/**
17 * Constructs a wrapped S3 client for the given bucket and credentials
18 * @param {string} bucket bucket for the S3 client (default: env.AMAZON_S3_BUCKET)
19 * @param {string} key AWS credentials key (default: env.AMAZON_S3_KEY)
20 * @param {string} secret AWS credentials secret (default: env.AMAZON_S3_SECRET)
21 * @constructor
22 */
23function S3(bucket, key, secret) {
24 const config = {
25 key: key,
26 secret: secret,
27 bucket: bucket
28 };
29 this.bucket = bucket;
30
31 if (bucket && key && secret) {
32 this.client = knox.createClient(config);
33 }
34}
35
36/**
37 * Enables mapping of bucket to cloudfront.
38 * If value is non-null, urlWithBucket() will now map to a cloudfront url
39 * @param cloudfrontMapping the cloudfront subdomain the S3 bucket is mapped to
40 */
41S3.prototype.setCloudfrontMapping = function(cloudfrontMapping){
42 this.cloudfrontMapping = cloudfrontMapping;
43};
44
45/**
46 * Gets the bucket in use by the S3 client
47 * @returns {string} the name of the bucket in use
48 */
49S3.prototype.getBucket = function() {
50 return this.bucket;
51};
52
53/**
54 * Uploads the given text contents to S3
55 * @param {string|Buffer} contents the text or buffer to upload
56 * @param {string} path the path the contents should be saved to
57 * @param {bool|object=} makePrivateOrHeaders whether or not to make the file private on S3; or the full set of heders to use.
58 * @param {function} callback function(err, url)
59 */
60S3.prototype.putBuffer = function(contents, path, makePrivateOrHeaders, callback) {
61
62 if (typeof makePrivateOrHeaders === 'function') {
63 callback = makePrivateOrHeaders;
64 makePrivateOrHeaders = false;
65 }
66
67 path = _cleanPath(path);
68
69 const self = this;
70 const buffer = contents instanceof Buffer ? contents : new Buffer(contents);
71
72 const maxTries = 3;
73 let numTries = 0;
74 let fileUpload = null;
75
76 async.until(
77 () => { return fileUpload || numTries > maxTries },
78 untilNext => {
79 self.client.putBuffer(buffer, path, _createHeaders(makePrivateOrHeaders), function (err, result) {
80
81 fileUpload = result && self.urlWithBucket(path);
82 let delayTimeoutMs = 0;
83
84 if (++numTries <= maxTries && err) {
85 delayTimeoutMs = Math.pow(2, 10+numTries); //exponential backoff before retrying
86 console.log('S3 Upload error, will retry', {err, numTries, delayTimeoutMs});
87 err = null;
88 }
89
90 setTimeout(() => {
91 untilNext(err);
92 }, delayTimeoutMs);
93 });
94 },
95 err => {
96 callback(err, fileUpload);
97 }
98 );
99};
100
101/**
102 * Uploads a file to S3
103 * @param {string} sourcePath the path of the file to upload
104 * @param {string} targetPath the path of where the file should be saved on S3
105 * @param {bool|object} makePrivateOrHeaders whether or not to make the file private on S3; or the full set of heders to use.
106 * @param {function} callback function(err, url)
107 * @returns {object} fileUpload object that emits "progress" events with percent, written, and total properties
108 */
109S3.prototype.putFile = function(sourcePath, targetPath, makePrivateOrHeaders, callback) {
110
111 targetPath = _cleanPath(targetPath);
112
113 const self = this;
114 const stream = fs.createReadStream(sourcePath);
115
116 const headers = _createHeaders(makePrivateOrHeaders, targetPath);
117 headers['Content-Length'] = fs.statSync(sourcePath).size;
118
119 const maxTries = 3;
120 let numTries = 0;
121 let fileUpload = null;
122
123 async.until(
124 () => { return fileUpload || numTries > maxTries },
125 untilNext => {
126 self.client.putStream(stream, targetPath, headers, function(err, result){
127
128 fileUpload = result && self.urlWithBucket(targetPath);
129 let delayTimeoutMs = 0;
130
131 if (++numTries <= maxTries && err) {
132 delayTimeoutMs = Math.pow(2, 10+numTries); //exponential backoff before retrying
133 console.warn('S3 Upload error, will retry', {err, numTries, delayTimeoutMs, fileUpload});
134 err = null;
135 }
136
137 setTimeout(() => {
138 untilNext(err);
139 }, delayTimeoutMs);
140 });
141 },
142 err => {
143 callback(err, fileUpload);
144 }
145 );
146};
147
148/**
149 * Checks whether or not a file exists on S3
150 * @param {string} path the path within the S3 bucket to check
151 * @param {function} callback function(err, fileExists)
152 */
153S3.prototype.fileExists = function(path, callback) {
154
155 const self = this;
156
157 path = _cleanPath(path);
158
159 // we check the file be retrieving its header information
160 self.client.headFile(path, function(err, response) {
161 callback(err, response && response.statusCode === 200);
162 });
163
164};
165
166/**
167 * Returns the full url given an S3 path
168 * @param {string} path path within the S3 bucket
169 * @returns {string} url to the s3 path (or mapped cloudfront path)
170 */
171S3.prototype.urlWithBucket = function(path) {
172 path = path || '';
173 // for the web, forward slashes are king
174 path = path.replace(/\\/g, '/');
175 if (path[0] === '/') {
176 path = path.substr(1);
177 }
178 return this.cloudfrontMapping
179 ? 'https://'+this.cloudfrontMapping+'.cloudfront.net/'+path
180 : 'https://s3.amazonaws.com/'+this.bucket.toLowerCase()+'/'+path;
181};
182
183function _createHeaders(makePrivateOrHeaders, targetPath) {
184
185 let headers = {};
186 const contentType = targetPath && mime.lookup(targetPath);
187 if (typeof makePrivateOrHeaders === 'object') {
188 headers = makePrivateOrHeaders;
189 } else if (!makePrivateOrHeaders) {
190 headers['x-amz-acl'] = 'public-read';
191 }
192
193 // some intelligent header setting based on the path
194 if (!headers['Content-Type'] && contentType) {
195 headers['Content-Type'] = contentType;
196 }
197
198 return headers;
199}
200
201function _cleanPath(path) {
202 // cleans path in case generated in windows environment
203 return path && path.replace(/\\/g,'/');
204}
205
206module.exports = S3;
\No newline at end of file