UNPKG

13.3 kBJavaScriptView Raw
1'use strict'
2
3import {jsonStableSerialize} from './util';
4
5import {
6 MUTABLE_DATUM_DIR_TYPE,
7 MUTABLE_DATUM_FILE_TYPE,
8 MUTABLE_DATUM_INODE_HEADER_SCHEMA,
9 URI_RECORD_SCHEMA,
10 MUTABLE_DATUM_DIR_IDATA_SCHEMA,
11} from './schemas';
12
13const assert = require('assert');
14const crypto = require('crypto');
15const EC = require('elliptic').ec;
16const ec = EC('secp256k1');
17const Ajv = require('ajv');
18
19const BLOCKSTACK_STORAGE_PROTO_VERSION = 1;
20
21/*
22 * Hash an inode payload and its length.
23 * Specifically hash `${payload.length}:${payload},`
24 *
25 * @param payload_buffer (Buffer) the buffer to hash
26 *
27 * Return the sha256
28 */
29export function hashDataPayload( payload_buffer ) {
30 const hash = crypto.createHash('sha256');
31
32 hash.update(`${payload_buffer.length}:`);
33 hash.update(payload_buffer);
34 hash.update(',');
35
36 return hash.digest('hex');
37}
38
39/*
40 * Hash raw data
41 * @param payload_buffer (Buffer) the buffer to hash
42 *
43 * Return the sha256
44 */
45export function hashRawData( payload_buffer ) {
46 const hash = crypto.createHash('sha256');
47
48 hash.update(payload_buffer);
49
50 return hash.digest('hex');
51}
52
53
54/*
55 * Decode a hex string into a byte buffer.
56 *
57 * @param hex (String) a string of hex digits.
58 *
59 * Returns a buffer with the raw bytes
60 */
61function decodeHexString( hex ) {
62 const bytes = [];
63 for(let i=0; i< hex.length-1; i+=2) {
64 bytes.push(parseInt(hex.substr(i, 2), 16));
65 }
66 return Buffer.from(bytes)
67}
68
69
70/*
71 * Decode an ECDSA private key into a byte buffer
72 * (compatible with Bitcoin's 'compressed' flag quirk)
73 *
74 * @param privkey_hex (String) a hex-encoded ECDSA private key on secp256k1; optionally ending in '01'
75 *
76 * Returns a Buffer with the private key data
77 */
78export function decodePrivateKey( privatekey_hex ) {
79 if( privatekey_hex.length === 66 && privatekey_hex.slice(64, 66) === '01' ) {
80 // truncate the '01', which is a hint to Bitcoin to expect a compressed public key
81 privatekey_hex = privatekey_hex.slice(0, 64);
82 }
83 return decodeHexString(privatekey_hex);
84}
85
86
87/*
88 * Sign a string of data.
89 *
90 * @param payload_buffer (Buffer) the buffer to sign
91 * @param privkey_hex (String) the hex-encoded ECDSA private key
92 * @param hash (String) optional; the hash of the payload. payload_buffer can be null if hash is given.
93 *
94 * Return the base64-encoded signature
95 */
96export function signRawData( payload_buffer, privkey_hex, hash ) {
97
98 const privkey = decodePrivateKey(privkey_hex);
99
100 if( !hash ) {
101 hash = hashRawData(payload_buffer);
102 }
103
104 const sig = ec.sign(hash, privkey, {canonical: true});
105
106 // use signature encoding compatible with Blockstack
107 let r_array = sig.r.toArray();
108 let s_array = sig.s.toArray();
109 let r_buf = Buffer.from(r_array).toString('hex');
110 let s_buf = Buffer.from(s_array).toString('hex');
111
112 if(r_buf.length < 64) {
113 while(r_buf.length < 64) {
114 r_buf = "0" + r_buf;
115 }
116 }
117
118 if( s_buf.length < 64) {
119 while(s_buf.length < 64) {
120 s_buf = "0" + s_buf;
121 }
122 }
123
124 const sig_buf_hex = r_buf + s_buf;
125
126 assert(sig_buf_hex.length == 128);
127
128 const sigb64 = Buffer.from(sig_buf_hex, 'hex').toString('base64');
129 return sigb64;
130}
131
132
133/*
134 * Sign a data payload and its length.
135 * Specifically sign `${payload.length}:${payload},`
136 *
137 * @payload_string (String) the string to sign
138 * @privkey_hex (String) the hex-encoded private key
139 *
140 * Return the base64-encoded signature
141 */
142export function signDataPayload( payload_string, privkey_hex ) {
143 return signRawData( Buffer.concat( [Buffer.from(`${payload_string.length}:`), Buffer.from(payload_string), Buffer.from(',')] ), privkey_hex );
144}
145
146
147/*
148 * Make a fully-qualified data ID (i.e. includes the device ID)
149 * equivalent to this in Python: urllib.quote(str('{}:{}'.format(device_id, data_id).replace('/', '\\x2f')))
150 *
151 * @param device_id (String) the device ID
152 * @param data_id (String) the device-agnostic part of the data ID
153 *
154 * Returns the fully-qualified data ID
155 */
156export function makeFullyQualifiedDataId( device_id, data_id ) {
157 return escape(`${device_id}:${data_id}`.replace('/', '\\x2f'));
158}
159
160
161/*
162 * Make a mutable data payload
163 *
164 * @param data_id (String) the data identifier (not fully qualified)
165 * @param data_payload (String) the data payload to store
166 * @param version (Int) the version number
167 * @param device_id (String) the ID of the device creating this data
168 *
169 * Returns an mutable data payload object.
170 */
171export function makeMutableDataInfo( data_id, data_payload, device_id, version ) {
172 const fq_data_id = makeFullyQualifiedDataId( device_id, data_id );
173 const timestamp = new Date().getTime();
174
175 const ret = {
176 'fq_data_id': fq_data_id,
177 'data': data_payload,
178 'version': version,
179 'timestamp': timestamp,
180 };
181
182 return ret
183}
184
185
186/*
187 * Make a single datum tombstone.
188 *
189 * @param tombstone_payload (String) the string that encodes the tombstone
190 *
191 * Returns the tombstone (to be fed into the storage driver)
192 */
193export function makeDataTombstone( tombstone_payload ) {
194 const now = parseInt(new Date().getTime() / 1000);
195 return `delete-${now}:${tombstone_payload}`;
196}
197
198
199/*
200 * Make a list of mutable data tombstones.
201 *
202 * @param device_ids (Array) the list of device IDs
203 * @param data_id (String) the datum ID
204 *
205 * Returns a list of tombstones.
206 */
207export function makeMutableDataTombstones( device_ids, data_id ) {
208 const ts = [];
209 for (let device_id of device_ids) {
210 ts.push( makeDataTombstone( makeFullyQualifiedDataId(device_id, data_id) ));
211 }
212 return ts;
213}
214
215
216/*
217 * Make a list of inode tombstones.
218 *
219 * @param datastore_id (String) the datastore ID
220 * @param inode_uuid (String) the inode ID
221 * @param device_ids (Array) the list of device IDs
222 *
223 * Returns a list of tombstones.
224 */
225export function makeInodeTombstones( datastore_id, inode_uuid, device_ids ) {
226 assert(device_ids.length > 0);
227
228 const header_id = `${datastore_id}.${inode_uuid}.hdr`;
229 const header_tombstones = makeMutableDataTombstones( device_ids, header_id );
230
231 const idata_id = `${datastore_id}.${inode_uuid}`;
232 const idata_tombstones = makeMutableDataTombstones( device_ids, idata_id );
233
234 return header_tombstones.concat(idata_tombstones);
235}
236
237
238/*
239 * Sign a datum tombstone
240 *
241 * @param tombstone (String) the tombstone string
242 * @param privkey (String) the hex-encoded private key
243 *
244 * Returns the signed tombstone as a String
245 */
246export function signDataTombstone( tombstone, privkey ) {
247 const sigb64 = signRawData( tombstone, privkey );
248 return `${tombstone}:${sigb64}`;
249}
250
251
252/*
253 * Sign a list of mutable data tombstones
254 *
255 * @param tobmstones (Array) the list of per-device tombstones
256 * @param privkey (String) the hex-encoded private key
257 *
258 * Returns the list of signed tombstones as an Array.
259 */
260export function signMutableDataTombstones( tombstones, privkey ) {
261 const sts = [];
262 for (let ts of tombstones) {
263 sts.push( signDataTombstone(ts, privkey) );
264 };
265 return sts;
266}
267
268
269/*
270 * Make an inode header blob.
271 *
272 * @param datastore_id (String) the ID of the datastore for this inode
273 * @param inode_type (Int) 1 for file, 2 for directory
274 * @param owner_id (String) a string that encodes the owner of this inode (i.e. pass datastore_id for now)
275 * @param inode_uuid (String) the inode ID
276 * @param data_hash (String) the hex-encoded sha256 of the data
277 * @param version (Int) the version of this inode.
278 * @param device_id (String) the ID of this device
279 *
280 * Returns an object encoding an inode header.
281 */
282export function makeInodeHeaderBlob( datastore_id, inode_type, owner_id, inode_uuid, data_hash, device_id, version ) {
283
284 const header = {
285 'type': inode_type,
286 'owner': owner_id,
287 'uuid': inode_uuid,
288 'readers': [], // unused for now
289 'data_hash': data_hash,
290 'version': version,
291 'proto_version': BLOCKSTACK_STORAGE_PROTO_VERSION,
292 };
293
294 let valid = null;
295 const ajv = new Ajv();
296 try {
297 valid = ajv.validate(MUTABLE_DATUM_INODE_HEADER_SCHEMA, header);
298 assert(valid);
299 }
300 catch(e) {
301 console.log('header: ' + JSON.stringify(header));
302 console.log('schema: ' + JSON.stringify(MUTABLE_DATUM_INODE_HEADER_SCHEMA));
303 console.log(e.stack);
304 throw e;
305 }
306
307 const inode_data_id = `${datastore_id}.${inode_uuid}.hdr`;
308 const inode_data_payload = jsonStableSerialize(header);
309 const inode_header_blob = makeMutableDataInfo( inode_data_id, inode_data_payload, device_id, version );
310 return jsonStableSerialize(inode_header_blob);
311}
312
313
314/*
315 * Make a directory inode header for a particular datastore and owner.
316 *
317 * @param datastore_id (String) the ID of the datastore for this inode
318 * @param owner_id (String) a string that encodes the owner of this directory (i.e. pass datastore_id for now)
319 * @param inode_uuid (String) the ID of the inode
320 * @param dir_listing (Object) a MUTABLE_DATUM_DIR_IDATA_SCHEMA-conformant object that describes the directory listing.
321 * @param device_id (String) this device ID
322 *
323 * Returns an object encoding a directory's header and idata
324 */
325export function makeDirInodeBlob( datastore_id, owner_id, inode_uuid, dir_listing, device_id, version ) {
326
327 const ajv = new Ajv();
328 let valid = null;
329 try {
330 valid = ajv.validate(MUTABLE_DATUM_DIR_IDATA_SCHEMA.properties.children, dir_listing);
331 assert(valid);
332 }
333 catch(e) {
334 console.log('dir listing: ' + JSON.stringify(dir_listing));
335 console.log('schema: ' + JSON.stringify(MUTABLE_DATUM_DIR_IDATA_SCHEMA));
336 throw e;
337 }
338
339 if(!version) {
340 version = 1;
341 }
342
343 const empty_hash = '0000000000000000000000000000000000000000000000000000000000000000';
344 const internal_header_blob = makeInodeHeaderBlob( datastore_id, MUTABLE_DATUM_DIR_TYPE, owner_id, inode_uuid, empty_hash, device_id, version );
345
346 // recover header
347 const internal_header = JSON.parse( JSON.parse(internal_header_blob).data );
348 const idata_payload = {
349 children: dir_listing,
350 header: internal_header,
351 };
352
353 const idata_payload_str = jsonStableSerialize(idata_payload);
354 const idata_hash = hashDataPayload(idata_payload_str);
355
356 const header_blob = makeInodeHeaderBlob( datastore_id, MUTABLE_DATUM_DIR_TYPE, owner_id, inode_uuid, idata_hash, device_id, version );
357 return {'header': header_blob, 'idata': idata_payload_str};
358}
359
360
361/*
362 * Make a file inode header for a particular datastore and owner.
363 *
364 * @param datastore_id (String) the ID of the datastore for this niode
365 * @param owner_id (String) a string that encodes the owner of this file (i.e. pass datastore_id for now)
366 * @param inode_uuid (String) the ID of the inode
367 * @param data_hash (String) the hash of the file data
368 * @param device_id (String) this device ID
369 *
370 * Returns an object encoding a file's header
371 */
372export function makeFileInodeBlob( datastore_id, owner_id, inode_uuid, data_hash, device_id, version ) {
373
374 const header_blob = makeInodeHeaderBlob( datastore_id, MUTABLE_DATUM_FILE_TYPE, owner_id, inode_uuid, data_hash, device_id, version );
375 return {'header': header_blob}
376}
377
378
379/*
380 * Get the child inode version from a directory
381 * @param parent_dir (Object) directory inode
382 * @param child_name (String) name of the directory
383 *
384 * Raises if there is no child
385 */
386export function getChildVersion(parent_dir, child_name) {
387 assert(parent_dir['idata']['children'][child_name]);
388 return parent_dir['idata']['children'][child_name].version;
389}
390
391
392/*
393 * Insert an entry into a directory's listing.
394 *
395 * @param parent_dir (Object) a directory inode structure
396 * @param child_type (Int) 1 for file, 2 for directory
397 * @param child_name (String) the name of the child inode (must be unique in this directory)
398 * @param child_uuid (String) the ID of the child inode.
399 * @param exists (Bool) if given, and if True, then expect the child to exist.
400 *
401 * Returns the new parent directory inode object.
402 */
403export function inodeDirLink( parent_dir, child_type, child_name, child_uuid, exists ) {
404
405 assert(parent_dir['type'] === MUTABLE_DATUM_DIR_TYPE);
406 assert(parent_dir['idata']);
407 assert(parent_dir['idata']['children']);
408
409 if( !exists ) {
410 assert(!Object.keys(parent_dir['idata']['children']).includes(child_name));
411 }
412
413 const new_dirent = {
414 uuid: child_uuid,
415 type: child_type,
416 version: 1,
417 };
418
419 if(parent_dir['idata']['children']['version']) {
420 new_dirent.version = parent_dir['idata']['children']['version'] + 1;
421 }
422
423 parent_dir['idata']['children'][child_name] = new_dirent;
424 parent_dir['version'] += 1;
425 return parent_dir;
426}
427
428
429/*
430 * Detach an inode from a directory.
431 *
432 * @param parent_dir (Object) a directory inode structure
433 * @param child_name (String) the name of the child to detach
434 *
435 * Returns the new parent directory inode object.
436 */
437export function inodeDirUnlink( parent_dir, child_name ) {
438
439 assert(parent_dir['type'] === MUTABLE_DATUM_DIR_TYPE);
440 assert(parent_dir['idata']);
441 assert(parent_dir['idata']['children']);
442
443 assert(Object.keys(parent_dir['idata']['children']).includes(child_name));
444
445 delete parent_dir['idata']['children'][child_name];
446 parent_dir['version'] += 1;
447 return parent_dir;
448}
449