UNPKG

6.06 kBJavaScriptView Raw
1"use strict";
2Object.defineProperty(exports, "__esModule", { value: true });
3const sha256_1 = require("@noble/hashes/sha256");
4const sha512_1 = require("@noble/hashes/sha512");
5const pbkdf2_1 = require("@noble/hashes/pbkdf2");
6const utils_1 = require("@noble/hashes/utils");
7const _wordlists_1 = require("./_wordlists");
8let DEFAULT_WORDLIST = _wordlists_1._default;
9const INVALID_MNEMONIC = 'Invalid mnemonic';
10const INVALID_ENTROPY = 'Invalid entropy';
11const INVALID_CHECKSUM = 'Invalid mnemonic checksum';
12const WORDLIST_REQUIRED = 'A wordlist is required but a default could not be found.\n' +
13 'Please pass a 2048 word array explicitly.';
14function normalize(str) {
15 return (str || '').normalize('NFKD');
16}
17function lpad(str, padString, length) {
18 while (str.length < length) {
19 str = padString + str;
20 }
21 return str;
22}
23function binaryToByte(bin) {
24 return parseInt(bin, 2);
25}
26function bytesToBinary(bytes) {
27 return bytes.map((x) => lpad(x.toString(2), '0', 8)).join('');
28}
29function deriveChecksumBits(entropyBuffer) {
30 const ENT = entropyBuffer.length * 8;
31 const CS = ENT / 32;
32 const hash = sha256_1.sha256(Uint8Array.from(entropyBuffer));
33 return bytesToBinary(Array.from(hash)).slice(0, CS);
34}
35function salt(password) {
36 return 'mnemonic' + (password || '');
37}
38function mnemonicToSeedSync(mnemonic, password) {
39 const mnemonicBuffer = Uint8Array.from(Buffer.from(normalize(mnemonic), 'utf8'));
40 const saltBuffer = Uint8Array.from(Buffer.from(salt(normalize(password)), 'utf8'));
41 const res = pbkdf2_1.pbkdf2(sha512_1.sha512, mnemonicBuffer, saltBuffer, {
42 c: 2048,
43 dkLen: 64,
44 });
45 return Buffer.from(res);
46}
47exports.mnemonicToSeedSync = mnemonicToSeedSync;
48function mnemonicToSeed(mnemonic, password) {
49 const mnemonicBuffer = Uint8Array.from(Buffer.from(normalize(mnemonic), 'utf8'));
50 const saltBuffer = Uint8Array.from(Buffer.from(salt(normalize(password)), 'utf8'));
51 return pbkdf2_1.pbkdf2Async(sha512_1.sha512, mnemonicBuffer, saltBuffer, {
52 c: 2048,
53 dkLen: 64,
54 }).then((res) => Buffer.from(res));
55}
56exports.mnemonicToSeed = mnemonicToSeed;
57function mnemonicToEntropy(mnemonic, wordlist) {
58 wordlist = wordlist || DEFAULT_WORDLIST;
59 if (!wordlist) {
60 throw new Error(WORDLIST_REQUIRED);
61 }
62 const words = normalize(mnemonic).split(' ');
63 if (words.length % 3 !== 0) {
64 throw new Error(INVALID_MNEMONIC);
65 }
66 // convert word indices to 11 bit binary strings
67 const bits = words
68 .map((word) => {
69 const index = wordlist.indexOf(word);
70 if (index === -1) {
71 throw new Error(INVALID_MNEMONIC);
72 }
73 return lpad(index.toString(2), '0', 11);
74 })
75 .join('');
76 // split the binary string into ENT/CS
77 const dividerIndex = Math.floor(bits.length / 33) * 32;
78 const entropyBits = bits.slice(0, dividerIndex);
79 const checksumBits = bits.slice(dividerIndex);
80 // calculate the checksum and compare
81 const entropyBytes = entropyBits.match(/(.{1,8})/g).map(binaryToByte);
82 if (entropyBytes.length < 16) {
83 throw new Error(INVALID_ENTROPY);
84 }
85 if (entropyBytes.length > 32) {
86 throw new Error(INVALID_ENTROPY);
87 }
88 if (entropyBytes.length % 4 !== 0) {
89 throw new Error(INVALID_ENTROPY);
90 }
91 const entropy = Buffer.from(entropyBytes);
92 const newChecksum = deriveChecksumBits(entropy);
93 if (newChecksum !== checksumBits) {
94 throw new Error(INVALID_CHECKSUM);
95 }
96 return entropy.toString('hex');
97}
98exports.mnemonicToEntropy = mnemonicToEntropy;
99function entropyToMnemonic(entropy, wordlist) {
100 if (!Buffer.isBuffer(entropy)) {
101 entropy = Buffer.from(entropy, 'hex');
102 }
103 wordlist = wordlist || DEFAULT_WORDLIST;
104 if (!wordlist) {
105 throw new Error(WORDLIST_REQUIRED);
106 }
107 // 128 <= ENT <= 256
108 if (entropy.length < 16) {
109 throw new TypeError(INVALID_ENTROPY);
110 }
111 if (entropy.length > 32) {
112 throw new TypeError(INVALID_ENTROPY);
113 }
114 if (entropy.length % 4 !== 0) {
115 throw new TypeError(INVALID_ENTROPY);
116 }
117 const entropyBits = bytesToBinary(Array.from(entropy));
118 const checksumBits = deriveChecksumBits(entropy);
119 const bits = entropyBits + checksumBits;
120 const chunks = bits.match(/(.{1,11})/g);
121 const words = chunks.map((binary) => {
122 const index = binaryToByte(binary);
123 return wordlist[index];
124 });
125 return wordlist[0] === '\u3042\u3044\u3053\u304f\u3057\u3093' // Japanese wordlist
126 ? words.join('\u3000')
127 : words.join(' ');
128}
129exports.entropyToMnemonic = entropyToMnemonic;
130function generateMnemonic(strength, rng, wordlist) {
131 strength = strength || 128;
132 if (strength % 32 !== 0) {
133 throw new TypeError(INVALID_ENTROPY);
134 }
135 rng = rng || ((size) => Buffer.from(utils_1.randomBytes(size)));
136 return entropyToMnemonic(rng(strength / 8), wordlist);
137}
138exports.generateMnemonic = generateMnemonic;
139function validateMnemonic(mnemonic, wordlist) {
140 try {
141 mnemonicToEntropy(mnemonic, wordlist);
142 }
143 catch (e) {
144 return false;
145 }
146 return true;
147}
148exports.validateMnemonic = validateMnemonic;
149function setDefaultWordlist(language) {
150 const result = _wordlists_1.wordlists[language];
151 if (result) {
152 DEFAULT_WORDLIST = result;
153 }
154 else {
155 throw new Error('Could not find wordlist for language "' + language + '"');
156 }
157}
158exports.setDefaultWordlist = setDefaultWordlist;
159function getDefaultWordlist() {
160 if (!DEFAULT_WORDLIST) {
161 throw new Error('No Default Wordlist set');
162 }
163 return Object.keys(_wordlists_1.wordlists).filter((lang) => {
164 if (lang === 'JA' || lang === 'EN') {
165 return false;
166 }
167 return _wordlists_1.wordlists[lang].every((word, index) => word === DEFAULT_WORDLIST[index]);
168 })[0];
169}
170exports.getDefaultWordlist = getDefaultWordlist;
171var _wordlists_2 = require("./_wordlists");
172exports.wordlists = _wordlists_2.wordlists;