1 | var __rest = (this && this.__rest) || function (s, e) {
|
2 | var t = {};
|
3 | for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
4 | t[p] = s[p];
|
5 | if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
6 | for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
7 | if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
8 | t[p[i]] = s[p[i]];
|
9 | }
|
10 | return t;
|
11 | };
|
12 | import fetch, { Request } from "node-fetch";
|
13 | import CachePolicy from "http-cache-semantics";
|
14 | import { promises as fsp } from "fs";
|
15 | import { join } from "path";
|
16 | import { createHash } from "crypto";
|
17 | export function createPrefetch(previousKey) {
|
18 | return async function prefetch(info, init = {}) {
|
19 |
|
20 | if (typeof info === "string" && !info.startsWith("http")) {
|
21 | const key = await createFileKey(info);
|
22 | return Buffer.from(JSON.stringify({ type: "fs", key })).toString("base64");
|
23 | }
|
24 |
|
25 | let req = new Request(info, Object.assign(Object.assign({}, init), { method: "HEAD" }));
|
26 | let previousPolicy = null;
|
27 | let previousRes = null;
|
28 | if (previousKey) {
|
29 | const { policy, response } = JSON.parse(Buffer.from(previousKey, "base64").toString("utf8"));
|
30 | previousPolicy = CachePolicy.fromObject(policy);
|
31 | previousRes = response;
|
32 | }
|
33 | if (previousPolicy && previousPolicy.satisfiesWithoutRevalidation(req)) {
|
34 | previousRes.headers = previousPolicy.responseHeaders();
|
35 | const response = !previousPolicy.storable()
|
36 | ? null
|
37 | : serializeRes(previousRes);
|
38 | return Buffer.from(JSON.stringify({
|
39 | type: "network",
|
40 | policy: previousPolicy.toObject(),
|
41 | response,
|
42 | })).toString("base64");
|
43 | }
|
44 | if (previousPolicy) {
|
45 | req = new Request(info, Object.assign(Object.assign({}, init), { method: "HEAD", headers: Object.assign(Object.assign({}, req.headers), previousPolicy.revalidationHeaders(serializeReq(req))) }));
|
46 | const res = await fetch(req);
|
47 | const { policy, modified } = previousPolicy.revalidatedPolicy(req, res);
|
48 | const response = !policy.storable() ? null : serializeRes(res);
|
49 | if (modified) {
|
50 | return Buffer.from(JSON.stringify({
|
51 | type: "network",
|
52 | policy: policy.toObject(),
|
53 | response,
|
54 | })).toString("base64");
|
55 | }
|
56 | return Buffer.from(JSON.stringify({
|
57 | type: "network",
|
58 | policy: previousPolicy.toObject(),
|
59 | response,
|
60 | })).toString("base64");
|
61 | }
|
62 | const res = await fetch(req);
|
63 | const policy = createPolicy(req, res);
|
64 | const response = !policy.storable() ? null : serializeRes(res);
|
65 | return Buffer.from(JSON.stringify({ policy: policy.toObject(), response })).toString("base64");
|
66 | };
|
67 | }
|
68 | export const isKeyValid = (previousKey, key) => {
|
69 | if (!previousKey || !key)
|
70 | return false;
|
71 | const { type } = JSON.parse(Buffer.from(previousKey, "base64").toString("utf8"));
|
72 | if (type === "fs")
|
73 | return previousKey === key;
|
74 | const _a = JSON.parse(Buffer.from(key, "base64").toString("utf8")), { type: _type } = _a, current = __rest(_a, ["type"]);
|
75 | return current.response.status === 200 || current.response.status === 304;
|
76 | };
|
77 | const serializeReq = (req) => ({
|
78 | url: req.url,
|
79 | method: req.method,
|
80 | headers: iterableToObject(req.headers),
|
81 | });
|
82 | const serializeRes = (res) => ({
|
83 | status: res.status,
|
84 | headers: iterableToObject(res.headers),
|
85 | });
|
86 | const iterableToObject = (iter) => {
|
87 | if (typeof iter.keys !== "function")
|
88 | return iter;
|
89 | let obj = {};
|
90 | for (const key of iter.keys()) {
|
91 | obj[key] = iter.get(key);
|
92 | }
|
93 | return obj;
|
94 | };
|
95 | const createPolicy = (req, res) => new CachePolicy(serializeReq(req), serializeRes(res), { shared: false });
|
96 | async function createFileKey(p) {
|
97 | const hash = createHash("sha1");
|
98 | const stat = await fsp.stat(p);
|
99 | if (stat.isDirectory()) {
|
100 | for (const ent of await addDir(p, { mode: "dir" })) {
|
101 | hash.update(ent);
|
102 | }
|
103 | }
|
104 | else {
|
105 | hash.update(await addFile(p, { mode: "file" }));
|
106 | }
|
107 | return hash.digest("base64");
|
108 | }
|
109 | const addDir = async (p, { mode }) => {
|
110 | const ents = await fsp.readdir(p, { withFileTypes: true });
|
111 | const results = await Promise.all(ents.map((ent) => ent.isDirectory()
|
112 | ? addDir(join(p, ent.name), { mode })
|
113 | : addFile(join(p, ent.name), { mode })));
|
114 | return [].concat(...results);
|
115 | };
|
116 | const addFile = async (p, { mode }) => {
|
117 | if (mode === "dir") {
|
118 | return p;
|
119 | }
|
120 | else if (mode === "file") {
|
121 | const content = await fsp.readFile(p);
|
122 | return content;
|
123 | }
|
124 | };
|