UNPKG

42.1 kBJavaScriptView Raw
1'use strict';
2
3Object.defineProperty(exports, '__esModule', { value: true });
4
5function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
6
7const axios = _interopDefault(require('axios'));
8const graphql = require('graphql');
9const LRU = _interopDefault(require('tiny-lru'));
10const core = require('@graphql-hive/core');
11const retry = _interopDefault(require('async-retry'));
12const crypto = require('crypto');
13const utils = require('@graphql-tools/utils');
14
15const version = '0.18.5';
16
17function createAgent(pluginOptions, { prefix, data, body, headers = () => ({}), }) {
18 const options = Object.assign({ timeout: 30000, debug: false, enabled: true, minTimeout: 200, maxRetries: 3, sendInterval: 10000, maxSize: 25, logger: console, name: 'Hive' }, pluginOptions);
19 const enabled = options.enabled !== false;
20 let timeoutID = null;
21 function schedule() {
22 if (timeoutID) {
23 clearTimeout(timeoutID);
24 }
25 timeoutID = setTimeout(send, options.sendInterval);
26 }
27 function debugLog(msg) {
28 if (options.debug) {
29 options.logger.info(`[hive][${prefix}]${enabled ? '' : '[DISABLED]'} ${msg}`);
30 }
31 }
32 let scheduled = false;
33 function capture(event) {
34 // Calling capture starts the schedule
35 if (!scheduled) {
36 scheduled = true;
37 schedule();
38 }
39 data.set(event);
40 if (data.size() >= options.maxSize) {
41 debugLog('Sending immediately');
42 setImmediate(() => send({ runOnce: true, throwOnError: false }));
43 }
44 }
45 function sendImmediately(event) {
46 data.set(event);
47 debugLog('Sending immediately');
48 return send({ runOnce: true, throwOnError: true });
49 }
50 async function send(sendOptions) {
51 var _a;
52 const runOnce = (_a = sendOptions === null || sendOptions === void 0 ? void 0 : sendOptions.runOnce) !== null && _a !== void 0 ? _a : false;
53 if (!data.size()) {
54 if (!runOnce) {
55 schedule();
56 }
57 return null;
58 }
59 try {
60 const buffer = await body();
61 const dataToSend = data.size();
62 data.clear();
63 const sendReport = async (_bail, attempt) => {
64 debugLog(`Sending (queue ${dataToSend}) (attempt ${attempt})`);
65 if (!enabled) {
66 return {
67 status: 200,
68 data: null,
69 };
70 }
71 const response = await axios
72 .post(options.endpoint, buffer, {
73 headers: Object.assign({ accept: 'application/json', 'content-type': 'application/json', Authorization: `Bearer ${options.token}`, 'User-Agent': `${options.name}@${version}` }, headers()),
74 responseType: 'json',
75 timeout: options.timeout,
76 })
77 .catch(error => {
78 debugLog(`Attempt ${attempt} failed: ${error.message}`);
79 return Promise.reject(error);
80 });
81 if (response.status >= 200 && response.status < 300) {
82 return response;
83 }
84 debugLog(`Attempt ${attempt} failed: ${response.status}`);
85 throw new Error(`${response.status}: ${response.statusText}`);
86 };
87 const response = await retry(sendReport, {
88 retries: options.maxRetries,
89 minTimeout: options.minTimeout,
90 factor: 2,
91 });
92 if (response.status < 200 || response.status >= 300) {
93 throw new Error(`[hive][${prefix}] Failed to send data (HTTP status ${response.status}): ${response.data}`);
94 }
95 debugLog(`Sent!`);
96 if (!runOnce) {
97 schedule();
98 }
99 return response.data;
100 }
101 catch (error) {
102 if (!runOnce) {
103 schedule();
104 }
105 if (sendOptions === null || sendOptions === void 0 ? void 0 : sendOptions.throwOnError) {
106 throw error;
107 }
108 options.logger.error(`[hive][${prefix}] Failed to send data: ${error.message}`);
109 return null;
110 }
111 }
112 async function dispose() {
113 debugLog('Disposing');
114 if (timeoutID) {
115 clearTimeout(timeoutID);
116 }
117 await send({
118 runOnce: true,
119 throwOnError: false,
120 });
121 }
122 return {
123 capture,
124 sendImmediately,
125 dispose,
126 };
127}
128
129function randomSampling(sampleRate) {
130 if (sampleRate > 1 || sampleRate < 0) {
131 throw new Error(`Expected usage.sampleRate to be 0 <= x <= 1, received ${sampleRate}`);
132 }
133 return function shouldInclude() {
134 return Math.random() <= sampleRate;
135 };
136}
137
138function isAsyncIterableIterator(value) {
139 return typeof (value === null || value === void 0 ? void 0 : value[Symbol.asyncIterator]) === 'function';
140}
141function memo(fn, cacheKeyFn) {
142 let memoizedResult = null;
143 let memoizedKey = null;
144 return (arg) => {
145 const currentKey = cacheKeyFn(arg);
146 if (memoizedKey === currentKey) {
147 return memoizedResult;
148 }
149 memoizedKey = currentKey;
150 memoizedResult = fn(arg);
151 return memoizedResult;
152 };
153}
154function cache(fn, cacheKeyFn, cacheMap) {
155 return (arg, arg2) => {
156 const key = cacheKeyFn(arg, arg2);
157 const cachedValue = cacheMap.get(key);
158 if (cachedValue !== null && typeof cachedValue !== 'undefined') {
159 return {
160 key,
161 value: cachedValue,
162 cacheHit: true,
163 };
164 }
165 const value = fn(arg, arg2);
166 cacheMap.set(key, value);
167 return {
168 key,
169 value,
170 cacheHit: false,
171 };
172 };
173}
174function cacheDocumentKey(doc, variables) {
175 const hasher = crypto.createHash('md5').update(JSON.stringify(doc));
176 if (variables) {
177 hasher.update(JSON.stringify(variables, (_, value) => {
178 if ((value && typeof value === 'object' && Object.keys(value).length) ||
179 (Array.isArray(value) && value.length)) {
180 return value;
181 }
182 return '';
183 }));
184 }
185 return hasher.digest('hex');
186}
187const HR_TO_NS = 1e9;
188const NS_TO_MS = 1e6;
189function deltaFrom(hrtime) {
190 const delta = process.hrtime(hrtime);
191 const ns = delta[0] * HR_TO_NS + delta[1];
192 return {
193 ns,
194 get ms() {
195 return ns / NS_TO_MS;
196 },
197 };
198}
199function measureDuration() {
200 const startAt = process.hrtime();
201 return function end() {
202 return deltaFrom(startAt).ns;
203 };
204}
205function isHiveClient(clientOrOptions) {
206 return 'operationsStore' in clientOrOptions;
207}
208function logIf(condition, message, logFn) {
209 if (condition) {
210 logFn(message);
211 }
212}
213
214function createUsage(pluginOptions) {
215 var _a, _b, _c, _d, _e, _f;
216 if (!pluginOptions.usage || pluginOptions.enabled === false) {
217 return {
218 collect() {
219 return () => { };
220 },
221 async dispose() { },
222 };
223 }
224 let report = {
225 size: 0,
226 map: {},
227 operations: [],
228 };
229 const options = typeof pluginOptions.usage === 'boolean' ? {} : pluginOptions.usage;
230 const logger = (_b = (_a = pluginOptions.agent) === null || _a === void 0 ? void 0 : _a.logger) !== null && _b !== void 0 ? _b : console;
231 const collector = memo(createCollector, arg => arg.schema);
232 const excludeSet = new Set((_c = options.exclude) !== null && _c !== void 0 ? _c : []);
233 const agent = createAgent(Object.assign(Object.assign({ logger }, ((_d = pluginOptions.agent) !== null && _d !== void 0 ? _d : {
234 maxSize: 1500,
235 })), { endpoint: (_e = options.endpoint) !== null && _e !== void 0 ? _e : 'https://app.graphql-hive.com/usage', token: pluginOptions.token, enabled: pluginOptions.enabled, debug: pluginOptions.debug }), {
236 prefix: 'usage',
237 data: {
238 set(operation) {
239 report.operations.push({
240 operationMapKey: operation.key,
241 timestamp: operation.timestamp,
242 execution: {
243 ok: operation.execution.ok,
244 duration: operation.execution.duration,
245 errorsTotal: operation.execution.errorsTotal,
246 errors: operation.execution.errors,
247 },
248 metadata: {
249 client: operation.client,
250 },
251 });
252 report.size += 1;
253 if (!report.map[operation.key]) {
254 report.map[operation.key] = {
255 operation: operation.operation,
256 operationName: operation.operationName,
257 fields: operation.fields,
258 };
259 }
260 },
261 size() {
262 return report.size;
263 },
264 clear() {
265 report = {
266 size: 0,
267 map: {},
268 operations: [],
269 };
270 },
271 },
272 headers() {
273 return {
274 'Content-Type': 'application/json',
275 'graphql-client-name': 'Hive Client',
276 'graphql-client-version': version,
277 };
278 },
279 body() {
280 return JSON.stringify(report);
281 },
282 });
283 logIf(typeof pluginOptions.token !== 'string' || pluginOptions.token.length === 0, '[hive][usage] token is missing', logger.error);
284 const shouldInclude = randomSampling((_f = options.sampleRate) !== null && _f !== void 0 ? _f : 1.0);
285 return {
286 dispose: agent.dispose,
287 collect(args) {
288 const finish = measureDuration();
289 return function complete(result) {
290 var _a, _b, _c, _d, _e, _f;
291 try {
292 if (isAsyncIterableIterator(result)) {
293 logger.info('@stream @defer is not supported');
294 finish();
295 return;
296 }
297 const rootOperation = args.document.definitions.find(o => o.kind === graphql.Kind.OPERATION_DEFINITION);
298 const document = args.document;
299 const operationName = args.operationName || ((_a = rootOperation.name) === null || _a === void 0 ? void 0 : _a.value) || 'anonymous';
300 const duration = finish();
301 if (!excludeSet.has(operationName) && shouldInclude()) {
302 const errors = (_c = (_b = result.errors) === null || _b === void 0 ? void 0 : _b.map(error => {
303 var _a;
304 return ({
305 message: error.message,
306 path: (_a = error.path) === null || _a === void 0 ? void 0 : _a.join('.'),
307 });
308 })) !== null && _c !== void 0 ? _c : [];
309 const collect = collector({
310 schema: args.schema,
311 max: (_d = options.max) !== null && _d !== void 0 ? _d : 1000,
312 ttl: options.ttl,
313 processVariables: (_e = options.processVariables) !== null && _e !== void 0 ? _e : false,
314 });
315 const { key, value: info } = collect(document, (_f = args.variableValues) !== null && _f !== void 0 ? _f : null);
316 agent.capture({
317 key,
318 timestamp: Date.now(),
319 operationName,
320 operation: info.document,
321 fields: info.fields,
322 execution: {
323 ok: errors.length === 0,
324 duration,
325 errorsTotal: errors.length,
326 errors,
327 },
328 // TODO: operationHash is ready to accept hashes of persisted operations
329 client: typeof args.contextValue !== 'undefined' && typeof options.clientInfo !== 'undefined'
330 ? options.clientInfo(args.contextValue)
331 : null,
332 });
333 }
334 }
335 catch (error) {
336 logger.error(`Failed to collect operation`, error);
337 }
338 };
339 },
340 };
341}
342function createCollector({ schema, max, ttl, processVariables = false, }) {
343 const typeInfo = new graphql.TypeInfo(schema);
344 function collect(doc, variables) {
345 const entries = new Set();
346 const collected_entire_named_types = new Set();
347 const shouldAnalyzeVariableValues = processVariables === true && variables !== null;
348 function markAsUsed(id) {
349 if (!entries.has(id)) {
350 entries.add(id);
351 }
352 }
353 function makeId(...names) {
354 return names.join('.');
355 }
356 const collectedInputTypes = {};
357 function collectInputType(inputType, fieldName) {
358 if (!collectedInputTypes[inputType]) {
359 collectedInputTypes[inputType] = {
360 all: false,
361 fields: new Set(),
362 };
363 }
364 if (fieldName) {
365 collectedInputTypes[inputType].fields.add(fieldName);
366 }
367 else {
368 collectedInputTypes[inputType].all = true;
369 }
370 }
371 function collectNode(node) {
372 const inputType = typeInfo.getInputType();
373 const inputTypeName = resolveTypeName(inputType);
374 if (node.value.kind === graphql.Kind.ENUM) {
375 // Collect only a specific enum value
376 collectInputType(inputTypeName, node.value.value);
377 }
378 else if (node.value.kind !== graphql.Kind.OBJECT && node.value.kind !== graphql.Kind.LIST) {
379 // When processing of variables is enabled,
380 // we want to skip collecting full input types of variables
381 // and only collect specific fields.
382 // That's why the following condition is added.
383 // Otherwise we would mark entire input types as used, and not granular fields.
384 if (node.value.kind === graphql.Kind.VARIABLE && shouldAnalyzeVariableValues) {
385 return;
386 }
387 collectInputType(inputTypeName);
388 }
389 }
390 function markEntireTypeAsUsed(type) {
391 const namedType = unwrapType(type);
392 if (collected_entire_named_types.has(namedType.name)) {
393 // No need to mark this type as used again
394 return;
395 }
396 else {
397 // Add this type to the set of types that have been marked as used
398 // to avoid infinite loops
399 collected_entire_named_types.add(namedType.name);
400 }
401 if (graphql.isScalarType(namedType)) {
402 markAsUsed(makeId(namedType.name));
403 return;
404 }
405 if (graphql.isEnumType(namedType)) {
406 namedType.getValues().forEach(value => {
407 markAsUsed(makeId(namedType.name, value.name));
408 });
409 return;
410 }
411 const fieldsMap = namedType.getFields();
412 for (const fieldName in fieldsMap) {
413 const field = fieldsMap[fieldName];
414 markAsUsed(makeId(namedType.name, field.name));
415 markEntireTypeAsUsed(field.type);
416 }
417 }
418 function collectVariable(namedType, variableValue) {
419 const variableValueArray = Array.isArray(variableValue) ? variableValue : [variableValue];
420 if (graphql.isInputObjectType(namedType)) {
421 variableValueArray.forEach(variable => {
422 if (variable) {
423 // Collect only the used fields
424 for (const fieldName in variable) {
425 const field = namedType.getFields()[fieldName];
426 if (field) {
427 collectInputType(namedType.name, fieldName);
428 collectVariable(unwrapType(field.type), variable[fieldName]);
429 }
430 }
431 }
432 else {
433 // Collect the entire type
434 collectInputType(namedType.name);
435 }
436 });
437 }
438 else {
439 collectInputType(namedType.name);
440 }
441 }
442 graphql.visit(doc, graphql.visitWithTypeInfo(typeInfo, {
443 Field() {
444 const parent = typeInfo.getParentType();
445 const field = typeInfo.getFieldDef();
446 markAsUsed(makeId(parent.name, field.name));
447 },
448 VariableDefinition(node) {
449 const inputType = typeInfo.getInputType();
450 if (shouldAnalyzeVariableValues) {
451 // Granular collection of variable values is enabled
452 const variableName = node.variable.name.value;
453 const variableValue = variables[variableName];
454 const namedType = unwrapType(inputType);
455 collectVariable(namedType, variableValue);
456 }
457 else {
458 // Collect the entire type without processing the variables
459 collectInputType(resolveTypeName(inputType));
460 }
461 },
462 Argument(node) {
463 const parent = typeInfo.getParentType();
464 const field = typeInfo.getFieldDef();
465 const arg = typeInfo.getArgument();
466 markAsUsed(makeId(parent.name, field.name, arg.name));
467 collectNode(node);
468 },
469 ListValue(node) {
470 const inputType = typeInfo.getInputType();
471 const inputTypeName = resolveTypeName(inputType);
472 node.values.forEach(value => {
473 if (value.kind !== graphql.Kind.OBJECT) {
474 // if a value is not an object we need to collect all fields
475 collectInputType(inputTypeName);
476 }
477 });
478 },
479 ObjectField(node) {
480 const parentInputType = typeInfo.getParentInputType();
481 const parentInputTypeName = resolveTypeName(parentInputType);
482 collectNode(node);
483 collectInputType(parentInputTypeName, node.name.value);
484 },
485 }));
486 for (const inputTypeName in collectedInputTypes) {
487 const { fields, all } = collectedInputTypes[inputTypeName];
488 if (all) {
489 markEntireTypeAsUsed(schema.getType(inputTypeName));
490 }
491 else {
492 fields.forEach(field => {
493 markAsUsed(makeId(inputTypeName, field));
494 });
495 }
496 }
497 return {
498 document: core.normalizeOperation({
499 document: doc,
500 hideLiterals: true,
501 removeAliases: true,
502 }),
503 fields: Array.from(entries),
504 };
505 }
506 return cache(collect, function cacheKey(doc, variables) {
507 return cacheDocumentKey(doc, processVariables === true ? variables : null);
508 }, LRU(max, ttl));
509}
510function resolveTypeName(inputType) {
511 return unwrapType(inputType).name;
512}
513function unwrapType(type) {
514 if (graphql.isNonNullType(type) || graphql.isListType(type)) {
515 return unwrapType(type.ofType);
516 }
517 return type;
518}
519
520function createReporting(pluginOptions) {
521 var _a, _b, _c, _d;
522 if (!pluginOptions.reporting || pluginOptions.enabled === false) {
523 return {
524 async report() { },
525 async dispose() { },
526 };
527 }
528 const token = pluginOptions.token;
529 const reportingOptions = pluginOptions.reporting;
530 const logger = (_b = (_a = pluginOptions.agent) === null || _a === void 0 ? void 0 : _a.logger) !== null && _b !== void 0 ? _b : console;
531 logIf(typeof reportingOptions.author !== 'string' || reportingOptions.author.length === 0, '[hive][reporting] author is missing', logger.error);
532 logIf(typeof reportingOptions.commit !== 'string' || reportingOptions.commit.length === 0, '[hive][reporting] commit is missing', logger.error);
533 logIf(typeof token !== 'string' || token.length === 0, '[hive][reporting] token is missing', logger.error);
534 let currentSchema = null;
535 const agent = createAgent(Object.assign(Object.assign({ logger }, ((_c = pluginOptions.agent) !== null && _c !== void 0 ? _c : {})), { endpoint: (_d = reportingOptions.endpoint) !== null && _d !== void 0 ? _d : 'https://app.graphql-hive.com/graphql', token: token, enabled: pluginOptions.enabled, debug: pluginOptions.debug }), {
536 prefix: 'reporting',
537 data: {
538 set(incomingSchema) {
539 currentSchema = incomingSchema;
540 },
541 size() {
542 return currentSchema ? 1 : 0;
543 },
544 clear() {
545 currentSchema = null;
546 },
547 },
548 headers() {
549 return {
550 'Content-Type': 'application/json',
551 'graphql-client-name': 'Hive Client',
552 'graphql-client-version': version,
553 };
554 },
555 async body() {
556 var _a, _b;
557 return JSON.stringify({
558 query,
559 operationName: 'schemaPublish',
560 variables: {
561 input: {
562 sdl: await printToSDL(currentSchema),
563 author: reportingOptions.author,
564 commit: reportingOptions.commit,
565 service: (_a = reportingOptions.serviceName) !== null && _a !== void 0 ? _a : null,
566 url: (_b = reportingOptions.serviceUrl) !== null && _b !== void 0 ? _b : null,
567 force: true,
568 },
569 },
570 });
571 },
572 });
573 return {
574 async report({ schema }) {
575 var _a;
576 try {
577 const result = await agent.sendImmediately(schema);
578 if (result === null) {
579 throw new Error('Empty response');
580 }
581 if (Array.isArray(result.errors)) {
582 throw new Error(result.errors.map(error => error.message).join('\n'));
583 }
584 const data = result.data.schemaPublish;
585 switch (data.__typename) {
586 case 'SchemaPublishSuccess': {
587 logger.info(`[hive][reporting] ${(_a = data.successMessage) !== null && _a !== void 0 ? _a : 'Published schema'}`);
588 return;
589 }
590 case 'SchemaPublishMissingServiceError': {
591 throw new Error('Service name is not defined');
592 }
593 case 'SchemaPublishMissingUrlError': {
594 throw new Error('Service url is not defined');
595 }
596 case 'SchemaPublishError': {
597 logger.info(`[hive][reporting] Published schema (forced with ${data.errors.total} errors)`);
598 data.errors.nodes.slice(0, 5).forEach(error => {
599 logger.info(` - ${error.message}`);
600 });
601 return;
602 }
603 }
604 }
605 catch (error) {
606 logger.error(`[hive][reporting] Failed to report schema: ${error instanceof Error && 'message' in error ? error.message : error}`);
607 }
608 },
609 dispose: agent.dispose,
610 };
611}
612const query = graphql.stripIgnoredCharacters(/* GraphQL */ `
613 mutation schemaPublish($input: SchemaPublishInput!) {
614 schemaPublish(input: $input) {
615 __typename
616 ... on SchemaPublishSuccess {
617 initial
618 valid
619 successMessage: message
620 }
621 ... on SchemaPublishError {
622 valid
623 errors {
624 nodes {
625 message
626 }
627 total
628 }
629 }
630 ... on SchemaPublishMissingServiceError {
631 missingServiceError: message
632 }
633 ... on SchemaPublishMissingUrlError {
634 missingUrlError: message
635 }
636 }
637 }
638`);
639/**
640 * It's a bit tricky to detect if a schema is federated or not.
641 * For now, we just check if the schema has a _service that resolves to `_Service!` (as described in federation spec).
642 * This may lead to issues if the schema is not a federated schema but something made by the user (I don't think we will hit that issue soon).
643 */
644function isFederatedSchema(schema) {
645 const queryType = schema.getQueryType();
646 if (queryType) {
647 const fields = queryType.getFields();
648 if (fields._service && fields._service.type.toString() === `_Service!`) {
649 return true;
650 }
651 }
652 return false;
653}
654/**
655 * Extracts the SDL of a federated service from a GraphQLSchema object
656 * We do it to not send federated schema to the registry but only the original schema provided by user
657 */
658async function extractFederationServiceSDL(schema) {
659 const queryType = schema.getQueryType();
660 const serviceField = queryType.getFields()._service;
661 const resolved = await serviceField.resolve();
662 return resolved.sdl;
663}
664function isSchemaOfCommonNames(schema) {
665 const queryType = schema.getQueryType();
666 if (queryType && queryType.name !== 'Query') {
667 return false;
668 }
669 const mutationType = schema.getMutationType();
670 if (mutationType && mutationType.name !== 'Mutation') {
671 return false;
672 }
673 const subscriptionType = schema.getSubscriptionType();
674 if (subscriptionType && subscriptionType.name !== 'Subscription') {
675 return false;
676 }
677 return true;
678}
679function printSchemaWithDirectives(schema) {
680 const doc = utils.getDocumentNodeFromSchema(schema);
681 if (schema.description == null && isSchemaOfCommonNames(schema)) {
682 // remove the schema definition if it's the default one
683 // We do it to avoid sending schema definition to the registry, which may be unwanted by federated services or something
684 return graphql.print({
685 kind: graphql.Kind.DOCUMENT,
686 definitions: doc.definitions.filter(def => def.kind !== graphql.Kind.SCHEMA_DEFINITION),
687 });
688 }
689 return graphql.print(doc);
690}
691async function printToSDL(schema) {
692 return graphql.stripIgnoredCharacters(isFederatedSchema(schema) ? await extractFederationServiceSDL(schema) : printSchemaWithDirectives(schema));
693}
694
695function createOperationsStore(pluginOptions) {
696 const operationsStoreOptions = pluginOptions.operationsStore;
697 const token = pluginOptions.token;
698 if (!operationsStoreOptions || pluginOptions.enabled === false) {
699 return {
700 canHandle() {
701 return false;
702 },
703 get() {
704 return null;
705 },
706 async load() { },
707 async reload() { },
708 };
709 }
710 const store = new Map();
711 const canHandle = key => {
712 return typeof key === 'string' && !key.includes('{');
713 };
714 const get = key => {
715 return store.get(key);
716 };
717 const load = async () => {
718 var _a;
719 const response = await axios.post((_a = operationsStoreOptions.endpoint) !== null && _a !== void 0 ? _a : 'https://app.graphql-hive.com/graphql', {
720 query: query$1,
721 operationName: 'loadStoredOperations',
722 }, {
723 responseType: 'json',
724 headers: {
725 'content-type': 'application/json',
726 Authorization: `Bearer ${token}`,
727 },
728 });
729 const parsedData = await response.data;
730 store.clear();
731 parsedData.data.storedOperations.forEach(({ key, document }) => {
732 store.set(key, graphql.parse(document, {
733 noLocation: true,
734 }));
735 });
736 };
737 const reload = load;
738 return {
739 canHandle,
740 get,
741 load,
742 reload,
743 };
744}
745const query$1 = graphql.stripIgnoredCharacters(/* GraphQL */ `
746 query loadStoredOperations {
747 storedOperations {
748 key: operationHash
749 document: content
750 }
751 }
752`);
753
754function createHive(options) {
755 var _a, _b;
756 const logger = (_b = (_a = options === null || options === void 0 ? void 0 : options.agent) === null || _a === void 0 ? void 0 : _a.logger) !== null && _b !== void 0 ? _b : console;
757 if (!options.enabled) {
758 logIf(options.debug === true, '[hive] is not enabled.', logger.info);
759 }
760 if (!options.token && options.enabled) {
761 options.enabled = false;
762 logger.info('[hive] Missing token, disabling.');
763 }
764 const usage = createUsage(options);
765 const schemaReporter = createReporting(options);
766 const operationsStore = createOperationsStore(options);
767 function reportSchema({ schema }) {
768 void schemaReporter.report({ schema });
769 }
770 function collectUsage(args) {
771 return usage.collect(args);
772 }
773 async function dispose() {
774 await Promise.all([schemaReporter.dispose(), usage.dispose()]);
775 }
776 async function info() {
777 var _a, _b, _c, _d;
778 if (options.enabled !== true) {
779 return;
780 }
781 try {
782 let endpoint = 'https://app.graphql-hive.com/graphql';
783 if (options.reporting && options.reporting.endpoint) {
784 endpoint = options.reporting.endpoint;
785 }
786 const query = /* GraphQL */ `
787 query myTokenInfo {
788 tokenInfo {
789 __typename
790 ... on TokenInfo {
791 token {
792 name
793 }
794 organization {
795 name
796 cleanId
797 }
798 project {
799 name
800 type
801 cleanId
802 }
803 target {
804 name
805 cleanId
806 }
807 canReportSchema: hasTargetScope(scope: REGISTRY_WRITE)
808 canCollectUsage: hasTargetScope(scope: REGISTRY_WRITE)
809 canReadOperations: hasProjectScope(scope: OPERATIONS_STORE_READ)
810 }
811 ... on TokenNotFoundError {
812 message
813 }
814 }
815 }
816 `;
817 const response = await axios.post(endpoint, JSON.stringify({
818 query,
819 operationName: 'myTokenInfo',
820 }), {
821 headers: {
822 'content-type': 'application/json',
823 Authorization: `Bearer ${options.token}`,
824 },
825 timeout: 30000,
826 decompress: true,
827 responseType: 'json',
828 });
829 if (response.status >= 200 && response.status < 300) {
830 const result = await response.data;
831 if (((_a = result.data) === null || _a === void 0 ? void 0 : _a.tokenInfo.__typename) === 'TokenInfo') {
832 const { tokenInfo } = result.data;
833 const { organization, project, target, canReportSchema, canCollectUsage, canReadOperations } = tokenInfo;
834 const print = createPrinter([tokenInfo.token.name, organization.name, project.name, target.name]);
835 const organizationUrl = `https://app.graphql-hive.com/${organization.cleanId}`;
836 const projectUrl = `${organizationUrl}/${project.cleanId}`;
837 const targetUrl = `${projectUrl}/${target.cleanId}`;
838 logger.info([
839 '[hive][info] Token details',
840 '',
841 `Token name: ${print(tokenInfo.token.name)}`,
842 `Organization: ${print(organization.name, organizationUrl)}`,
843 `Project: ${print(project.name, projectUrl)}`,
844 `Target: ${print(target.name, targetUrl)}`,
845 '',
846 `Can report schema? ${print(canReportSchema ? 'Yes' : 'No')}`,
847 `Can collect usage? ${print(canCollectUsage ? 'Yes' : 'No')}`,
848 `Can read operations? ${print(canReadOperations ? 'Yes' : 'No')}`,
849 '',
850 ].join('\n'));
851 }
852 else if ((_b = result.data) === null || _b === void 0 ? void 0 : _b.tokenInfo.message) {
853 logger.error(`[hive][info] Token not found. Reason: ${(_c = result.data) === null || _c === void 0 ? void 0 : _c.tokenInfo.message}`);
854 logger.info(`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`);
855 }
856 else {
857 logger.error(`[hive][info] ${result.errors[0].message}`);
858 logger.info(`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`);
859 }
860 }
861 else {
862 logger.error(`[hive][info] Error ${response.status}: ${response.statusText}`);
863 }
864 }
865 catch (error) {
866 logger.error(`[hive][info] Error ${(_d = error === null || error === void 0 ? void 0 : error.message) !== null && _d !== void 0 ? _d : error}`);
867 }
868 }
869 return {
870 info,
871 reportSchema,
872 collectUsage,
873 operationsStore,
874 dispose,
875 };
876}
877function createPrinter(values) {
878 const maxLen = Math.max(...values.map(v => v.length)) + 4;
879 return (base, extra) => {
880 return base.padEnd(maxLen, ' ') + (extra || '');
881 };
882}
883
884function useHive(clientOrOptions) {
885 var _a;
886 const hive = isHiveClient(clientOrOptions)
887 ? clientOrOptions
888 : createHive(Object.assign(Object.assign({}, clientOrOptions), { agent: Object.assign({ name: 'HiveEnvelop' }, ((_a = clientOrOptions.agent) !== null && _a !== void 0 ? _a : {})) }));
889 void hive.info();
890 return {
891 onSchemaChange({ schema }) {
892 hive.reportSchema({ schema });
893 },
894 onExecute({ args }) {
895 const complete = hive.collectUsage(args);
896 return {
897 onExecuteDone({ result }) {
898 complete(result);
899 },
900 };
901 },
902 };
903}
904
905function createSupergraphSDLFetcher({ endpoint, key }) {
906 return function supergraphSDLFetcher() {
907 return axios
908 .get(endpoint + '/supergraph', {
909 headers: {
910 'X-Hive-CDN-Key': key,
911 },
912 })
913 .then(response => {
914 if (response.status >= 200 && response.status < 300) {
915 return response.data;
916 }
917 return Promise.reject(new Error(`Failed to fetch supergraph [${response.status}]`));
918 })
919 .then(supergraphSdl => ({
920 id: crypto.createHash('sha256').update(supergraphSdl).digest('base64'),
921 supergraphSdl,
922 }));
923 };
924}
925function createSupergraphManager(options) {
926 var _a;
927 const pollIntervalInMs = (_a = options.pollIntervalInMs) !== null && _a !== void 0 ? _a : 30000;
928 const fetchSupergraph = createSupergraphSDLFetcher({ endpoint: options.endpoint, key: options.key });
929 let timer = null;
930 return {
931 async initialize(hooks) {
932 const initialResult = await fetchSupergraph();
933 function poll() {
934 timer = setTimeout(async () => {
935 var _a;
936 try {
937 const result = await fetchSupergraph();
938 if (result.supergraphSdl) {
939 (_a = hooks.update) === null || _a === void 0 ? void 0 : _a.call(hooks, result.supergraphSdl);
940 }
941 }
942 catch (error) {
943 console.error(`Failed to update supergraph: ${error instanceof Error ? error.message : error}`);
944 }
945 poll();
946 }, pollIntervalInMs);
947 }
948 poll();
949 return {
950 supergraphSdl: initialResult.supergraphSdl,
951 cleanup: async () => {
952 if (timer) {
953 clearTimeout(timer);
954 }
955 },
956 };
957 },
958 };
959}
960function hiveApollo(clientOrOptions) {
961 var _a;
962 const hive = isHiveClient(clientOrOptions)
963 ? clientOrOptions
964 : createHive(Object.assign(Object.assign({}, clientOrOptions), { agent: Object.assign({ name: 'HiveApollo' }, ((_a = clientOrOptions.agent) !== null && _a !== void 0 ? _a : {})) }));
965 void hive.info();
966 return {
967 requestDidStart(context) {
968 // `overallCachePolicy` does not exist in v0
969 const isLegacyV0 = !('overallCachePolicy' in context);
970 let doc;
971 const complete = hive.collectUsage({
972 schema: context.schema,
973 get document() {
974 return doc;
975 },
976 operationName: context.operationName,
977 contextValue: context.context,
978 variableValues: context.request.variables,
979 });
980 if (isLegacyV0) {
981 return {
982 willSendResponse(ctx) {
983 doc = ctx.document;
984 complete(ctx.response);
985 },
986 };
987 }
988 return Promise.resolve({
989 async willSendResponse(ctx) {
990 doc = ctx.document;
991 complete(ctx.response);
992 },
993 });
994 },
995 serverWillStart(ctx) {
996 // `engine` does not exist in v3
997 const isLegacyV0 = 'engine' in ctx;
998 hive.reportSchema({ schema: ctx.schema });
999 if (isLegacyV0) {
1000 return {
1001 async serverWillStop() {
1002 await hive.dispose();
1003 },
1004 };
1005 }
1006 return Promise.resolve({
1007 async serverWillStop() {
1008 await hive.dispose();
1009 },
1010 schemaDidLoadOrUpdate(schemaContext) {
1011 if (ctx.schema !== schemaContext.apiSchema) {
1012 hive.reportSchema({ schema: schemaContext.apiSchema });
1013 }
1014 },
1015 });
1016 },
1017 };
1018}
1019
1020function createFetcher({ endpoint, key }) {
1021 return function fetcher() {
1022 return axios
1023 .get(endpoint + '/schema', {
1024 headers: {
1025 'X-Hive-CDN-Key': key,
1026 accept: 'application/json',
1027 },
1028 responseType: 'json',
1029 })
1030 .then(response => {
1031 if (response.status >= 200 && response.status < 300) {
1032 return response.data;
1033 }
1034 return Promise.reject(new Error(`Failed to fetch [${response.status}]`));
1035 });
1036 };
1037}
1038function createSchemaFetcher({ endpoint, key }) {
1039 const fetcher = createFetcher({ endpoint, key });
1040 return function schemaFetcher() {
1041 return fetcher().then(schema => (Object.assign({ id: crypto.createHash('sha256').update(schema.sdl).update(schema.url).update(schema.name).digest('base64') }, schema)));
1042 };
1043}
1044function createServicesFetcher({ endpoint, key }) {
1045 const fetcher = createFetcher({ endpoint, key });
1046 return function schemaFetcher() {
1047 return fetcher().then(services => services.map(service => (Object.assign({ id: crypto.createHash('sha256').update(service.sdl).update(service.url).update(service.name).digest('base64') }, service))));
1048 };
1049}
1050
1051exports.createHive = createHive;
1052exports.createSchemaFetcher = createSchemaFetcher;
1053exports.createServicesFetcher = createServicesFetcher;
1054exports.createSupergraphManager = createSupergraphManager;
1055exports.createSupergraphSDLFetcher = createSupergraphSDLFetcher;
1056exports.hiveApollo = hiveApollo;
1057exports.useHive = useHive;