UNPKG

41.6 kBJavaScriptView Raw
1import axios from 'axios';
2import { Kind, TypeInfo, visit, visitWithTypeInfo, isNonNullType, isListType, isScalarType, isEnumType, isInputObjectType, stripIgnoredCharacters, print, parse } from 'graphql';
3import LRU from 'tiny-lru';
4import { normalizeOperation } from '@graphql-hive/core';
5import retry from 'async-retry';
6import { createHash } from 'crypto';
7import { getDocumentNodeFromSchema } from '@graphql-tools/utils';
8
9const version = '0.18.5';
10
11function createAgent(pluginOptions, { prefix, data, body, headers = () => ({}), }) {
12 const options = Object.assign({ timeout: 30000, debug: false, enabled: true, minTimeout: 200, maxRetries: 3, sendInterval: 10000, maxSize: 25, logger: console, name: 'Hive' }, pluginOptions);
13 const enabled = options.enabled !== false;
14 let timeoutID = null;
15 function schedule() {
16 if (timeoutID) {
17 clearTimeout(timeoutID);
18 }
19 timeoutID = setTimeout(send, options.sendInterval);
20 }
21 function debugLog(msg) {
22 if (options.debug) {
23 options.logger.info(`[hive][${prefix}]${enabled ? '' : '[DISABLED]'} ${msg}`);
24 }
25 }
26 let scheduled = false;
27 function capture(event) {
28 // Calling capture starts the schedule
29 if (!scheduled) {
30 scheduled = true;
31 schedule();
32 }
33 data.set(event);
34 if (data.size() >= options.maxSize) {
35 debugLog('Sending immediately');
36 setImmediate(() => send({ runOnce: true, throwOnError: false }));
37 }
38 }
39 function sendImmediately(event) {
40 data.set(event);
41 debugLog('Sending immediately');
42 return send({ runOnce: true, throwOnError: true });
43 }
44 async function send(sendOptions) {
45 var _a;
46 const runOnce = (_a = sendOptions === null || sendOptions === void 0 ? void 0 : sendOptions.runOnce) !== null && _a !== void 0 ? _a : false;
47 if (!data.size()) {
48 if (!runOnce) {
49 schedule();
50 }
51 return null;
52 }
53 try {
54 const buffer = await body();
55 const dataToSend = data.size();
56 data.clear();
57 const sendReport = async (_bail, attempt) => {
58 debugLog(`Sending (queue ${dataToSend}) (attempt ${attempt})`);
59 if (!enabled) {
60 return {
61 status: 200,
62 data: null,
63 };
64 }
65 const response = await axios
66 .post(options.endpoint, buffer, {
67 headers: Object.assign({ accept: 'application/json', 'content-type': 'application/json', Authorization: `Bearer ${options.token}`, 'User-Agent': `${options.name}@${version}` }, headers()),
68 responseType: 'json',
69 timeout: options.timeout,
70 })
71 .catch(error => {
72 debugLog(`Attempt ${attempt} failed: ${error.message}`);
73 return Promise.reject(error);
74 });
75 if (response.status >= 200 && response.status < 300) {
76 return response;
77 }
78 debugLog(`Attempt ${attempt} failed: ${response.status}`);
79 throw new Error(`${response.status}: ${response.statusText}`);
80 };
81 const response = await retry(sendReport, {
82 retries: options.maxRetries,
83 minTimeout: options.minTimeout,
84 factor: 2,
85 });
86 if (response.status < 200 || response.status >= 300) {
87 throw new Error(`[hive][${prefix}] Failed to send data (HTTP status ${response.status}): ${response.data}`);
88 }
89 debugLog(`Sent!`);
90 if (!runOnce) {
91 schedule();
92 }
93 return response.data;
94 }
95 catch (error) {
96 if (!runOnce) {
97 schedule();
98 }
99 if (sendOptions === null || sendOptions === void 0 ? void 0 : sendOptions.throwOnError) {
100 throw error;
101 }
102 options.logger.error(`[hive][${prefix}] Failed to send data: ${error.message}`);
103 return null;
104 }
105 }
106 async function dispose() {
107 debugLog('Disposing');
108 if (timeoutID) {
109 clearTimeout(timeoutID);
110 }
111 await send({
112 runOnce: true,
113 throwOnError: false,
114 });
115 }
116 return {
117 capture,
118 sendImmediately,
119 dispose,
120 };
121}
122
123function randomSampling(sampleRate) {
124 if (sampleRate > 1 || sampleRate < 0) {
125 throw new Error(`Expected usage.sampleRate to be 0 <= x <= 1, received ${sampleRate}`);
126 }
127 return function shouldInclude() {
128 return Math.random() <= sampleRate;
129 };
130}
131
132function isAsyncIterableIterator(value) {
133 return typeof (value === null || value === void 0 ? void 0 : value[Symbol.asyncIterator]) === 'function';
134}
135function memo(fn, cacheKeyFn) {
136 let memoizedResult = null;
137 let memoizedKey = null;
138 return (arg) => {
139 const currentKey = cacheKeyFn(arg);
140 if (memoizedKey === currentKey) {
141 return memoizedResult;
142 }
143 memoizedKey = currentKey;
144 memoizedResult = fn(arg);
145 return memoizedResult;
146 };
147}
148function cache(fn, cacheKeyFn, cacheMap) {
149 return (arg, arg2) => {
150 const key = cacheKeyFn(arg, arg2);
151 const cachedValue = cacheMap.get(key);
152 if (cachedValue !== null && typeof cachedValue !== 'undefined') {
153 return {
154 key,
155 value: cachedValue,
156 cacheHit: true,
157 };
158 }
159 const value = fn(arg, arg2);
160 cacheMap.set(key, value);
161 return {
162 key,
163 value,
164 cacheHit: false,
165 };
166 };
167}
168function cacheDocumentKey(doc, variables) {
169 const hasher = createHash('md5').update(JSON.stringify(doc));
170 if (variables) {
171 hasher.update(JSON.stringify(variables, (_, value) => {
172 if ((value && typeof value === 'object' && Object.keys(value).length) ||
173 (Array.isArray(value) && value.length)) {
174 return value;
175 }
176 return '';
177 }));
178 }
179 return hasher.digest('hex');
180}
181const HR_TO_NS = 1e9;
182const NS_TO_MS = 1e6;
183function deltaFrom(hrtime) {
184 const delta = process.hrtime(hrtime);
185 const ns = delta[0] * HR_TO_NS + delta[1];
186 return {
187 ns,
188 get ms() {
189 return ns / NS_TO_MS;
190 },
191 };
192}
193function measureDuration() {
194 const startAt = process.hrtime();
195 return function end() {
196 return deltaFrom(startAt).ns;
197 };
198}
199function isHiveClient(clientOrOptions) {
200 return 'operationsStore' in clientOrOptions;
201}
202function logIf(condition, message, logFn) {
203 if (condition) {
204 logFn(message);
205 }
206}
207
208function createUsage(pluginOptions) {
209 var _a, _b, _c, _d, _e, _f;
210 if (!pluginOptions.usage || pluginOptions.enabled === false) {
211 return {
212 collect() {
213 return () => { };
214 },
215 async dispose() { },
216 };
217 }
218 let report = {
219 size: 0,
220 map: {},
221 operations: [],
222 };
223 const options = typeof pluginOptions.usage === 'boolean' ? {} : pluginOptions.usage;
224 const logger = (_b = (_a = pluginOptions.agent) === null || _a === void 0 ? void 0 : _a.logger) !== null && _b !== void 0 ? _b : console;
225 const collector = memo(createCollector, arg => arg.schema);
226 const excludeSet = new Set((_c = options.exclude) !== null && _c !== void 0 ? _c : []);
227 const agent = createAgent(Object.assign(Object.assign({ logger }, ((_d = pluginOptions.agent) !== null && _d !== void 0 ? _d : {
228 maxSize: 1500,
229 })), { endpoint: (_e = options.endpoint) !== null && _e !== void 0 ? _e : 'https://app.graphql-hive.com/usage', token: pluginOptions.token, enabled: pluginOptions.enabled, debug: pluginOptions.debug }), {
230 prefix: 'usage',
231 data: {
232 set(operation) {
233 report.operations.push({
234 operationMapKey: operation.key,
235 timestamp: operation.timestamp,
236 execution: {
237 ok: operation.execution.ok,
238 duration: operation.execution.duration,
239 errorsTotal: operation.execution.errorsTotal,
240 errors: operation.execution.errors,
241 },
242 metadata: {
243 client: operation.client,
244 },
245 });
246 report.size += 1;
247 if (!report.map[operation.key]) {
248 report.map[operation.key] = {
249 operation: operation.operation,
250 operationName: operation.operationName,
251 fields: operation.fields,
252 };
253 }
254 },
255 size() {
256 return report.size;
257 },
258 clear() {
259 report = {
260 size: 0,
261 map: {},
262 operations: [],
263 };
264 },
265 },
266 headers() {
267 return {
268 'Content-Type': 'application/json',
269 'graphql-client-name': 'Hive Client',
270 'graphql-client-version': version,
271 };
272 },
273 body() {
274 return JSON.stringify(report);
275 },
276 });
277 logIf(typeof pluginOptions.token !== 'string' || pluginOptions.token.length === 0, '[hive][usage] token is missing', logger.error);
278 const shouldInclude = randomSampling((_f = options.sampleRate) !== null && _f !== void 0 ? _f : 1.0);
279 return {
280 dispose: agent.dispose,
281 collect(args) {
282 const finish = measureDuration();
283 return function complete(result) {
284 var _a, _b, _c, _d, _e, _f;
285 try {
286 if (isAsyncIterableIterator(result)) {
287 logger.info('@stream @defer is not supported');
288 finish();
289 return;
290 }
291 const rootOperation = args.document.definitions.find(o => o.kind === Kind.OPERATION_DEFINITION);
292 const document = args.document;
293 const operationName = args.operationName || ((_a = rootOperation.name) === null || _a === void 0 ? void 0 : _a.value) || 'anonymous';
294 const duration = finish();
295 if (!excludeSet.has(operationName) && shouldInclude()) {
296 const errors = (_c = (_b = result.errors) === null || _b === void 0 ? void 0 : _b.map(error => {
297 var _a;
298 return ({
299 message: error.message,
300 path: (_a = error.path) === null || _a === void 0 ? void 0 : _a.join('.'),
301 });
302 })) !== null && _c !== void 0 ? _c : [];
303 const collect = collector({
304 schema: args.schema,
305 max: (_d = options.max) !== null && _d !== void 0 ? _d : 1000,
306 ttl: options.ttl,
307 processVariables: (_e = options.processVariables) !== null && _e !== void 0 ? _e : false,
308 });
309 const { key, value: info } = collect(document, (_f = args.variableValues) !== null && _f !== void 0 ? _f : null);
310 agent.capture({
311 key,
312 timestamp: Date.now(),
313 operationName,
314 operation: info.document,
315 fields: info.fields,
316 execution: {
317 ok: errors.length === 0,
318 duration,
319 errorsTotal: errors.length,
320 errors,
321 },
322 // TODO: operationHash is ready to accept hashes of persisted operations
323 client: typeof args.contextValue !== 'undefined' && typeof options.clientInfo !== 'undefined'
324 ? options.clientInfo(args.contextValue)
325 : null,
326 });
327 }
328 }
329 catch (error) {
330 logger.error(`Failed to collect operation`, error);
331 }
332 };
333 },
334 };
335}
336function createCollector({ schema, max, ttl, processVariables = false, }) {
337 const typeInfo = new TypeInfo(schema);
338 function collect(doc, variables) {
339 const entries = new Set();
340 const collected_entire_named_types = new Set();
341 const shouldAnalyzeVariableValues = processVariables === true && variables !== null;
342 function markAsUsed(id) {
343 if (!entries.has(id)) {
344 entries.add(id);
345 }
346 }
347 function makeId(...names) {
348 return names.join('.');
349 }
350 const collectedInputTypes = {};
351 function collectInputType(inputType, fieldName) {
352 if (!collectedInputTypes[inputType]) {
353 collectedInputTypes[inputType] = {
354 all: false,
355 fields: new Set(),
356 };
357 }
358 if (fieldName) {
359 collectedInputTypes[inputType].fields.add(fieldName);
360 }
361 else {
362 collectedInputTypes[inputType].all = true;
363 }
364 }
365 function collectNode(node) {
366 const inputType = typeInfo.getInputType();
367 const inputTypeName = resolveTypeName(inputType);
368 if (node.value.kind === Kind.ENUM) {
369 // Collect only a specific enum value
370 collectInputType(inputTypeName, node.value.value);
371 }
372 else if (node.value.kind !== Kind.OBJECT && node.value.kind !== Kind.LIST) {
373 // When processing of variables is enabled,
374 // we want to skip collecting full input types of variables
375 // and only collect specific fields.
376 // That's why the following condition is added.
377 // Otherwise we would mark entire input types as used, and not granular fields.
378 if (node.value.kind === Kind.VARIABLE && shouldAnalyzeVariableValues) {
379 return;
380 }
381 collectInputType(inputTypeName);
382 }
383 }
384 function markEntireTypeAsUsed(type) {
385 const namedType = unwrapType(type);
386 if (collected_entire_named_types.has(namedType.name)) {
387 // No need to mark this type as used again
388 return;
389 }
390 else {
391 // Add this type to the set of types that have been marked as used
392 // to avoid infinite loops
393 collected_entire_named_types.add(namedType.name);
394 }
395 if (isScalarType(namedType)) {
396 markAsUsed(makeId(namedType.name));
397 return;
398 }
399 if (isEnumType(namedType)) {
400 namedType.getValues().forEach(value => {
401 markAsUsed(makeId(namedType.name, value.name));
402 });
403 return;
404 }
405 const fieldsMap = namedType.getFields();
406 for (const fieldName in fieldsMap) {
407 const field = fieldsMap[fieldName];
408 markAsUsed(makeId(namedType.name, field.name));
409 markEntireTypeAsUsed(field.type);
410 }
411 }
412 function collectVariable(namedType, variableValue) {
413 const variableValueArray = Array.isArray(variableValue) ? variableValue : [variableValue];
414 if (isInputObjectType(namedType)) {
415 variableValueArray.forEach(variable => {
416 if (variable) {
417 // Collect only the used fields
418 for (const fieldName in variable) {
419 const field = namedType.getFields()[fieldName];
420 if (field) {
421 collectInputType(namedType.name, fieldName);
422 collectVariable(unwrapType(field.type), variable[fieldName]);
423 }
424 }
425 }
426 else {
427 // Collect the entire type
428 collectInputType(namedType.name);
429 }
430 });
431 }
432 else {
433 collectInputType(namedType.name);
434 }
435 }
436 visit(doc, visitWithTypeInfo(typeInfo, {
437 Field() {
438 const parent = typeInfo.getParentType();
439 const field = typeInfo.getFieldDef();
440 markAsUsed(makeId(parent.name, field.name));
441 },
442 VariableDefinition(node) {
443 const inputType = typeInfo.getInputType();
444 if (shouldAnalyzeVariableValues) {
445 // Granular collection of variable values is enabled
446 const variableName = node.variable.name.value;
447 const variableValue = variables[variableName];
448 const namedType = unwrapType(inputType);
449 collectVariable(namedType, variableValue);
450 }
451 else {
452 // Collect the entire type without processing the variables
453 collectInputType(resolveTypeName(inputType));
454 }
455 },
456 Argument(node) {
457 const parent = typeInfo.getParentType();
458 const field = typeInfo.getFieldDef();
459 const arg = typeInfo.getArgument();
460 markAsUsed(makeId(parent.name, field.name, arg.name));
461 collectNode(node);
462 },
463 ListValue(node) {
464 const inputType = typeInfo.getInputType();
465 const inputTypeName = resolveTypeName(inputType);
466 node.values.forEach(value => {
467 if (value.kind !== Kind.OBJECT) {
468 // if a value is not an object we need to collect all fields
469 collectInputType(inputTypeName);
470 }
471 });
472 },
473 ObjectField(node) {
474 const parentInputType = typeInfo.getParentInputType();
475 const parentInputTypeName = resolveTypeName(parentInputType);
476 collectNode(node);
477 collectInputType(parentInputTypeName, node.name.value);
478 },
479 }));
480 for (const inputTypeName in collectedInputTypes) {
481 const { fields, all } = collectedInputTypes[inputTypeName];
482 if (all) {
483 markEntireTypeAsUsed(schema.getType(inputTypeName));
484 }
485 else {
486 fields.forEach(field => {
487 markAsUsed(makeId(inputTypeName, field));
488 });
489 }
490 }
491 return {
492 document: normalizeOperation({
493 document: doc,
494 hideLiterals: true,
495 removeAliases: true,
496 }),
497 fields: Array.from(entries),
498 };
499 }
500 return cache(collect, function cacheKey(doc, variables) {
501 return cacheDocumentKey(doc, processVariables === true ? variables : null);
502 }, LRU(max, ttl));
503}
504function resolveTypeName(inputType) {
505 return unwrapType(inputType).name;
506}
507function unwrapType(type) {
508 if (isNonNullType(type) || isListType(type)) {
509 return unwrapType(type.ofType);
510 }
511 return type;
512}
513
514function createReporting(pluginOptions) {
515 var _a, _b, _c, _d;
516 if (!pluginOptions.reporting || pluginOptions.enabled === false) {
517 return {
518 async report() { },
519 async dispose() { },
520 };
521 }
522 const token = pluginOptions.token;
523 const reportingOptions = pluginOptions.reporting;
524 const logger = (_b = (_a = pluginOptions.agent) === null || _a === void 0 ? void 0 : _a.logger) !== null && _b !== void 0 ? _b : console;
525 logIf(typeof reportingOptions.author !== 'string' || reportingOptions.author.length === 0, '[hive][reporting] author is missing', logger.error);
526 logIf(typeof reportingOptions.commit !== 'string' || reportingOptions.commit.length === 0, '[hive][reporting] commit is missing', logger.error);
527 logIf(typeof token !== 'string' || token.length === 0, '[hive][reporting] token is missing', logger.error);
528 let currentSchema = null;
529 const agent = createAgent(Object.assign(Object.assign({ logger }, ((_c = pluginOptions.agent) !== null && _c !== void 0 ? _c : {})), { endpoint: (_d = reportingOptions.endpoint) !== null && _d !== void 0 ? _d : 'https://app.graphql-hive.com/graphql', token: token, enabled: pluginOptions.enabled, debug: pluginOptions.debug }), {
530 prefix: 'reporting',
531 data: {
532 set(incomingSchema) {
533 currentSchema = incomingSchema;
534 },
535 size() {
536 return currentSchema ? 1 : 0;
537 },
538 clear() {
539 currentSchema = null;
540 },
541 },
542 headers() {
543 return {
544 'Content-Type': 'application/json',
545 'graphql-client-name': 'Hive Client',
546 'graphql-client-version': version,
547 };
548 },
549 async body() {
550 var _a, _b;
551 return JSON.stringify({
552 query,
553 operationName: 'schemaPublish',
554 variables: {
555 input: {
556 sdl: await printToSDL(currentSchema),
557 author: reportingOptions.author,
558 commit: reportingOptions.commit,
559 service: (_a = reportingOptions.serviceName) !== null && _a !== void 0 ? _a : null,
560 url: (_b = reportingOptions.serviceUrl) !== null && _b !== void 0 ? _b : null,
561 force: true,
562 },
563 },
564 });
565 },
566 });
567 return {
568 async report({ schema }) {
569 var _a;
570 try {
571 const result = await agent.sendImmediately(schema);
572 if (result === null) {
573 throw new Error('Empty response');
574 }
575 if (Array.isArray(result.errors)) {
576 throw new Error(result.errors.map(error => error.message).join('\n'));
577 }
578 const data = result.data.schemaPublish;
579 switch (data.__typename) {
580 case 'SchemaPublishSuccess': {
581 logger.info(`[hive][reporting] ${(_a = data.successMessage) !== null && _a !== void 0 ? _a : 'Published schema'}`);
582 return;
583 }
584 case 'SchemaPublishMissingServiceError': {
585 throw new Error('Service name is not defined');
586 }
587 case 'SchemaPublishMissingUrlError': {
588 throw new Error('Service url is not defined');
589 }
590 case 'SchemaPublishError': {
591 logger.info(`[hive][reporting] Published schema (forced with ${data.errors.total} errors)`);
592 data.errors.nodes.slice(0, 5).forEach(error => {
593 logger.info(` - ${error.message}`);
594 });
595 return;
596 }
597 }
598 }
599 catch (error) {
600 logger.error(`[hive][reporting] Failed to report schema: ${error instanceof Error && 'message' in error ? error.message : error}`);
601 }
602 },
603 dispose: agent.dispose,
604 };
605}
606const query = stripIgnoredCharacters(/* GraphQL */ `
607 mutation schemaPublish($input: SchemaPublishInput!) {
608 schemaPublish(input: $input) {
609 __typename
610 ... on SchemaPublishSuccess {
611 initial
612 valid
613 successMessage: message
614 }
615 ... on SchemaPublishError {
616 valid
617 errors {
618 nodes {
619 message
620 }
621 total
622 }
623 }
624 ... on SchemaPublishMissingServiceError {
625 missingServiceError: message
626 }
627 ... on SchemaPublishMissingUrlError {
628 missingUrlError: message
629 }
630 }
631 }
632`);
633/**
634 * It's a bit tricky to detect if a schema is federated or not.
635 * For now, we just check if the schema has a _service that resolves to `_Service!` (as described in federation spec).
636 * This may lead to issues if the schema is not a federated schema but something made by the user (I don't think we will hit that issue soon).
637 */
638function isFederatedSchema(schema) {
639 const queryType = schema.getQueryType();
640 if (queryType) {
641 const fields = queryType.getFields();
642 if (fields._service && fields._service.type.toString() === `_Service!`) {
643 return true;
644 }
645 }
646 return false;
647}
648/**
649 * Extracts the SDL of a federated service from a GraphQLSchema object
650 * We do it to not send federated schema to the registry but only the original schema provided by user
651 */
652async function extractFederationServiceSDL(schema) {
653 const queryType = schema.getQueryType();
654 const serviceField = queryType.getFields()._service;
655 const resolved = await serviceField.resolve();
656 return resolved.sdl;
657}
658function isSchemaOfCommonNames(schema) {
659 const queryType = schema.getQueryType();
660 if (queryType && queryType.name !== 'Query') {
661 return false;
662 }
663 const mutationType = schema.getMutationType();
664 if (mutationType && mutationType.name !== 'Mutation') {
665 return false;
666 }
667 const subscriptionType = schema.getSubscriptionType();
668 if (subscriptionType && subscriptionType.name !== 'Subscription') {
669 return false;
670 }
671 return true;
672}
673function printSchemaWithDirectives(schema) {
674 const doc = getDocumentNodeFromSchema(schema);
675 if (schema.description == null && isSchemaOfCommonNames(schema)) {
676 // remove the schema definition if it's the default one
677 // We do it to avoid sending schema definition to the registry, which may be unwanted by federated services or something
678 return print({
679 kind: Kind.DOCUMENT,
680 definitions: doc.definitions.filter(def => def.kind !== Kind.SCHEMA_DEFINITION),
681 });
682 }
683 return print(doc);
684}
685async function printToSDL(schema) {
686 return stripIgnoredCharacters(isFederatedSchema(schema) ? await extractFederationServiceSDL(schema) : printSchemaWithDirectives(schema));
687}
688
689function createOperationsStore(pluginOptions) {
690 const operationsStoreOptions = pluginOptions.operationsStore;
691 const token = pluginOptions.token;
692 if (!operationsStoreOptions || pluginOptions.enabled === false) {
693 return {
694 canHandle() {
695 return false;
696 },
697 get() {
698 return null;
699 },
700 async load() { },
701 async reload() { },
702 };
703 }
704 const store = new Map();
705 const canHandle = key => {
706 return typeof key === 'string' && !key.includes('{');
707 };
708 const get = key => {
709 return store.get(key);
710 };
711 const load = async () => {
712 var _a;
713 const response = await axios.post((_a = operationsStoreOptions.endpoint) !== null && _a !== void 0 ? _a : 'https://app.graphql-hive.com/graphql', {
714 query: query$1,
715 operationName: 'loadStoredOperations',
716 }, {
717 responseType: 'json',
718 headers: {
719 'content-type': 'application/json',
720 Authorization: `Bearer ${token}`,
721 },
722 });
723 const parsedData = await response.data;
724 store.clear();
725 parsedData.data.storedOperations.forEach(({ key, document }) => {
726 store.set(key, parse(document, {
727 noLocation: true,
728 }));
729 });
730 };
731 const reload = load;
732 return {
733 canHandle,
734 get,
735 load,
736 reload,
737 };
738}
739const query$1 = stripIgnoredCharacters(/* GraphQL */ `
740 query loadStoredOperations {
741 storedOperations {
742 key: operationHash
743 document: content
744 }
745 }
746`);
747
748function createHive(options) {
749 var _a, _b;
750 const logger = (_b = (_a = options === null || options === void 0 ? void 0 : options.agent) === null || _a === void 0 ? void 0 : _a.logger) !== null && _b !== void 0 ? _b : console;
751 if (!options.enabled) {
752 logIf(options.debug === true, '[hive] is not enabled.', logger.info);
753 }
754 if (!options.token && options.enabled) {
755 options.enabled = false;
756 logger.info('[hive] Missing token, disabling.');
757 }
758 const usage = createUsage(options);
759 const schemaReporter = createReporting(options);
760 const operationsStore = createOperationsStore(options);
761 function reportSchema({ schema }) {
762 void schemaReporter.report({ schema });
763 }
764 function collectUsage(args) {
765 return usage.collect(args);
766 }
767 async function dispose() {
768 await Promise.all([schemaReporter.dispose(), usage.dispose()]);
769 }
770 async function info() {
771 var _a, _b, _c, _d;
772 if (options.enabled !== true) {
773 return;
774 }
775 try {
776 let endpoint = 'https://app.graphql-hive.com/graphql';
777 if (options.reporting && options.reporting.endpoint) {
778 endpoint = options.reporting.endpoint;
779 }
780 const query = /* GraphQL */ `
781 query myTokenInfo {
782 tokenInfo {
783 __typename
784 ... on TokenInfo {
785 token {
786 name
787 }
788 organization {
789 name
790 cleanId
791 }
792 project {
793 name
794 type
795 cleanId
796 }
797 target {
798 name
799 cleanId
800 }
801 canReportSchema: hasTargetScope(scope: REGISTRY_WRITE)
802 canCollectUsage: hasTargetScope(scope: REGISTRY_WRITE)
803 canReadOperations: hasProjectScope(scope: OPERATIONS_STORE_READ)
804 }
805 ... on TokenNotFoundError {
806 message
807 }
808 }
809 }
810 `;
811 const response = await axios.post(endpoint, JSON.stringify({
812 query,
813 operationName: 'myTokenInfo',
814 }), {
815 headers: {
816 'content-type': 'application/json',
817 Authorization: `Bearer ${options.token}`,
818 },
819 timeout: 30000,
820 decompress: true,
821 responseType: 'json',
822 });
823 if (response.status >= 200 && response.status < 300) {
824 const result = await response.data;
825 if (((_a = result.data) === null || _a === void 0 ? void 0 : _a.tokenInfo.__typename) === 'TokenInfo') {
826 const { tokenInfo } = result.data;
827 const { organization, project, target, canReportSchema, canCollectUsage, canReadOperations } = tokenInfo;
828 const print = createPrinter([tokenInfo.token.name, organization.name, project.name, target.name]);
829 const organizationUrl = `https://app.graphql-hive.com/${organization.cleanId}`;
830 const projectUrl = `${organizationUrl}/${project.cleanId}`;
831 const targetUrl = `${projectUrl}/${target.cleanId}`;
832 logger.info([
833 '[hive][info] Token details',
834 '',
835 `Token name: ${print(tokenInfo.token.name)}`,
836 `Organization: ${print(organization.name, organizationUrl)}`,
837 `Project: ${print(project.name, projectUrl)}`,
838 `Target: ${print(target.name, targetUrl)}`,
839 '',
840 `Can report schema? ${print(canReportSchema ? 'Yes' : 'No')}`,
841 `Can collect usage? ${print(canCollectUsage ? 'Yes' : 'No')}`,
842 `Can read operations? ${print(canReadOperations ? 'Yes' : 'No')}`,
843 '',
844 ].join('\n'));
845 }
846 else if ((_b = result.data) === null || _b === void 0 ? void 0 : _b.tokenInfo.message) {
847 logger.error(`[hive][info] Token not found. Reason: ${(_c = result.data) === null || _c === void 0 ? void 0 : _c.tokenInfo.message}`);
848 logger.info(`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`);
849 }
850 else {
851 logger.error(`[hive][info] ${result.errors[0].message}`);
852 logger.info(`[hive][info] How to create a token? https://docs.graphql-hive.com/features/tokens`);
853 }
854 }
855 else {
856 logger.error(`[hive][info] Error ${response.status}: ${response.statusText}`);
857 }
858 }
859 catch (error) {
860 logger.error(`[hive][info] Error ${(_d = error === null || error === void 0 ? void 0 : error.message) !== null && _d !== void 0 ? _d : error}`);
861 }
862 }
863 return {
864 info,
865 reportSchema,
866 collectUsage,
867 operationsStore,
868 dispose,
869 };
870}
871function createPrinter(values) {
872 const maxLen = Math.max(...values.map(v => v.length)) + 4;
873 return (base, extra) => {
874 return base.padEnd(maxLen, ' ') + (extra || '');
875 };
876}
877
878function useHive(clientOrOptions) {
879 var _a;
880 const hive = isHiveClient(clientOrOptions)
881 ? clientOrOptions
882 : createHive(Object.assign(Object.assign({}, clientOrOptions), { agent: Object.assign({ name: 'HiveEnvelop' }, ((_a = clientOrOptions.agent) !== null && _a !== void 0 ? _a : {})) }));
883 void hive.info();
884 return {
885 onSchemaChange({ schema }) {
886 hive.reportSchema({ schema });
887 },
888 onExecute({ args }) {
889 const complete = hive.collectUsage(args);
890 return {
891 onExecuteDone({ result }) {
892 complete(result);
893 },
894 };
895 },
896 };
897}
898
899function createSupergraphSDLFetcher({ endpoint, key }) {
900 return function supergraphSDLFetcher() {
901 return axios
902 .get(endpoint + '/supergraph', {
903 headers: {
904 'X-Hive-CDN-Key': key,
905 },
906 })
907 .then(response => {
908 if (response.status >= 200 && response.status < 300) {
909 return response.data;
910 }
911 return Promise.reject(new Error(`Failed to fetch supergraph [${response.status}]`));
912 })
913 .then(supergraphSdl => ({
914 id: createHash('sha256').update(supergraphSdl).digest('base64'),
915 supergraphSdl,
916 }));
917 };
918}
919function createSupergraphManager(options) {
920 var _a;
921 const pollIntervalInMs = (_a = options.pollIntervalInMs) !== null && _a !== void 0 ? _a : 30000;
922 const fetchSupergraph = createSupergraphSDLFetcher({ endpoint: options.endpoint, key: options.key });
923 let timer = null;
924 return {
925 async initialize(hooks) {
926 const initialResult = await fetchSupergraph();
927 function poll() {
928 timer = setTimeout(async () => {
929 var _a;
930 try {
931 const result = await fetchSupergraph();
932 if (result.supergraphSdl) {
933 (_a = hooks.update) === null || _a === void 0 ? void 0 : _a.call(hooks, result.supergraphSdl);
934 }
935 }
936 catch (error) {
937 console.error(`Failed to update supergraph: ${error instanceof Error ? error.message : error}`);
938 }
939 poll();
940 }, pollIntervalInMs);
941 }
942 poll();
943 return {
944 supergraphSdl: initialResult.supergraphSdl,
945 cleanup: async () => {
946 if (timer) {
947 clearTimeout(timer);
948 }
949 },
950 };
951 },
952 };
953}
954function hiveApollo(clientOrOptions) {
955 var _a;
956 const hive = isHiveClient(clientOrOptions)
957 ? clientOrOptions
958 : createHive(Object.assign(Object.assign({}, clientOrOptions), { agent: Object.assign({ name: 'HiveApollo' }, ((_a = clientOrOptions.agent) !== null && _a !== void 0 ? _a : {})) }));
959 void hive.info();
960 return {
961 requestDidStart(context) {
962 // `overallCachePolicy` does not exist in v0
963 const isLegacyV0 = !('overallCachePolicy' in context);
964 let doc;
965 const complete = hive.collectUsage({
966 schema: context.schema,
967 get document() {
968 return doc;
969 },
970 operationName: context.operationName,
971 contextValue: context.context,
972 variableValues: context.request.variables,
973 });
974 if (isLegacyV0) {
975 return {
976 willSendResponse(ctx) {
977 doc = ctx.document;
978 complete(ctx.response);
979 },
980 };
981 }
982 return Promise.resolve({
983 async willSendResponse(ctx) {
984 doc = ctx.document;
985 complete(ctx.response);
986 },
987 });
988 },
989 serverWillStart(ctx) {
990 // `engine` does not exist in v3
991 const isLegacyV0 = 'engine' in ctx;
992 hive.reportSchema({ schema: ctx.schema });
993 if (isLegacyV0) {
994 return {
995 async serverWillStop() {
996 await hive.dispose();
997 },
998 };
999 }
1000 return Promise.resolve({
1001 async serverWillStop() {
1002 await hive.dispose();
1003 },
1004 schemaDidLoadOrUpdate(schemaContext) {
1005 if (ctx.schema !== schemaContext.apiSchema) {
1006 hive.reportSchema({ schema: schemaContext.apiSchema });
1007 }
1008 },
1009 });
1010 },
1011 };
1012}
1013
1014function createFetcher({ endpoint, key }) {
1015 return function fetcher() {
1016 return axios
1017 .get(endpoint + '/schema', {
1018 headers: {
1019 'X-Hive-CDN-Key': key,
1020 accept: 'application/json',
1021 },
1022 responseType: 'json',
1023 })
1024 .then(response => {
1025 if (response.status >= 200 && response.status < 300) {
1026 return response.data;
1027 }
1028 return Promise.reject(new Error(`Failed to fetch [${response.status}]`));
1029 });
1030 };
1031}
1032function createSchemaFetcher({ endpoint, key }) {
1033 const fetcher = createFetcher({ endpoint, key });
1034 return function schemaFetcher() {
1035 return fetcher().then(schema => (Object.assign({ id: createHash('sha256').update(schema.sdl).update(schema.url).update(schema.name).digest('base64') }, schema)));
1036 };
1037}
1038function createServicesFetcher({ endpoint, key }) {
1039 const fetcher = createFetcher({ endpoint, key });
1040 return function schemaFetcher() {
1041 return fetcher().then(services => services.map(service => (Object.assign({ id: createHash('sha256').update(service.sdl).update(service.url).update(service.name).digest('base64') }, service))));
1042 };
1043}
1044
1045export { createHive, createSchemaFetcher, createServicesFetcher, createSupergraphManager, createSupergraphSDLFetcher, hiveApollo, useHive };