1 | 'use strict';
|
2 |
|
3 | const Fs = require('fs');
|
4 | const Path = require('path');
|
5 | const { expect } = require('code');
|
6 | const { graphql } = require('graphi');
|
7 | const Hapi = require('hapi');
|
8 | const Lab = require('lab');
|
9 | const CloudApiGql = require('../lib/');
|
10 | const schema = Fs.readFileSync(Path.join(__dirname, '../lib/schema.graphql'));
|
11 |
|
12 | const lab = exports.lab = Lab.script();
|
13 | const it = lab.it;
|
14 |
|
15 |
|
16 | it('can be registered with hapi', async () => {
|
17 | const server = new Hapi.Server();
|
18 | await server.register({ plugin: CloudApiGql, options: { keyPath: Path.join(__dirname, 'test.key') } });
|
19 | });
|
20 |
|
21 | it('has a resolver for every query and mutation in the schema', async () => {
|
22 | const fields = [];
|
23 | const parsed = graphql.parse(schema.toString());
|
24 | for (const def of parsed.definitions) {
|
25 | if (def.kind !== 'ObjectTypeDefinition' || (def.name.value !== 'Query' && def.name.value !== 'Mutation')) {
|
26 | continue;
|
27 | }
|
28 |
|
29 | for (const field of def.fields) {
|
30 | fields.push(field.name.value);
|
31 | }
|
32 | }
|
33 |
|
34 | const server = new Hapi.Server();
|
35 | await server.register({ plugin: CloudApiGql, options: { keyPath: Path.join(__dirname, 'test.key') } });
|
36 | await server.initialize();
|
37 | const paths = server.table().map((route) => {
|
38 | return route.path.substr(1);
|
39 | });
|
40 |
|
41 | for (const field of fields) {
|
42 | expect(paths).to.contain(field);
|
43 | }
|
44 | });
|
45 |
|
46 |
|