1 | import * as fs from 'fs';
|
2 | import * as path from 'path';
|
3 | import * as rimraf from 'rimraf';
|
4 |
|
5 | import { loadSchema, loadSchemaFromConfig, loadAndMergeQueryDocuments } from 'apollo-codegen-core/lib/loading';
|
6 | import { validateQueryDocument } from './validation';
|
7 | import { compileToIR } from 'apollo-codegen-core/lib/compiler';
|
8 | import { compileToLegacyIR } from 'apollo-codegen-core/lib/compiler/legacyIR';
|
9 | import serializeToJSON from 'apollo-codegen-core/lib/serializeToJSON';
|
10 | import { BasicGeneratedFile } from 'apollo-codegen-core/lib/utilities/CodeGenerator'
|
11 |
|
12 | import { generateSource as generateSwiftSource } from 'apollo-codegen-swift';
|
13 | import { generateSource as generateTypescriptLegacySource } from 'apollo-codegen-typescript-legacy';
|
14 | import { generateSource as generateFlowLegacySource } from 'apollo-codegen-flow-legacy';
|
15 | import { generateSource as generateFlowSource } from 'apollo-codegen-flow';
|
16 | import { generateSource as generateTypescriptSource } from 'apollo-codegen-typescript';
|
17 | import { generateSource as generateScalaSource } from 'apollo-codegen-scala';
|
18 |
|
19 | type TargetType = 'json' | 'swift' | 'ts-legacy' | 'typescript-legacy'
|
20 | | 'flow-legacy' | 'scala' | 'flow' | 'typescript'
|
21 | | 'ts';
|
22 |
|
23 | export default function generate(
|
24 | inputPaths: string[],
|
25 | schemaPath: string,
|
26 | outputPath: string,
|
27 | only: string,
|
28 | target: TargetType,
|
29 | tagName: string,
|
30 | projectName: string,
|
31 | options: any
|
32 | ) {
|
33 | const schema = schemaPath == null
|
34 | ? loadSchemaFromConfig(projectName)
|
35 | : loadSchema(schemaPath);
|
36 |
|
37 | const document = loadAndMergeQueryDocuments(inputPaths, tagName);
|
38 |
|
39 | validateQueryDocument(schema, document);
|
40 |
|
41 | if (target === 'swift') {
|
42 | options.addTypename = true;
|
43 | const context = compileToIR(schema, document, options);
|
44 |
|
45 | const outputIndividualFiles = fs.existsSync(outputPath) && fs.statSync(outputPath).isDirectory();
|
46 |
|
47 | const generator = generateSwiftSource(context, outputIndividualFiles, only);
|
48 |
|
49 | if (outputIndividualFiles) {
|
50 | writeGeneratedFiles(generator.generatedFiles, outputPath);
|
51 | } else {
|
52 | fs.writeFileSync(outputPath, generator.output);
|
53 | }
|
54 |
|
55 | if (options.generateOperationIds) {
|
56 | writeOperationIdsMap(context);
|
57 | }
|
58 | }
|
59 | else if (target === 'flow' || target === 'typescript' || target === 'ts') {
|
60 | const context = compileToIR(schema, document, options);
|
61 | const { generatedFiles, common } = target === 'flow'
|
62 | ? generateFlowSource(context)
|
63 | : generateTypescriptSource(context) ;
|
64 |
|
65 | const outFiles: {
|
66 | [fileName: string]: BasicGeneratedFile
|
67 | } = {};
|
68 |
|
69 | const outputIndividualFiles = fs.existsSync(outputPath) && fs.statSync(outputPath).isDirectory();
|
70 |
|
71 | if (outputIndividualFiles) {
|
72 | Object.keys(generatedFiles)
|
73 | .forEach((filePath: string) => {
|
74 | outFiles[path.basename(filePath)] = {
|
75 | output: generatedFiles[filePath].fileContents + common
|
76 | }
|
77 | });
|
78 |
|
79 | writeGeneratedFiles(
|
80 | outFiles,
|
81 | outputPath
|
82 | );
|
83 | } else {
|
84 | fs.writeFileSync(
|
85 | outputPath,
|
86 | Object.values(generatedFiles).map(v => v.fileContents).join("\n") + common
|
87 | );
|
88 | }
|
89 | }
|
90 | else {
|
91 | let output;
|
92 | const context = compileToLegacyIR(schema, document, options);
|
93 | switch (target) {
|
94 | case 'json':
|
95 | output = serializeToJSON(context);
|
96 | break;
|
97 | case 'ts-legacy':
|
98 | case 'typescript-legacy':
|
99 | output = generateTypescriptLegacySource(context);
|
100 | break;
|
101 | case 'flow-legacy':
|
102 | output = generateFlowLegacySource(context);
|
103 | break;
|
104 | case 'scala':
|
105 | output = generateScalaSource(context);
|
106 | }
|
107 |
|
108 | if (outputPath) {
|
109 | fs.writeFileSync(outputPath, output);
|
110 | } else {
|
111 | console.log(output);
|
112 | }
|
113 | }
|
114 | }
|
115 |
|
116 | function writeGeneratedFiles(
|
117 | generatedFiles: { [fileName: string]: BasicGeneratedFile },
|
118 | outputDirectory: string
|
119 | ) {
|
120 |
|
121 |
|
122 | rimraf.sync(outputDirectory);
|
123 |
|
124 | fs.mkdirSync(outputDirectory);
|
125 |
|
126 | for (const [fileName, generatedFile] of Object.entries(generatedFiles)) {
|
127 | fs.writeFileSync(path.join(outputDirectory, fileName), generatedFile.output);
|
128 | }
|
129 | }
|
130 |
|
131 | interface OperationIdsMap {
|
132 | name: string;
|
133 | source: string;
|
134 | }
|
135 |
|
136 | function writeOperationIdsMap(context: any) {
|
137 | let operationIdsMap: { [id: string]: OperationIdsMap } = {};
|
138 | Object.keys(context.operations).map(k => context.operations[k]).forEach(operation => {
|
139 | operationIdsMap[operation.operationId] = {
|
140 | name: operation.operationName,
|
141 | source: operation.sourceWithFragments
|
142 | };
|
143 | });
|
144 | fs.writeFileSync(context.options.operationIdsPath, JSON.stringify(operationIdsMap, null, 2));
|
145 | }
|