1 |
|
2 | import tokenize from "glsl-tokenizer/string";
|
3 | import print from "glsl-token-string";
|
4 |
|
5 | type Token = {
|
6 | type: string,
|
7 | data: string,
|
8 | };
|
9 |
|
10 | type Result = {
|
11 | data: ?{
|
12 | glsl: string,
|
13 | },
|
14 | errors: Array<*>,
|
15 | };
|
16 |
|
17 | const tokenWithType = (token: Token, type: string): boolean =>
|
18 | token.type === type;
|
19 |
|
20 | const tokenWithTypeAndData = (
|
21 | token: Token,
|
22 | type: string,
|
23 | data: string
|
24 | ): boolean => token.type === type && token.data === data;
|
25 |
|
26 | const isMainFunctionDefinitionAt = (tokens: Array<*>, i: number): boolean => {
|
27 | if (!tokenWithTypeAndData(tokens[i], "keyword", "void")) return false;
|
28 | for (
|
29 | i = i + 1;
|
30 | i < tokens.length && tokenWithType(tokens[i], "whitespace");
|
31 | i++
|
32 | );
|
33 | if (i >= tokens.length) return false;
|
34 | if (!tokenWithTypeAndData(tokens[i], "ident", "main")) return false;
|
35 | return true;
|
36 | };
|
37 |
|
38 | const isFromOrToTexture2DCallAt = (tokens: Array<*>, i: number): boolean => {
|
39 | if (!tokenWithTypeAndData(tokens[i], "builtin", "texture2D")) return false;
|
40 | for (
|
41 | i = i + 1;
|
42 | i < tokens.length && tokenWithType(tokens[i], "whitespace");
|
43 | i++
|
44 | );
|
45 | if (i >= tokens.length) return false;
|
46 | if (tokenWithType(tokens[i], "operator", "(")) {
|
47 | i++;
|
48 | }
|
49 | for (
|
50 | i = i + 1;
|
51 | i < tokens.length && tokenWithType(tokens[i], "whitespace");
|
52 | i++
|
53 | );
|
54 | if (i >= tokens.length) return false;
|
55 | i--;
|
56 | const token = tokens[i];
|
57 | if (
|
58 | token.type === "ident" &&
|
59 | (token.data === "from" || token.data === "to")
|
60 | ) {
|
61 | return true;
|
62 | }
|
63 | return false;
|
64 | };
|
65 |
|
66 | const uniformsToRemove = ["from", "to", "progress", "resolution"];
|
67 |
|
68 | export default (glsl: string): Result => {
|
69 | const errors = [];
|
70 |
|
71 | const tokens: Array<Token> = tokenize(glsl);
|
72 | const newTokens: Array<Token> = [];
|
73 |
|
74 | let eatWhitespaces = false;
|
75 | for (let i = 0; i < tokens.length; i++) {
|
76 | const token = tokens[i];
|
77 | if (eatWhitespaces) {
|
78 | if (tokenWithType(token, "whitespace")) {
|
79 | continue;
|
80 | } else {
|
81 | eatWhitespaces = false;
|
82 | }
|
83 | }
|
84 | if (token.type === "preprocessor" && token.data.indexOf("#ifdef") === 0) {
|
85 | while (i < tokens.length) {
|
86 | const token = tokens[i];
|
87 | i++;
|
88 | if (token.type === "preprocessor" && token.data === "#endif") {
|
89 | break;
|
90 | }
|
91 | }
|
92 | eatWhitespaces = true;
|
93 | } else if (token.type === "keyword" && token.data === "uniform") {
|
94 | let foundToRemove = [], foundToKeep = [];
|
95 | let j;
|
96 | for (
|
97 | j = i + 1;
|
98 | j < tokens.length && !tokenWithTypeAndData(tokens[j], "operator", ";");
|
99 | j++
|
100 | ) {
|
101 | if (tokens[j].type === "ident") {
|
102 | const ident = tokens[j];
|
103 | if (uniformsToRemove.includes(ident.data)) {
|
104 | foundToRemove.push(ident);
|
105 | } else {
|
106 | foundToKeep.push(ident);
|
107 | }
|
108 | }
|
109 | }
|
110 | if (foundToRemove.length > 0) {
|
111 | if (foundToKeep.length > 0) {
|
112 | errors.push({
|
113 | type: "old_glsl_mixed_uniforms",
|
114 | message: "Please manually removes uniforms: " +
|
115 | foundToRemove.map(n => n.data).join(", "),
|
116 | });
|
117 |
|
118 | newTokens.push(token);
|
119 | } else {
|
120 | i = j;
|
121 | eatWhitespaces = true;
|
122 | }
|
123 | } else {
|
124 |
|
125 | newTokens.push(token);
|
126 | }
|
127 | } else if (isMainFunctionDefinitionAt(tokens, i)) {
|
128 | newTokens.push({
|
129 | type: "keyword",
|
130 | data: "vec4",
|
131 | });
|
132 | for (
|
133 | i = i + 1;
|
134 | i < tokens.length && tokenWithType(tokens[i], "whitespace");
|
135 | i++
|
136 | ) {
|
137 | newTokens.push(tokens[i]);
|
138 | }
|
139 | newTokens.push({
|
140 | type: "ident",
|
141 | data: "transition",
|
142 | });
|
143 | for (
|
144 | i = i + 1;
|
145 | i < tokens.length && tokenWithType(tokens[i], "whitespace");
|
146 | i++
|
147 | ) {
|
148 | newTokens.push(tokens[i]);
|
149 | }
|
150 | if (i >= tokens.length) break;
|
151 | newTokens.push(tokens[i]);
|
152 | newTokens.push({
|
153 | type: "keyword",
|
154 | data: "vec2",
|
155 | });
|
156 | newTokens.push({
|
157 | type: "whitespace",
|
158 | data: " ",
|
159 | });
|
160 | newTokens.push({
|
161 | type: "ident",
|
162 | data: "uv",
|
163 | });
|
164 |
|
165 |
|
166 | } else if (tokenWithTypeAndData(token, "builtin", "gl_FragCoord")) {
|
167 | newTokens.push({
|
168 | type: "ident",
|
169 | data: "uv",
|
170 | });
|
171 | } else if (tokenWithTypeAndData(token, "ident", "resolution")) {
|
172 | newTokens.push({
|
173 | type: "keyword",
|
174 | data: "vec2",
|
175 | });
|
176 | newTokens.push({
|
177 | type: "operator",
|
178 | data: "(",
|
179 | });
|
180 | newTokens.push({
|
181 | type: "literal",
|
182 | data: "1.0",
|
183 | });
|
184 | newTokens.push({
|
185 | type: "operator",
|
186 | data: ")",
|
187 | });
|
188 | } else if (tokenWithTypeAndData(token, "builtin", "gl_FragColor")) {
|
189 | newTokens.push({
|
190 | type: "keyword",
|
191 | data: "return",
|
192 | });
|
193 | for (
|
194 | i = i + 1;
|
195 | i < tokens.length && tokenWithType(tokens[i], "whitespace");
|
196 | i++
|
197 | ) {
|
198 | newTokens.push(tokens[i]);
|
199 | }
|
200 | if (i >= tokens.length) return;
|
201 | if (tokenWithType(tokens[i], "operator", "=")) {
|
202 | i++;
|
203 | }
|
204 | for (
|
205 | i = i + 1;
|
206 | i < tokens.length && tokenWithType(tokens[i], "whitespace");
|
207 | i++
|
208 | ) {
|
209 | newTokens.push(tokens[i]);
|
210 | }
|
211 | i--;
|
212 | } else if (isFromOrToTexture2DCallAt(tokens, i)) {
|
213 | let identData;
|
214 | for (i = i + 1; i < tokens.length; i++) {
|
215 | const t = tokens[i];
|
216 | if (t.type === "whitespace") {
|
217 | } else if (t.type === "ident") {
|
218 | identData = t.data;
|
219 | } else if (t.type === "operator") {
|
220 | if (t.data === ",") break;
|
221 | }
|
222 | }
|
223 | for (
|
224 | i = i + 1;
|
225 | i < tokens.length && tokenWithType(tokens[i], "whitespace");
|
226 | i++
|
227 | );
|
228 | i--;
|
229 | newTokens.push({
|
230 | type: "keyword",
|
231 | data: identData === "to" ? "getToColor" : "getFromColor",
|
232 | });
|
233 | newTokens.push({
|
234 | type: "operator",
|
235 | data: "(",
|
236 | });
|
237 | } else {
|
238 |
|
239 | newTokens.push(token);
|
240 | }
|
241 | }
|
242 |
|
243 | const head = `\
|
244 | // Author:
|
245 | // License: MIT
|
246 | `;
|
247 |
|
248 | return {
|
249 | data: {
|
250 | glsl: head + print(newTokens),
|
251 | },
|
252 | errors,
|
253 | };
|
254 | };
|