UNPKG

10.2 kBtext/coffeescriptView Raw
1#(docs are generated from top-level comments, keep an eye on the formatting!)
2
3### tensor =====================================================================
4
5Tags
6----
7scripting, JS, internal, treenode, general concept
8
9General description
10-------------------
11Tensors are a strange in-between of matrices and "computer"
12rectangular data structures.
13
14Tensors, unlike matrices, and like rectangular data structures,
15can have an arbitrary number of dimensions (rank), although a tensor with
16rank zero is just a scalar.
17
18Tensors, like matrices and unlike many computer rectangular data structures,
19must be "contiguous" i.e. have no empty spaces within its size, and "uniform",
20i.e. each element must have the same shape and hence the same rank.
21
22Also tensors have necessarily to make a distinction between row vectors,
23column vectors (which have a rank of 2) and uni-dimensional vectors (rank 1).
24They look very similar but they are fundamentally different.
25
26Tensors with elements that are also tensors get promoted to a higher rank
27, this is so we can represent and get the rank of a matrix correctly.
28Example:
29Start with a tensor of rank 1 with 2 elements (i.e. shape: 2)
30if you put in both its elements another 2 tensors
31of rank 1 with 2 elements (i.e. shape: 2)
32then the result is a tensor of rank 2 with shape 2,2
33i.e. the dimension of a tensor at all times must be
34the number of nested tensors in it.
35Also, all tensors must be "uniform" i.e. they must be accessed
36uniformly, which means that all existing elements of a tensor
37must be contiguous and have the same shape.
38Implication of it all is that you can't put arbitrary
39tensors inside tensors (like you would do to represent block matrices)
40Rather, all tensors inside tensors must have same shape (and hence, rank)
41
42Limitations
43-----------
44n.a.
45
46Implementation info
47-------------------
48Tensors are implemented...
49
50###
51
52# Called from the "eval" module to evaluate tensor elements.
53# p1 points to the tensor operand.
54Eval_tensor = ->
55 i = 0
56 ndim = 0
57 nelem = 0
58 #U **a, **b
59
60 #---------------------------------------------------------------------
61 #
62 # create a new tensor for the result
63 #
64 #---------------------------------------------------------------------
65
66 check_tensor_dimensions p1
67
68 nelem = p1.tensor.nelem
69
70 ndim = p1.tensor.ndim
71
72 p2 = alloc_tensor(nelem)
73
74 p2.tensor.ndim = ndim
75
76 for i in [0...ndim]
77 p2.tensor.dim[i] = p1.tensor.dim[i]
78
79 #---------------------------------------------------------------------
80 #
81 # b = Eval(a)
82 #
83 #---------------------------------------------------------------------
84
85 a = p1.tensor.elem
86 b = p2.tensor.elem
87
88 check_tensor_dimensions p2
89
90 for i in [0...nelem]
91 #console.log "push/pop: pushing element a of " + i
92 push(a[i])
93 Eval()
94 #console.log "push/pop: popping into element b of " + i
95 b[i] = pop()
96
97 check_tensor_dimensions p1
98 check_tensor_dimensions p2
99 #---------------------------------------------------------------------
100 #
101 # push the result
102 #
103 #---------------------------------------------------------------------
104
105 push(p2)
106
107 promote_tensor()
108
109#-----------------------------------------------------------------------------
110#
111# Add tensors
112#
113# Input: Operands on stack
114#
115# Output: Result on stack
116#
117#-----------------------------------------------------------------------------
118
119tensor_plus_tensor = ->
120 i = 0
121 ndim = 0
122 nelem = 0
123 #U **a, **b, **c
124
125 save()
126
127 p2 = pop()
128 p1 = pop()
129
130 # are the dimension lists equal?
131
132 ndim = p1.tensor.ndim
133
134 if (ndim != p2.tensor.ndim)
135 push(symbol(NIL))
136 restore()
137 return
138
139 for i in [0...ndim]
140 if (p1.tensor.dim[i] != p2.tensor.dim[i])
141 push(symbol(NIL))
142 restore()
143 return
144
145 # create a new tensor for the result
146
147 nelem = p1.tensor.nelem
148
149 p3 = alloc_tensor(nelem)
150
151 p3.tensor.ndim = ndim
152
153 for i in [0...ndim]
154 p3.tensor.dim[i] = p1.tensor.dim[i]
155
156 # c = a + b
157
158 a = p1.tensor.elem
159 b = p2.tensor.elem
160 c = p3.tensor.elem
161
162 for i in [0...nelem]
163 push(a[i])
164 push(b[i])
165 add()
166 c[i] = pop()
167
168 # push the result
169
170 push(p3)
171
172 restore()
173
174#-----------------------------------------------------------------------------
175#
176# careful not to reorder factors
177#
178#-----------------------------------------------------------------------------
179
180tensor_times_scalar = ->
181 i = 0
182 ndim = 0
183 nelem = 0
184 #U **a, **b
185
186 save()
187
188 p2 = pop()
189 p1 = pop()
190
191 ndim = p1.tensor.ndim
192 nelem = p1.tensor.nelem
193
194 p3 = alloc_tensor(nelem)
195
196 p3.tensor.ndim = ndim
197
198 for i in [0...ndim]
199 p3.tensor.dim[i] = p1.tensor.dim[i]
200
201 a = p1.tensor.elem
202 b = p3.tensor.elem
203
204 for i in [0...nelem]
205 push(a[i])
206 push(p2)
207 multiply()
208 b[i] = pop()
209
210 push(p3)
211 restore()
212
213scalar_times_tensor = ->
214 i = 0
215 ndim = 0
216 nelem = 0
217 #U **a, **b
218
219 save()
220
221 p2 = pop()
222 p1 = pop()
223
224 ndim = p2.tensor.ndim
225 nelem = p2.tensor.nelem
226
227 p3 = alloc_tensor(nelem)
228
229 p3.tensor.ndim = ndim
230
231 for i in [0...ndim]
232 p3.tensor.dim[i] = p2.tensor.dim[i]
233
234 a = p2.tensor.elem
235 b = p3.tensor.elem
236
237 for i in [0...nelem]
238 push(p1)
239 push(a[i])
240 multiply()
241 b[i] = pop()
242
243 push(p3)
244
245 restore()
246
247check_tensor_dimensions = (p) ->
248 if p.tensor.nelem != p.tensor.elem.length
249 console.log "something wrong in tensor dimensions"
250 debugger
251
252is_square_matrix = (p) ->
253 if (istensor(p) && p.tensor.ndim == 2 && p.tensor.dim[0] == p.tensor.dim[1])
254 return 1
255 else
256 return 0
257
258#-----------------------------------------------------------------------------
259#
260# gradient of tensor
261#
262#-----------------------------------------------------------------------------
263
264d_tensor_tensor = ->
265 i = 0
266 j = 0
267 ndim = 0
268 nelem = 0
269 #U **a, **b, **c
270
271 ndim = p1.tensor.ndim
272 nelem = p1.tensor.nelem
273
274 if (ndim + 1 >= MAXDIM)
275 push_symbol(DERIVATIVE)
276 push(p1)
277 push(p2)
278 list(3)
279 return
280
281 p3 = alloc_tensor(nelem * p2.tensor.nelem)
282
283 p3.tensor.ndim = ndim + 1
284
285 for i in [0...ndim]
286 p3.tensor.dim[i] = p1.tensor.dim[i]
287
288 p3.tensor.dim[ndim] = p2.tensor.dim[0]
289
290 a = p1.tensor.elem
291 b = p2.tensor.elem
292 c = p3.tensor.elem
293
294 for i in [0...nelem]
295 for j in [0...p2.tensor.nelem]
296 push(a[i])
297 push(b[j])
298 derivative()
299 c[i * p2.tensor.nelem + j] = pop()
300
301 push(p3)
302
303
304#-----------------------------------------------------------------------------
305#
306# gradient of scalar
307#
308#-----------------------------------------------------------------------------
309
310d_scalar_tensor = ->
311 #U **a, **b
312
313 p3 = alloc_tensor(p2.tensor.nelem)
314
315 p3.tensor.ndim = 1
316
317 p3.tensor.dim[0] = p2.tensor.dim[0]
318
319 a = p2.tensor.elem
320 b = p3.tensor.elem
321
322 for i in [0...p2.tensor.nelem]
323 push(p1)
324 push(a[i])
325 derivative()
326 b[i] = pop()
327
328 push(p3)
329
330#-----------------------------------------------------------------------------
331#
332# Derivative of tensor
333#
334#-----------------------------------------------------------------------------
335
336d_tensor_scalar = ->
337 i = 0
338 #U **a, **b
339
340 p3 = alloc_tensor(p1.tensor.nelem)
341
342 p3.tensor.ndim = p1.tensor.ndim
343
344 for i in [0...p1.tensor.ndim]
345 p3.tensor.dim[i] = p1.tensor.dim[i]
346
347 a = p1.tensor.elem
348 b = p3.tensor.elem
349
350 for i in [0...p1.tensor.nelem]
351 push(a[i])
352 push(p2)
353 derivative()
354 b[i] = pop()
355
356 push(p3)
357
358compare_tensors = (p1, p2) ->
359
360 i = 0
361 if (p1.tensor.ndim < p2.tensor.ndim)
362 return -1
363
364 if (p1.tensor.ndim > p2.tensor.ndim)
365 return 1
366
367 for i in [0...p1.tensor.ndim]
368 if (p1.tensor.dim[i] < p2.tensor.dim[i])
369 return -1
370 if (p1.tensor.dim[i] > p2.tensor.dim[i])
371 return 1
372
373 for i in [0...p1.tensor.nelem]
374 if (equal(p1.tensor.elem[i], p2.tensor.elem[i]))
375 continue
376 if (lessp(p1.tensor.elem[i], p2.tensor.elem[i]))
377 return -1
378 else
379 return 1
380
381 return 0
382
383#-----------------------------------------------------------------------------
384#
385# Raise a tensor to a power
386#
387# Input: p1 tensor
388#
389# p2 exponent
390#
391# Output: Result on stack
392#
393#-----------------------------------------------------------------------------
394
395power_tensor = ->
396 i = 0
397 k = 0
398 n = 0
399
400 # first and last dims must be equal
401
402 k = p1.tensor.ndim - 1
403
404 if (p1.tensor.dim[0] != p1.tensor.dim[k])
405 push_symbol(POWER)
406 push(p1)
407 push(p2)
408 list(3)
409 return
410
411 push(p2)
412
413 n = pop_integer()
414
415 if (isNaN(n))
416 push_symbol(POWER)
417 push(p1)
418 push(p2)
419 list(3)
420 return
421
422 if (n == 0)
423 if (p1.tensor.ndim != 2)
424 stop("power(tensor,0) with tensor rank not equal to 2")
425 n = p1.tensor.dim[0]
426 p1 = alloc_tensor(n * n)
427 p1.tensor.ndim = 2
428 p1.tensor.dim[0] = n
429 p1.tensor.dim[1] = n
430 for i in [0...n]
431 p1.tensor.elem[n * i + i] = one
432
433 check_tensor_dimensions p1
434
435 push(p1)
436 return
437
438 if (n < 0)
439 n = -n
440 push(p1)
441 inv()
442 p1 = pop()
443
444 push(p1)
445
446 for i in [1...n]
447 push(p1)
448 inner()
449 if (iszero(stack[tos - 1]))
450 break
451
452copy_tensor = ->
453 i = 0
454
455 save()
456
457 p1 = pop()
458
459 p2 = alloc_tensor(p1.tensor.nelem)
460
461 p2.tensor.ndim = p1.tensor.ndim
462
463 for i in [0...p1.tensor.ndim]
464 p2.tensor.dim[i] = p1.tensor.dim[i]
465
466 for i in [0...p1.tensor.nelem]
467 p2.tensor.elem[i] = p1.tensor.elem[i]
468
469 check_tensor_dimensions p1
470 check_tensor_dimensions p2
471
472 push(p2)
473
474 restore()
475
476# Tensors with elements that are also tensors get promoted to a higher rank.
477
478promote_tensor = ->
479 i = 0
480 j = 0
481 k = 0
482 nelem = 0
483 ndim = 0
484
485 save()
486
487 p1 = pop()
488
489 if (!istensor(p1))
490 push(p1)
491 restore()
492 return
493
494 p2 = p1.tensor.elem[0]
495
496 for i in [1...p1.tensor.nelem]
497 if (!compatible(p2, p1.tensor.elem[i]))
498 stop("Cannot promote tensor due to inconsistent tensor components.")
499
500 if (!istensor(p2))
501 push(p1)
502 restore()
503 return
504
505 ndim = p1.tensor.ndim + p2.tensor.ndim
506
507 if (ndim > MAXDIM)
508 stop("tensor rank > " + MAXDIM)
509
510 nelem = p1.tensor.nelem * p2.tensor.nelem
511
512 p3 = alloc_tensor(nelem)
513
514 p3.tensor.ndim = ndim
515
516 for i in [0...p1.tensor.ndim]
517 p3.tensor.dim[i] = p1.tensor.dim[i]
518
519 for j in [0...p2.tensor.ndim]
520 p3.tensor.dim[i + j] = p2.tensor.dim[j]
521
522 k = 0
523
524 for i in [0...p1.tensor.nelem]
525 p2 = p1.tensor.elem[i]
526 for j in [0...p2.tensor.nelem]
527 p3.tensor.elem[k++] = p2.tensor.elem[j]
528
529 check_tensor_dimensions p2
530 check_tensor_dimensions p3
531
532 push(p3)
533
534 restore()
535
536compatible = (p,q) ->
537
538 if (!istensor(p) && !istensor(q))
539 return 1
540
541 if (!istensor(p) || !istensor(q))
542 return 0
543
544 if (p.tensor.ndim != q.tensor.ndim)
545 return 0
546
547 for i in [0...p.tensor.ndim]
548 if (p.tensor.dim[i] != q.tensor.dim[i])
549 return 0
550
551 return 1
552
553