UNPKG

28 kBSource Map (JSON)View Raw
1{"version":3,"file":"gradients_test.js","sourceRoot":"","sources":["../src/gradients_test.ts"],"names":[],"mappings":"AACA;;;;;;;;;;;;;;;GAeG;AAEH,OAAO,EAAC,MAAM,EAAC,MAAM,UAAU,CAAC;AAChC,OAAO,KAAK,EAAE,MAAM,SAAS,CAAC;AAC9B,OAAO,EAAC,QAAQ,EAAE,iBAAiB,EAAC,MAAM,gBAAgB,CAAC;AAE3D,OAAO,EAAC,iBAAiB,EAAC,MAAM,aAAa,CAAC;AAE9C,iBAAiB,CAAC,WAAW,EAAE,QAAQ,EAAE,GAAG,EAAE;IAC5C,EAAE,CAAC,eAAe,EAAE,KAAK,IAAI,EAAE;QAC7B,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QACxD,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAErD,MAAM,CAAC,EAAE,EAAE,EAAE,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAc,EAAE,CAAc,EAAE,EAAE;YAC3D,gBAAgB;YAChB,cAAc;YACd,aAAa;YACb,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YAC1B,MAAM,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACrB,OAAO,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACnB,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAEX,YAAY;QACZ,kBAAkB;QAClB,kCAAkC;QAClC,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAEtC,yBAAyB;QACzB,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;QAClC,IAAI,UAAU,GAAG,KAAK,CAAC;QACvB,IAAI,UAAU,GAAG,IAAI,CAAC;QACtB,iBAAiB,CACb,MAAM,EAAE,CAAC,IAAI,EAAE,EACf,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC;QAE7D,yBAAyB;QACzB,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;QAClC,UAAU,GAAG,IAAI,CAAC;QAClB,UAAU,GAAG,KAAK,CAAC;QACnB,iBAAiB,CACb,MAAM,EAAE,CAAC,IAAI,EAAE,EACf,MAAM,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC;IAC/D,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,SAAS,EAAE,KAAK,IAAI,EAAE;QACvB,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC;QACtC,MAAM,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC;QAC3C,iBAAiB,CAAC,MAAM,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;IACnD,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,6BAA6B,EAAE,KAAK,IAAI,EAAE;QAC3C,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC;QAEtC,MAAM,MAAM,GAAG,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC;QAC3C,MAAM,OAAO,GAAG,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC;QAC5C,iBAAiB,CAAC,MAAM,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;QACjD,iBAAiB,CAAC,MAAM,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;IACpD,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,gDAAgD,EAAE,GAAG,EAAE;QACxD,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;YACvB,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;QACzC,CAAC,CAAC,CAAC;QACH,MAAM,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,YAAY,EAAE,CAAC;QAChD,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACxC,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,kDAAkD,EAAE,GAAG,EAAE;QAC1D,MAAM,QAAQ,GAAG,EAAE,CAAC,UAAU,CAAC,CAAC,CAAY,EAAE,EAAE;YAC9C,OAAO;gBACL,KAAK,EAAE,CAAC;gBACR,QAAQ,EAAE,GAAG,EAAE;oBACb,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;gBAC1C,CAAC;aACF,CAAC;QACJ,CAAC,CAAC,CAAC;QACH,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;QACvC,MAAM,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,YAAY,EAAE,CAAC;QAChD,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACxC,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,UAAU,EAAE,KAAK,IAAI,EAAE;QACxB,MAAM,KAAK,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC;QACxC,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QAC9C,iBAAiB,CAAC,MAAM,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;IACtD,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,8BAA8B,EAAE,KAAK,IAAI,EAAE;QAC5C,MAAM,KAAK,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC;QAExC,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QAC9C,MAAM,OAAO,GAAG,KAAK,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QAC/C,iBAAiB,CAAC,MAAM,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;QACpD,iBAAiB,CAAC,MAAM,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;IACvD,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,oBAAoB,EAAE,KAAK,IAAI,EAAE;QAClC,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAC5C,MAAM,QAAQ,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QAEpD,MAAM,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;YACrB,MAAM,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,CAAC;YACtB,MAAM,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;YAC9B,OAAO,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACnB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QAEN,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QACjC,iBAAiB,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IACnD,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,yCAAyC,EAAE,GAAG,EAAE;QACjD,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAC5C,MAAM,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,CAAC;QACtB,MAAM,QAAQ,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QAEpD,MAAM,CAAC,GAAG,GAAG,EAAE;YACb,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;gBAChB,MAAM,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;gBAC9B,OAAO,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACnB,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QACb,CAAC,CAAC;QACF,MAAM,CAAC,CAAC,CAAC,CAAC,YAAY,EAAE,CAAC;IAC3B,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,6DAA6D,EAC7D,KAAK,IAAI,EAAE;QACT,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,CAAC,CAAC;QAC5C,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,EAAE,MAAM,CAAC,CAAC;QAC7C,MAAM,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;YACrB,kDAAkD;YAClD,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YAChB,OAAO,CAAC,CAAC,GAAG,EAAE,CAAC;QACjB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QACN,iBAAiB,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IAC7C,CAAC,CAAC,CAAC;IAEN,EAAE,CAAC,0CAA0C,EAAE,GAAG,EAAE;QAClD,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,CAAC,CAAC;QAC5C,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,EAAE,MAAM,CAAC,CAAC;QAC7C,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;YACvB,6DAA6D;YAC7D,OAAO,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QACzB,CAAC,CAAC,CAAC;QACH,MAAM,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,EAAE,CAAC;IACvC,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,mBAAmB,EAAE,KAAK,IAAI,EAAE;QACjC,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QACrD,MAAM,QAAQ,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QAE5D,MAAM,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;YACrB,MAAM,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,CAAC;YACtB,MAAM,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;YAC9B,OAAO,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACnB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QAEN,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QACjC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;QACpC,iBAAiB,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IACnD,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,2CAA2C,EAAE,GAAG,EAAE;QACnD,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QACrD,MAAM,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,CAAC;QACtB,MAAM,QAAQ,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QAE5D,MAAM,CAAC,GAAG,GAAG,EAAE;YACb,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;gBACV,MAAM,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;gBAC9B,OAAO,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACnB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QACR,CAAC,CAAC;QACF,MAAM,CAAC,CAAC,CAAC,CAAC,YAAY,EAAE,CAAC;IAC3B,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,iDAAiD,EAAE,GAAG,EAAE;QACzD,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC;QAClC,MAAM,SAAS,GAAG,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACpC,SAAS,CAAC,QAAQ,CAAC,GAAG,EAAE;YACtB,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC;YACrB,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC;YACrB,CAAC,CAAC,OAAO,EAAE,CAAC;YACZ,OAAO,CAAC,CAAC;QACX,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,wBAAwB,EAAE,GAAG,EAAE;QAChC,MAAM,MAAM,GAAG,EAAE,CAAC,MAAM,EAAE,CAAC,UAAU,CAAC;QACtC,MAAM,CAAC,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QACnC,CAAC,CAAC,OAAO,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAAE,CAAC,MAAM,EAAE,CAAC,UAAU,CAAC;QACnC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAC3B,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,iBAAiB,CAAC,mBAAmB,EAAE,QAAQ,EAAE,GAAG,EAAE;IACpD,EAAE,CAAC,eAAe,EAAE,KAAK,IAAI,EAAE;QAC7B,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QACxD,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAErD,MAAM,EAAC,KAAK,EAAE,KAAK,EAAC,GAChB,EAAE,CAAC,aAAa,CAAC,CAAC,CAAc,EAAE,CAAc,EAAE,EAAE;YAClD,gBAAgB;YAChB,cAAc;YACd,aAAa;YACb,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YAC1B,MAAM,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACrB,OAAO,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACnB,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAEf,iBAAiB,CAAC,MAAM,KAAK,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC;QAE1C,YAAY;QACZ,kBAAkB;QAClB,kCAAkC;QAClC,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAEtC,MAAM,CAAC,EAAE,EAAE,EAAE,CAAC,GAAG,KAAK,CAAC;QACvB,yBAAyB;QACzB,IAAI,UAAU,GAAG,KAAK,CAAC;QACvB,IAAI,UAAU,GAAG,IAAI,CAAC;QACtB,iBAAiB,CACb,MAAM,EAAE,CAAC,IAAI,EAAE,EACf,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC;QAE7D,yBAAyB;QACzB,UAAU,GAAG,IAAI,CAAC;QAClB,UAAU,GAAG,KAAK,CAAC;QACnB,iBAAiB,CACb,MAAM,EAAE,CAAC,IAAI,EAAE,EACf,MAAM,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC;IAC/D,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,4BAA4B,EAAE,KAAK,IAAI,EAAE;QAC1C,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QACxD,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAErD,MAAM,EAAC,KAAK,EAAE,KAAK,EAAC,GAChB,EAAE,CAAC,aAAa,CAAC,CAAC,CAAc,EAAE,CAAc,EAAE,EAAE;YAClD,gBAAgB;YAChB,cAAc;YACd,aAAa;YACb,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YAC1B,OAAO,EAAE,CAAC,IAAI,CAAC,GAAG,EAAE;gBAClB,MAAM,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;gBACrB,OAAO,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACnB,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAEf,iBAAiB,CAAC,MAAM,KAAK,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC;QAE1C,YAAY;QACZ,kBAAkB;QAClB,kCAAkC;QAClC,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAEtC,MAAM,CAAC,EAAE,EAAE,EAAE,CAAC,GAAG,KAAK,CAAC;QACvB,yBAAyB;QACzB,IAAI,UAAU,GAAG,KAAK,CAAC;QACvB,IAAI,UAAU,GAAG,IAAI,CAAC;QACtB,iBAAiB,CACb,MAAM,EAAE,CAAC,IAAI,EAAE,EACf,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC;QAE7D,yBAAyB;QACzB,UAAU,GAAG,IAAI,CAAC;QAClB,UAAU,GAAG,KAAK,CAAC;QACnB,iBAAiB,CACb,MAAM,EAAE,CAAC,IAAI,EAAE,EACf,MAAM,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC;IAC/D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,iBAAiB,CAAC,wBAAwB,EAAE,QAAQ,EAAE,GAAG,EAAE;IACzD,EAAE,CAAC,eAAe,EAAE,KAAK,IAAI,EAAE;QAC7B,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;QAChC,MAAM,MAAM,GAAG,EAAE,CAAC,MAAM,EAAE,CAAC,UAAU,CAAC;QACtC,MAAM,QAAQ,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QACxD,MAAM,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;QAC3B,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAChD,iBAAiB,CAAC,MAAM,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC;IACpD,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,iBAAiB,EAAE,KAAK,IAAI,EAAE;QAC/B,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;QACvB,MAAM,QAAQ,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;QAC3B,iCAAiC;QACjC,iBAAiB,CAAC,MAAM,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;IAC9C,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,iBAAiB,EAAE,KAAK,IAAI,EAAE;QAC/B,MAAM,KAAK,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QAC7C,MAAM,UAAU,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QAChD,MAAM,MAAM,GAAG,UAAU,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QACnD,iBAAiB,CAAC,MAAM,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC;IACvD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,iBAAiB,CAAC,gBAAgB,EAAE,QAAQ,EAAE,GAAG,EAAE;IACjD,EAAE,CAAC,OAAO,EAAE,KAAK,IAAI,EAAE;QACrB,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;QACvB,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QAChC,MAAM,EAAE,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;QAExB,MAAM,SAAS,GAAG,EAAE,CAAC,UAAU,CAAC,CAAC,CAAY,EAAE,EAAE;YAC/C,MAAM,KAAK,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YAC3B,MAAM,QAAQ,GAAG,CAAC,EAAa,EAAE,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;YAC3D,OAAO,EAAC,KAAK,EAAE,QAAQ,EAAC,CAAC;QAC3B,CAAC,CAAC,CAAC;QAEH,MAAM,EAAC,KAAK,EAAE,IAAI,EAAC,GAAG,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QAChE,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;QACrC,iBAAiB,CAAC,MAAM,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3C,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;QACpC,iBAAiB,CAAC,MAAM,IAAI,CAAC,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;IAC7C,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,gDAAgD,EAAE,KAAK,IAAI,EAAE;QAC9D,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;QACvB,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QAEhC,MAAM,EAAE,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;QAExB,MAAM,SAAS,GAAG,EAAE,CAAC,UAAU,CAAC,CAAC,CAAY,EAAE,IAAqB,EAAE,EAAE;YACtE,MAAM,KAAK,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YAC3B,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;YACV,MAAM,QAAQ,GAAG,CAAC,EAAa,EAAE,KAAe,EAAE,EAAE;gBAClD,MAAM,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;gBAClB,OAAO,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACnB,CAAC,CAAC;YACF,OAAO,EAAC,KAAK,EAAE,QAAQ,EAAC,CAAC;QAC3B,CAAC,CAAC,CAAC;QAEH,MAAM,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QACvD,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;QAEnC,yCAAyC;QACzC,iBAAiB,CAAC,MAAM,GAAG,CAAC,IAAI,EAAE,EAAE,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;IACvD,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,2CAA2C,EAAE,KAAK,IAAI,EAAE;QACzD,MAAM,QAAQ,GAAG,EAAE,CAAC,UAAU,CAAC,CAAC,CAAY,EAAE,IAAqB,EAAE,EAAE;YACrE,8DAA8D;YAC9D,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;YACV,OAAO;gBACL,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE;gBACjB,QAAQ,EAAE,CAAC,EAAE,EAAE,KAAe,EAAE,EAAE;oBAChC,MAAM,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;oBAClB,OAAO,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;gBACzB,CAAC;aACF,CAAC;QACJ,CAAC,CAAC,CAAC;QACH,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QACnC,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;QAEvC,iBAAiB,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QACnD,iBAAiB,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC","sourcesContent":["\n/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENGINE} from './engine';\nimport * as tf from './index';\nimport {ALL_ENVS, describeWithFlags} from './jasmine_util';\nimport {Tensor} from './tensor';\nimport {expectArraysClose} from './test_util';\n\ndescribeWithFlags('gradients', ALL_ENVS, () => {\n it('matmul + relu', async () => {\n const a = tf.tensor2d([-1, 2, -3, 10, -20, 30], [2, 3]);\n const b = tf.tensor2d([2, -3, 4, -1, 2, -3], [3, 2]);\n\n const [da, db] = tf.grads((a: tf.Tensor2D, b: tf.Tensor2D) => {\n // m = dot(a, b)\n // y = relu(m)\n // e = sum(y)\n const m = tf.matMul(a, b);\n const y = tf.relu(m);\n return tf.sum(y);\n })([a, b]);\n\n // de/dy = 1\n // dy/dm = step(m)\n // de/dm = de/dy * dy/dm = step(m)\n const dedm = tf.step(tf.matMul(a, b));\n\n // de/da = dot(de/dy, bT)\n expect(da.shape).toEqual(a.shape);\n let transposeA = false;\n let transposeB = true;\n expectArraysClose(\n await da.data(),\n await tf.matMul(dedm, b, transposeA, transposeB).data());\n\n // de/db = dot(aT, de/dy)\n expect(db.shape).toEqual(b.shape);\n transposeA = true;\n transposeB = false;\n expectArraysClose(\n await db.data(),\n await tf.matMul(a, dedm, transposeA, transposeB).data());\n });\n\n it('grad(f)', async () => {\n const grad = tf.grad(x => x.square());\n const result = grad(tf.tensor1d([.1, .2]));\n expectArraysClose(await result.data(), [.2, .4]);\n });\n\n it('calling grad(f) twice works', async () => {\n const grad = tf.grad(x => x.square());\n\n const result = grad(tf.tensor1d([.1, .2]));\n const result2 = grad(tf.tensor1d([.1, .4]));\n expectArraysClose(await result.data(), [.2, .4]);\n expectArraysClose(await result2.data(), [.2, .8]);\n });\n\n it('grad(f): throwing an error during forward pass', () => {\n const grad = tf.grad(x => {\n throw new Error('failed forward pass');\n });\n expect(() => grad(tf.zeros([]))).toThrowError();\n expect(ENGINE.isTapeOn()).toBe(false);\n });\n\n it('grad(f): throwing an error during backwards pass', () => {\n const customOp = tf.customGrad((x: tf.Tensor) => {\n return {\n value: x,\n gradFunc: () => {\n throw new Error('failed backward pass');\n }\n };\n });\n const grad = tf.grad(x => customOp(x));\n expect(() => grad(tf.zeros([]))).toThrowError();\n expect(ENGINE.isTapeOn()).toBe(false);\n });\n\n it('grads(f)', async () => {\n const grads = tf.grads(x => x.square());\n const result = grads([tf.tensor1d([.1, .2])]);\n expectArraysClose(await result[0].data(), [.2, .4]);\n });\n\n it('calling grads(f) twice works', async () => {\n const grads = tf.grads(x => x.square());\n\n const result = grads([tf.tensor1d([.1, .2])]);\n const result2 = grads([tf.tensor1d([.1, .4])]);\n expectArraysClose(await result[0].data(), [.2, .4]);\n expectArraysClose(await result2[0].data(), [.2, .8]);\n });\n\n it('works with reshape', async () => {\n const a = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n const exponent = tf.tensor1d([2, 2, 2, 2], 'int32');\n\n const da = tf.grad(a => {\n const b = a.flatten();\n const m = tf.pow(b, exponent);\n return tf.sum(m);\n })(a);\n\n expect(da.shape).toEqual([2, 2]);\n expectArraysClose(await da.data(), [2, 4, 6, 8]);\n });\n\n it('reshape outside tf.grads() throws error', () => {\n const a = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n const b = a.flatten();\n const exponent = tf.tensor1d([2, 2, 2, 2], 'int32');\n\n const f = () => {\n tf.grads((a, b) => {\n const m = tf.pow(b, exponent);\n return tf.sum(m);\n })([a, b]);\n };\n expect(f).toThrowError();\n });\n\n it('does not error if irrelevant (pruned) ops are missing grads',\n async () => {\n const a = tf.tensor1d([true, true], 'bool');\n const b = tf.tensor1d([false, true], 'bool');\n const da = tf.grad(a => {\n // Logical has no gradients, but it is irrelevant.\n a.logicalAnd(b);\n return a.sum();\n })(a);\n expectArraysClose(await da.data(), [1, 1]);\n });\n\n it('errors if relevant ops are missing grads', () => {\n const a = tf.tensor1d([true, true], 'bool');\n const b = tf.tensor1d([false, true], 'bool');\n const dfda = tf.grad(a => {\n // Logical has no gradients, but it's relevant to the output.\n return a.logicalAnd(b);\n });\n expect(() => dfda(a)).toThrowError();\n });\n\n it('works with asType', async () => {\n const a = tf.tensor2d([1, 2, 3, 4], [2, 2], 'int32');\n const exponent = tf.tensor2d([2, 2, 2, 2], [2, 2], 'int32');\n\n const da = tf.grad(a => {\n const b = a.toFloat();\n const m = tf.pow(b, exponent);\n return tf.sum(m);\n })(a);\n\n expect(da.shape).toEqual([2, 2]);\n expect(da.dtype).toEqual('float32');\n expectArraysClose(await da.data(), [2, 4, 6, 8]);\n });\n\n it('asType outside of tf.grads() throws error', () => {\n const a = tf.tensor2d([1, 2, 3, 4], [2, 2], 'int32');\n const b = a.toFloat();\n const exponent = tf.tensor2d([2, 2, 2, 2], [2, 2], 'int32');\n\n const f = () => {\n tf.grad(a => {\n const m = tf.pow(b, exponent);\n return tf.sum(m);\n })(a);\n };\n expect(f).toThrowError();\n });\n\n it('saves tensors from the forward pass as expected', () => {\n const x = tf.scalar(1).variable();\n const optimizer = tf.train.sgd(0.1);\n optimizer.minimize(() => {\n const y = x.square();\n const z = y.square();\n y.dispose();\n return z;\n });\n });\n\n it('custom ops do not leak', () => {\n const before = tf.memory().numTensors;\n const x = tf.softmax([1, 2, 3, 4]);\n x.dispose();\n const now = tf.memory().numTensors;\n expect(now).toBe(before);\n });\n});\n\ndescribeWithFlags('valueAndGradients', ALL_ENVS, () => {\n it('matmul + relu', async () => {\n const a = tf.tensor2d([-1, 2, -3, 10, -20, 30], [2, 3]);\n const b = tf.tensor2d([2, -3, 4, -1, 2, -3], [3, 2]);\n\n const {value, grads} =\n tf.valueAndGrads((a: tf.Tensor2D, b: tf.Tensor2D) => {\n // m = dot(a, b)\n // y = relu(m)\n // e = sum(y)\n const m = tf.matMul(a, b);\n const y = tf.relu(m);\n return tf.sum(y);\n })([a, b]);\n\n expectArraysClose(await value.data(), 10);\n\n // de/dy = 1\n // dy/dm = step(m)\n // de/dm = de/dy * dy/dm = step(m)\n const dedm = tf.step(tf.matMul(a, b));\n\n const [da, db] = grads;\n // de/da = dot(de/dy, bT)\n let transposeA = false;\n let transposeB = true;\n expectArraysClose(\n await da.data(),\n await tf.matMul(dedm, b, transposeA, transposeB).data());\n\n // de/db = dot(aT, de/dy)\n transposeA = true;\n transposeB = false;\n expectArraysClose(\n await db.data(),\n await tf.matMul(a, dedm, transposeA, transposeB).data());\n });\n\n it('matmul + relu + inner tidy', async () => {\n const a = tf.tensor2d([-1, 2, -3, 10, -20, 30], [2, 3]);\n const b = tf.tensor2d([2, -3, 4, -1, 2, -3], [3, 2]);\n\n const {value, grads} =\n tf.valueAndGrads((a: tf.Tensor2D, b: tf.Tensor2D) => {\n // m = dot(a, b)\n // y = relu(m)\n // e = sum(y)\n const m = tf.matMul(a, b);\n return tf.tidy(() => {\n const y = tf.relu(m);\n return tf.sum(y);\n });\n })([a, b]);\n\n expectArraysClose(await value.data(), 10);\n\n // de/dy = 1\n // dy/dm = step(m)\n // de/dm = de/dy * dy/dm = step(m)\n const dedm = tf.step(tf.matMul(a, b));\n\n const [da, db] = grads;\n // de/da = dot(de/dy, bT)\n let transposeA = false;\n let transposeB = true;\n expectArraysClose(\n await da.data(),\n await tf.matMul(dedm, b, transposeA, transposeB).data());\n\n // de/db = dot(aT, de/dy)\n transposeA = true;\n transposeB = false;\n expectArraysClose(\n await db.data(),\n await tf.matMul(a, dedm, transposeA, transposeB).data());\n });\n});\n\ndescribeWithFlags('higher-order gradients', ALL_ENVS, () => {\n it('grad(grad(f))', async () => {\n const x = tf.tensor1d([.1, .2]);\n const before = tf.memory().numTensors;\n const gradgrad = tf.grad(tf.grad(x => x.mul(x).mul(x)));\n const result = gradgrad(x);\n expect(tf.memory().numTensors).toBe(before + 1);\n expectArraysClose(await result.data(), [.6, 1.2]);\n });\n\n it('grad(grad(x^2))', async () => {\n const x = tf.scalar(3);\n const gradgrad = tf.grad(tf.grad(x => x.square()));\n const result = gradgrad(x);\n // grad(grad(x^2)) = grad(2x) = 2\n expectArraysClose(await result.data(), [2]);\n });\n\n it('grads(grads(f))', async () => {\n const grads = tf.grads(x => x.mul(x).mul(x));\n const gradsgrads = tf.grads(x => grads([x])[0]);\n const result = gradsgrads([tf.tensor1d([.1, .2])]);\n expectArraysClose(await result[0].data(), [.6, 1.2]);\n });\n});\n\ndescribeWithFlags('customGradient', ALL_ENVS, () => {\n it('basic', async () => {\n const a = tf.scalar(3);\n const b = tf.scalar(2, 'int32');\n const dy = tf.scalar(4);\n\n const customPow = tf.customGrad((a: tf.Tensor) => {\n const value = tf.pow(a, b);\n const gradFunc = (dy: tf.Tensor) => dy.mul(tf.scalar(0.1));\n return {value, gradFunc};\n });\n\n const {value, grad} = tf.valueAndGrad(a => customPow(a))(a, dy);\n expect(value.shape).toEqual(a.shape);\n expectArraysClose(await value.data(), [9]);\n expect(grad.shape).toEqual(a.shape);\n expectArraysClose(await grad.data(), [.4]);\n });\n\n it('second order derivative through customGradient', async () => {\n const a = tf.scalar(3);\n const b = tf.scalar(2, 'int32');\n\n const dy = tf.scalar(5);\n\n const customPow = tf.customGrad((a: tf.Tensor, save: tf.GradSaveFunc) => {\n const value = tf.pow(a, b);\n save([a]);\n const gradFunc = (dy: tf.Tensor, saved: Tensor[]) => {\n const [a] = saved;\n return dy.mul(a);\n };\n return {value, gradFunc};\n });\n\n const dda = tf.grad(tf.grad(a => customPow(a)))(a, dy);\n expect(dda.shape).toEqual(a.shape);\n\n // First order: dy * a. Second order: dy.\n expectArraysClose(await dda.data(), await dy.data());\n });\n\n it('calling gradient of custom op twice works', async () => {\n const customOp = tf.customGrad((x: tf.Tensor, save: tf.GradSaveFunc) => {\n // Override gradient of our custom x ^ 2 op to be dy * abs(x);\n save([x]);\n return {\n value: x.square(),\n gradFunc: (dy, saved: Tensor[]) => {\n const [x] = saved;\n return dy.mul(x.abs());\n }\n };\n });\n const x = tf.tensor1d([-1, -2, 3]);\n const grad = tf.grad(x => customOp(x));\n\n expectArraysClose(await grad(x).data(), [1, 2, 3]);\n expectArraysClose(await grad(x).data(), [1, 2, 3]);\n });\n});\n"]}
\No newline at end of file