store grain.autograd.BackProp object in returned variables from forward function
type-erased version of backward function used in grain.autograd.BackProp object
test logsoftmax simple case, gradcheck and cpu/cuda equality
import grain.testing; import std.typecons; import numir; import mir.ndslice; import mir.math; auto e = log(exp(-1.0) + exp(2.0) + exp(3.0)); auto xs = [[-1.0f, 2.0f, 3.0f], [-1.0f, 2.0f, 3.0f], [-1.0f, 2.0f, 3.0f]] .nparray; LogSoftmax!float hfunc; auto _hx = xs.variable; auto _hy = hfunc.forward(_hx); assert(approxEqual(_hy.sliced, xs - e)); auto hx = uniform!float(2, 2).slice.variable; auto hy = hfunc.forward(hx); auto hgy = uniform!float(2, 2).slice.variable; auto hgx = hfunc.backward(hgy); gradCheck(hfunc, hx, hgy, 1e-3, 1e-3, 1e-3); version (grain_cuda) { alias Storage = DeviceStorage; auto func = LogSoftmax!float(); auto dx = hx.to!Storage; auto dy = func.forward(dx); assert(approxEqual(dy.to!HostStorage.sliced, hy.sliced)); auto dgy = hgy.to!Storage; auto dgx = func.backward(dgy); assert(approxEqual(dgx.to!HostStorage.sliced, hgx.sliced)); }