store grain.autograd.BackProp object in returned variables from forward function
type-erased version of backward function used in grain.autograd.BackProp object
test matmul gradcheck and cpu/cuda equality
foreach (i; [2, 3, 4]) { foreach (j; [2, 3, 4]) { import std.typecons : tuple; import numir : uniform; import mir.ndslice : slice; import grain.testing; auto k = 3; auto a = uniform!float(i, k).slice.variable; auto b = uniform!float(k, j).slice.variable; auto gc = uniform!float(i, j).slice.variable; MatMul!float func; gradCheck(func, tuple(a, b), gc, 1e-3, 1e-3, 1e-3); version (grain_cuda) { import numir.testing; MatMul!float func2; auto hc = func.forward(a, b); auto dc = func2.forward(a.to!DeviceStorage, b.to!DeviceStorage); assert(approxEqual(dc.to!HostStorage.sliced, hc.sliced)); auto hgab = func.backward(gc); auto dgab = func2.backward(gc.to!DeviceStorage); // writefln!"%s vs %s"(dgab[0].to!HostStorage.sliced, hgab[0].sliced); assert(approxEqual(dgab[0].to!HostStorage.sliced, hgab[0].sliced)); assert(approxEqual(dgab[1].to!HostStorage.sliced, hgab[1].sliced)); } } }
matmul with variable.backward
import std.typecons; import numir; import mir.ndslice; static import grain.config; grain.config.backprop = true; auto func = new MatMul!float; auto a = uniform!float(3, 4).slice.variable(true); auto b = uniform!float(4, 2).slice.variable(true); auto c = func.applyForward(a, b); auto gc = uniform!float(3, 2).slice.variable; auto ugc = UntypedVariable(gc); c.backward(&ugc); auto gab = func.backward(gc); assert(a.gradSlice == gab[0].sliced); assert(b.gradSlice == gab[1].sliced);
Matrix-Matrix multiplication (using cuBLAS)