computes gradients of creator variables w.r.t. the arg grad
computes gradients of creator variables w.r.t. this variable
detach the computation graph used in backward
duplicate (deep copy) variable
get gradient as variable
binary ops: b * this TODO implement contiguous with mir.ndslice and cudnnTransformTensor
binary ops with primitive scalar value (e.g., float, double)
binary ops: this op b
check data is not null
data pointer
test opBinary(string op)(Variable ...)
import mir.ndslice; import numir; import std.stdio; static foreach (op; ["+", "*", "-", "/"]) { { auto a = uniform!float(3, 2).slice.variable(true); auto b = uniform!float(3, 2).slice.variable(true); // this is equivalent to `a + b` if op == "+" auto c = a.opBinary!op(b); // this is equivalent to `a.sliced.slice + b.sliced.slice` if op == "+" auto e = a.sliced.slice.opBinary!op(b.sliced.slice); assert(approxEqual(c.sliced, e)); auto gc = uniform!float(3, 2).slice.variable(true); auto ugc = UntypedVariable(gc); c.backward(&ugc); version (grain_cuda) { auto da = a.to!DeviceStorage; auto db = b.to!DeviceStorage; auto dc = da.opBinary!op(db); assert(approxEqual(dc.to!HostStorage.sliced, c.sliced)); import grain.cuda : zero_; da.grad.zero_(); db.grad.zero_(); auto dugc = UntypedVariable(gc.to!DeviceStorage); dc.backward(&dugc); assert(approxEqual(da.to!HostStorage.gradSliced, a.gradSliced)); } } }
test multiple addition
static import grain.config; grain.config.backprop = true; auto x = [1f, 2f].variable(true); auto y = x + x; // x = 2 x auto z = y + y; // x = 4 x auto g = [0f, 1f].variable; auto u = UntypedVariable(g); z.backward(&u); assert(x.gradSliced == [0f, 4f]);
test Variable.defined
Variable!(float, 1, HostStorage) h; assert(!h.defined); assert(0.variable.defined); assert(0.1f.variable.defined); assert([0].variable.defined); assert([0.1f].variable.defined); version (grain_cuda) { Variable!(float, 1, DeviceStorage) d; assert(!d.defined); assert(!h.to!DeviceStorage.defined); assert(0.variable.to!DeviceStorage.defined); assert(0.1f.variable.to!DeviceStorage.defined); assert([0].variable.to!DeviceStorage.defined); assert([0.1f].variable.to!DeviceStorage.defined); }
A variable has autograd ability with mir.ndslice.Slice like data
TODO: add SliceKind