Variable

A variable has autograd ability with mir.ndslice.Slice like data

TODO: add SliceKind

Constructors

this
this(bool requiresGrad, uint[dim] shape, int[dim] strides, Storage!T data)

Members

Functions

backward
void backward(UntypedVariable* grad, size_t pos)

computes gradients of creator variables w.r.t. the arg grad

backward
void backward()

computes gradients of creator variables w.r.t. this variable

detach
ref detach()

detach the computation graph used in backward

dup
auto dup()

duplicate (deep copy) variable

gradSliced
auto gradSliced()
gradVariable
auto gradVariable(bool requiresGrad)

get gradient as variable

opBinary
auto opBinary(Variable!(T, dim, Storage) b)

binary ops: b * this TODO implement contiguous with mir.ndslice and cudnnTransformTensor

opBinary
auto opBinary(T b)

binary ops with primitive scalar value (e.g., float, double)

opBinaryRight
auto opBinaryRight(T b)

binary ops: this op b

sliced
auto sliced()
sliced
auto sliced()
toString
string toString()

Manifest constants

isHost
enum isHost;
Undocumented in source.

Properties

defined
bool defined [@property getter]

check data is not null

ptr
auto ptr [@property getter]

data pointer

Variables

bprop
BackProp bprop;
Undocumented in source.
data
Storage!T data;
Undocumented in source.
grad
Storage!T grad;
Undocumented in source.
offset
uint offset;
Undocumented in source.
requiresGrad
bool requiresGrad;
Undocumented in source.
shape
uint[dim] shape;
Undocumented in source.
strides
int[dim] strides;
Undocumented in source.

Examples

test opBinary(string op)(Variable ...)

import mir.ndslice;
import numir;
import std.stdio;

static foreach (op; ["+", "*", "-", "/"]) {
    {
        auto a = uniform!float(3, 2).slice.variable(true);
        auto b = uniform!float(3, 2).slice.variable(true);
        // this is equivalent to `a + b` if op == "+"
        auto c = a.opBinary!op(b);
        // this is equivalent to `a.sliced.slice + b.sliced.slice` if op == "+"
        auto e = a.sliced.slice.opBinary!op(b.sliced.slice);
        assert(approxEqual(c.sliced, e));

        auto gc = uniform!float(3, 2).slice.variable(true);
        auto ugc = UntypedVariable(gc);
        c.backward(&ugc);

        version (grain_cuda) {
            auto da = a.to!DeviceStorage;
            auto db = b.to!DeviceStorage;
            auto dc = da.opBinary!op(db);
            assert(approxEqual(dc.to!HostStorage.sliced, c.sliced));

            import grain.cuda : zero_;

            da.grad.zero_();
            db.grad.zero_();
            auto dugc = UntypedVariable(gc.to!DeviceStorage);
            dc.backward(&dugc);
            assert(approxEqual(da.to!HostStorage.gradSliced, a.gradSliced));
        }
    }
}

test multiple addition

static import grain.config;
grain.config.backprop = true;
auto x = [1f, 2f].variable(true);
auto y = x + x; // x = 2 x
auto z = y + y; // x = 4 x
auto g = [0f, 1f].variable;
auto u = UntypedVariable(g);
z.backward(&u);
assert(x.gradSliced == [0f, 4f]);

test Variable.defined

Variable!(float, 1, HostStorage) h;
assert(!h.defined);
assert(0.variable.defined);
assert(0.1f.variable.defined);
assert([0].variable.defined);
assert([0.1f].variable.defined);

version (grain_cuda) {
    Variable!(float, 1, DeviceStorage) d;
    assert(!d.defined);
    assert(!h.to!DeviceStorage.defined);
    assert(0.variable.to!DeviceStorage.defined);
    assert(0.1f.variable.to!DeviceStorage.defined);
    assert([0].variable.to!DeviceStorage.defined);
    assert([0.1f].variable.to!DeviceStorage.defined);
}

Meta