ReLU

TODO implement scale with cudnnScaleTensor rectified linear unit nonlinearity (using cuDNN)

Members

Functions

backward
auto backward(Variable!(T, dim, HostStorage) gy)
Undocumented in source. Be warned that the author may not have intended to support it.
backward
auto backward(Variable!(T, dim, DeviceStorage) gy)
Undocumented in source. Be warned that the author may not have intended to support it.
forward
auto forward(Variable!(T, dim, HostStorage) x)
Undocumented in source. Be warned that the author may not have intended to support it.
forward
auto forward(Variable!(T, dim, DeviceStorage) x)
Undocumented in source. Be warned that the author may not have intended to support it.

Mixins

__anonymous
mixin FunctionCommon
Undocumented in source.

Variables

dx
Variable!(T, dim, DeviceStorage) dx;
dy
Variable!(T, dim, DeviceStorage) dy;
Undocumented in source.
hx
Variable!(T, dim, HostStorage) hx;
Undocumented in source.
inplace
bool inplace;
Undocumented in source.
useCuDNN
bool useCuDNN;
Undocumented in source.

Mixed In Members

From mixin FunctionCommon

this(this)
this(this)
Undocumented in source.
DeviceRets
alias DeviceRets = Tuple!(Parameters!backward)
Undocumented in source.
DeviceArgs
alias DeviceArgs = Tuple!(Parameters!forward)
Undocumented in source.
__anonymous
mixin TypeChecker!(forward, backward)
Undocumented in source.
_mixin_dargs
DeviceArgs _mixin_dargs;
Undocumented in source.
HostRets
alias HostRets = Tuple!(Parameters!backward)
Undocumented in source.
HostArgs
alias HostArgs = Tuple!(Parameters!forward)
Undocumented in source.
__anonymous
mixin TypeChecker!(forward, backward)
Undocumented in source.
_mixin_hargs
HostArgs _mixin_hargs;
Undocumented in source.
applyForward
auto applyForward(Args args)

store grain.autograd.BackProp object in returned variables from forward function

applyBackward
void applyBackward(UntypedVariable[] ugradOutputs)

type-erased version of backward function used in grain.autograd.BackProp object

Examples

test relu

import grain.testing : gradCheck;

foreach (inplace; [true, false]) {
    foreach (useCuDNN; [true, false]) {
        auto func = new ReLU!(float, 1);
        func.inplace = inplace;
        func.useCuDNN = useCuDNN;

        // test CPU
        {
            auto x = [-1.0f, 1.0f, 0.0f].variable;
            // fail because of non-smooth function?
            // gradCheck(func, x, [0.1f, 0.1f, 0.1f].variable);

            auto y = func.forward(x);
            assert(x.data == (inplace ? y.data : [-1.0f, 1.0f, 0.0f]));
            assert(y.data == [0.0f, 1.0f, 0.0f]);

            auto gy = [1.0f, 2.0f, 3.0f].variable;
            auto gx = func.backward(gy);
            assert(gx.data == [0.0f, 2.0f, 3.0f]);
        }

        // test CUDA
        version (grain_cuda) {
            auto x = [-1.0f, 1.0f, 0.0f].variable;
            auto xd = x.to!DeviceStorage;
            auto yd = func.forward(xd);
            x = xd.to!HostStorage;
            auto y = yd.to!HostStorage;
            assert(x.data == (inplace ? y.data : [-1.0f, 1.0f, 0.0f]));
            assert(y.data == [0.0f, 1.0f, 0.0f]);

            x = [-1.0f, 1.0f, 0.0f].variable;
            auto gy = [1.0f, 2.0f, 3.0f].variable;
            auto gxd = func.backward(gy.to!DeviceStorage);
            auto gx = gxd.to!HostStorage;
            assert(gx.data == [0.0, 2.0, 0.0]);
        }
    }
}

Meta