Modifier and Type | Method and Description |
---|---|
static int |
JCudnn.cudnnActivationBackward(cudnnHandle handle,
cudnnActivationDescriptor activationDesc,
Pointer alpha,
cudnnTensorDescriptor yDesc,
Pointer y,
cudnnTensorDescriptor dyDesc,
Pointer dy,
cudnnTensorDescriptor xDesc,
Pointer x,
Pointer beta,
cudnnTensorDescriptor dxDesc,
Pointer dx)
Function to perform backward activation
|
static int |
JCudnn.cudnnActivationForward(cudnnHandle handle,
cudnnActivationDescriptor activationDesc,
Pointer alpha,
cudnnTensorDescriptor xDesc,
Pointer x,
Pointer beta,
cudnnTensorDescriptor yDesc,
Pointer y)
Function to perform forward activation
|
static int |
JCudnn.cudnnConvolutionBiasActivationForward(cudnnHandle handle,
Pointer alpha1,
cudnnTensorDescriptor xDesc,
Pointer x,
cudnnFilterDescriptor wDesc,
Pointer w,
cudnnConvolutionDescriptor convDesc,
int algo,
Pointer workSpace,
long workSpaceSizeInBytes,
Pointer alpha2,
cudnnTensorDescriptor zDesc,
Pointer z,
cudnnTensorDescriptor biasDesc,
Pointer bias,
cudnnActivationDescriptor activationDesc,
cudnnTensorDescriptor yDesc,
Pointer y)
Fused conv/bias/activation operation : y = Act( alpha1 * conv(x) + alpha2 * z + bias )
|
static int |
JCudnn.cudnnCreateActivationDescriptor(cudnnActivationDescriptor activationDesc)
Activation functions: All of the form "output = alpha * Op(inputs) + beta * output"
|
static int |
JCudnn.cudnnDestroyActivationDescriptor(cudnnActivationDescriptor activationDesc)
ceiling for clipped RELU, alpha for ELU
|
static int |
JCudnn.cudnnGetActivationDescriptor(cudnnActivationDescriptor activationDesc,
int[] mode,
int[] reluNanOpt,
double[] coef)
ceiling for clipped RELU, alpha for ELU
|
static int |
JCudnn.cudnnSetActivationDescriptor(cudnnActivationDescriptor activationDesc,
int mode,
int reluNanOpt,
double coef) |
Copyright © 2018. All rights reserved.