mxnet

Classes  
struct  dnnl_rnn_desc_t 
A descriptor for an RNN operation. More...  
Enumerations  
enum  dnnl_rnn_flags_t { dnnl_rnn_flags_undef = 0x0 } 
Flags for RNN cell. More...  
enum  dnnl_rnn_direction_t { dnnl_unidirectional_left2right, dnnl_unidirectional_right2left, dnnl_bidirectional_concat, dnnl_bidirectional_sum, dnnl_unidirectional = dnnl_unidirectional_left2right } 
A direction of RNN primitive execution. More...  
Functions  
dnnl_status_t DNNL_API  dnnl_vanilla_rnn_forward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, const dnnl_alg_kind_t activation, const dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, unsigned flags, float alpha, float beta) 
dnnl_status_t DNNL_API  dnnl_vanilla_rnn_backward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, const dnnl_alg_kind_t activation, const dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, unsigned flags, float alpha, float beta) 
dnnl_status_t DNNL_API  dnnl_lstm_forward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *src_iter_c_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *dst_iter_c_desc, unsigned flags) 
dnnl_status_t DNNL_API  dnnl_lstm_forward_desc_init_v2 (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *src_iter_c_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *weights_peephole_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *dst_iter_c_desc, unsigned flags) 
dnnl_status_t DNNL_API  dnnl_lstm_forward_desc_init_v3 (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *src_iter_c_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *weights_peephole_desc, const dnnl_memory_desc_t *weights_projection_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *dst_iter_c_desc, unsigned flags) 
dnnl_status_t DNNL_API  dnnl_lstm_backward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *src_iter_c_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *dst_iter_c_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_src_iter_c_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, const dnnl_memory_desc_t *diff_dst_iter_c_desc, unsigned flags) 
dnnl_status_t DNNL_API  dnnl_lstm_backward_desc_init_v2 (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *src_iter_c_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *weights_peephole_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *dst_iter_c_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_src_iter_c_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_weights_peephole_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, const dnnl_memory_desc_t *diff_dst_iter_c_desc, unsigned flags) 
dnnl_status_t DNNL_API  dnnl_lstm_backward_desc_init_v3 (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *src_iter_c_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *weights_peephole_desc, const dnnl_memory_desc_t *weights_projection_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *dst_iter_c_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_src_iter_c_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_weights_peephole_desc, const dnnl_memory_desc_t *diff_weights_projection_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, const dnnl_memory_desc_t *diff_dst_iter_c_desc, unsigned flags) 
dnnl_status_t DNNL_API  dnnl_gru_forward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, unsigned flags) 
dnnl_status_t DNNL_API  dnnl_gru_backward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, unsigned flags) 
dnnl_status_t DNNL_API  dnnl_lbr_gru_forward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, unsigned flags) 
dnnl_status_t DNNL_API  dnnl_lbr_gru_backward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, unsigned flags) 
enum dnnl_rnn_direction_t 
A direction of RNN primitive execution.
Enumerator  

dnnl_unidirectional_left2right 
Unidirectional execution of RNN primitive from left to right. 
dnnl_unidirectional_right2left 
Unidirectional execution of RNN primitive from right to left. 
dnnl_bidirectional_concat 
Bidirectional execution of RNN primitive with concatenation of the results. 
dnnl_bidirectional_sum 
Bidirectional execution of RNN primitive with summation of the results. 
dnnl_unidirectional 
Alias for dnnl_unidirectional_left2right. 
enum dnnl_rnn_flags_t 
dnnl_status_t DNNL_API dnnl_gru_backward_desc_init  (  dnnl_rnn_desc_t *  rnn_desc, 
dnnl_prop_kind_t  prop_kind,  
dnnl_rnn_direction_t  direction,  
const dnnl_memory_desc_t *  src_layer_desc,  
const dnnl_memory_desc_t *  src_iter_desc,  
const dnnl_memory_desc_t *  weights_layer_desc,  
const dnnl_memory_desc_t *  weights_iter_desc,  
const dnnl_memory_desc_t *  bias_desc,  
const dnnl_memory_desc_t *  dst_layer_desc,  
const dnnl_memory_desc_t *  dst_iter_desc,  
const dnnl_memory_desc_t *  diff_src_layer_desc,  
const dnnl_memory_desc_t *  diff_src_iter_desc,  
const dnnl_memory_desc_t *  diff_weights_layer_desc,  
const dnnl_memory_desc_t *  diff_weights_iter_desc,  
const dnnl_memory_desc_t *  diff_bias_desc,  
const dnnl_memory_desc_t *  diff_dst_layer_desc,  
const dnnl_memory_desc_t *  diff_dst_iter_desc,  
unsigned  flags  
) 
Initializes a descriptor for GRU backward propagation primitive.
The following arguments may either be NULL
or point to a zero memory descriptor:
src_iter_desc
together with diff_src_iter_desc
,bias_desc
together with diff_bias_desc
,dst_iter_desc
together with diff_dst_iter_desc
.This would then indicate that the GRU backward propagation primitive should not use them and should default to zero values instead.
rnn_desc  Output descriptor for GRU primitive. 
prop_kind  Propagation kind. Must be dnnl_backward. 
direction  RNN direction. See dnnl_rnn_direction_t for more info. 
src_layer_desc  Memory descriptor for the input vector. 
src_iter_desc  Memory descriptor for the input recurrent hidden state vector. 
weights_layer_desc  Memory descriptor for the weights applied to the layer input. 
weights_iter_desc  Memory descriptor for the weights applied to the recurrent input. 
bias_desc  Bias memory descriptor. 
dst_layer_desc  Memory descriptor for the output vector. 
dst_iter_desc  Memory descriptor for the output recurrent hidden state vector. 
diff_src_layer_desc  Memory descriptor for the diff of input vector. 
diff_src_iter_desc  Memory descriptor for the diff of input recurrent hidden state vector. 
diff_weights_layer_desc  Memory descriptor for the diff of weights applied to the layer input. 
diff_weights_iter_desc  Memory descriptor for the diff of weights applied to the recurrent input. 
diff_bias_desc  Diff bias memory descriptor. 
diff_dst_layer_desc  Memory descriptor for the diff of output vector. 
diff_dst_iter_desc  Memory descriptor for the diff of output recurrent hidden state vector. 
flags  Unused. 
dnnl_status_t DNNL_API dnnl_gru_forward_desc_init  (  dnnl_rnn_desc_t *  rnn_desc, 
dnnl_prop_kind_t  prop_kind,  
dnnl_rnn_direction_t  direction,  
const dnnl_memory_desc_t *  src_layer_desc,  
const dnnl_memory_desc_t *  src_iter_desc,  
const dnnl_memory_desc_t *  weights_layer_desc,  
const dnnl_memory_desc_t *  weights_iter_desc,  
const dnnl_memory_desc_t *  bias_desc,  
const dnnl_memory_desc_t *  dst_layer_desc,  
const dnnl_memory_desc_t *  dst_iter_desc,  
unsigned  flags  
) 
Initializes a descriptor for GRU forward propagation primitive.
The following arguments may either be NULL
or point to a zero memory descriptor:
src_iter_desc
,bias_desc
,dst_iter_desc
.This would then indicate that the GRU forward propagation primitive should not use them and should default to zero values instead.
rnn_desc  Output descriptor for GRU primitive. 
prop_kind  Propagation kind. Possible values are dnnl_forward_training and dnnl_forward_inference. 
direction  RNN direction. See dnnl_rnn_direction_t for more info. 
src_layer_desc  Memory descriptor for the input vector. 
src_iter_desc  Memory descriptor for the input recurrent hidden state vector. 
weights_layer_desc  Memory descriptor for the weights applied to the layer input. 
weights_iter_desc  Memory descriptor for the weights applied to the recurrent input. 
bias_desc  Bias memory descriptor. 
dst_layer_desc  Memory descriptor for the output vector. 
dst_iter_desc  Memory descriptor for the output recurrent hidden state vector. 
flags  Unused. 
dnnl_status_t DNNL_API dnnl_lbr_gru_backward_desc_init  (  dnnl_rnn_desc_t *  rnn_desc, 
dnnl_prop_kind_t  prop_kind,  
dnnl_rnn_direction_t  direction,  
const dnnl_memory_desc_t *  src_layer_desc,  
const dnnl_memory_desc_t *  src_iter_desc,  
const dnnl_memory_desc_t *  weights_layer_desc,  
const dnnl_memory_desc_t *  weights_iter_desc,  
const dnnl_memory_desc_t *  bias_desc,  
const dnnl_memory_desc_t *  dst_layer_desc,  
const dnnl_memory_desc_t *  dst_iter_desc,  
const dnnl_memory_desc_t *  diff_src_layer_desc,  
const dnnl_memory_desc_t *  diff_src_iter_desc,  
const dnnl_memory_desc_t *  diff_weights_layer_desc,  
const dnnl_memory_desc_t *  diff_weights_iter_desc,  
const dnnl_memory_desc_t *  diff_bias_desc,  
const dnnl_memory_desc_t *  diff_dst_layer_desc,  
const dnnl_memory_desc_t *  diff_dst_iter_desc,  
unsigned  flags  
) 
Initializes a descriptor for LBR GRU backward propagation primitive.
The following arguments may either be NULL
or point to a zero memory descriptor:
src_iter_desc
together with diff_src_iter_desc
,bias_desc
together with diff_bias_desc
,dst_iter_desc
together with diff_dst_iter_desc
.This would then indicate that the LBR GRU backward propagation primitive should not use them and should default to zero values instead.
rnn_desc  Output descriptor for LBR GRU primitive. 
prop_kind  Propagation kind. Must be dnnl_backward. 
direction  RNN direction. See dnnl_rnn_direction_t for more info. 
src_layer_desc  Memory descriptor for the input vector. 
src_iter_desc  Memory descriptor for the input recurrent hidden state vector. 
weights_layer_desc  Memory descriptor for the weights applied to the layer input. 
weights_iter_desc  Memory descriptor for the weights applied to the recurrent input. 
bias_desc  Bias memory descriptor. 
dst_layer_desc  Memory descriptor for the output vector. 
dst_iter_desc  Memory descriptor for the output recurrent hidden state vector. 
diff_src_layer_desc  Memory descriptor for the diff of input vector. 
diff_src_iter_desc  Memory descriptor for the diff of input recurrent hidden state vector. 
diff_weights_layer_desc  Memory descriptor for the diff of weights applied to the layer input. 
diff_weights_iter_desc  Memory descriptor for the diff of weights applied to the recurrent input. 
diff_bias_desc  Diff bias memory descriptor. 
diff_dst_layer_desc  Memory descriptor for the diff of output vector. 
diff_dst_iter_desc  Memory descriptor for the diff of output recurrent hidden state vector. 
flags  Unused. 
dnnl_status_t DNNL_API dnnl_lbr_gru_forward_desc_init  (  dnnl_rnn_desc_t *  rnn_desc, 
dnnl_prop_kind_t  prop_kind,  
dnnl_rnn_direction_t  direction,  
const dnnl_memory_desc_t *  src_layer_desc,  
const dnnl_memory_desc_t *  src_iter_desc,  
const dnnl_memory_desc_t *  weights_layer_desc,  
const dnnl_memory_desc_t *  weights_iter_desc,  
const dnnl_memory_desc_t *  bias_desc,  
const dnnl_memory_desc_t *  dst_layer_desc,  
const dnnl_memory_desc_t *  dst_iter_desc,  
unsigned  flags  
) 
Initializes a descriptor for LBR GRU forward propagation primitive.
The following arguments may either be NULL
or point to a zero memory descriptor:
src_iter_desc
,bias_desc
,dst_iter_desc
.This would then indicate that the LBR GRU forward propagation primitive should not use them and should default to zero values instead.
rnn_desc  Output descriptor for LBR GRU primitive. 
prop_kind  Propagation kind. Possible values are dnnl_forward_training and dnnl_forward_inference. 
direction  RNN direction. See dnnl_rnn_direction_t for more info. 
src_layer_desc  Memory descriptor for the input vector. 
src_iter_desc  Memory descriptor for the input recurrent hidden state vector. 
weights_layer_desc  Memory descriptor for the weights applied to the layer input. 
weights_iter_desc  Memory descriptor for the weights applied to the recurrent input. 
bias_desc  Bias memory descriptor. 
dst_layer_desc  Memory descriptor for the output vector. 
dst_iter_desc  Memory descriptor for the output recurrent hidden state vector. 
flags  Unused. 
dnnl_status_t DNNL_API dnnl_lstm_backward_desc_init  (  dnnl_rnn_desc_t *  rnn_desc, 
dnnl_prop_kind_t  prop_kind,  
dnnl_rnn_direction_t  direction,  
const dnnl_memory_desc_t *  src_layer_desc,  
const dnnl_memory_desc_t *  src_iter_desc,  
const dnnl_memory_desc_t *  src_iter_c_desc,  
const dnnl_memory_desc_t *  weights_layer_desc,  
const dnnl_memory_desc_t *  weights_iter_desc,  
const dnnl_memory_desc_t *  bias_desc,  
const dnnl_memory_desc_t *  dst_layer_desc,  
const dnnl_memory_desc_t *  dst_iter_desc,  
const dnnl_memory_desc_t *  dst_iter_c_desc,  
const dnnl_memory_desc_t *  diff_src_layer_desc,  
const dnnl_memory_desc_t *  diff_src_iter_desc,  
const dnnl_memory_desc_t *  diff_src_iter_c_desc,  
const dnnl_memory_desc_t *  diff_weights_layer_desc,  
const dnnl_memory_desc_t *  diff_weights_iter_desc,  
const dnnl_memory_desc_t *  diff_bias_desc,  
const dnnl_memory_desc_t *  diff_dst_layer_desc,  
const dnnl_memory_desc_t *  diff_dst_iter_desc,  
const dnnl_memory_desc_t *  diff_dst_iter_c_desc,  
unsigned  flags  
) 
Initializes a descriptor for an LSTM backward propagation primitive.
The following arguments may either be NULL
or point to a zero memory descriptor:
src_iter_desc
together with src_iter_c_desc
, diff_src_iter_desc, and diff_src_iter_c_desc
,bias_desc
together with diff_bias_desc
,dst_iter_desc
together with dst_iter_c_desc
, diff_dst_iter_desc, and diff_dst_iter_c_desc
.This would then indicate that the LSTM backward propagation primitive should not use them and should default to zero values instead.
rnn_desc  Output descriptor for LSTM primitive. 
prop_kind  Propagation kind. Must be dnnl_backward. 
direction  RNN direction. See dnnl_rnn_direction_t for more info. 
src_layer_desc  Memory descriptor for the input vector. 
src_iter_desc  Memory descriptor for the input recurrent hidden state vector. 
src_iter_c_desc  Memory descriptor for the input recurrent cell state vector. 
weights_layer_desc  Memory descriptor for the weights applied to the layer input. 
weights_iter_desc  Memory descriptor for the weights applied to the recurrent input. 
bias_desc  Bias memory descriptor. 
dst_layer_desc  Memory descriptor for the output vector. 
dst_iter_desc  Memory descriptor for the output recurrent hidden state vector. 
dst_iter_c_desc  Memory descriptor for the output recurrent cell state vector. 
diff_src_layer_desc  Memory descriptor for the diff of input vector. 
diff_src_iter_desc  Memory descriptor for the diff of input recurrent hidden state vector. 
diff_src_iter_c_desc  Memory descriptor for the diff of input recurrent cell state vector. 
diff_weights_layer_desc  Memory descriptor for the diff of weights applied to the layer input. 
diff_weights_iter_desc  Memory descriptor for the diff of weights applied to the recurrent input. 
diff_bias_desc  Diff bias memory descriptor. 
diff_dst_layer_desc  Memory descriptor for the diff of output vector. 
diff_dst_iter_desc  Memory descriptor for the diff of output recurrent hidden state vector. 
diff_dst_iter_c_desc  Memory descriptor for the diff of output recurrent cell state vector. 
flags  Unused. 
dnnl_status_t DNNL_API dnnl_lstm_backward_desc_init_v2  (  dnnl_rnn_desc_t *  rnn_desc, 
dnnl_prop_kind_t  prop_kind,  
dnnl_rnn_direction_t  direction,  
const dnnl_memory_desc_t *  src_layer_desc,  
const dnnl_memory_desc_t *  src_iter_desc,  
const dnnl_memory_desc_t *  src_iter_c_desc,  
const dnnl_memory_desc_t *  weights_layer_desc,  
const dnnl_memory_desc_t *  weights_iter_desc,  
const dnnl_memory_desc_t *  weights_peephole_desc,  
const dnnl_memory_desc_t *  bias_desc,  
const dnnl_memory_desc_t *  dst_layer_desc,  
const dnnl_memory_desc_t *  dst_iter_desc,  
const dnnl_memory_desc_t *  dst_iter_c_desc,  
const dnnl_memory_desc_t *  diff_src_layer_desc,  
const dnnl_memory_desc_t *  diff_src_iter_desc,  
const dnnl_memory_desc_t *  diff_src_iter_c_desc,  
const dnnl_memory_desc_t *  diff_weights_layer_desc,  
const dnnl_memory_desc_t *  diff_weights_iter_desc,  
const dnnl_memory_desc_t *  diff_weights_peephole_desc,  
const dnnl_memory_desc_t *  diff_bias_desc,  
const dnnl_memory_desc_t *  diff_dst_layer_desc,  
const dnnl_memory_desc_t *  diff_dst_iter_desc,  
const dnnl_memory_desc_t *  diff_dst_iter_c_desc,  
unsigned  flags  
) 
Initializes a descriptor for an LSTM (with or without peephole) backward propagation primitive.
The following arguments may either be NULL
or point to a zero memory descriptor:
src_iter_desc
together with src_iter_c_desc
, diff_src_iter_desc, and diff_src_iter_c_desc
,weights_peephole_desc
together with diff_weights_peephole_desc
,bias_desc
together with diff_bias_desc
,dst_iter_desc
together with dst_iter_c_desc
, diff_dst_iter_desc, and diff_dst_iter_c_desc
.This would then indicate that the LSTM backward propagation primitive should not use them and should default to zero values instead.
rnn_desc  Output descriptor for LSTM primitive. 
prop_kind  Propagation kind. Must be dnnl_backward. 
direction  RNN direction. See dnnl_rnn_direction_t for more info. 
src_layer_desc  Memory descriptor for the input vector. 
src_iter_desc  Memory descriptor for the input recurrent hidden state vector. 
src_iter_c_desc  Memory descriptor for the input recurrent cell state vector. 
weights_layer_desc  Memory descriptor for the weights applied to the layer input. 
weights_iter_desc  Memory descriptor for the weights applied to the recurrent input. 
weights_peephole_desc  Memory descriptor for the weights applied to the cell states (according to the Peephole LSTM formula). 
bias_desc  Bias memory descriptor. 
dst_layer_desc  Memory descriptor for the output vector. 
dst_iter_desc  Memory descriptor for the output recurrent hidden state vector. 
dst_iter_c_desc  Memory descriptor for the output recurrent cell state vector. 
diff_src_layer_desc  Memory descriptor for the diff of input vector. 
diff_src_iter_desc  Memory descriptor for the diff of input recurrent hidden state vector. 
diff_src_iter_c_desc  Memory descriptor for the diff of input recurrent cell state vector. 
diff_weights_layer_desc  Memory descriptor for the diff of weights applied to the layer input. 
diff_weights_iter_desc  Memory descriptor for the diff of weights applied to the recurrent input. 
diff_weights_peephole_desc  Memory descriptor for the diff of weights applied to the cell states (according to the Peephole LSTM formula). 
diff_bias_desc  Diff bias memory descriptor. 
diff_dst_layer_desc  Memory descriptor for the diff of output vector. 
diff_dst_iter_desc  Memory descriptor for the diff of output recurrent hidden state vector. 
diff_dst_iter_c_desc  Memory descriptor for the diff of output recurrent cell state vector. 
flags  Unused. 
dnnl_status_t DNNL_API dnnl_lstm_backward_desc_init_v3  (  dnnl_rnn_desc_t *  rnn_desc, 
dnnl_prop_kind_t  prop_kind,  
dnnl_rnn_direction_t  direction,  
const dnnl_memory_desc_t *  src_layer_desc,  
const dnnl_memory_desc_t *  src_iter_desc,  
const dnnl_memory_desc_t *  src_iter_c_desc,  
const dnnl_memory_desc_t *  weights_layer_desc,  
const dnnl_memory_desc_t *  weights_iter_desc,  
const dnnl_memory_desc_t *  weights_peephole_desc,  
const dnnl_memory_desc_t *  weights_projection_desc,  
const dnnl_memory_desc_t *  bias_desc,  
const dnnl_memory_desc_t *  dst_layer_desc,  
const dnnl_memory_desc_t *  dst_iter_desc,  
const dnnl_memory_desc_t *  dst_iter_c_desc,  
const dnnl_memory_desc_t *  diff_src_layer_desc,  
const dnnl_memory_desc_t *  diff_src_iter_desc,  
const dnnl_memory_desc_t *  diff_src_iter_c_desc,  
const dnnl_memory_desc_t *  diff_weights_layer_desc,  
const dnnl_memory_desc_t *  diff_weights_iter_desc,  
const dnnl_memory_desc_t *  diff_weights_peephole_desc,  
const dnnl_memory_desc_t *  diff_weights_projection_desc,  
const dnnl_memory_desc_t *  diff_bias_desc,  
const dnnl_memory_desc_t *  diff_dst_layer_desc,  
const dnnl_memory_desc_t *  diff_dst_iter_desc,  
const dnnl_memory_desc_t *  diff_dst_iter_c_desc,  
unsigned  flags  
) 
Initializes a descriptor for an LSTM (with or without peephole and with or with out recurrent projection layer) backward propagation primitive.
The following arguments may either be NULL
or point to a zero memory descriptor:
src_iter_desc
together with src_iter_c_desc
, diff_src_iter_desc, and diff_src_iter_c_desc
,weights_peephole_desc
together with diff_weights_peephole_desc
,bias_desc
together with diff_bias_desc
,dst_iter_desc
together with dst_iter_c_desc
, diff_dst_iter_desc, and diff_dst_iter_c_desc
.This would then indicate that the LSTM backward propagation primitive should not use them and should default to zero values instead.
The weights_projection_desc
together with diff_weights_projection_desc
could either be NULL
or point to a zero memory descriptor. This would then indicate that the LSTM doesn't have recurrent projection layer.
rnn_desc  Output descriptor for LSTM primitive. 
prop_kind  Propagation kind. Must be dnnl_backward. 
direction  RNN direction. See dnnl_rnn_direction_t for more info. 
src_layer_desc  Memory descriptor for the input vector. 
src_iter_desc  Memory descriptor for the input recurrent hidden state vector. 
src_iter_c_desc  Memory descriptor for the input recurrent cell state vector. 
weights_layer_desc  Memory descriptor for the weights applied to the layer input. 
weights_iter_desc  Memory descriptor for the weights applied to the recurrent input. 
weights_peephole_desc  Memory descriptor for the weights applied to the cell states (according to the Peephole LSTM formula). 
weights_projection_desc  Memory descriptor for the weights applied to the hidden states to get the recurrent projection (according to the Projection LSTM formula). 
bias_desc  Bias memory descriptor. 
dst_layer_desc  Memory descriptor for the output vector. 
dst_iter_desc  Memory descriptor for the output recurrent hidden state vector. 
dst_iter_c_desc  Memory descriptor for the output recurrent cell state vector. 
diff_src_layer_desc  Memory descriptor for the diff of input vector. 
diff_src_iter_desc  Memory descriptor for the diff of input recurrent hidden state vector. 
diff_src_iter_c_desc  Memory descriptor for the diff of input recurrent cell state vector. 
diff_weights_layer_desc  Memory descriptor for the diff of weights applied to the layer input. 
diff_weights_iter_desc  Memory descriptor for the diff of weights applied to the recurrent input. 
diff_weights_peephole_desc  Memory descriptor for the diff of weights applied to the cell states (according to the Peephole LSTM formula). 
diff_weights_projection_desc  Memory descriptor for the diff of weights applied to the hidden states to get the recurrent projection (according to the Projection LSTM formula). 
diff_bias_desc  Diff bias memory descriptor. 
diff_dst_layer_desc  Memory descriptor for the diff of output vector. 
diff_dst_iter_desc  Memory descriptor for the diff of output recurrent hidden state vector. 
diff_dst_iter_c_desc  Memory descriptor for the diff of output recurrent cell state vector. 
flags  Unused. 
dnnl_status_t DNNL_API dnnl_lstm_forward_desc_init  (  dnnl_rnn_desc_t *  rnn_desc, 
dnnl_prop_kind_t  prop_kind,  
dnnl_rnn_direction_t  direction,  
const dnnl_memory_desc_t *  src_layer_desc,  
const dnnl_memory_desc_t *  src_iter_desc,  
const dnnl_memory_desc_t *  src_iter_c_desc,  
const dnnl_memory_desc_t *  weights_layer_desc,  
const dnnl_memory_desc_t *  weights_iter_desc,  
const dnnl_memory_desc_t *  bias_desc,  
const dnnl_memory_desc_t *  dst_layer_desc,  
const dnnl_memory_desc_t *  dst_iter_desc,  
const dnnl_memory_desc_t *  dst_iter_c_desc,  
unsigned  flags  
) 
Initializes a descriptor for LSTM forward propagation primitive.
The following arguments may either be NULL
or point to a zero memory descriptor:
src_iter_desc
together with src_iter_c_desc
,bias_desc
,dst_iter_desc
together with dst_iter_c_desc
.This would then indicate that the LSTM forward propagation primitive should not use them and should default to zero values instead.
rnn_desc  Output descriptor for LSTM primitive. 
prop_kind  Propagation kind. Possible values are dnnl_forward_training and dnnl_forward_inference. 
direction  RNN direction. See dnnl_rnn_direction_t for more info. 
src_layer_desc  Memory descriptor for the input vector. 
src_iter_desc  Memory descriptor for the input recurrent hidden state vector. 
src_iter_c_desc  Memory descriptor for the input recurrent cell state vector. 
weights_layer_desc  Memory descriptor for the weights applied to the layer input. 
weights_iter_desc  Memory descriptor for the weights applied to the recurrent input. 
bias_desc  Bias memory descriptor. 
dst_layer_desc  Memory descriptor for the output vector. 
dst_iter_desc  Memory descriptor for the output recurrent hidden state vector. 
dst_iter_c_desc  Memory descriptor for the output recurrent cell state vector. 
flags  Unused. 
dnnl_status_t DNNL_API dnnl_lstm_forward_desc_init_v2  (  dnnl_rnn_desc_t *  rnn_desc, 
dnnl_prop_kind_t  prop_kind,  
dnnl_rnn_direction_t  direction,  
const dnnl_memory_desc_t *  src_layer_desc,  
const dnnl_memory_desc_t *  src_iter_desc,  
const dnnl_memory_desc_t *  src_iter_c_desc,  
const dnnl_memory_desc_t *  weights_layer_desc,  
const dnnl_memory_desc_t *  weights_iter_desc,  
const dnnl_memory_desc_t *  weights_peephole_desc,  
const dnnl_memory_desc_t *  bias_desc,  
const dnnl_memory_desc_t *  dst_layer_desc,  
const dnnl_memory_desc_t *  dst_iter_desc,  
const dnnl_memory_desc_t *  dst_iter_c_desc,  
unsigned  flags  
) 
Initializes a descriptor for an LSTM (with or without peephole) forward propagation primitive.
The following arguments may either be NULL
or point to a zero memory descriptor:
src_iter_desc
together with src_iter_c_desc
,weights_peephole_desc
,bias_desc
,dst_iter_desc
together with dst_iter_c_desc
.This would then indicate that the LSTM forward propagation primitive should not use them and should default to zero values instead.
rnn_desc  Output descriptor for LSTM primitive. 
prop_kind  Propagation kind. Possible values are dnnl_forward_training and dnnl_forward_inference. 
direction  RNN direction. See dnnl_rnn_direction_t for more info. 
src_layer_desc  Memory descriptor for the input vector. 
src_iter_desc  Memory descriptor for the input recurrent hidden state vector. 
src_iter_c_desc  Memory descriptor for the input recurrent cell state vector. 
weights_layer_desc  Memory descriptor for the weights applied to the layer input. 
weights_iter_desc  Memory descriptor for the weights applied to the recurrent input. 
weights_peephole_desc  Memory descriptor for the weights applied to the cell states (according to the Peephole LSTM formula). 
bias_desc  Bias memory descriptor. 
dst_layer_desc  Memory descriptor for the output vector. 
dst_iter_desc  Memory descriptor for the output recurrent hidden state vector. 
dst_iter_c_desc  Memory descriptor for the output recurrent cell state vector. 
flags  Unused. 
dnnl_status_t DNNL_API dnnl_lstm_forward_desc_init_v3  (  dnnl_rnn_desc_t *  rnn_desc, 
dnnl_prop_kind_t  prop_kind,  
dnnl_rnn_direction_t  direction,  
const dnnl_memory_desc_t *  src_layer_desc,  
const dnnl_memory_desc_t *  src_iter_desc,  
const dnnl_memory_desc_t *  src_iter_c_desc,  
const dnnl_memory_desc_t *  weights_layer_desc,  
const dnnl_memory_desc_t *  weights_iter_desc,  
const dnnl_memory_desc_t *  weights_peephole_desc,  
const dnnl_memory_desc_t *  weights_projection_desc,  
const dnnl_memory_desc_t *  bias_desc,  
const dnnl_memory_desc_t *  dst_layer_desc,  
const dnnl_memory_desc_t *  dst_iter_desc,  
const dnnl_memory_desc_t *  dst_iter_c_desc,  
unsigned  flags  
) 
Initializes a descriptor for an LSTM (with or without peephole and with or without recurrent projection layer) forward propagation primitive.
The following arguments may either be NULL
or point to a zero memory descriptor:
src_iter_desc
together with src_iter_c_desc
,weights_peephole_desc
,bias_desc
,dst_iter_desc
together with dst_iter_c_desc
.This would then indicate that the LSTM forward propagation primitive should not use them and should default to zero values instead.
The weights_projection_desc
could either be NULL
or point to a zero memory descriptor. This would then indicate that the LSTM doesn't have recurrent projection layer.
rnn_desc  Output descriptor for LSTM primitive. 
prop_kind  Propagation kind. Possible values are dnnl_forward_training and dnnl_forward_inference. 
direction  RNN direction. See dnnl_rnn_direction_t for more info. 
src_layer_desc  Memory descriptor for the input vector. 
src_iter_desc  Memory descriptor for the input recurrent hidden state vector. 
src_iter_c_desc  Memory descriptor for the input recurrent cell state vector. 
weights_layer_desc  Memory descriptor for the weights applied to the layer input. 
weights_iter_desc  Memory descriptor for the weights applied to the recurrent input. 
weights_peephole_desc  Memory descriptor for the weights applied to the cell states (according to the Peephole LSTM formula). 
weights_projection_desc  Memory descriptor for the weights applied to the hidden states to get the recurrent projection (according to the Projection LSTM formula). 
bias_desc  Bias memory descriptor. 
dst_layer_desc  Memory descriptor for the output vector. 
dst_iter_desc  Memory descriptor for the output recurrent hidden state vector. 
dst_iter_c_desc  Memory descriptor for the output recurrent cell state vector. 
flags  Unused. 
dnnl_status_t DNNL_API dnnl_vanilla_rnn_backward_desc_init  (  dnnl_rnn_desc_t *  rnn_desc, 
dnnl_prop_kind_t  prop_kind,  
const dnnl_alg_kind_t  activation,  
const dnnl_rnn_direction_t  direction,  
const dnnl_memory_desc_t *  src_layer_desc,  
const dnnl_memory_desc_t *  src_iter_desc,  
const dnnl_memory_desc_t *  weights_layer_desc,  
const dnnl_memory_desc_t *  weights_iter_desc,  
const dnnl_memory_desc_t *  bias_desc,  
const dnnl_memory_desc_t *  dst_layer_desc,  
const dnnl_memory_desc_t *  dst_iter_desc,  
const dnnl_memory_desc_t *  diff_src_layer_desc,  
const dnnl_memory_desc_t *  diff_src_iter_desc,  
const dnnl_memory_desc_t *  diff_weights_layer_desc,  
const dnnl_memory_desc_t *  diff_weights_iter_desc,  
const dnnl_memory_desc_t *  diff_bias_desc,  
const dnnl_memory_desc_t *  diff_dst_layer_desc,  
const dnnl_memory_desc_t *  diff_dst_iter_desc,  
unsigned  flags,  
float  alpha,  
float  beta  
) 
Initializes a descriptor for vanilla RNN backward propagation primitive.
The following arguments may either be NULL
or point to a zero memory descriptor:
src_iter_desc
together with diff_src_iter_desc
,bias_desc
together with diff_bias_desc
,dst_iter_desc
together with diff_dst_iter_desc
.This would then indicate that the RNN backward propagation primitive should not use the respective data and should use zero values instead.
rnn_desc  Output descriptor for vanilla RNN primitive. 
prop_kind  Propagation kind. Must be dnnl_backward. 
activation  Activation kind. Possible values are dnnl_eltwise_relu, dnnl_eltwise_tanh or dnnl_eltwise_logistic. 
direction  RNN direction. See dnnl_rnn_direction_t for more info. 
src_layer_desc  Memory descriptor for the input vector. 
src_iter_desc  Memory descriptor for the input recurrent hidden state vector. 
weights_layer_desc  Memory descriptor for the weights applied to the layer input. 
weights_iter_desc  Memory descriptor for the weights applied to the recurrent input. 
bias_desc  Bias memory descriptor. 
dst_layer_desc  Memory descriptor for the output vector. 
dst_iter_desc  Memory descriptor for the output recurrent hidden state vector. 
diff_src_layer_desc  Memory descriptor for the diff of input vector. 
diff_src_iter_desc  Memory descriptor for the diff of input recurrent hidden state vector. 
diff_weights_layer_desc  Memory descriptor for the diff of weights applied to the layer input. 
diff_weights_iter_desc  Memory descriptor for the diff of weights applied to the recurrent input. 
diff_bias_desc  Diff bias memory descriptor. 
diff_dst_layer_desc  Memory descriptor for the diff of output vector. 
diff_dst_iter_desc  Memory descriptor for the diff of output recurrent hidden state vector. 
flags  Unused. 
alpha  Negative slope if activation is dnnl_eltwise_relu. 
beta  Unused. 
dnnl_status_t DNNL_API dnnl_vanilla_rnn_forward_desc_init  (  dnnl_rnn_desc_t *  rnn_desc, 
dnnl_prop_kind_t  prop_kind,  
const dnnl_alg_kind_t  activation,  
const dnnl_rnn_direction_t  direction,  
const dnnl_memory_desc_t *  src_layer_desc,  
const dnnl_memory_desc_t *  src_iter_desc,  
const dnnl_memory_desc_t *  weights_layer_desc,  
const dnnl_memory_desc_t *  weights_iter_desc,  
const dnnl_memory_desc_t *  bias_desc,  
const dnnl_memory_desc_t *  dst_layer_desc,  
const dnnl_memory_desc_t *  dst_iter_desc,  
unsigned  flags,  
float  alpha,  
float  beta  
) 
Initializes a descriptor for vanilla RNN forward propagation primitive.
The following arguments may either be NULL
or point to a zero memory descriptor:
src_iter_desc
,bias_desc
,dst_iter_desc
.This would then indicate that the RNN forward propagation primitive should not use them and should default to zero values instead.
rnn_desc  Output descriptor for vanilla RNN primitive. 
prop_kind  Propagation kind. Possible values are dnnl_forward_training and dnnl_forward_inference. 
activation  Activation kind. Possible values are dnnl_eltwise_relu, dnnl_eltwise_tanh or dnnl_eltwise_logistic. 
direction  RNN direction. See dnnl_rnn_direction_t for more info. 
src_layer_desc  Memory descriptor for the input vector. 
src_iter_desc  Memory descriptor for the input recurrent hidden state vector. 
weights_layer_desc  Memory descriptor for the weights applied to the layer input. 
weights_iter_desc  Memory descriptor for the weights applied to the recurrent input. 
bias_desc  Bias memory descriptor. 
dst_layer_desc  Memory descriptor for the output vector. 
dst_iter_desc  Memory descriptor for the output recurrent hidden state vector. 
flags  Unused. 
alpha  Negative slope if activation is dnnl_eltwise_relu. 
beta  Unused. 