org.apache.mxnet.javaapi

rmspropalex_updateParam

Related Doc: package javaapi

class rmspropalex_updateParam extends AnyRef

This Param Object is specifically used for rmspropalex_update

Linear Supertypes
AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. rmspropalex_updateParam
  2. AnyRef
  3. Any
  1. Hide All
  2. Show all
Learn more about member selection
Visibility
  1. Public
  2. All

Instance Constructors

  1. new rmspropalex_updateParam(weight: NDArray, grad: NDArray, n: NDArray, g: NDArray, delta: NDArray, lr: Float)

    weight

    Weight

    grad

    Gradient

    n

    n

    g

    g

    delta

    delta

    lr

    Learning rate

Value Members

  1. final def !=(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  2. final def ##(): Int

    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  5. def clone(): AnyRef

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  6. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  7. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  8. def finalize(): Unit

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  9. final def getClass(): Class[_]

    Definition Classes
    AnyRef → Any
  10. def getClip_gradient(): Float

  11. def getClip_weights(): Float

  12. def getDelta(): NDArray

  13. def getEpsilon(): Float

  14. def getG(): NDArray

  15. def getGamma1(): Float

  16. def getGamma2(): Float

  17. def getGrad(): NDArray

  18. def getLr(): Float

  19. def getN(): NDArray

  20. def getOut(): mxnet.NDArray

  21. def getRescale_grad(): Float

  22. def getWd(): Float

  23. def getWeight(): NDArray

  24. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  25. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  26. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  27. final def notify(): Unit

    Definition Classes
    AnyRef
  28. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  29. def setClip_gradient(clip_gradient: Float): rmspropalex_updateParam

    clip_gradient

    Clip gradient to the range of [-clip_gradient, clip_gradient] If clip_gradient <= 0, gradient clipping is turned off. grad = max(min(grad, clip_gradient), -clip_gradient).

  30. def setClip_weights(clip_weights: Float): rmspropalex_updateParam

    clip_weights

    Clip weights to the range of [-clip_weights, clip_weights] If clip_weights <= 0, weight clipping is turned off. weights = max(min(weights, clip_weights), -clip_weights).

  31. def setEpsilon(epsilon: Float): rmspropalex_updateParam

    epsilon

    A small constant for numerical stability.

  32. def setGamma1(gamma1: Float): rmspropalex_updateParam

    gamma1

    Decay rate.

  33. def setGamma2(gamma2: Float): rmspropalex_updateParam

    gamma2

    Decay rate.

  34. def setOut(out: NDArray): rmspropalex_updateParam

  35. def setRescale_grad(rescale_grad: Float): rmspropalex_updateParam

    rescale_grad

    Rescale gradient to grad = rescale_grad*grad.

  36. def setWd(wd: Float): rmspropalex_updateParam

    wd

    Weight decay augments the objective function with a regularization term that penalizes large weights. The penalty scales with the square of the magnitude of each weight.

  37. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  38. def toString(): String

    Definition Classes
    AnyRef → Any
  39. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  40. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  41. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )

Inherited from AnyRef

Inherited from Any

Ungrouped