Documentation
¶
Index ¶
- Constants
- func AdamUpdate(params, grads, mean, variance *NDArray, lr, beta1, beta2, epsilon, wd float64)
- func GpuCount() int
- func LibVersion() fu.VersionType
- func Nograd(_hidden_nograd_)
- func RandomSeed(seed int)
- func SgdMomUpdate(params, grads, mom *NDArray, lr, momentum, wd float64)
- func SgdUpdate(params, grads *NDArray, lr, wd float64)
- type ActivationType
- type Context
- func (c Context) Array(tp Dtype, d Dimension, vals ...interface{}) *NDArray
- func (c Context) CopyAs(a *NDArray, dtype Dtype) *NDArray
- func (c Context) DevNo() int
- func (c Context) DevType() int
- func (c Context) IsGPU() bool
- func (c Context) RandomSeed(seed int)
- func (c Context) String() string
- func (c Context) Upgrade() Context
- type Dimension
- func (dim Dimension) Empty() bool
- func (dim Dimension) Good() bool
- func (dim Dimension) Like(b Dimension) Dimension
- func (dim Dimension) Push(i int) Dimension
- func (dim Dimension) SizeOf(dt Dtype) int
- func (dim Dimension) Skip(n int) Dimension
- func (dim Dimension) Slice() []int
- func (dim Dimension) String() string
- func (dim Dimension) Total() int
- type Dtype
- type Graph
- func (g *Graph) Backward()
- func (g *Graph) Forward(train bool)
- func (g *Graph) GetShapes(withLoss bool) map[string][]int
- func (g *Graph) Identity() GraphIdentity
- func (g *Graph) InitParam(name string)
- func (g *Graph) Initialize(seed int, inite func(*NDArray, string))
- func (g *Graph) LogSummary(withLoss bool)
- func (g *Graph) NextSymbolId() int
- func (g *Graph) PrintSummary(withLoss bool)
- func (g *Graph) Release()
- func (g *Graph) Summary(withLoss bool) Summary
- func (g *Graph) SummaryOut(withLoss bool, out func(string))
- func (g *Graph) ToJson(withLoss bool) []byte
- type GraphIdentity
- type GraphJs
- type Inite
- type Loss
- type NDArray
- func (a *NDArray) Cast(dt Dtype) *NDArray
- func (a *NDArray) Context() Context
- func (a *NDArray) CopyValuesTo(dst interface{})
- func (a *NDArray) Depth() int
- func (a *NDArray) Dim() Dimension
- func (a *NDArray) Dtype() Dtype
- func (a *NDArray) Fill(value float32) *NDArray
- func (a *NDArray) Len(d int) int
- func (a *NDArray) NewLikeThis() *NDArray
- func (a *NDArray) Normal(mean float32, scale float32) *NDArray
- func (a *NDArray) Ones() *NDArray
- func (a *NDArray) Raw() []byte
- func (a *NDArray) ReCopyValuesTo(dst interface{})
- func (a *NDArray) Release()
- func (a *NDArray) Reshape(dim Dimension) *NDArray
- func (a *NDArray) SetValues(vals ...interface{})
- func (a *NDArray) Size() int
- func (a *NDArray) String() string
- func (a *NDArray) Uniform(low float32, high float32) *NDArray
- func (a *NDArray) Values(dtype Dtype) interface{}
- func (a *NDArray) ValuesF32() []float32
- func (a *NDArray) Xavier(gaussian bool, factor int, magnitude float32) *NDArray
- func (a *NDArray) Zeros() *NDArray
- type Param
- type Summary
- type SummaryRow
- type SummryArg
- type Symbol
- func Abs(a *Symbol) *Symbol
- func Activation(a *Symbol, actType ActivationType) *Symbol
- func Add(lv interface{}, rv interface{}) *Symbol
- func And(a *Symbol, b *Symbol) *Symbol
- func BatchNorm(a, gamma, beta, rmean, rvar *Symbol, mom, eps float32, useGlobalStats bool, ...) *Symbol
- func BcastAdd(a, b *Symbol) *Symbol
- func BcastDiv(a, b *Symbol) *Symbol
- func BcastMul(a, b *Symbol) *Symbol
- func BcastSub(a, b *Symbol) *Symbol
- func BlockGrad(s *Symbol) *Symbol
- func Bound(a ...*Symbol) *Symbol
- func Channel(a *Symbol, ch int) *Symbol
- func Concat(a ...*Symbol) *Symbol
- func Conv(a, weight, bias *Symbol, channels int, kernel, stride, padding Dimension, ...) *Symbol
- func Cosh(a *Symbol) *Symbol
- func Depend(a ...*Symbol) *Symbol
- func Div(lv interface{}, rv interface{}) *Symbol
- func Dot(lv interface{}, rv interface{}) *Symbol
- func Dropout(a *Symbol, rate float32) *Symbol
- func EQ(a *Symbol, rv interface{}) *Symbol
- func Exp(a *Symbol) *Symbol
- func Flatten(a *Symbol) *Symbol
- func FullyConnected(a, weight, bias *Symbol, size int, flatten bool) *Symbol
- func GE(a *Symbol, rv interface{}) *Symbol
- func GenericOp1(op, opScalar capi.MxnetOp, l *Symbol, rv interface{}) *Symbol
- func GenericOp2(op, opScalar, opScalarR capi.MxnetOp, lv interface{}, rv interface{}) *Symbol
- func Greater(a *Symbol, rv interface{}) *Symbol
- func Group(a ...*Symbol) *Symbol
- func HardSigmoid(a *Symbol) *Symbol
- func Input(..._hidden_input_) *Symbol
- func LE(a *Symbol, rv interface{}) *Symbol
- func Lesser(a *Symbol, rv interface{}) *Symbol
- func Link(name string) *Symbol
- func Log(a *Symbol) *Symbol
- func LogCosh(a *Symbol) *Symbol
- func LogSoftmax(a *Symbol, axis ...int) *Symbol
- func MakeLoss(s *Symbol) *Symbol
- func Mean(a *Symbol, axis ...int) *Symbol
- func MeanKd(a *Symbol, axis ...int) *Symbol
- func MeanXl(a *Symbol, axis ...int) *Symbol
- func Minus(a *Symbol) *Symbol
- func Mul(lv interface{}, rv interface{}) *Symbol
- func NE(a *Symbol, rv interface{}) *Symbol
- func Normal(loc, scale float32, dim ...int) *Symbol
- func Not(a *Symbol) *Symbol
- func Ones(dim ...int) *Symbol
- func OnesLike(a *Symbol) *Symbol
- func Or(a *Symbol, b *Symbol) *Symbol
- func Output(a *Symbol, name string) *Symbol
- func Pick(a *Symbol, label *Symbol) *Symbol
- func Pool(a *Symbol, kernel, stride, padding Dimension, ceil bool, maxpool bool) *Symbol
- func Pow(lv interface{}, rv interface{}) *Symbol
- func ReLU(a *Symbol) *Symbol
- func Ref(name string, a ...*Symbol) *Symbol
- func Reshape(a *Symbol, dim ...int) *Symbol
- func ReshapeLike(a, b *Symbol) *Symbol
- func Sigmoid(a *Symbol) *Symbol
- func Sin(a *Symbol) *Symbol
- func Slice(a *Symbol, axis, begin, end int) *Symbol
- func Softmax(a *Symbol, axis ...int) *Symbol
- func SoftmaxActivation(a *Symbol, channel bool) *Symbol
- func SoftmaxCrossEntropy(a, b *Symbol, axis ...int) *Symbol
- func SoftmaxOutput(a *Symbol, l *Symbol, multiOutput bool) *Symbol
- func Sqrt(a *Symbol) *Symbol
- func Square(a *Symbol) *Symbol
- func Stack(a ...*Symbol) *Symbol
- func Stack1(a ...*Symbol) *Symbol
- func Sub(lv interface{}, rv interface{}) *Symbol
- func Sum(a *Symbol, axis ...int) *Symbol
- func Sum1(a *Symbol) *Symbol
- func SumNan(a *Symbol, axis ...int) *Symbol
- func SumXl(a *Symbol, axis ...int) *Symbol
- func SwapAxes(a *Symbol, x, y int) *Symbol
- func SymbolCast(i interface{}) (*Symbol, error)
- func Tanh(a *Symbol) *Symbol
- func Transpose(a *Symbol, axis ...int) *Symbol
- func Value(name string, a ...float32) *Symbol
- func Var(name string, opt ...interface{}) *Symbol
- func Xor(a *Symbol, b *Symbol) *Symbol
- func Zeros(dim ...int) *Symbol
- func ZerosLike(a *Symbol) *Symbol
Constants ¶
View Source
const ( DimRow = 0 DimColumn = 1 DimDepth = 2 DimDepth3 = 3 )
View Source
const ( VersionMajor = 1 VersionMinor = 5 VersionPatch = 0 )
View Source
const ( OpVar_ capi.MxnetOp = -1 OpInput_ capi.MxnetOp = -2 OpScalar_ capi.MxnetOp = -4 OpNogVar_ capi.MxnetOp = -5 OpGroup_ capi.MxnetOp = -7 OpRef_ capi.MxnetOp = -8 OpOutput_ capi.MxnetOp = -9 OpBound_ capi.MxnetOp = -10 OpDepend_ capi.MxnetOp = -11 OpLink_ capi.MxnetOp = -12 )
View Source
const MaxDimensionCount = 4
do not change this constant code can assume exactly this value
View Source
const Version fu.VersionType = VersionMajor*10000 + VersionMinor*100 + VersionPatch
Variables ¶
This section is empty.
Functions ¶
func AdamUpdate ¶
func LibVersion ¶
func LibVersion() fu.VersionType
func Nograd ¶
func Nograd(_hidden_nograd_)
Types ¶
type ActivationType ¶
type ActivationType int
const ( ActivReLU ActivationType = iota ActivSoftReLU ActivSoftSign ActivSigmoid ActivTanh )
type Dimension ¶
type Dimension struct { Shape [MaxDimensionCount]int `yaml:"shape,flow"` Len int `yaml:"len"` }
Array Dimension
type Graph ¶
type Graph struct { Ctx Context Dtype Dtype Input *NDArray // network input referencing to Params["_input"] Output *NDArray // referencing to Outputs["_output_output"] Loss *NDArray // referencing to Outputs["_loss_loss"] Label *NDArray // loss function label referencing to Params["_label"] Outputs map[string]*NDArray // referencing to executor outputs except loss Params map[string]*NDArray // network parameters Shapes map[string]Dimension // predefined param shape Autograd map[string]bool // if param can be trained Grads map[string]*NDArray // training gradients Exec capi.ExecutorHandle Initializers map[string]Inite Initialized bool // contains filtered or unexported fields }
func Compose ¶
func (*Graph) Identity ¶
func (g *Graph) Identity() GraphIdentity
type GraphIdentity ¶
type GraphIdentity [20]byte // SHA1
func (GraphIdentity) String ¶
func (identity GraphIdentity) String() string
type GraphJs ¶
type NDArray ¶
type NDArray struct {
// contains filtered or unexported fields
}
func (*NDArray) Xavier ¶
type Summary ¶
type Summary []SummaryRow
type SummaryRow ¶
type Symbol ¶
type Symbol struct { Op capi.MxnetOp `yaml:"op"` Value string `yaml:"value"` Name string `yaml:"name"` Args []*Symbol `yaml:"args"` Init Inite `yaml:"-"` Attr map[capi.MxnetKey]string `yaml:"attr"` Dim Dimension `yaml:"dim"` Output bool `yaml:"output"` }
func Activation ¶
func Activation(a *Symbol, actType ActivationType) *Symbol
func BatchNorm ¶
func Conv ¶
func GenericOp2 ¶
func Pool ¶
Source Files
¶
Click to show internal directories.
Click to hide internal directories.