gorgonia.org/gorgonia@v0.9.17/op_infidel.go (about)

     1  package gorgonia
     2  
     3  import (
     4  	"fmt"
     5  	"hash"
     6  
     7  	"github.com/chewxy/hm"
     8  	"gorgonia.org/tensor"
     9  )
    10  
    11  /*
    12  This file contains code for Ops that aren't really functions in the sense that they aren't pure.
    13  
    14  Since they're not adherents to the Church of Lambda, they are INFIDELS! A fatwa will be issued on them shortly
    15  
    16  */
    17  
    18  type stmtOp interface {
    19  	Op
    20  	isStmt() bool
    21  }
    22  
    23  // letOp is not really a function. It's more of a binding statement.
    24  // However, it's implemented as a Op so that it can be counted for register allocation and liveness
    25  type letOp struct{}
    26  
    27  func (op letOp) Arity() int                                                      { return 0 }
    28  func (op letOp) Type() hm.Type                                                   { return nil }
    29  func (op letOp) ReturnsPtr() bool                                                { return true }
    30  func (op letOp) OverwritesInput() int                                            { return 0 }
    31  func (op letOp) CallsExtern() bool                                               { return false }
    32  func (op letOp) InferShape(...DimSizer) (tensor.Shape, error)                    { return nil, nil }
    33  func (op letOp) DiffWRT(int) []bool                                              { return nil }
    34  func (op letOp) SymDiff(inputs Nodes, outputNode, gradNode *Node) (Nodes, error) { return nil, nil }
    35  func (op letOp) Do(vals ...Value) (Value, error)                                 { return nil, nil }
    36  func (op letOp) String() string                                                  { return "=" }
    37  func (op letOp) WriteHash(h hash.Hash)                                           { h.Write([]byte("let")) }
    38  func (op letOp) Hashcode() uint32                                                { return simpleHash(op) }
    39  
    40  func (op letOp) isStmt() bool { return true }
    41  
    42  // readOp reads a value off the input. This op ensures that a value used, and hence codegen'd out
    43  type readOp struct {
    44  	into *Value // no, it's not a mistake. It's a pointer to a Value (which is an interface{} type)
    45  }
    46  
    47  func (op readOp) Arity() int                                                      { return 0 }
    48  func (op readOp) Type() hm.Type                                                   { return nil }
    49  func (op readOp) ReturnsPtr() bool                                                { return true }
    50  func (op readOp) OverwritesInput() int                                            { return 0 }
    51  func (op readOp) CallsExtern() bool                                               { return false }
    52  func (op readOp) InferShape(...DimSizer) (tensor.Shape, error)                    { return nil, nil }
    53  func (op readOp) DiffWRT(int) []bool                                              { return nil }
    54  func (op readOp) SymDiff(inputs Nodes, outputNode, gradNode *Node) (Nodes, error) { return nil, nil }
    55  func (op readOp) Do(vals ...Value) (Value, error)                                 { return nil, nil }
    56  func (op readOp) String() string                                                  { return "print" }
    57  func (op readOp) WriteHash(h hash.Hash)                                           { fmt.Fprintf(h, "print %p", op.into) }
    58  func (op readOp) Hashcode() uint32                                                { return simpleHash(op) }
    59  
    60  func (op readOp) isStmt() bool { return true }
    61  
    62  // devTrans is a dummy Op, used to aid in creating the program that is run in a *tapeMachine. It is inserted not into the graph, but into a slice of sorted nodes, and will not show up in thegraph.
    63  type devTrans struct {
    64  	from, to Device
    65  	toNode   *Node
    66  }
    67  
    68  func (op devTrans) Arity() int                                   { panic("not implemented") }
    69  func (op devTrans) Type() hm.Type                                { panic("not implemented") }
    70  func (op devTrans) InferShape(...DimSizer) (tensor.Shape, error) { panic("not implemented") }
    71  func (op devTrans) Do(...Value) (Value, error)                   { panic("not implemented") }
    72  func (op devTrans) ReturnsPtr() bool                             { return false }
    73  func (op devTrans) CallsExtern() bool                            { return true }
    74  func (op devTrans) OverwritesInput() int                         { return -1 }
    75  func (op devTrans) WriteHash(h hash.Hash)                        { fmt.Fprintf(h, "from:%vto%v", op.from, op.to) }
    76  func (op devTrans) Hashcode() uint32                             { return simpleHash(op) }
    77  
    78  func (op devTrans) String() string { return fmt.Sprintf("[CP %v %v]", op.from, op.to) }
    79  func (op devTrans) isStmt() bool   { return true }
    80  
    81  func (op devTrans) CUDADo(extern External, dev Device, prealloc Value, inputs ...Value) (retVal Value, err error) {
    82  	return nil, nil
    83  }
    84  func (op devTrans) CUDAFuncName() string { return op.String() }