gorgonia.org/gorgonia@v0.9.17/operatorPointwise_unary.go (about)

     1  package gorgonia
     2  
     3  import (
     4  	"github.com/pkg/errors"
     5  	"gorgonia.org/tensor"
     6  )
     7  
     8  // a ʘUnaryOperator is essentially a function that takes a float32 or float64 and returns the same
     9  // pros : no overloading = clear understanding
    10  // cons : no overloading = a lot of extra code
    11  //
    12  // There are TWO ʘUnaryOperator types so far:
    13  //		sf32UnaryOperator - scalar float32 unary operator
    14  //		sf64UnaryOperator - scalar float64 unary operator
    15  //
    16  // Because TensorTypes are parameterized by a scalar type, it isn't necessary to create operators
    17  // that will work on TensorTypes. A simple type switch will do.
    18  //
    19  // n.b.: ʘ is used to denote pointwiseness of the operator.
    20  // if you want to type it, it's U+0298 - Latin Letter Bilabial Click
    21  type ʘUnaryOperator interface {
    22  	unaryOpType() ʘUnaryOperatorType
    23  	String() string
    24  }
    25  
    26  type sf32UnaryOperator func(float32) float32
    27  type sf64UnaryOperator func(float64) float64
    28  
    29  // unaryCheckApply checks in a interface is fulfilled. If it is, that engine is used instead
    30  func unaryCheckApply(op ʘUnaryOperator, t tensor.Tensor, opts ...tensor.FuncOpt) (retVal tensor.Tensor, err error) {
    31  	e := t.Engine()
    32  	switch op.unaryOpType() {
    33  	case absOpType:
    34  		if oe, ok := e.(tensor.Abser); ok {
    35  			return oe.Abs(t, opts...)
    36  		}
    37  	case signOpType:
    38  		if oe, ok := e.(tensor.Signer); ok {
    39  			return oe.Sign(t, opts...)
    40  		}
    41  	case ceilOpType:
    42  	case floorOpType:
    43  	case sinOpType:
    44  	case cosOpType:
    45  	case expOpType:
    46  		if oe, ok := e.(tensor.Exper); ok {
    47  			return oe.Exp(t, opts...)
    48  		}
    49  	case lnOpType:
    50  		if oe, ok := e.(tensor.Loger); ok {
    51  			return oe.Log(t, opts...)
    52  		}
    53  	case log2OpType:
    54  		if oe, ok := e.(tensor.Log2er); ok {
    55  			return oe.Log2(t, opts...)
    56  		}
    57  	case negOpType:
    58  		if oe, ok := e.(tensor.Neger); ok {
    59  			return oe.Neg(t, opts...)
    60  		}
    61  	case squareOpType:
    62  		if oe, ok := e.(tensor.Squarer); ok {
    63  			return oe.Square(t, opts...)
    64  		}
    65  	case sqrtOpType:
    66  		if oe, ok := e.(tensor.Sqrter); ok {
    67  			return oe.Sqrt(t, opts...)
    68  		}
    69  	case inverseOpType:
    70  		if oe, ok := e.(tensor.Inver); ok {
    71  			return oe.Inv(t, opts...)
    72  		}
    73  	case inverseSqrtOpType:
    74  		if oe, ok := e.(tensor.InvSqrter); ok {
    75  			return oe.InvSqrt(t, opts...)
    76  		}
    77  	case cubeOpType:
    78  		if oe, ok := e.(tensor.Cuber); ok {
    79  			return oe.Cube(t, opts...)
    80  		}
    81  	case tanhOpType:
    82  		if oe, ok := e.(tensor.Tanher); ok {
    83  			return oe.Tanh(t, opts...)
    84  		}
    85  	case sigmoidOpType:
    86  	case log1pOpType:
    87  	case expm1OpType:
    88  	case softplusOpType:
    89  	}
    90  
    91  	//default case:
    92  	var fn interface{}
    93  	switch opFn := op.(type) {
    94  	case *sf64UnaryOperator:
    95  		fn = (func(float64) float64)(*opFn)
    96  	case *sf32UnaryOperator:
    97  		fn = (func(float32) float32)(*opFn)
    98  	}
    99  
   100  	return t.Apply(fn, opts...)
   101  }
   102  
   103  /*
   104  DIFFERENTIATION EXPRESSIONS
   105  
   106  All the functions here are expressed in terms of *Node and/or Nodes
   107  
   108  */
   109  
   110  func nondiffUnaryOpExpr(x, y, gradY *Node) (*Node, error) {
   111  	return nil, errors.Errorf("Nondifferentiable Function")
   112  }
   113  func nondiffUnaryOp(x, y *Node) error {
   114  	return AutoDiffError{}
   115  }
   116  
   117  // apparently abs is differentiable
   118  func absDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   119  	if retVal, err = Sign(x); err != nil {
   120  		return nil, errors.Wrap(err, "Failed to call Sign()")
   121  	}
   122  	WithGroupName(gradClust)(retVal)
   123  
   124  	if retVal, err = HadamardProd(gradY, retVal); err != nil {
   125  		return nil, errors.Wrap(err, hadamardProdFail)
   126  	}
   127  	return
   128  }
   129  
   130  func absDiff(x, y *Node) (err error) {
   131  	xdv, ydv := getDV(x, y)
   132  
   133  	var d Value
   134  	sign := newElemUnaryOp(signOpType, x)
   135  	if d, err = sign.Do(xdv.Value); err == nil {
   136  		if dT, ok := d.(tensor.Tensor); ok {
   137  			defer returnTensor(dT)
   138  		}
   139  
   140  		mul := newElemBinOp(mulOpType, y, x)
   141  		err = mul.IncrDo(xdv.d, d, ydv.d)
   142  		if err = checkErrSetDeriv(err, xdv); err != nil {
   143  			return errors.Wrapf(err, autodiffFail, x)
   144  		}
   145  	}
   146  	return
   147  }
   148  
   149  // Solution here
   150  // https://www.symbolab.com/solver/step-by-step/%5Cfrac%7Bd%7D%7Bdx%7D%5Cleft(sin%5Cleft(x%5Cright)%5Cright)
   151  func sinDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   152  	if retVal, err = Cos(x); err == nil {
   153  		WithGroupName(gradClust)(retVal)
   154  		retVal, err = HadamardProd(retVal, gradY)
   155  		if err != nil {
   156  			return nil, errors.Wrap(err, hadamardProdFail)
   157  		}
   158  	} else {
   159  		return nil, errors.Wrap(err, "Failed to carry Cos()")
   160  	}
   161  	return
   162  }
   163  
   164  func sinDiff(x, y *Node) (err error) {
   165  	xdv, ydv := getDV(x, y)
   166  
   167  	cos := newElemUnaryOp(cosOpType, x)
   168  
   169  	var d Value
   170  	if d, err = cos.Do(xdv.Value); err == nil {
   171  		if dT, ok := d.(tensor.Tensor); ok {
   172  			defer returnTensor(dT)
   173  		}
   174  
   175  		mul := newElemBinOp(mulOpType, x, y)
   176  		err = mul.IncrDo(xdv.d, d, ydv.d)
   177  		if err = checkErrSetDeriv(err, xdv); err != nil {
   178  			return errors.Wrapf(err, autodiffFail, x)
   179  		}
   180  	}
   181  	return
   182  }
   183  
   184  // Solution here (then apply chain rule to result by multiplying gradY):
   185  // https://www.symbolab.com/solver/step-by-step/%5Cfrac%7Bd%7D%7Bdx%7D%5Cleft(cos%5Cleft(x%5Cright)%5Cright)
   186  func cosDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   187  	if retVal, err = Sin(x); err == nil {
   188  		WithGroupName(gradClust)(retVal)
   189  		if retVal, err = Neg(retVal); err == nil {
   190  			WithGroupName(gradClust)(retVal)
   191  			retVal, err = HadamardProd(retVal, gradY)
   192  			if err != nil {
   193  				return nil, errors.Wrap(err, hadamardProdFail)
   194  			}
   195  		} else {
   196  			return nil, errors.Wrap(err, negFail)
   197  		}
   198  	} else {
   199  		return nil, errors.Wrap(err, "Failed to call Sin()")
   200  	}
   201  	return
   202  }
   203  
   204  func cosDiff(x, y *Node) (err error) {
   205  	xdv, ydv := getDV(x, y)
   206  
   207  	sin := newElemUnaryOp(sinOpType, x)
   208  
   209  	var d Value
   210  	if d, err = sin.Do(xdv.Value); err == nil {
   211  		if dT, ok := d.(tensor.Tensor); ok {
   212  			defer returnTensor(dT)
   213  		}
   214  
   215  		neg := newElemUnaryOp(negOpType, x)
   216  		if d, err = neg.UnsafeDo(d); err == nil {
   217  			mul := newElemBinOp(mulOpType, x, y)
   218  			err = mul.IncrDo(xdv.d, d, ydv.d)
   219  			if err = checkErrSetDeriv(err, xdv); err != nil {
   220  				return errors.Wrapf(err, autodiffFail, x)
   221  			}
   222  
   223  		}
   224  	}
   225  	return
   226  }
   227  
   228  func expDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   229  	return HadamardProd(y, gradY)
   230  }
   231  
   232  func expDiff(x, y *Node) (err error) {
   233  	xdv, ydv := getDV(x, y)
   234  
   235  	mul := newElemBinOp(mulOpType, x, y)
   236  	err = mul.IncrDo(xdv.d, ydv.Value, ydv.d)
   237  	if err = checkErrSetDeriv(err, xdv); err != nil {
   238  		return errors.Wrapf(err, autodiffFail, x)
   239  	}
   240  	return
   241  }
   242  
   243  // solution is 1/x.
   244  // Upon multiplying with gradY for chain rule, it simply becomes gradY/x
   245  func lnDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   246  	return HadamardDiv(gradY, x)
   247  }
   248  
   249  func lnDiff(x, y *Node) (err error) {
   250  	xdv, ydv := getDV(x, y)
   251  
   252  	div := newElemBinOp(divOpType, y, x)
   253  
   254  	err = div.IncrDo(xdv.d, ydv.d, xdv.Value)
   255  	if err = checkErrSetDeriv(err, xdv); err != nil {
   256  		return errors.Wrapf(err, autodiffFail, x)
   257  	}
   258  
   259  	return
   260  }
   261  
   262  // 1/(x*ln(2))
   263  func log2DiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   264  	var log2 *Node
   265  	if log2, err = getConst(x, "log2"); err != nil {
   266  		return nil, errors.Wrap(err, "getConst failed")
   267  	}
   268  
   269  	if retVal, err = HadamardDiv(x, log2); err != nil {
   270  		return nil, errors.Wrap(err, hadamardProdFail)
   271  	}
   272  	WithGroupName(gradClust)(retVal)
   273  	if retVal, err = HadamardDiv(gradY, retVal); err != nil {
   274  		return nil, errors.Wrap(err, hadamardDivFail)
   275  	}
   276  	return
   277  }
   278  
   279  func log2Diff(x, y *Node) (err error) {
   280  	xdv, ydv := getDV(x, y)
   281  
   282  	var log2 *Node
   283  	if log2, err = getConst(x, "log2"); err != nil {
   284  		return errors.Wrap(err, "getConst failed")
   285  	}
   286  
   287  	mul := newElemBinOp(mulOpType, x, log2)
   288  	var d Value
   289  	if d, err = mul.Do(xdv.Value, log2.boundTo); err != nil {
   290  		return errors.Wrapf(err, doFail, mul)
   291  	}
   292  
   293  	if dT, ok := d.(tensor.Tensor); ok {
   294  		defer returnTensor(dT)
   295  	}
   296  
   297  	div := newElemBinOp(divOpType, y, x)
   298  	err = div.IncrDo(xdv.d, ydv.d, d)
   299  	if err = checkErrSetDeriv(err, xdv); err != nil {
   300  		return errors.Wrapf(err, autodiffFail, x)
   301  	}
   302  
   303  	return
   304  }
   305  
   306  func negDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   307  	return Neg(gradY)
   308  }
   309  
   310  func negDiff(x, y *Node) (err error) {
   311  	xdv, ydv := getDV(x, y)
   312  
   313  	sub := newElemBinOp(subOpType, x, y)
   314  	var d Value
   315  	d, err = sub.UnsafeDo(xdv.d, ydv.d)
   316  
   317  	// first we check if what essentially is a noIncrError is called
   318  	if err = checkErrSetDeriv(err, xdv); err != nil {
   319  		return errors.Wrapf(err, autodiffFail, x)
   320  	}
   321  
   322  	// then we set derivs, if d is a scalar
   323  	if _, ok := xdv.Value.(Scalar); ok {
   324  		if err = xdv.SetDeriv(d); err != nil {
   325  			return errors.Wrapf(err, autodiffFail, x)
   326  		}
   327  	}
   328  
   329  	return
   330  }
   331  
   332  func squareDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   333  	var two *Node
   334  	if two, err = getConst(x, "two"); err != nil {
   335  		return nil, errors.Wrap(err, "getConst failed")
   336  	}
   337  
   338  	// symdiffLogf("X %v and TWO %v", x.Shape(), two.Shape())
   339  	if retVal, err = HadamardProd(x, two); err == nil {
   340  		symdiffLogf("Spawned: %d", retVal.ID())
   341  		WithGroupName(gradClust)(retVal)
   342  		retVal, err = HadamardProd(retVal, gradY)
   343  		if err != nil {
   344  			return nil, errors.Wrap(err, hadamardProdFail)
   345  		}
   346  		symdiffLogf("Spawned: %d", retVal.ID())
   347  	} else {
   348  		return nil, errors.Wrap(err, hadamardProdFail)
   349  	}
   350  	return
   351  }
   352  
   353  func squareDiff(x, y *Node) (err error) {
   354  	xdv, ydv := getDV(x, y)
   355  
   356  	var two *Node
   357  	if two, err = getConst(x, "two"); err != nil {
   358  		return errors.Wrap(err, "getConst failed")
   359  	}
   360  
   361  	var d Value
   362  	mul := newElemBinOp(mulOpType, x, y)
   363  	if d, err = mul.Do(xdv.Value, two.boundTo); err == nil {
   364  		if dT, ok := d.(tensor.Tensor); ok {
   365  			defer returnTensor(dT)
   366  		}
   367  
   368  		err = mul.IncrDo(xdv.d, d, ydv.d)
   369  		if err = checkErrSetDeriv(err, xdv); err != nil {
   370  			return errors.Wrapf(err, autodiffFail, x)
   371  		}
   372  	}
   373  	return
   374  }
   375  
   376  func sqrtDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   377  	var two *Node
   378  	if two, err = getConst(x, "two"); err != nil {
   379  		return nil, errors.Wrap(err, "getConst failed")
   380  	}
   381  
   382  	if retVal, err = HadamardProd(two, y); err == nil {
   383  		WithGroupName(gradClust)(retVal)
   384  		retVal, err = HadamardDiv(gradY, retVal)
   385  		if err != nil {
   386  			return nil, errors.Wrap(err, hadamardDivFail)
   387  		}
   388  	} else {
   389  		return nil, errors.Wrap(err, hadamardProdFail)
   390  	}
   391  	return
   392  }
   393  
   394  func sqrtDiff(x, y *Node) (err error) {
   395  	xdv, ydv := getDV(x, y)
   396  
   397  	var two *Node
   398  	if two, err = getConst(x, "two"); err != nil {
   399  		return errors.Wrap(err, "getConst failed")
   400  	}
   401  
   402  	mul := newElemBinOp(mulOpType, x, y)
   403  
   404  	var d Value
   405  	if d, err = mul.Do(ydv.Value, two.boundTo); err == nil {
   406  		if dT, ok := d.(tensor.Tensor); ok {
   407  			defer returnTensor(dT)
   408  		}
   409  
   410  		div := newElemBinOp(divOpType, y, x)
   411  		err = div.IncrDo(xdv.d, ydv.d, d)
   412  		if err = checkErrSetDeriv(err, xdv); err != nil {
   413  			return errors.Wrapf(err, autodiffFail, x)
   414  		}
   415  	}
   416  	return
   417  }
   418  
   419  func inverseDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   420  	if retVal, err = HadamardProd(y, y); err == nil {
   421  		WithGroupName(gradClust)(retVal)
   422  		if retVal, err = Neg(retVal); err == nil {
   423  			WithGroupName(gradClust)(retVal)
   424  			retVal, err = HadamardProd(retVal, gradY)
   425  			if err != nil {
   426  				return nil, errors.Wrap(err, hadamardProdFail)
   427  			}
   428  		} else {
   429  			return nil, errors.Wrap(err, negFail)
   430  		}
   431  	} else {
   432  		return nil, errors.Wrap(err, hadamardProdFail)
   433  	}
   434  	return
   435  }
   436  
   437  func inverseDiff(x, y *Node) (err error) {
   438  	xdv, ydv := getDV(x, y)
   439  
   440  	sq := newElemUnaryOp(squareOpType, y)
   441  
   442  	var d Value
   443  	if d, err = sq.Do(ydv.Value); err != nil {
   444  		return errors.Wrapf(err, doFail, sq)
   445  	}
   446  
   447  	neg := newElemUnaryOp(negOpType, y)
   448  	if d, err = neg.Do(d); err != nil {
   449  		return errors.Wrapf(err, doFail, neg)
   450  	}
   451  	if dT, ok := d.(tensor.Tensor); ok {
   452  		defer returnTensor(dT)
   453  	}
   454  
   455  	mul := newElemBinOp(mulOpType, y, y)
   456  	err = mul.IncrDo(xdv.d, d, ydv.d)
   457  	if err = checkErrSetDeriv(err, xdv); err != nil {
   458  		return errors.Wrapf(err, autodiffFail, x)
   459  	}
   460  	return
   461  }
   462  
   463  func inverseSqrtDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   464  	var two *Node
   465  	if two, err = getConst(x, "two"); err != nil {
   466  		return nil, errors.Wrap(err, "getConst failed")
   467  	}
   468  	if retVal, err = Cube(y); err != nil {
   469  		return nil, errors.Wrapf(err, cubeFail)
   470  	}
   471  	if retVal, err = HadamardProd(two, retVal); err != nil {
   472  		return nil, errors.Wrapf(err, hadamardProdFail)
   473  	}
   474  	if retVal, err = HadamardDiv(gradY, retVal); err != nil {
   475  		return nil, errors.Wrapf(err, hadamardDivFail)
   476  	}
   477  	return Neg(retVal)
   478  }
   479  
   480  func inverseSqrtDiff(x, y *Node) (err error) {
   481  	xdv, ydv := getDV(x, y)
   482  	var two *Node
   483  	if two, err = getConst(x, "two"); err != nil {
   484  		return errors.Wrap(err, "getConst failed")
   485  	}
   486  
   487  	cb := newElemUnaryOp(cubeOpType, y)
   488  	var d Value
   489  	if d, err = cb.Do(ydv.Value); err != nil {
   490  		return errors.Wrapf(err, doFail, cb)
   491  	}
   492  
   493  	mul := newElemBinOp(mulOpType, x, y)
   494  	if d, err = mul.Do(two.boundTo, d); err != nil {
   495  		return errors.Wrapf(err, doFail, mul)
   496  	}
   497  
   498  	div := newElemBinOp(divOpType, y, x)
   499  	if d, err = div.Do(ydv.d, d); err != nil {
   500  		return errors.Wrapf(err, doFail, div)
   501  	}
   502  
   503  	sub := newElemBinOp(subOpType, x, y)
   504  	if _, err = sub.Do(xdv.d, d); err != nil {
   505  		return errors.Wrapf(err, doFail, sub)
   506  	}
   507  	return nil
   508  }
   509  
   510  func cubeDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   511  	var three *Node
   512  	if three, err = getConst(x, "three"); err != nil {
   513  		return nil, errors.Wrap(err, "getConst failed")
   514  	}
   515  
   516  	if retVal, err = HadamardProd(x, x); err == nil {
   517  		WithGroupName(gradClust)(retVal)
   518  		if retVal, err = HadamardProd(retVal, three); err == nil {
   519  			WithGroupName(gradClust)(retVal)
   520  			retVal, err = HadamardProd(retVal, gradY)
   521  			if err != nil {
   522  				return nil, errors.Wrap(err, hadamardProdFail)
   523  			}
   524  		} else {
   525  			return nil, errors.Wrap(err, hadamardProdFail)
   526  		}
   527  	} else {
   528  		return nil, errors.Wrap(err, hadamardProdFail)
   529  	}
   530  	return
   531  }
   532  
   533  func cubeDiff(x, y *Node) (err error) {
   534  	xdv, ydv := getDV(x, y)
   535  
   536  	var three *Node
   537  	if three, err = getConst(x, "three"); err != nil {
   538  		return errors.Wrap(err, "getConst failed")
   539  	}
   540  
   541  	mul := newElemBinOp(mulOpType, x, y)
   542  	var d Value
   543  	if d, err = mul.Do(xdv.Value, xdv.Value); err != nil {
   544  		return errors.Wrapf(err, doFail, mul)
   545  	}
   546  
   547  	if dT, ok := d.(tensor.Tensor); ok {
   548  		defer returnTensor(dT)
   549  	}
   550  
   551  	if d, err = mul.UnsafeDo(d, three.boundTo); err != nil {
   552  		return errors.Wrapf(err, unsafeDoFail, mul)
   553  	}
   554  
   555  	err = mul.IncrDo(xdv.d, d, ydv.d)
   556  	if err = checkErrSetDeriv(err, xdv); err != nil {
   557  		return errors.Wrapf(err, autodiffFail, x)
   558  	}
   559  	return
   560  }
   561  
   562  func tanhDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   563  	var one *Node
   564  	if one, err = getConst(x, "one"); err != nil {
   565  		return nil, errors.Wrap(err, "getConst failed")
   566  	}
   567  
   568  	if retVal, err = HadamardProd(y, y); err == nil {
   569  		WithGroupName(gradClust)(retVal)
   570  		if retVal, err = Sub(one, retVal); err == nil {
   571  			WithGroupName(gradClust)(retVal)
   572  			retVal, err = HadamardProd(retVal, gradY)
   573  			if err != nil {
   574  				return nil, errors.Wrap(err, hadamardProdFail)
   575  			}
   576  		} else {
   577  			return nil, errors.Wrap(err, subFail)
   578  		}
   579  	} else {
   580  		return nil, errors.Wrap(err, hadamardProdFail)
   581  	}
   582  	return
   583  }
   584  
   585  func tanhDiff(x, y *Node) (err error) {
   586  	xdv, ydv := getDV(x, y)
   587  
   588  	var one *Node
   589  	if one, err = getConst(x, "one"); err != nil {
   590  		return errors.Wrap(err, "getConst failed")
   591  	}
   592  
   593  	sq := newElemUnaryOp(squareOpType, y)
   594  
   595  	var d Value
   596  	if d, err = sq.Do(ydv.Value); err != nil {
   597  		return errors.Wrapf(err, doFail, sq)
   598  	}
   599  
   600  	if dT, ok := d.(tensor.Tensor); ok {
   601  		defer returnTensor(dT)
   602  	}
   603  
   604  	sub := newElemBinOp(subOpType, one, y)
   605  	if d, err = sub.UnsafeDo(one.boundTo, d); err != nil {
   606  		return errors.Wrapf(err, unsafeDoFail, sub)
   607  	}
   608  
   609  	mul := newElemBinOp(mulOpType, x, y)
   610  	err = mul.IncrDo(xdv.d, d, ydv.d)
   611  	if err = checkErrSetDeriv(err, xdv); err != nil {
   612  		return errors.Wrapf(err, autodiffFail, x)
   613  	}
   614  	return
   615  }
   616  
   617  func sigmoidDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   618  	var one *Node
   619  	if one, err = getConst(x, "one"); err != nil {
   620  		return nil, errors.Wrap(err, "getConst failed")
   621  	}
   622  
   623  	if retVal, err = Sub(one, y); err == nil {
   624  		WithGroupName(gradClust)(retVal)
   625  		if retVal, err = HadamardProd(y, retVal); err == nil {
   626  			WithGroupName(gradClust)(retVal)
   627  			retVal, err = HadamardProd(retVal, gradY)
   628  			if err != nil {
   629  				return nil, errors.Wrap(err, hadamardProdFail)
   630  			}
   631  		} else {
   632  			return nil, errors.Wrap(err, hadamardProdFail)
   633  		}
   634  	} else {
   635  		return nil, errors.Wrap(err, subFail)
   636  	}
   637  	return
   638  }
   639  
   640  func sigmoidDiff(x, y *Node) (err error) {
   641  	xdv, ydv := getDV(x, y)
   642  
   643  	var one *Node
   644  	if one, err = getConst(x, "one"); err != nil {
   645  		return errors.Wrap(err, "getConst failed")
   646  	}
   647  
   648  	sub := newElemBinOp(subOpType, one, y)
   649  
   650  	var d Value
   651  	if d, err = sub.Do(one.boundTo, ydv.Value); err != nil {
   652  		return errors.Wrapf(err, doFail, sub)
   653  	}
   654  
   655  	if dT, ok := d.(tensor.Tensor); ok {
   656  		defer returnTensor(dT)
   657  	}
   658  
   659  	mul := newElemBinOp(mulOpType, x, y)
   660  	if d, err = mul.UnsafeDo(d, ydv.Value); err != nil {
   661  		return errors.Wrapf(err, unsafeDoFail, mul)
   662  	}
   663  
   664  	err = mul.IncrDo(xdv.d, d, ydv.d)
   665  	if err = checkErrSetDeriv(err, xdv); err != nil {
   666  		return errors.Wrapf(err, autodiffFail, x)
   667  	}
   668  	return
   669  }
   670  
   671  // 1/(x+1)
   672  func log1pDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   673  	var one *Node
   674  	if one, err = getConst(x, "one"); err != nil {
   675  		return nil, errors.Wrap(err, "getConst failed")
   676  	}
   677  
   678  	if retVal, err = Add(x, one); err == nil {
   679  		WithGroupName(gradClust)(retVal)
   680  		retVal, err = HadamardDiv(gradY, retVal)
   681  		if err != nil {
   682  			return nil, errors.Wrap(err, hadamardProdFail)
   683  		}
   684  	} else {
   685  		return nil, errors.Wrap(err, "Failed to carry Add()")
   686  	}
   687  	return
   688  }
   689  
   690  func log1pDiff(x, y *Node) (err error) {
   691  	xdv, ydv := getDV(x, y)
   692  
   693  	var one *Node
   694  	if one, err = getConst(x, "one"); err != nil {
   695  		return errors.Wrap(err, "getConst failed")
   696  	}
   697  
   698  	add := newElemBinOp(addOpType, x, one)
   699  
   700  	var d Value
   701  	if d, err = add.Do(xdv.Value, one.boundTo); err != nil {
   702  		return errors.Wrapf(err, doFail, add)
   703  	}
   704  
   705  	if dT, ok := d.(tensor.Tensor); ok {
   706  		defer returnTensor(dT)
   707  	}
   708  
   709  	div := newElemBinOp(divOpType, y, x)
   710  	err = div.IncrDo(xdv.d, ydv.d, d)
   711  	if err = checkErrSetDeriv(err, xdv); err != nil {
   712  		return errors.Wrapf(err, autodiffFail, x)
   713  	}
   714  	return
   715  }
   716  
   717  func expm1DiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   718  	if retVal, err = Exp(x); err == nil {
   719  		WithGroupName(gradClust)(retVal)
   720  		return HadamardProd(gradY, retVal)
   721  	}
   722  	return nil, errors.Wrap(err, "Failled to carry Exp()")
   723  }
   724  
   725  func expm1Diff(x, y *Node) (err error) {
   726  	xdv, ydv := getDV(x, y)
   727  
   728  	exp := newElemUnaryOp(expOpType, x)
   729  
   730  	var d Value
   731  	if d, err = exp.Do(xdv.Value); err != nil {
   732  		return errors.Wrapf(err, doFail, exp)
   733  	}
   734  
   735  	if dT, ok := d.(tensor.Tensor); ok {
   736  		defer returnTensor(dT)
   737  	}
   738  
   739  	mul := newElemBinOp(mulOpType, x, y)
   740  	err = mul.IncrDo(xdv.d, d, ydv.d)
   741  	if err = checkErrSetDeriv(err, xdv); err != nil {
   742  		return errors.Wrapf(err, autodiffFail, x)
   743  	}
   744  	return
   745  }
   746  
   747  func softplusDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
   748  	if retVal, err = Sigmoid(x); err == nil {
   749  		WithGroupName(gradClust)(retVal)
   750  		return HadamardProd(retVal, gradY)
   751  	}
   752  	return nil, errors.Wrap(err, "Failed to carry Sigmoid()")
   753  }
   754  
   755  func softplusDiff(x, y *Node) (err error) {
   756  	xdv, ydv := getDV(x, y)
   757  
   758  	sigmoid := newElemUnaryOp(sigmoidOpType, x)
   759  
   760  	var d Value
   761  	if d, err = sigmoid.Do(xdv.Value); err != nil {
   762  		return errors.Wrapf(err, doFail, sigmoid)
   763  	}
   764  
   765  	if dT, ok := d.(tensor.Tensor); ok {
   766  		defer returnTensor(dT)
   767  	}
   768  
   769  	mul := newElemBinOp(mulOpType, x, y)
   770  	err = mul.IncrDo(xdv.d, d, ydv.d)
   771  	if err = checkErrSetDeriv(err, xdv); err != nil {
   772  		return errors.Wrapf(err, autodiffFail, x)
   773  	}
   774  	return
   775  }