use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class ManhattanDistance method doDiff.
@Override
public List<SDVariable> doDiff(List<SDVariable> i_v1) {
// ddist(x,y)/dxi = sign(xi-yi)
SDVariable difference = larg().sub(rarg());
SDVariable gradBroadcastable;
// TODO shape may not always be defined?
int origRank = Shape.rankFromShape(arg().getShape());
if (!(dimensions.length == 1 && dimensions[0] == Integer.MAX_VALUE)) {
// 1x1 output case
gradBroadcastable = i_v1.get(0);
} else {
gradBroadcastable = f().reductionBroadcastableWithOrigShape(origRank, dimensions, i_v1.get(0));
}
SDVariable gradX = sameDiff.sign(difference).mul(gradBroadcastable);
SDVariable gradY = f().neg(gradX);
return Arrays.asList(gradX, gradY);
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class BaseOp method outputVariables.
@Override
public SDVariable[] outputVariables(String baseName) {
if (zVertexId == null) {
val outputNames = sameDiff.getOutputsForFunction(this);
// no need to dynamically create if already exists
if (outputNames != null) {
zVertexId = sameDiff.getVariable(outputNames[0]).getVarName();
return new SDVariable[] { sameDiff.getVariable(outputNames[0]) };
}
if (isInPlace()) {
val newVars = sameDiff.generateOutputVariableForOp(this, null);
val inputArr = x();
// in place op
if (inputArr == null) {
return newVars;
}
sameDiff.putArrayForVarName(newVars[0].getVarName(), inputArr);
z = inputArr;
if (sameDiff.getOutputsForFunction(this) == null)
sameDiff.addOutgoingFor(newVars, this);
return newVars;
}
val newVars = sameDiff.generateOutputVariableForOp(this, null);
INDArray arr = null;
if (newVars == null || newVars.length < 1 || newVars[0].getShape() == null) {
arr = null;
} else if (newVars[0].getArr() == null) {
arr = newVars[0].storeAndAllocateNewArray();
} else
arr = newVars[0].getArr();
if (arr == null) {
val shapes = calculateOutputShape();
if (shapes != null && !shapes.isEmpty() && shapes.get(0) != null) {
sameDiff.putShapeForVarName(newVars[0].getVarName(), shapes.get(0));
arr = newVars[0].storeAndAllocateNewArray();
}
}
z = arr;
if (sameDiff.getOutputsForFunction(this) == null)
sameDiff.addOutgoingFor(newVars, this);
return newVars;
}
return new SDVariable[] { sameDiff.getVariable(zVertexId) };
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class OnnxGraphMapper method mapNodeType.
@Override
public void mapNodeType(OnnxProto3.NodeProto tfNode, ImportState<OnnxProto3.GraphProto, onnx.OnnxProto3.TypeProto.Tensor> importState) {
val differentialFunction = DifferentialFunctionClassHolder.getInstance().getOpWithOnnxName(tfNode.getOpType());
if (differentialFunction == null) {
throw new NoOpNameFoundException("No op name found " + tfNode.getOpType());
}
val diff = importState.getSameDiff();
val idx = importState.getGraph().getNodeList().indexOf(tfNode);
val name = !tfNode.getName().isEmpty() ? tfNode.getName() : String.valueOf(idx);
try {
val newInstance = differentialFunction.getClass().newInstance();
val args = new SDVariable[tfNode.getInputCount()];
newInstance.setSameDiff(importState.getSameDiff());
newInstance.initFromOnnx(tfNode, diff, getAttrMap(tfNode), importState.getGraph());
importState.getSameDiff().putFunctionForId(newInstance.getOwnName(), newInstance);
// ensure we can track node name to function instance later.
diff.setBaseNameForFunctionInstanceId(tfNode.getName(), newInstance);
diff.addVarNameForImport(tfNode.getName());
} catch (Exception e) {
e.printStackTrace();
}
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class DifferentialFunction method diff.
/**
* Perform automatic differentiation
* wrt the input variables
* @param i_v1 the input variables
* @return the differentiated output
* wrt each input variable
*/
public List<SDVariable> diff(List<SDVariable> i_v1) {
List<SDVariable> vals = doDiff(i_v1);
if (vals == null) {
throw new IllegalStateException("Error executing diff operation: doDiff returned null for op: " + this.opName());
}
val outputVars = args();
for (int i = 0; i < vals.size(); i++) {
SDVariable var = outputVars[i];
SDVariable grad = var.getGradient();
if (grad != null) {
SDVariable gradVar = f().add(grad, vals.get(i));
try {
vals.set(i, gradVar);
} catch (UnsupportedOperationException e) {
throw new UnsupportedOperationException("Use a mutable list when returning values from " + this.getClass().getSimpleName() + ".doDiff (e.g. Arrays.asList instead of Collections.singletonList)", e);
}
sameDiff.setGradientForVariableName(var.getVarName(), gradVar);
} else {
SDVariable gradVar = vals.get(i);
sameDiff.updateVariableNameAndReference(gradVar, var.getVarName() + "-grad");
sameDiff.setGradientForVariableName(var.getVarName(), gradVar);
sameDiff.setForwardVariableForVarName(gradVar.getVarName(), var);
}
}
return vals;
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class DifferentialFunction method getZ.
@JsonIgnore
private INDArray getZ() {
if (isInPlace())
return getX();
SDVariable opId = outputVariables()[0];
INDArray ret = opId.getArr();
return ret;
}
Aggregations