Skip to content

Instantly share code, notes, and snippets.

@AbdelmajidB
Last active February 2, 2020 18:16
Show Gist options
  • Save AbdelmajidB/2a6a73f074925c412570cc63b915fdcd to your computer and use it in GitHub Desktop.
Save AbdelmajidB/2a6a73f074925c412570cc63b915fdcd to your computer and use it in GitHub Desktop.
package ma.enset.brain_tumor_segmentation;
import org.deeplearning4j.nn.conf.inputs.InputType;
import org.deeplearning4j.nn.conf.layers.samediff.SDLayerParams;
import org.deeplearning4j.nn.conf.layers.samediff.SameDiffOutputLayer;
import org.nd4j.autodiff.samediff.SDVariable;
import org.nd4j.autodiff.samediff.SameDiff;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.util.Map;
public class SameDiffDiceLossLayer extends SameDiffOutputLayer {
//Small constant to prevent division by zero.
double epsilon=1.0E-8;
@Override
public SDVariable defineLayer(SameDiff sameDiff, SDVariable layerInput, SDVariable labels, Map<String, SDVariable> paramTable) {
SDVariable numerator = layerInput.mul(labels).sum(1,2,3).mul(2); //Skip dimension 0, i.e., batch dimension, for sum
SDVariable denominator = labels.mul(labels).sum(1,2,3).add(layerInput.mul(layerInput).sum(1,2,3)).add(epsilon);
SDVariable loss = numerator.div(denominator);
return loss.rsub(1.0);
}
@Override
public String activationsVertexName() {
return "input";
}
@Override
public void defineParameters(SDLayerParams params) {
//No op for loss layer (no params)
}
@Override
public void initializeParameters(Map<String, INDArray> params) {
//No op for loss layer (no params)
}
@Override
public InputType getOutputType(int layerIndex, InputType inputType) {
return null;
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment