Skip to content

Instantly share code, notes, and snippets.

@AbdelmajidB
Last active February 2, 2020 10:14
Show Gist options
  • Save AbdelmajidB/039d34ba41513d733995a142193b5093 to your computer and use it in GitHub Desktop.
Save AbdelmajidB/039d34ba41513d733995a142193b5093 to your computer and use it in GitHub Desktop.
package ma.enset.brain_tumor_segmentation;
import org.deeplearning4j.nn.conf.inputs.InputType;
import org.deeplearning4j.nn.conf.layers.samediff.SDLayerParams;
import org.deeplearning4j.nn.conf.layers.samediff.SameDiffOutputLayer;
import org.nd4j.autodiff.samediff.SDVariable;
import org.nd4j.autodiff.samediff.SameDiff;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.util.Map;
public class SameDiffTverskyLossLayer extends SameDiffOutputLayer {
////Small constant to prevent division by zero.
double epsilon=1.0E-8;
double alpha=0.7;
double beta=1-alpha;
@Override
public SDVariable defineLayer(SameDiff sameDiff, SDVariable layerInput, SDVariable labels, Map<String, SDVariable> paramTable) {
SDVariable numerator = layerInput.mul(labels).sum(1,2,3);
SDVariable denominator = layerInput.mul(labels).sum(1,2,3)
.add(layerInput.mul(labels.rsub(1.0)).sum(1,2,3)).mul(alpha)
.add(layerInput.rsub(1.0).mul(labels).sum(1,2,3)).mul(beta).add(epsilon);
SDVariable loss = numerator.div(denominator);
return loss.rsub(1.0);
}
@Override
public String activationsVertexName() {
return "input";
}
@Override
public void defineParameters(SDLayerParams params) {
//No op for loss layer (no params)
}
@Override
public void initializeParameters(Map<String, INDArray> params) {
//No op for loss layer (no params)
}
@Override
public InputType getOutputType(int layerIndex, InputType inputType) {
return null;
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment