Skip to content

Instantly share code, notes, and snippets.

@ayushoriginal
Created June 24, 2019 12:42
Show Gist options
  • Save ayushoriginal/53fdb1ffeba70e62760794f65f159bba to your computer and use it in GitHub Desktop.
Save ayushoriginal/53fdb1ffeba70e62760794f65f159bba to your computer and use it in GitHub Desktop.
inference_cs
nint[] ns_sum = new nint[] { 44, 1, 1 }; // 44 is the size of vocabulary
List<double> predicted_labels = new List<double>();
string hate_speech_var = "NOT OFFENSIVE";
double prob = 0.0;
MLMultiArray temp1 = new MLMultiArray(ns_sum, MLMultiArrayDataType.Double, out NSError error3);
var narray = new NSNumber[example.Length];
for (int i = 0; i < narray.Length; i++)
narray[i] = NSNumber.FromDouble(example[i]);
for (int i = 0; i < narray.Length; i++)
temp1.SetObject(narray[i], i);
var hate_coremlOutput = this.hate_model.GetPrediction(temp1, out NSError error2);
if (error3 != null)
{
throw new Exception("Unexpected runtime error.");
}
if (error2 != null)
{
throw new Exception("Error with Hate Model during Runtime.\n");
}
var hate_prob = hate_coremlOutput.Output1;
Console.WriteLine("Hate Probability Score: ");
Console.WriteLine(hate_prob);
predicted_labels.Add(hate_prob[0].DoubleValue);
predicted_labels.Add(hate_prob[1].DoubleValue);
if (hate_prob[0].DoubleValue > hate_prob[1].DoubleValue)
{
hate_speech_var = "OFFENSIVE";
prob = hate_prob[0].DoubleValue;
}
else
{
prob = hate_prob[1].DoubleValue;
}
}
string title_text = "TEXT IS " + hate_speech_var;
string body_text = "Probability of being " + hate_speech_var + " is " + prob.ToString();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment