Skip to content

Instantly share code, notes, and snippets.

@JacopoMangiavacchi
Last active July 14, 2020 20:52
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save JacopoMangiavacchi/9d0407b045a56aa5d9f804f238653ab1 to your computer and use it in GitHub Desktop.
Save JacopoMangiavacchi/9d0407b045a56aa5d9f804f238653ab1 to your computer and use it in GitHub Desktop.
private func initializeTensors() {
device = MLCDevice(type: .cpu)!
inputTensor = MLCTensor(descriptor: MLCTensorDescriptor(shape: [batchSize, imageSize, 1, 1], dataType: .float32)!)
dense1WeightsTensor = MLCTensor(descriptor: MLCTensorDescriptor(shape: [1, imageSize*dense1LayerOutputSize, 1, 1], dataType: .float32)!,
randomInitializerType: .glorotUniform)
dense1BiasesTensor = MLCTensor(descriptor: MLCTensorDescriptor(shape: [1, dense1LayerOutputSize, 1, 1], dataType: .float32)!,
randomInitializerType: .glorotUniform)
dense2WeightsTensor = MLCTensor(descriptor: MLCTensorDescriptor(shape: [1, dense1LayerOutputSize*numberOfClasses, 1, 1], dataType: .float32)!,
randomInitializerType: .glorotUniform)
dense2BiasesTensor = MLCTensor(descriptor: MLCTensorDescriptor(shape: [1, numberOfClasses, 1, 1], dataType: .float32)!,
randomInitializerType: .glorotUniform)
lossLabelTensor = MLCTensor(descriptor: MLCTensorDescriptor(shape: [batchSize, numberOfClasses], dataType: .float32)!)
}
private func buildGraph() {
graph = MLCGraph()
dense1 = graph.node(with: MLCFullyConnectedLayer(weights: dense1WeightsTensor,
biases: dense1BiasesTensor,
descriptor: MLCConvolutionDescriptor(kernelSizes: (height: imageSize, width: dense1LayerOutputSize),
inputFeatureChannelCount: imageSize,
outputFeatureChannelCount: dense1LayerOutputSize))!,
sources: [inputTensor])
relu1 = graph.node(with: MLCActivationLayer(descriptor: MLCActivationDescriptor(type: MLCActivationType.relu)!),
source: dense1!)
dense2 = graph.node(with: MLCFullyConnectedLayer(weights: dense2WeightsTensor,
biases: dense2BiasesTensor,
descriptor: MLCConvolutionDescriptor(kernelSizes: (height: dense1LayerOutputSize, width: numberOfClasses),
inputFeatureChannelCount: dense1LayerOutputSize,
outputFeatureChannelCount: numberOfClasses))!,
sources: [relu1!])
outputSoftmax = graph.node(with: MLCSoftmaxLayer(operation: .softmax),
source: dense2!)
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment