Skip to content

Instantly share code, notes, and snippets.

@tanmayb123
Last active February 28, 2019 21:47
Show Gist options
  • Save tanmayb123/f3631d283cb763ccc30447869efe13e8 to your computer and use it in GitHub Desktop.
Save tanmayb123/f3631d283cb763ccc30447869efe13e8 to your computer and use it in GitHub Desktop.
@_fixed_layout
public struct Dense<Scalar: TensorFlowFloatingPoint>: Layer {
public var weight: Tensor<Scalar>
public var bias: Tensor<Scalar>?
public typealias Activation = @differentiable (Tensor<Scalar>) -> Tensor<Scalar>
@noDerivative public let activation: Activation
public init(
weight: Tensor<Scalar>,
bias: Tensor<Scalar>?,
activation: @escaping Activation
) {
self.weight = weight
self.bias = bias
self.activation = activation
}
@differentiable
public func applied(to input: Tensor<Scalar>, in _: Context) -> Tensor<Scalar> {
var output = matmul(input, weight)
if let bias = bias {
output += bias
}
return activation(output)
}
}
public extension Dense where Scalar.RawSignificand: FixedWidthInteger {
init<G: RandomNumberGenerator>(
inputSize: Int,
outputSize: Int,
activation: @escaping Activation = identity,
generator: inout G,
useBias: Bool
) {
self.init(weight: Tensor(glorotUniform: [Int32(inputSize), Int32(outputSize)],
generator: &generator),
bias: useBias ? Tensor(zeros: [Int32(outputSize)]) : nil,
activation: activation)
}
init(inputSize: Int, outputSize: Int, activation: @escaping Activation = identity, useBias: Bool) {
self.init(inputSize: inputSize, outputSize: outputSize, activation: activation,
generator: &PhiloxRandomNumberGenerator.global, useBias: useBias)
}
}
public extension Dense {
init(
inputSize: Int,
outputSize: Int,
activation: @escaping Activation = identity,
seed: (Int64, Int64) = (Int64.random(in: Int64.min..<Int64.max),
Int64.random(in: Int64.min..<Int64.max))
) {
self.init(weight: Tensor(glorotUniform: [Int32(inputSize), Int32(outputSize)],
seed: seed),
bias: Tensor(zeros: [Int32(outputSize)]),
activation: activation)
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment