Skip to content

Instantly share code, notes, and snippets.

@Atry

Atry/Adagrad.sc Secret

Last active November 30, 2017 10:18
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Atry/89ee1baa4c161b8ccc1b82cdd9c109fe to your computer and use it in GitHub Desktop.
Save Atry/89ee1baa4c161b8ccc1b82cdd9c109fe to your computer and use it in GitHub Desktop.
Lock the weight when updating cache
Display the source blob
Display the rendered blob
Raw
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
/**
* @note This [[Adagrad]] hyperparameter is usually used before global [[LearningRate]].
* e.g. `Adagrad with FixedLearningRate`, not `FixedLearningRate with Adagrad`
*/
trait Adagrad extends com.thoughtworks.deeplearning.plugins.INDArrayWeights {
import org.nd4j.linalg.api.ndarray.INDArray
import org.nd4j.linalg.factory.Nd4j
import org.nd4j.linalg.ops.transforms.Transforms
/** The hyperparmaeter `eps`, which should be configured in `Factory.newInstance()` */
def eps: Double
trait INDArrayWeightApi extends super.INDArrayWeightApi { this: INDArrayWeight =>
/** The `cache` state injected to `INDArrayWeight` by this `Adagrad` plug-in */
var cache: Option[INDArray] = None
}
override type INDArrayWeight <: INDArrayWeightApi with Weight
trait INDArrayOptimizerApi extends super.INDArrayOptimizerApi { this: INDArrayOptimizer =>
private lazy val delta0: INDArray = {
// The original delta computed by previous plug-ins
val superDelta = super.delta
import org.nd4s.Implicits._
import weight._
val newCache = weight.synchronized {
val newCache = weight.cache.getOrElse(Nd4j.zeros(superDelta.shape: _*)) + superDelta * superDelta
weight.cache = Some(newCache)
newCache
}
superDelta / (Transforms.sqrt(newCache) + eps)
}
/** The computation of delta for injected to `INDArrayOptimizer` by this `Adagrad` plug-in */
override def delta = delta0
}
override type INDArrayOptimizer <: INDArrayOptimizerApi with Optimizer
}
MIT License
Copyright (c) 2017 ThoughtWorks Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment