Skip to content

Instantly share code, notes, and snippets.

@mdespriee
Last active November 5, 2018 21:07
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mdespriee/ced69d2b38b6aec8f192e0ec325f8e7e to your computer and use it in GitHub Desktop.
Save mdespriee/ced69d2b38b6aec8f192e0ec325f8e7e to your computer and use it in GitHub Desktop.
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnet
import org.apache.mxnet.annotation.Experimental
// scalastyle:off
abstract class NDArrayRandomAPIBase {
/**
* <pre>
Draw random samples from an an approximately log-uniform
* or Zipfian distribution without replacement.
*
* This operation takes a 2-D shape `(batch_size, num_sampled)`,
* and randomly generates *num_sampled* samples from the range of integers [0, range_max)
* for each instance in the batch.
*
* The elements in each instance are drawn without replacement from the base distribution.
* The base distribution for this operator is an approximately log-uniform or Zipfian distribution:
*
* P(class) = (log(class + 2) - log(class + 1)) / log(range_max + 1)
*
* Additionaly, it also returns the number of trials used to obtain `num_sampled` samples for
* each instance in the batch.
*
* Example::
*
* samples, trials = _sample_unique_zipfian(750000, shape=(4, 8192))
* unique(samples[0]) = 8192
* unique(samples[3]) = 8192
* trials[0] = 16435
*
*
*
* Defined in src/operator/random/unique_sample_op.cc:L66 * </pre>
* @param range_max The number of possible classes.
* @param shape 2-D shape of the output, where shape[0] is the batch size, and shape[1] is the number of candidates to sample for each batch.
* @return org.apache.mxnet.NDArrayFuncReturn
*/
@Experimental
def unique_zipfian[T: NDArrayOrValue : scala.reflect.runtime.universe.TypeTag] (range_max : T, shape : Option[org.apache.mxnet.Shape] = None, out : Option[NDArray] = None): org.apache.mxnet.NDArrayFuncReturn
/**
* <pre>
Draw random samples from a normal (Gaussian) distribution.
*
* .. note:: The existing alias ``normal`` is deprecated.
*
* Samples are distributed according to a normal distribution parametrized by *loc* (mean) and *scale* (standard deviation).
*
* Example::
*
* normal(loc=0, scale=1, shape=(2,2)) = [[ 1.89171135, -1.16881478],
* [-1.23474145, 1.55807114]]
*
*
* Defined in src/operator/random/sample_op.cc:L85 * </pre>
* @param mu Mean of the distribution.
* @param sigma Standard deviation of the distribution.
* @param shape Shape of the output.
* @param ctx Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
* @param dtype DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
* @return org.apache.mxnet.NDArrayFuncReturn
*/
@Experimental
def normal[T: NDArrayOrValue : scala.reflect.runtime.universe.TypeTag] (mu : Option[T] = None, sigma : Option[T] = None, shape : Option[org.apache.mxnet.Shape] = None, ctx : Option[String] = None, dtype : Option[String] = None, out : Option[NDArray] = None): org.apache.mxnet.NDArrayFuncReturn
/**
* <pre>
Draw random samples from a generalized negative binomial distribution.
*
* Samples are distributed according to a generalized negative binomial distribution parametrized by
* *mu* (mean) and *alpha* (dispersion). *alpha* is defined as *1/k* where *k* is the failure limit of the
* number of unsuccessful experiments (generalized to real numbers).
* Samples will always be returned as a floating point data type.
*
* Example::
*
* generalized_negative_binomial(mu=2.0, alpha=0.3, shape=(2,2)) = [[ 2., 1.],
* [ 6., 4.]]
*
*
* Defined in src/operator/random/sample_op.cc:L168 * </pre>
* @param mu Mean of the negative binomial distribution.
* @param alpha Alpha (dispersion) parameter of the negative binomial distribution.
* @param shape Shape of the output.
* @param ctx Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
* @param dtype DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
* @return org.apache.mxnet.NDArrayFuncReturn
*/
@Experimental
def generalized_negative_binomial[T: NDArrayOrValue : scala.reflect.runtime.universe.TypeTag] (mu : Option[T] = None, alpha : Option[T] = None, shape : Option[org.apache.mxnet.Shape] = None, ctx : Option[String] = None, dtype : Option[String] = None, out : Option[NDArray] = None): org.apache.mxnet.NDArrayFuncReturn
/**
* <pre>
Draw random samples from a Poisson distribution.
*
* Samples are distributed according to a Poisson distribution parametrized by *lambda* (rate).
* Samples will always be returned as a floating point data type.
*
* Example::
*
* poisson(lam=4, shape=(2,2)) = [[ 5., 2.],
* [ 4., 6.]]
*
*
* Defined in src/operator/random/sample_op.cc:L132 * </pre>
* @param lam Lambda parameter (rate) of the Poisson distribution.
* @param shape Shape of the output.
* @param ctx Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
* @param dtype DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
* @return org.apache.mxnet.NDArrayFuncReturn
*/
@Experimental
def poisson[T: NDArrayOrValue : scala.reflect.runtime.universe.TypeTag] (lam : Option[T] = None, shape : Option[org.apache.mxnet.Shape] = None, ctx : Option[String] = None, dtype : Option[String] = None, out : Option[NDArray] = None): org.apache.mxnet.NDArrayFuncReturn
/**
* <pre>
Draw random samples from a uniform distribution.
*
* .. note:: The existing alias ``uniform`` is deprecated.
*
* Samples are uniformly distributed over the half-open interval *[low, high)*
* (includes *low*, but excludes *high*).
*
* Example::
*
* uniform(low=0, high=1, shape=(2,2)) = [[ 0.60276335, 0.85794562],
* [ 0.54488319, 0.84725171]]
*
*
*
* Defined in src/operator/random/sample_op.cc:L66 * </pre>
* @param low Lower bound of the distribution.
* @param high Upper bound of the distribution.
* @param shape Shape of the output.
* @param ctx Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
* @param dtype DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
* @return org.apache.mxnet.NDArrayFuncReturn
*/
@Experimental
def uniform[T: NDArrayOrValue : scala.reflect.runtime.universe.TypeTag] (low : Option[T] = None, high : Option[T] = None, shape : Option[org.apache.mxnet.Shape] = None, ctx : Option[String] = None, dtype : Option[String] = None, out : Option[NDArray] = None): org.apache.mxnet.NDArrayFuncReturn
/**
* <pre>
Draw random samples from a negative binomial distribution.
*
* Samples are distributed according to a negative binomial distribution parametrized by
* *k* (limit of unsuccessful experiments) and *p* (failure probability in each experiment).
* Samples will always be returned as a floating point data type.
*
* Example::
*
* negative_binomial(k=3, p=0.4, shape=(2,2)) = [[ 4., 7.],
* [ 2., 5.]]
*
*
* Defined in src/operator/random/sample_op.cc:L149 * </pre>
* @param k Limit of unsuccessful experiments.
* @param p Failure probability in each experiment.
* @param shape Shape of the output.
* @param ctx Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
* @param dtype DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
* @return org.apache.mxnet.NDArrayFuncReturn
*/
@Experimental
def negative_binomial[T: NDArrayOrValue : scala.reflect.runtime.universe.TypeTag] (k : Option[T] = None, p : Option[T] = None, shape : Option[org.apache.mxnet.Shape] = None, ctx : Option[String] = None, dtype : Option[String] = None, out : Option[NDArray] = None): org.apache.mxnet.NDArrayFuncReturn
/**
* <pre>
Concurrent sampling from multiple multinomial distributions.
*
* *data* is an *n* dimensional array whose last dimension has length *k*, where
* *k* is the number of possible outcomes of each multinomial distribution. This
* operator will draw *shape* samples from each distribution. If shape is empty
* one sample will be drawn from each distribution.
*
* If *get_prob* is true, a second array containing log likelihood of the drawn
* samples will also be returned. This is usually used for reinforcement learning
* where you can provide reward as head gradient for this array to estimate
* gradient.
*
* Note that the input distribution must be normalized, i.e. *data* must sum to
* 1 along its last axis.
*
* Examples::
*
* probs = [[0, 0.1, 0.2, 0.3, 0.4], [0.4, 0.3, 0.2, 0.1, 0]]
*
* // Draw a single sample for each distribution
* sample_multinomial(probs) = [3, 0]
*
* // Draw a vector containing two samples for each distribution
* sample_multinomial(probs, shape=(2)) = [[4, 2],
* [0, 0]]
*
* // requests log likelihood
* sample_multinomial(probs, get_prob=True) = [2, 1], [0.2, 0.3] * </pre>
* @param data Distribution probabilities. Must sum to one on the last axis.
* @param shape Shape to be sampled from each random distribution.
* @param get_prob Whether to also return the log probability of sampled result. This is usually used for differentiating through stochastic variables, e.g. in reinforcement learning.
* @param dtype DType of the output in case this can't be inferred.
* @return org.apache.mxnet.NDArrayFuncReturn
*/
@Experimental
def multinomial[T: NDArrayOrValue : scala.reflect.runtime.universe.TypeTag] (data : T, shape : Option[org.apache.mxnet.Shape] = None, get_prob : Option[Boolean] = None, dtype : Option[String] = None, out : Option[NDArray] = None): org.apache.mxnet.NDArrayFuncReturn
/**
* <pre>
Draw random samples from an exponential distribution.
*
* Samples are distributed according to an exponential distribution parametrized by *lambda* (rate).
*
* Example::
*
* exponential(lam=4, shape=(2,2)) = [[ 0.0097189 , 0.08999364],
* [ 0.04146638, 0.31715935]]
*
*
* Defined in src/operator/random/sample_op.cc:L115 * </pre>
* @param lam Lambda parameter (rate) of the exponential distribution.
* @param shape Shape of the output.
* @param ctx Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
* @param dtype DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
* @return org.apache.mxnet.NDArrayFuncReturn
*/
@Experimental
def exponential[T: NDArrayOrValue : scala.reflect.runtime.universe.TypeTag] (lam : Option[T] = None, shape : Option[org.apache.mxnet.Shape] = None, ctx : Option[String] = None, dtype : Option[String] = None, out : Option[NDArray] = None): org.apache.mxnet.NDArrayFuncReturn
/**
* <pre>
Draw random samples from a gamma distribution.
*
* Samples are distributed according to a gamma distribution parametrized by *alpha* (shape) and *beta* (scale).
*
* Example::
*
* gamma(alpha=9, beta=0.5, shape=(2,2)) = [[ 7.10486984, 3.37695289],
* [ 3.91697288, 3.65933681]]
*
*
* Defined in src/operator/random/sample_op.cc:L100 * </pre>
* @param alpha Alpha parameter (shape) of the gamma distribution.
* @param beta Beta parameter (scale) of the gamma distribution.
* @param shape Shape of the output.
* @param ctx Context of output, in format [cpu|gpu|cpu_pinned](n). Only used for imperative calls.
* @param dtype DType of the output in case this can't be inferred. Defaults to float32 if not defined (dtype=None).
* @return org.apache.mxnet.NDArrayFuncReturn
*/
@Experimental
def gamma[T: NDArrayOrValue : scala.reflect.runtime.universe.TypeTag] (alpha : Option[T] = None, beta : Option[T] = None, shape : Option[org.apache.mxnet.Shape] = None, ctx : Option[String] = None, dtype : Option[String] = None, out : Option[NDArray] = None): org.apache.mxnet.NDArrayFuncReturn
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment