diff --git a/docs/api/python/ndarray/ndarray.md b/docs/api/python/ndarray/ndarray.md index 09564c2f2035..68f8333ada24 100644 --- a/docs/api/python/ndarray/ndarray.md +++ b/docs/api/python/ndarray/ndarray.md @@ -559,13 +559,13 @@ The `ndarray` package provides several classes: .. autosummary:: :nosignatures: - sample_uniform - sample_normal - sample_gamma - sample_exponential - sample_poisson - sample_negative_binomial - sample_generalized_negative_binomial + mxnet.nd.random.uniform + mxnet.nd.random.normal + mxnet.nd.random.gamma + mxnet.nd.random.exponential + mxnet.nd.random.poisson + mxnet.nd.random.negative_binomial + mxnet.nd.random.generalized_negative_binomial mxnet.random.seed ``` @@ -580,7 +580,6 @@ The `ndarray` package provides several classes: argsort argmax argmin - argmax_channel ``` ### Sequence operation diff --git a/docs/api/python/symbol/symbol.md b/docs/api/python/symbol/symbol.md index e93976d6033a..8d83086fde5e 100644 --- a/docs/api/python/symbol/symbol.md +++ b/docs/api/python/symbol/symbol.md @@ -558,13 +558,13 @@ Composite multiple symbols into a new one by an operator. .. autosummary:: :nosignatures: - sample_uniform - sample_normal - sample_gamma - sample_exponential - sample_poisson - sample_negative_binomial - sample_generalized_negative_binomial + mxnet.sym.random.uniform + mxnet.sym.random.normal + mxnet.sym.random.gamma + mxnet.sym.random.exponential + mxnet.sym.random.poisson + mxnet.sym.random.negative_binomial + mxnet.sym.random.generalized_negative_binomial mxnet.random.seed ``` @@ -579,7 +579,6 @@ Composite multiple symbols into a new one by an operator. argsort argmax argmin - argmax_channel ``` ### Sequence operation diff --git a/python/mxnet/gluon/rnn/rnn_layer.py b/python/mxnet/gluon/rnn/rnn_layer.py index 3a4f712fb80d..204f3c9bd507 100644 --- a/python/mxnet/gluon/rnn/rnn_layer.py +++ b/python/mxnet/gluon/rnn/rnn_layer.py @@ -299,11 +299,11 @@ class RNN(_RNNLayer): -------- >>> layer = mx.gluon.rnn.RNN(100, 3) >>> layer.initialize() - >>> input = mx.nd.random_uniform(shape=(5, 3, 10)) + >>> input = mx.nd.random.uniform(shape=(5, 3, 10)) >>> # by default zeros are used as begin state >>> output = layer(input) >>> # manually specify begin state. - >>> h0 = mx.nd.random_uniform(shape=(3, 3, 100)) + >>> h0 = mx.nd.random.uniform(shape=(3, 3, 100)) >>> output, hn = layer(input, h0) """ def __init__(self, hidden_size, num_layers=1, activation='relu', @@ -399,12 +399,12 @@ class LSTM(_RNNLayer): -------- >>> layer = mx.gluon.rnn.LSTM(100, 3) >>> layer.initialize() - >>> input = mx.nd.random_uniform(shape=(5, 3, 10)) + >>> input = mx.nd.random.uniform(shape=(5, 3, 10)) >>> # by default zeros are used as begin state >>> output = layer(input) >>> # manually specify begin state. - >>> h0 = mx.nd.random_uniform(shape=(3, 3, 100)) - >>> c0 = mx.nd.random_uniform(shape=(3, 3, 100)) + >>> h0 = mx.nd.random.uniform(shape=(3, 3, 100)) + >>> c0 = mx.nd.random.uniform(shape=(3, 3, 100)) >>> output, hn = layer(input, [h0, c0]) """ def __init__(self, hidden_size, num_layers=1, layout='TNC', @@ -496,11 +496,11 @@ class GRU(_RNNLayer): -------- >>> layer = mx.gluon.rnn.GRU(100, 3) >>> layer.initialize() - >>> input = mx.nd.random_uniform(shape=(5, 3, 10)) + >>> input = mx.nd.random.uniform(shape=(5, 3, 10)) >>> # by default zeros are used as begin state >>> output = layer(input) >>> # manually specify begin state. - >>> h0 = mx.nd.random_uniform(shape=(3, 3, 100)) + >>> h0 = mx.nd.random.uniform(shape=(3, 3, 100)) >>> output, hn = layer(input, h0) """ def __init__(self, hidden_size, num_layers=1, layout='TNC',