Home
last modified time | relevance | path

Searched refs:dropout (Results 1 – 25 of 66) sorted by relevance

123

/third_party/mindspore/tests/st/ops/cpu/
Dtest_dropout_op.py30 self.dropout = P.Dropout()
33 return self.dropout(x)
41 dropout = Net()
42 output, mask = dropout(Tensor(x))
51 self.dropout = P.Dropout(keep_prob=0.1)
54 return self.dropout(x)
62 dropout = Net1()
63 output, mask = dropout(Tensor(x))
72 self.dropout = P.Dropout(keep_prob=1.0)
75 return self.dropout(x)
[all …]
Dtest_lstm_op.py42 dropout=0.0, argument
65 dropout=dropout))
102 … __init__(self, batch_size, input_size, hidden_size, num_layers, has_bias, bidirectional, dropout): argument
109 self.lstm = StackLSTM(input_size, hidden_size, num_layers, has_bias, bidirectional, dropout)
162 dropout = 0.0
166 net = LstmNet(batch_size, input_size, hidden_size, num_layers, has_bias, bidirectional, dropout)
205 … __init__(self, batch_size, input_size, hidden_size, num_layers, has_bias, bidirectional, dropout): argument
213 bidirectional=bidirectional, dropout=dropout)
267 dropout = 0.0
270 dropout)
[all …]
/third_party/mindspore/mindspore/nn/layer/
Dlstm.py138 dropout=0, argument
151 self.dropout = dropout
157 dropout=float(dropout))
169 if dropout < 0 or dropout > 1:
172 if dropout == 1:
175 self.dropout_op = nn.Dropout(float(1 - dropout))
231 pre_layer = self.dropout_op(output) if self.dropout else output
250 pre_layer = self.dropout_op(y) if self.dropout else y
381 dropout=0, argument
392 dropout=float(dropout))
Drnns.py203 batch_first=False, dropout=0.0, bidirectional=False): argument
208 validator.check_is_float(dropout, "dropout", self.cls_name)
213 if not 0 <= dropout < 1:
217 if dropout > 0 and num_layers == 1:
221 "num_layers={}".format(dropout, num_layers))
239 self.dropout = dropout
240 self.dropout_op = nn.Dropout(float(1 - dropout))
311 … pre_layer = self.dropout_op(output) if (self.dropout != 0 and i < self.num_layers - 1) else output
343 … pre_layer = self.dropout_op(output) if (self.dropout != 0 and i < self.num_layers - 1) else output
/third_party/mindspore/tests/st/export_and_load/
Dtest_bgcf.py35 dropout=0.2): argument
49 self.dropout = nn.Dropout(keep_prob=1 - dropout)
53 neigh_matrix = self.dropout(neigh_matrix)
63 dropout=0.2): argument
75 self.dropout = nn.Dropout(keep_prob=1 - dropout)
79 neigh_matrix = self.dropout(neigh_feature)
111 activation=activation, dropout=neigh_drop_rate[1])
113 self.gnew_agg_user = AttenConv(self.input_dim, self.layer_dim, dropout=neigh_drop_rate[2])
115 self.gnew_agg_item = AttenConv(self.input_dim, self.layer_dim, dropout=neigh_drop_rate[2])
122 activation=activation, dropout=neigh_drop_rate[0])
[all …]
/third_party/mindspore/tests/st/ops/gpu/
Dtest_lstm_op.py32 …(self, seq_len, batch_size, input_size, hidden_size, num_layers, has_bias, bidirectional, dropout): argument
39 self.lstm = P.LSTM(input_size, hidden_size, num_layers, has_bias, bidirectional, dropout)
120 dropout = 0.0
126 …LstmNet(seq_len, batch_size, input_size, hidden_size, num_layers, has_bias, bidirectional, dropout)
164 …(self, seq_len, batch_size, input_size, hidden_size, num_layers, has_bias, bidirectional, dropout): argument
171 self.lstm = P.LSTM(input_size, hidden_size, num_layers, has_bias, bidirectional, dropout)
270 dropout = 0.0
276 …LstmNet(seq_len, batch_size, input_size, hidden_size, num_layers, has_bias, bidirectional, dropout)
321 …(self, seq_len, batch_size, input_size, hidden_size, num_layers, has_bias, bidirectional, dropout): argument
328 self.lstm = P.LSTM(input_size, hidden_size, num_layers, has_bias, bidirectional, dropout)
[all …]
/third_party/mindspore/mindspore/core/ops/grad/
Dlstm_grad.cc54 void LSTMGrad::set_dropout(const float dropout) { in set_dropout() argument
55 …CheckAndConvertUtils::CheckInRange<float>(kDropout, dropout, kIncludeBoth, {0.0, 1.0}, this->name(… in set_dropout()
56 (void)AddAttr(kDropout, MakeValue(dropout)); in set_dropout()
80 const float dropout, const bool bidirectional, const float zoneout_cell, in Init() argument
86 this->set_dropout(dropout); in Init()
Dlstm_grad.h36 const float dropout, const bool bidirectional = false, const float zoneout_cell = 0.0f,
46 void set_dropout(const float dropout);
/third_party/mindspore/tests/ut/python/parallel/
Dtest_matmul_dropout.py77 self.dropout = P.Dropout(keep_prob)
84 out, _ = self.dropout(x)
108 self.dropout = Dropout()
109 self.dropout.dropout_do_mask.shard(strategy2)
110 self.dropout.dropout_gen_mask.shard(strategy2)
115 out = self.dropout(out)
Dtest_batch_parallel_dropout.py56 self.dropout = nn.Dropout()
61 out = self.dropout(out)
Dtest_auto_parallel_matmul_drop.py56 self.dropout = nn.Dropout()
61 out = self.dropout(out)
/third_party/mindspore/tests/st/ops/ascend/
Dtest_gru_op.py40 …__init__(self, input_size, hidden_size, num_layers, has_bias, batch_first, bidirectional, dropout): argument
43 batch_first=batch_first, bidirectional=bidirectional, dropout=dropout)
112 bidirectional=bidirectional, dropout=0.0)
122 batch_first=False, bidirectional=bidirectional, dropout=0.0)
153 bidirectional=bidirectional, dropout=0.0)
168 batch_first=False, bidirectional=bidirectional, dropout=0.0)
Dtest_lstm_op.py41 … def __init__(self, input_s, hidden_s, num_layers, has_bias, batch_first, bidirectional, dropout): argument
44 batch_first=batch_first, bidirectional=bidirectional, dropout=dropout)
112 bidirectional=bidirectional, dropout=0.0)
120 bidirectional=bidirectional, dropout=0.0)
152 bidirectional=bidirectional, dropout=0.0)
166 bidirectional=bidirectional, dropout=0.0)
Dtest_rnn_op.py40 …__init__(self, input_size, hidden_size, num_layers, has_bias, batch_first, bidirectional, dropout): argument
43 batch_first=batch_first, bidirectional=bidirectional, dropout=dropout)
112 bidirectional=bidirectional, dropout=0.0)
122 batch_first=False, bidirectional=bidirectional, dropout=0.0)
153 batch_first=False, bidirectional=bidirectional, dropout=0.0)
168 batch_first=False, bidirectional=bidirectional, dropout=0.0)
/third_party/mindspore/mindspore/lite/src/runtime/kernel/arm/fp16_grad/
Ddropout_fp16_grad.cc82 auto dropout = reinterpret_cast<DropoutGradCPUKernelFp16 *>(cdata); in RunDropoutFp16Grad() local
83 CHECK_NULL_RETURN(dropout); in RunDropoutFp16Grad()
84 auto error_code = dropout->DoExecute(task_id); in RunDropoutFp16Grad()
/third_party/mindspore/mindspore/lite/examples/export_models/models/
Dmini_alexnet.py49 self.dropout = nn.Dropout(dropout_ratio)
64 x = self.dropout(x)
67 x = self.dropout(x)
/third_party/mindspore/mindspore/lite/src/runtime/kernel/arm/fp32_grad/
Ddropout_grad.cc82 auto dropout = reinterpret_cast<DropoutGradCPUKernel *>(cdata); in RunDropoutGrad() local
83 CHECK_NULL_RETURN(dropout); in RunDropoutGrad()
84 auto error_code = dropout->Execute(task_id); in RunDropoutGrad()
Ddropout.cc92 auto dropout = reinterpret_cast<DropoutCPUKernel *>(cdata); in RunDropout() local
93 auto error_code = dropout->Execute(task_id); in RunDropout()
/third_party/mindspore/tests/ut/python/pynative_mode/nn/
Dtest_dropout.py35 self.dropout = nn.Dropout(0.5)
38 return self.dropout(x)
/third_party/mindspore/mindspore/core/ops/
Dlstm.cc126 void LSTM::set_dropout(const float dropout) { in set_dropout() argument
127 …CheckAndConvertUtils::CheckInRange<float>(kDropout, dropout, kIncludeBoth, {0.0, 1.0}, this->name(… in set_dropout()
128 (void)AddAttr(kDropout, MakeValue(dropout)); in set_dropout()
152 …const float dropout, const bool bidirectional, const float zoneout_cell, const float zoneout_hidde… in Init() argument
157 this->set_dropout(dropout); in Init()
Dlstm.h45 const float dropout, const bool bidirectional = false, const float zoneout_cell = 0.0f,
72 void set_dropout(const float dropout);
/third_party/mindspore/tests/st/ops/graph_kernel/
Dtest_dropout.py40 dropout = Net(keep_prob)
42 output, mask = dropout(tx)
Dtest_layernorm_stitch.py34 self.dropout = nn.Dropout(1 - 0.1)
41 output = self.dropout(output)
Dtest_softmax_stitch.py34 self.dropout = nn.Dropout(1 - 0.1)
48 attention_probs = self.dropout(attention_probs)
/third_party/mindspore/tests/st/fl/albert/src/
Dmodel.py192 self.dropout = nn.Dropout(1 - config.hidden_dropout_prob)
221 output = self.dropout(output)
237 self.dropout = nn.Dropout(1 - config.hidden_dropout_prob)
251 output = self.dropout(output)
425 self.dropout = nn.Dropout(1 - config.attention_probs_dropout_prob)
491 attention_probs = self.dropout(attention_probs)
826 self.dropout = nn.Dropout(1 - config.classifier_dropout_prob)
833 pooled_output = self.dropout(pooled_output)
856 self.dropout = nn.Dropout(1 - config.classifier_dropout_prob)
865 pooled_output = self.dropout(pooled_output)

123