Home
last modified time | relevance | path

Searched refs:lr_power (Results 1 – 12 of 12) sorted by relevance

/third_party/mindspore/mindspore/nn/optim/
Dftrl.py29 def _tensor_run_opt_with_sparse(opt, spars_opt, push, pull, l1, l2, lr_power, learning_rate, linear, argument
46 def _tensor_run_opt(opt, spars_opt, push, pull, l1, l2, lr_power, learning_rate, linear, argument
52 success = F.depend(success, pull(push((gradient, learning_rate, l1, l2, lr_power),
55 …success = F.depend(success, opt(weight, moment, linear, gradient, learning_rate, l1, l2, lr_power))
59 def _check_param(initial_accum, lr_power, l1, l2, use_locking, prim_name=None): argument
64 validator.check_value_type("lr_power", lr_power, [float], prim_name)
65 validator.check_number("lr_power", lr_power, 0.0, Rel.LE, prim_name)
197 … def __init__(self, params, initial_accum=0.1, learning_rate=0.001, lr_power=-0.5, l1=0.0, l2=0.0, argument
202 _check_param(initial_accum, lr_power, l1, l2, use_locking, self.cls_name)
208 self.lr_power = lr_power
[all …]
/third_party/mindspore/mindspore/ccsrc/backend/kernel_compiler/cpu/
Dsparse_apply_ftrl_cpu_kernel.cc36 const auto lr_power = input_params->lr_power_; in ComputeFtrl() local
51 if (lr_power == -0.5) { in ComputeFtrl()
55 y = std::pow(accum_new, -lr_power); in ComputeFtrl()
56 linear[j] += summed_grad - (y - std::pow(accum[j], -lr_power)) / lr * var[j]; in ComputeFtrl()
/third_party/mindspore/tests/st/ops/ascend/test_aicpu_ops/
Dtest_fused_sparse_ftrl.py28 lr_power = -0.5 variable
33 self.fused_sparse_ftrl = P.FusedSparseFtrl(lr=0.1, l1=0.0, l2=0.0, lr_power=-0.5)
/third_party/mindspore/mindspore/ccsrc/backend/optimizer/ascend/mindir/
Doptimizer_unify_output.cc72 VarPtr lr_power = std::make_shared<Var>(); in DefinePattern() local
74 VectorRef pattern({prim::kPrimApplyFtrl, var, accum, linear, grad, lr, l1, l2, lr_power, u}); in DefinePattern()
/third_party/mindspore/tests/st/ops/gpu/
Dtest_sparse_apply_ftrl_op.py29 …self.sparse_apply_ftrl = P.SparseApplyFtrl(lr=0.001, l1=0.0, l2=0.0, lr_power=-0.5, use_locking=Fa…
42 …self.sparse_apply_ftrl = P.SparseApplyFtrl(lr=0.001, l1=0.0, l2=0.0, lr_power=-0.5, use_locking=Fa…
/third_party/mindspore/tests/st/ops/cpu/
Dtest_sparse_apply_ftrl_op.py29 self.sparse_apply_ftrl = P.FusedSparseFtrl(lr=0.001, l1=0.0, l2=0.0, lr_power=-0.5)
/third_party/mindspore/tests/st/auto_monad/
Dtest_effect_optimizer.py261 def construct(self, grad, lr, l1, l2, lr_power): argument
263 grad, lr, l1, l2, lr_power)
281 lr_power = Tensor(-0.5, mstype.float32)
282 new_var, new_accum, new_linear = net(grad, lr, l1, l2, lr_power)
500 lr=0.01, l1=0.0, l2=0.0, lr_power=-0.5)
659 lr=0.01, l1=0.0, l2=0.0, lr_power=-0.5)
691 lr=0.01, l1=0.0, l2=0.0, l2_shrinkage=0.0, lr_power=-0.5)
/third_party/mindspore/mindspore/ccsrc/transform/graph_ir/op_declare/
Dnn_training_ops_declare.cc183 {"lr_power", ATTR_DESC(lr_power, AnyTraits<float>())}};
200 {7, INPUT_DESC(l2)}, {8, INPUT_DESC(lr_power)}};
/third_party/mindspore/tests/ut/python/ops/
Dtest_dynamic_shape.py65 self.sparse_apply_ftrl = P.SparseApplyFtrl(lr=0.01, l1=0.0, l2=0.0, lr_power=-0.5)
Dtest_ops.py608 self.lr_power = -0.5
614 … self.apply_ftrl(self.var, self.accum, self.linear, grad, self.lr, self.l1, self.l2, self.lr_power)
621 self.sparse_apply_ftrl = P.SparseApplyFtrl(lr=0.001, l1=0.0, l2=0.0, lr_power=-0.5)
634 …arse_apply_ftrl_v2 = P.SparseApplyFtrlV2(lr=0.001, l1=0.0, l2=0.0, l2_shrinkage=0.0, lr_power=-0.5)
/third_party/mindspore/mindspore/ops/operations/
Dnn_ops.py5207 def __init__(self, lr, l1, l2, lr_power, use_locking=False): argument
5216 validator.check_value_type("lr_power", lr_power, [float], self.name)
5220 self.lr_power = validator.check_number("lr_power", lr_power, 0, Rel.LE, self.name)
7109 def __init__(self, lr, l1, l2, lr_power, use_locking=False): argument
7114 validator.check_value_type("lr_power", lr_power, [float], self.name)
7118 self.lr_power = validator.check_number("lr_power", lr_power, 0, Rel.LE, self.name)
7218 def __init__(self, lr, l1, l2, l2_shrinkage, lr_power, use_locking=False): argument
7223 validator.check_value_type("lr_power", lr_power, [float], self.name)
7227 self.lr_power = validator.check_number("lr_power", lr_power, 0, Rel.LE, self.name)
/third_party/mindspore/config/
Dop_info.config56 …me": "l1", "type": "float"}, {"name": "l2", "type": "float"}, {"name": "lr_power", "type": "float"…
166 …false, "param_type": "required", "shape": "all"}, {"index": 7, "name": "lr_power", "need_compile":…
177 …, "param_type": "required", "type": "float", "value": "all"}, {"name": "lr_power", "param_type": "…
299 …, "param_type": "required", "type": "float", "value": "all"}, {"name": "lr_power", "param_type": "…
300 …, "param_type": "required", "type": "float", "value": "all"}, {"name": "lr_power", "param_type": "…