Home
last modified time | relevance | path

Searched refs:lambd (Results 1 – 6 of 6) sorted by relevance

/third_party/mindspore/mindspore/ops/_grad_experimental/
Dgrad_nn_ops.py54 input_grad = G.SoftShrinkGrad(self.lambd)
66 grad = G.HShrinkGrad(self.lambd)
/third_party/mindspore/mindspore/nn/layer/
Dactivation.py805 def __init__(self, lambd=0.5): argument
807 self.softshrink = P.SoftShrink(lambd)
851 def __init__(self, lambd=0.5): argument
853 self.hshrink = P.HShrink(lambd)
/third_party/python/Lib/
Drandom.py639 def expovariate(self, lambd): argument
654 return -_log(1.0 - self.random()) / lambd
/third_party/mindspore/mindspore/ops/operations/
D_grad_ops.py2244 def __init__(self, lambd=0.5): argument
2246 validator.check_value_type("lambd", lambd, [float], self.name)
2247 validator.check_number("lambd", lambd, 0, Rel.GE, self.name)
2286 def __init__(self, lambd=0.5): argument
2287 validator.check_value_type("lambd", lambd, [float], self.name)
2288 if lambd < 0.0:
2289 lambd = 0.0
2290 self.add_prim_attr('lambd', lambd)
Dnn_ops.py8970 def __init__(self, lambd=0.5): argument
8972 validator.check_value_type("lambd", lambd, [float], self.name)
8973 validator.check_number("lambd", lambd, 0, Rel.GE, self.name)
9014 def __init__(self, lambd=0.5): argument
9016 validator.check_value_type('lambd', lambd, [float], self.name)
9017 if lambd < 0.0:
9018 lambd = 0.0
9019 self.add_prim_attr('lambd', lambd)
/third_party/python/Doc/library/
Drandom.rst306 .. function:: expovariate(lambd)
308 Exponential distribution. *lambd* is 1.0 divided by the desired
311 range from 0 to positive infinity if *lambd* is positive, and from
312 negative infinity to 0 if *lambd* is negative.