1path: "tensorflow.optimizers.experimental.AdamW" 2tf_class { 3 is_instance: "<class \'keras.optimizers.optimizer_experimental.adamw.AdamW\'>" 4 is_instance: "<class \'keras.optimizers.optimizer_experimental.optimizer.Optimizer\'>" 5 is_instance: "<class \'keras.optimizers.optimizer_experimental.optimizer._BaseOptimizer\'>" 6 is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>" 7 is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>" 8 is_instance: "<type \'object\'>" 9 member { 10 name: "iterations" 11 mtype: "<type \'property\'>" 12 } 13 member { 14 name: "learning_rate" 15 mtype: "<type \'property\'>" 16 } 17 member { 18 name: "lr" 19 mtype: "<type \'property\'>" 20 } 21 member_method { 22 name: "__init__" 23 argspec: "args=[\'self\', \'learning_rate\', \'weight_decay\', \'beta_1\', \'beta_2\', \'epsilon\', \'amsgrad\', \'clipnorm\', \'clipvalue\', \'global_clipnorm\', \'use_ema\', \'ema_momentum\', \'ema_overwrite_frequency\', \'jit_compile\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'0.001\', \'0.004\', \'0.9\', \'0.999\', \'1e-07\', \'False\', \'None\', \'None\', \'None\', \'False\', \'0.99\', \'None\', \'True\', \'AdamW\'], " 24 } 25 member_method { 26 name: "add_variable" 27 argspec: "args=[\'self\', \'shape\', \'dtype\', \'initializer\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'zeros\', \'None\'], " 28 } 29 member_method { 30 name: "add_variable_from_reference" 31 argspec: "args=[\'self\', \'model_variable\', \'variable_name\', \'shape\', \'initial_value\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " 32 } 33 member_method { 34 name: "aggregate_gradients" 35 argspec: "args=[\'self\', \'grads_and_vars\'], varargs=None, keywords=None, defaults=None" 36 } 37 member_method { 38 name: "apply_gradients" 39 argspec: "args=[\'self\', \'grads_and_vars\', \'name\', \'skip_gradients_aggregation\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'False\'], " 40 } 41 member_method { 42 name: "build" 43 argspec: "args=[\'self\', \'var_list\'], varargs=None, keywords=None, defaults=None" 44 } 45 member_method { 46 name: "compute_gradients" 47 argspec: "args=[\'self\', \'loss\', \'var_list\', \'tape\'], varargs=None, keywords=None, defaults=[\'None\'], " 48 } 49 member_method { 50 name: "exclude_from_weight_decay" 51 argspec: "args=[\'self\', \'var_list\', \'var_names\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " 52 } 53 member_method { 54 name: "finalize_variable_values" 55 argspec: "args=[\'self\', \'var_list\'], varargs=None, keywords=None, defaults=None" 56 } 57 member_method { 58 name: "from_config" 59 argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None" 60 } 61 member_method { 62 name: "get_config" 63 argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" 64 } 65 member_method { 66 name: "minimize" 67 argspec: "args=[\'self\', \'loss\', \'var_list\', \'tape\'], varargs=None, keywords=None, defaults=[\'None\'], " 68 } 69 member_method { 70 name: "update_step" 71 argspec: "args=[\'self\', \'gradient\', \'variable\'], varargs=None, keywords=None, defaults=None" 72 } 73 member_method { 74 name: "variables" 75 argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" 76 } 77} 78