GraphSparseApplyAdagradDA Method |
Namespace: Emgu.TF
public Operation SparseApplyAdagradDA( Output var, Output gradient_accumulator, Output gradient_squared_accumulator, Output grad, Output indices, Output lr, Output l1, Output l2, Output global_step, bool use_locking = false, string opName = "SparseApplyAdagradDA" )
Public Function SparseApplyAdagradDA ( var As Output, gradient_accumulator As Output, gradient_squared_accumulator As Output, grad As Output, indices As Output, lr As Output, l1 As Output, l2 As Output, global_step As Output, Optional use_locking As Boolean = false, Optional opName As String = "SparseApplyAdagradDA" ) As Operation
public: Operation^ SparseApplyAdagradDA( Output^ var, Output^ gradient_accumulator, Output^ gradient_squared_accumulator, Output^ grad, Output^ indices, Output^ lr, Output^ l1, Output^ l2, Output^ global_step, bool use_locking = false, String^ opName = L"SparseApplyAdagradDA" )
member SparseApplyAdagradDA : var : Output * gradient_accumulator : Output * gradient_squared_accumulator : Output * grad : Output * indices : Output * lr : Output * l1 : Output * l2 : Output * global_step : Output * ?use_locking : bool * ?opName : string (* Defaults: let _use_locking = defaultArg use_locking false let _opName = defaultArg opName "SparseApplyAdagradDA" *) -> Operation
2021 Emgu Corporation, All Rights Reserved,