1 | // This file is MACHINE GENERATED! Do not edit. |
2 | |
3 | |
4 | #include "tensorflow/cc/ops/const_op.h" |
5 | #include "tensorflow/cc/ops/training_ops.h" |
6 | |
7 | namespace tensorflow { |
8 | namespace ops { |
9 | |
10 | ApplyAdadelta::ApplyAdadelta(const ::tensorflow::Scope& scope, |
11 | ::tensorflow::Input var, ::tensorflow::Input |
12 | accum, ::tensorflow::Input accum_update, |
13 | ::tensorflow::Input lr, ::tensorflow::Input rho, |
14 | ::tensorflow::Input epsilon, ::tensorflow::Input |
15 | grad, const ApplyAdadelta::Attrs& attrs) { |
16 | if (!scope.ok()) return; |
17 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
18 | if (!scope.ok()) return; |
19 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
20 | if (!scope.ok()) return; |
21 | auto _accum_update = ::tensorflow::ops::AsNodeOut(scope, accum_update); |
22 | if (!scope.ok()) return; |
23 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
24 | if (!scope.ok()) return; |
25 | auto _rho = ::tensorflow::ops::AsNodeOut(scope, rho); |
26 | if (!scope.ok()) return; |
27 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
28 | if (!scope.ok()) return; |
29 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
30 | if (!scope.ok()) return; |
31 | ::tensorflow::Node* ret; |
32 | const auto unique_name = scope.GetUniqueNameForOp("ApplyAdadelta" ); |
33 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyAdadelta" ) |
34 | .Input(_var) |
35 | .Input(_accum) |
36 | .Input(_accum_update) |
37 | .Input(_lr) |
38 | .Input(_rho) |
39 | .Input(_epsilon) |
40 | .Input(_grad) |
41 | .Attr("use_locking" , attrs.use_locking_) |
42 | ; |
43 | scope.UpdateBuilder(&builder); |
44 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
45 | if (!scope.ok()) return; |
46 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
47 | this->operation = Operation(ret); |
48 | this->out = Output(ret, 0); |
49 | } |
50 | |
51 | ApplyAdadelta::ApplyAdadelta(const ::tensorflow::Scope& scope, |
52 | ::tensorflow::Input var, ::tensorflow::Input |
53 | accum, ::tensorflow::Input accum_update, |
54 | ::tensorflow::Input lr, ::tensorflow::Input rho, |
55 | ::tensorflow::Input epsilon, ::tensorflow::Input |
56 | grad) |
57 | : ApplyAdadelta(scope, var, accum, accum_update, lr, rho, epsilon, grad, ApplyAdadelta::Attrs()) {} |
58 | |
59 | ApplyAdagrad::ApplyAdagrad(const ::tensorflow::Scope& scope, |
60 | ::tensorflow::Input var, ::tensorflow::Input accum, |
61 | ::tensorflow::Input lr, ::tensorflow::Input grad, |
62 | const ApplyAdagrad::Attrs& attrs) { |
63 | if (!scope.ok()) return; |
64 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
65 | if (!scope.ok()) return; |
66 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
67 | if (!scope.ok()) return; |
68 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
69 | if (!scope.ok()) return; |
70 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
71 | if (!scope.ok()) return; |
72 | ::tensorflow::Node* ret; |
73 | const auto unique_name = scope.GetUniqueNameForOp("ApplyAdagrad" ); |
74 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyAdagrad" ) |
75 | .Input(_var) |
76 | .Input(_accum) |
77 | .Input(_lr) |
78 | .Input(_grad) |
79 | .Attr("use_locking" , attrs.use_locking_) |
80 | .Attr("update_slots" , attrs.update_slots_) |
81 | ; |
82 | scope.UpdateBuilder(&builder); |
83 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
84 | if (!scope.ok()) return; |
85 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
86 | this->operation = Operation(ret); |
87 | this->out = Output(ret, 0); |
88 | } |
89 | |
90 | ApplyAdagrad::ApplyAdagrad(const ::tensorflow::Scope& scope, |
91 | ::tensorflow::Input var, ::tensorflow::Input accum, |
92 | ::tensorflow::Input lr, ::tensorflow::Input grad) |
93 | : ApplyAdagrad(scope, var, accum, lr, grad, ApplyAdagrad::Attrs()) {} |
94 | |
95 | ApplyAdagradDA::ApplyAdagradDA(const ::tensorflow::Scope& scope, |
96 | ::tensorflow::Input var, ::tensorflow::Input |
97 | gradient_accumulator, ::tensorflow::Input |
98 | gradient_squared_accumulator, |
99 | ::tensorflow::Input grad, ::tensorflow::Input |
100 | lr, ::tensorflow::Input l1, ::tensorflow::Input |
101 | l2, ::tensorflow::Input global_step, const |
102 | ApplyAdagradDA::Attrs& attrs) { |
103 | if (!scope.ok()) return; |
104 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
105 | if (!scope.ok()) return; |
106 | auto _gradient_accumulator = ::tensorflow::ops::AsNodeOut(scope, gradient_accumulator); |
107 | if (!scope.ok()) return; |
108 | auto _gradient_squared_accumulator = ::tensorflow::ops::AsNodeOut(scope, gradient_squared_accumulator); |
109 | if (!scope.ok()) return; |
110 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
111 | if (!scope.ok()) return; |
112 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
113 | if (!scope.ok()) return; |
114 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
115 | if (!scope.ok()) return; |
116 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
117 | if (!scope.ok()) return; |
118 | auto _global_step = ::tensorflow::ops::AsNodeOut(scope, global_step); |
119 | if (!scope.ok()) return; |
120 | ::tensorflow::Node* ret; |
121 | const auto unique_name = scope.GetUniqueNameForOp("ApplyAdagradDA" ); |
122 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyAdagradDA" ) |
123 | .Input(_var) |
124 | .Input(_gradient_accumulator) |
125 | .Input(_gradient_squared_accumulator) |
126 | .Input(_grad) |
127 | .Input(_lr) |
128 | .Input(_l1) |
129 | .Input(_l2) |
130 | .Input(_global_step) |
131 | .Attr("use_locking" , attrs.use_locking_) |
132 | ; |
133 | scope.UpdateBuilder(&builder); |
134 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
135 | if (!scope.ok()) return; |
136 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
137 | this->operation = Operation(ret); |
138 | this->out = Output(ret, 0); |
139 | } |
140 | |
141 | ApplyAdagradDA::ApplyAdagradDA(const ::tensorflow::Scope& scope, |
142 | ::tensorflow::Input var, ::tensorflow::Input |
143 | gradient_accumulator, ::tensorflow::Input |
144 | gradient_squared_accumulator, |
145 | ::tensorflow::Input grad, ::tensorflow::Input |
146 | lr, ::tensorflow::Input l1, ::tensorflow::Input |
147 | l2, ::tensorflow::Input global_step) |
148 | : ApplyAdagradDA(scope, var, gradient_accumulator, gradient_squared_accumulator, grad, lr, l1, l2, global_step, ApplyAdagradDA::Attrs()) {} |
149 | |
150 | ApplyAdam::ApplyAdam(const ::tensorflow::Scope& scope, ::tensorflow::Input var, |
151 | ::tensorflow::Input m, ::tensorflow::Input v, |
152 | ::tensorflow::Input beta1_power, ::tensorflow::Input |
153 | beta2_power, ::tensorflow::Input lr, ::tensorflow::Input |
154 | beta1, ::tensorflow::Input beta2, ::tensorflow::Input |
155 | epsilon, ::tensorflow::Input grad, const ApplyAdam::Attrs& |
156 | attrs) { |
157 | if (!scope.ok()) return; |
158 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
159 | if (!scope.ok()) return; |
160 | auto _m = ::tensorflow::ops::AsNodeOut(scope, m); |
161 | if (!scope.ok()) return; |
162 | auto _v = ::tensorflow::ops::AsNodeOut(scope, v); |
163 | if (!scope.ok()) return; |
164 | auto _beta1_power = ::tensorflow::ops::AsNodeOut(scope, beta1_power); |
165 | if (!scope.ok()) return; |
166 | auto _beta2_power = ::tensorflow::ops::AsNodeOut(scope, beta2_power); |
167 | if (!scope.ok()) return; |
168 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
169 | if (!scope.ok()) return; |
170 | auto _beta1 = ::tensorflow::ops::AsNodeOut(scope, beta1); |
171 | if (!scope.ok()) return; |
172 | auto _beta2 = ::tensorflow::ops::AsNodeOut(scope, beta2); |
173 | if (!scope.ok()) return; |
174 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
175 | if (!scope.ok()) return; |
176 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
177 | if (!scope.ok()) return; |
178 | ::tensorflow::Node* ret; |
179 | const auto unique_name = scope.GetUniqueNameForOp("ApplyAdam" ); |
180 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyAdam" ) |
181 | .Input(_var) |
182 | .Input(_m) |
183 | .Input(_v) |
184 | .Input(_beta1_power) |
185 | .Input(_beta2_power) |
186 | .Input(_lr) |
187 | .Input(_beta1) |
188 | .Input(_beta2) |
189 | .Input(_epsilon) |
190 | .Input(_grad) |
191 | .Attr("use_locking" , attrs.use_locking_) |
192 | .Attr("use_nesterov" , attrs.use_nesterov_) |
193 | ; |
194 | scope.UpdateBuilder(&builder); |
195 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
196 | if (!scope.ok()) return; |
197 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
198 | this->operation = Operation(ret); |
199 | this->out = Output(ret, 0); |
200 | } |
201 | |
202 | ApplyAdam::ApplyAdam(const ::tensorflow::Scope& scope, ::tensorflow::Input var, |
203 | ::tensorflow::Input m, ::tensorflow::Input v, |
204 | ::tensorflow::Input beta1_power, ::tensorflow::Input |
205 | beta2_power, ::tensorflow::Input lr, ::tensorflow::Input |
206 | beta1, ::tensorflow::Input beta2, ::tensorflow::Input |
207 | epsilon, ::tensorflow::Input grad) |
208 | : ApplyAdam(scope, var, m, v, beta1_power, beta2_power, lr, beta1, beta2, epsilon, grad, ApplyAdam::Attrs()) {} |
209 | |
210 | ApplyAddSign::ApplyAddSign(const ::tensorflow::Scope& scope, |
211 | ::tensorflow::Input var, ::tensorflow::Input m, |
212 | ::tensorflow::Input lr, ::tensorflow::Input alpha, |
213 | ::tensorflow::Input sign_decay, ::tensorflow::Input |
214 | beta, ::tensorflow::Input grad, const |
215 | ApplyAddSign::Attrs& attrs) { |
216 | if (!scope.ok()) return; |
217 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
218 | if (!scope.ok()) return; |
219 | auto _m = ::tensorflow::ops::AsNodeOut(scope, m); |
220 | if (!scope.ok()) return; |
221 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
222 | if (!scope.ok()) return; |
223 | auto _alpha = ::tensorflow::ops::AsNodeOut(scope, alpha); |
224 | if (!scope.ok()) return; |
225 | auto _sign_decay = ::tensorflow::ops::AsNodeOut(scope, sign_decay); |
226 | if (!scope.ok()) return; |
227 | auto _beta = ::tensorflow::ops::AsNodeOut(scope, beta); |
228 | if (!scope.ok()) return; |
229 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
230 | if (!scope.ok()) return; |
231 | ::tensorflow::Node* ret; |
232 | const auto unique_name = scope.GetUniqueNameForOp("ApplyAddSign" ); |
233 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyAddSign" ) |
234 | .Input(_var) |
235 | .Input(_m) |
236 | .Input(_lr) |
237 | .Input(_alpha) |
238 | .Input(_sign_decay) |
239 | .Input(_beta) |
240 | .Input(_grad) |
241 | .Attr("use_locking" , attrs.use_locking_) |
242 | ; |
243 | scope.UpdateBuilder(&builder); |
244 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
245 | if (!scope.ok()) return; |
246 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
247 | this->operation = Operation(ret); |
248 | this->out = Output(ret, 0); |
249 | } |
250 | |
251 | ApplyAddSign::ApplyAddSign(const ::tensorflow::Scope& scope, |
252 | ::tensorflow::Input var, ::tensorflow::Input m, |
253 | ::tensorflow::Input lr, ::tensorflow::Input alpha, |
254 | ::tensorflow::Input sign_decay, ::tensorflow::Input |
255 | beta, ::tensorflow::Input grad) |
256 | : ApplyAddSign(scope, var, m, lr, alpha, sign_decay, beta, grad, ApplyAddSign::Attrs()) {} |
257 | |
258 | ApplyCenteredRMSProp::ApplyCenteredRMSProp(const ::tensorflow::Scope& scope, |
259 | ::tensorflow::Input var, |
260 | ::tensorflow::Input mg, |
261 | ::tensorflow::Input ms, |
262 | ::tensorflow::Input mom, |
263 | ::tensorflow::Input lr, |
264 | ::tensorflow::Input rho, |
265 | ::tensorflow::Input momentum, |
266 | ::tensorflow::Input epsilon, |
267 | ::tensorflow::Input grad, const |
268 | ApplyCenteredRMSProp::Attrs& attrs) { |
269 | if (!scope.ok()) return; |
270 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
271 | if (!scope.ok()) return; |
272 | auto _mg = ::tensorflow::ops::AsNodeOut(scope, mg); |
273 | if (!scope.ok()) return; |
274 | auto _ms = ::tensorflow::ops::AsNodeOut(scope, ms); |
275 | if (!scope.ok()) return; |
276 | auto _mom = ::tensorflow::ops::AsNodeOut(scope, mom); |
277 | if (!scope.ok()) return; |
278 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
279 | if (!scope.ok()) return; |
280 | auto _rho = ::tensorflow::ops::AsNodeOut(scope, rho); |
281 | if (!scope.ok()) return; |
282 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
283 | if (!scope.ok()) return; |
284 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
285 | if (!scope.ok()) return; |
286 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
287 | if (!scope.ok()) return; |
288 | ::tensorflow::Node* ret; |
289 | const auto unique_name = scope.GetUniqueNameForOp("ApplyCenteredRMSProp" ); |
290 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyCenteredRMSProp" ) |
291 | .Input(_var) |
292 | .Input(_mg) |
293 | .Input(_ms) |
294 | .Input(_mom) |
295 | .Input(_lr) |
296 | .Input(_rho) |
297 | .Input(_momentum) |
298 | .Input(_epsilon) |
299 | .Input(_grad) |
300 | .Attr("use_locking" , attrs.use_locking_) |
301 | ; |
302 | scope.UpdateBuilder(&builder); |
303 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
304 | if (!scope.ok()) return; |
305 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
306 | this->operation = Operation(ret); |
307 | this->out = Output(ret, 0); |
308 | } |
309 | |
310 | ApplyCenteredRMSProp::ApplyCenteredRMSProp(const ::tensorflow::Scope& scope, |
311 | ::tensorflow::Input var, |
312 | ::tensorflow::Input mg, |
313 | ::tensorflow::Input ms, |
314 | ::tensorflow::Input mom, |
315 | ::tensorflow::Input lr, |
316 | ::tensorflow::Input rho, |
317 | ::tensorflow::Input momentum, |
318 | ::tensorflow::Input epsilon, |
319 | ::tensorflow::Input grad) |
320 | : ApplyCenteredRMSProp(scope, var, mg, ms, mom, lr, rho, momentum, epsilon, grad, ApplyCenteredRMSProp::Attrs()) {} |
321 | |
322 | ApplyFtrl::ApplyFtrl(const ::tensorflow::Scope& scope, ::tensorflow::Input var, |
323 | ::tensorflow::Input accum, ::tensorflow::Input linear, |
324 | ::tensorflow::Input grad, ::tensorflow::Input lr, |
325 | ::tensorflow::Input l1, ::tensorflow::Input l2, |
326 | ::tensorflow::Input lr_power, const ApplyFtrl::Attrs& |
327 | attrs) { |
328 | if (!scope.ok()) return; |
329 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
330 | if (!scope.ok()) return; |
331 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
332 | if (!scope.ok()) return; |
333 | auto _linear = ::tensorflow::ops::AsNodeOut(scope, linear); |
334 | if (!scope.ok()) return; |
335 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
336 | if (!scope.ok()) return; |
337 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
338 | if (!scope.ok()) return; |
339 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
340 | if (!scope.ok()) return; |
341 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
342 | if (!scope.ok()) return; |
343 | auto _lr_power = ::tensorflow::ops::AsNodeOut(scope, lr_power); |
344 | if (!scope.ok()) return; |
345 | ::tensorflow::Node* ret; |
346 | const auto unique_name = scope.GetUniqueNameForOp("ApplyFtrl" ); |
347 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyFtrl" ) |
348 | .Input(_var) |
349 | .Input(_accum) |
350 | .Input(_linear) |
351 | .Input(_grad) |
352 | .Input(_lr) |
353 | .Input(_l1) |
354 | .Input(_l2) |
355 | .Input(_lr_power) |
356 | .Attr("use_locking" , attrs.use_locking_) |
357 | .Attr("multiply_linear_by_lr" , attrs.multiply_linear_by_lr_) |
358 | ; |
359 | scope.UpdateBuilder(&builder); |
360 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
361 | if (!scope.ok()) return; |
362 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
363 | this->operation = Operation(ret); |
364 | this->out = Output(ret, 0); |
365 | } |
366 | |
367 | ApplyFtrl::ApplyFtrl(const ::tensorflow::Scope& scope, ::tensorflow::Input var, |
368 | ::tensorflow::Input accum, ::tensorflow::Input linear, |
369 | ::tensorflow::Input grad, ::tensorflow::Input lr, |
370 | ::tensorflow::Input l1, ::tensorflow::Input l2, |
371 | ::tensorflow::Input lr_power) |
372 | : ApplyFtrl(scope, var, accum, linear, grad, lr, l1, l2, lr_power, ApplyFtrl::Attrs()) {} |
373 | |
374 | ApplyFtrlV2::ApplyFtrlV2(const ::tensorflow::Scope& scope, ::tensorflow::Input |
375 | var, ::tensorflow::Input accum, ::tensorflow::Input |
376 | linear, ::tensorflow::Input grad, ::tensorflow::Input |
377 | lr, ::tensorflow::Input l1, ::tensorflow::Input l2, |
378 | ::tensorflow::Input l2_shrinkage, ::tensorflow::Input |
379 | lr_power, const ApplyFtrlV2::Attrs& attrs) { |
380 | if (!scope.ok()) return; |
381 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
382 | if (!scope.ok()) return; |
383 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
384 | if (!scope.ok()) return; |
385 | auto _linear = ::tensorflow::ops::AsNodeOut(scope, linear); |
386 | if (!scope.ok()) return; |
387 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
388 | if (!scope.ok()) return; |
389 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
390 | if (!scope.ok()) return; |
391 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
392 | if (!scope.ok()) return; |
393 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
394 | if (!scope.ok()) return; |
395 | auto _l2_shrinkage = ::tensorflow::ops::AsNodeOut(scope, l2_shrinkage); |
396 | if (!scope.ok()) return; |
397 | auto _lr_power = ::tensorflow::ops::AsNodeOut(scope, lr_power); |
398 | if (!scope.ok()) return; |
399 | ::tensorflow::Node* ret; |
400 | const auto unique_name = scope.GetUniqueNameForOp("ApplyFtrlV2" ); |
401 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyFtrlV2" ) |
402 | .Input(_var) |
403 | .Input(_accum) |
404 | .Input(_linear) |
405 | .Input(_grad) |
406 | .Input(_lr) |
407 | .Input(_l1) |
408 | .Input(_l2) |
409 | .Input(_l2_shrinkage) |
410 | .Input(_lr_power) |
411 | .Attr("use_locking" , attrs.use_locking_) |
412 | .Attr("multiply_linear_by_lr" , attrs.multiply_linear_by_lr_) |
413 | ; |
414 | scope.UpdateBuilder(&builder); |
415 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
416 | if (!scope.ok()) return; |
417 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
418 | this->operation = Operation(ret); |
419 | this->out = Output(ret, 0); |
420 | } |
421 | |
422 | ApplyFtrlV2::ApplyFtrlV2(const ::tensorflow::Scope& scope, ::tensorflow::Input |
423 | var, ::tensorflow::Input accum, ::tensorflow::Input |
424 | linear, ::tensorflow::Input grad, ::tensorflow::Input |
425 | lr, ::tensorflow::Input l1, ::tensorflow::Input l2, |
426 | ::tensorflow::Input l2_shrinkage, ::tensorflow::Input |
427 | lr_power) |
428 | : ApplyFtrlV2(scope, var, accum, linear, grad, lr, l1, l2, l2_shrinkage, lr_power, ApplyFtrlV2::Attrs()) {} |
429 | |
430 | ApplyGradientDescent::ApplyGradientDescent(const ::tensorflow::Scope& scope, |
431 | ::tensorflow::Input var, |
432 | ::tensorflow::Input alpha, |
433 | ::tensorflow::Input delta, const |
434 | ApplyGradientDescent::Attrs& attrs) { |
435 | if (!scope.ok()) return; |
436 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
437 | if (!scope.ok()) return; |
438 | auto _alpha = ::tensorflow::ops::AsNodeOut(scope, alpha); |
439 | if (!scope.ok()) return; |
440 | auto _delta = ::tensorflow::ops::AsNodeOut(scope, delta); |
441 | if (!scope.ok()) return; |
442 | ::tensorflow::Node* ret; |
443 | const auto unique_name = scope.GetUniqueNameForOp("ApplyGradientDescent" ); |
444 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyGradientDescent" ) |
445 | .Input(_var) |
446 | .Input(_alpha) |
447 | .Input(_delta) |
448 | .Attr("use_locking" , attrs.use_locking_) |
449 | ; |
450 | scope.UpdateBuilder(&builder); |
451 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
452 | if (!scope.ok()) return; |
453 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
454 | this->operation = Operation(ret); |
455 | this->out = Output(ret, 0); |
456 | } |
457 | |
458 | ApplyGradientDescent::ApplyGradientDescent(const ::tensorflow::Scope& scope, |
459 | ::tensorflow::Input var, |
460 | ::tensorflow::Input alpha, |
461 | ::tensorflow::Input delta) |
462 | : ApplyGradientDescent(scope, var, alpha, delta, ApplyGradientDescent::Attrs()) {} |
463 | |
464 | ApplyMomentum::ApplyMomentum(const ::tensorflow::Scope& scope, |
465 | ::tensorflow::Input var, ::tensorflow::Input |
466 | accum, ::tensorflow::Input lr, ::tensorflow::Input |
467 | grad, ::tensorflow::Input momentum, const |
468 | ApplyMomentum::Attrs& attrs) { |
469 | if (!scope.ok()) return; |
470 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
471 | if (!scope.ok()) return; |
472 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
473 | if (!scope.ok()) return; |
474 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
475 | if (!scope.ok()) return; |
476 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
477 | if (!scope.ok()) return; |
478 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
479 | if (!scope.ok()) return; |
480 | ::tensorflow::Node* ret; |
481 | const auto unique_name = scope.GetUniqueNameForOp("ApplyMomentum" ); |
482 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyMomentum" ) |
483 | .Input(_var) |
484 | .Input(_accum) |
485 | .Input(_lr) |
486 | .Input(_grad) |
487 | .Input(_momentum) |
488 | .Attr("use_locking" , attrs.use_locking_) |
489 | .Attr("use_nesterov" , attrs.use_nesterov_) |
490 | ; |
491 | scope.UpdateBuilder(&builder); |
492 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
493 | if (!scope.ok()) return; |
494 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
495 | this->operation = Operation(ret); |
496 | this->out = Output(ret, 0); |
497 | } |
498 | |
499 | ApplyMomentum::ApplyMomentum(const ::tensorflow::Scope& scope, |
500 | ::tensorflow::Input var, ::tensorflow::Input |
501 | accum, ::tensorflow::Input lr, ::tensorflow::Input |
502 | grad, ::tensorflow::Input momentum) |
503 | : ApplyMomentum(scope, var, accum, lr, grad, momentum, ApplyMomentum::Attrs()) {} |
504 | |
505 | ApplyPowerSign::ApplyPowerSign(const ::tensorflow::Scope& scope, |
506 | ::tensorflow::Input var, ::tensorflow::Input m, |
507 | ::tensorflow::Input lr, ::tensorflow::Input |
508 | logbase, ::tensorflow::Input sign_decay, |
509 | ::tensorflow::Input beta, ::tensorflow::Input |
510 | grad, const ApplyPowerSign::Attrs& attrs) { |
511 | if (!scope.ok()) return; |
512 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
513 | if (!scope.ok()) return; |
514 | auto _m = ::tensorflow::ops::AsNodeOut(scope, m); |
515 | if (!scope.ok()) return; |
516 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
517 | if (!scope.ok()) return; |
518 | auto _logbase = ::tensorflow::ops::AsNodeOut(scope, logbase); |
519 | if (!scope.ok()) return; |
520 | auto _sign_decay = ::tensorflow::ops::AsNodeOut(scope, sign_decay); |
521 | if (!scope.ok()) return; |
522 | auto _beta = ::tensorflow::ops::AsNodeOut(scope, beta); |
523 | if (!scope.ok()) return; |
524 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
525 | if (!scope.ok()) return; |
526 | ::tensorflow::Node* ret; |
527 | const auto unique_name = scope.GetUniqueNameForOp("ApplyPowerSign" ); |
528 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyPowerSign" ) |
529 | .Input(_var) |
530 | .Input(_m) |
531 | .Input(_lr) |
532 | .Input(_logbase) |
533 | .Input(_sign_decay) |
534 | .Input(_beta) |
535 | .Input(_grad) |
536 | .Attr("use_locking" , attrs.use_locking_) |
537 | ; |
538 | scope.UpdateBuilder(&builder); |
539 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
540 | if (!scope.ok()) return; |
541 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
542 | this->operation = Operation(ret); |
543 | this->out = Output(ret, 0); |
544 | } |
545 | |
546 | ApplyPowerSign::ApplyPowerSign(const ::tensorflow::Scope& scope, |
547 | ::tensorflow::Input var, ::tensorflow::Input m, |
548 | ::tensorflow::Input lr, ::tensorflow::Input |
549 | logbase, ::tensorflow::Input sign_decay, |
550 | ::tensorflow::Input beta, ::tensorflow::Input |
551 | grad) |
552 | : ApplyPowerSign(scope, var, m, lr, logbase, sign_decay, beta, grad, ApplyPowerSign::Attrs()) {} |
553 | |
554 | ApplyProximalAdagrad::ApplyProximalAdagrad(const ::tensorflow::Scope& scope, |
555 | ::tensorflow::Input var, |
556 | ::tensorflow::Input accum, |
557 | ::tensorflow::Input lr, |
558 | ::tensorflow::Input l1, |
559 | ::tensorflow::Input l2, |
560 | ::tensorflow::Input grad, const |
561 | ApplyProximalAdagrad::Attrs& attrs) { |
562 | if (!scope.ok()) return; |
563 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
564 | if (!scope.ok()) return; |
565 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
566 | if (!scope.ok()) return; |
567 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
568 | if (!scope.ok()) return; |
569 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
570 | if (!scope.ok()) return; |
571 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
572 | if (!scope.ok()) return; |
573 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
574 | if (!scope.ok()) return; |
575 | ::tensorflow::Node* ret; |
576 | const auto unique_name = scope.GetUniqueNameForOp("ApplyProximalAdagrad" ); |
577 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyProximalAdagrad" ) |
578 | .Input(_var) |
579 | .Input(_accum) |
580 | .Input(_lr) |
581 | .Input(_l1) |
582 | .Input(_l2) |
583 | .Input(_grad) |
584 | .Attr("use_locking" , attrs.use_locking_) |
585 | ; |
586 | scope.UpdateBuilder(&builder); |
587 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
588 | if (!scope.ok()) return; |
589 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
590 | this->operation = Operation(ret); |
591 | this->out = Output(ret, 0); |
592 | } |
593 | |
594 | ApplyProximalAdagrad::ApplyProximalAdagrad(const ::tensorflow::Scope& scope, |
595 | ::tensorflow::Input var, |
596 | ::tensorflow::Input accum, |
597 | ::tensorflow::Input lr, |
598 | ::tensorflow::Input l1, |
599 | ::tensorflow::Input l2, |
600 | ::tensorflow::Input grad) |
601 | : ApplyProximalAdagrad(scope, var, accum, lr, l1, l2, grad, ApplyProximalAdagrad::Attrs()) {} |
602 | |
603 | ApplyProximalGradientDescent::ApplyProximalGradientDescent(const |
604 | ::tensorflow::Scope& |
605 | scope, |
606 | ::tensorflow::Input |
607 | var, |
608 | ::tensorflow::Input |
609 | alpha, |
610 | ::tensorflow::Input |
611 | l1, |
612 | ::tensorflow::Input |
613 | l2, |
614 | ::tensorflow::Input |
615 | delta, const |
616 | ApplyProximalGradientDescent::Attrs& |
617 | attrs) { |
618 | if (!scope.ok()) return; |
619 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
620 | if (!scope.ok()) return; |
621 | auto _alpha = ::tensorflow::ops::AsNodeOut(scope, alpha); |
622 | if (!scope.ok()) return; |
623 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
624 | if (!scope.ok()) return; |
625 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
626 | if (!scope.ok()) return; |
627 | auto _delta = ::tensorflow::ops::AsNodeOut(scope, delta); |
628 | if (!scope.ok()) return; |
629 | ::tensorflow::Node* ret; |
630 | const auto unique_name = scope.GetUniqueNameForOp("ApplyProximalGradientDescent" ); |
631 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyProximalGradientDescent" ) |
632 | .Input(_var) |
633 | .Input(_alpha) |
634 | .Input(_l1) |
635 | .Input(_l2) |
636 | .Input(_delta) |
637 | .Attr("use_locking" , attrs.use_locking_) |
638 | ; |
639 | scope.UpdateBuilder(&builder); |
640 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
641 | if (!scope.ok()) return; |
642 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
643 | this->operation = Operation(ret); |
644 | this->out = Output(ret, 0); |
645 | } |
646 | |
647 | ApplyProximalGradientDescent::ApplyProximalGradientDescent(const |
648 | ::tensorflow::Scope& |
649 | scope, |
650 | ::tensorflow::Input |
651 | var, |
652 | ::tensorflow::Input |
653 | alpha, |
654 | ::tensorflow::Input |
655 | l1, |
656 | ::tensorflow::Input |
657 | l2, |
658 | ::tensorflow::Input |
659 | delta) |
660 | : ApplyProximalGradientDescent(scope, var, alpha, l1, l2, delta, ApplyProximalGradientDescent::Attrs()) {} |
661 | |
662 | ApplyRMSProp::ApplyRMSProp(const ::tensorflow::Scope& scope, |
663 | ::tensorflow::Input var, ::tensorflow::Input ms, |
664 | ::tensorflow::Input mom, ::tensorflow::Input lr, |
665 | ::tensorflow::Input rho, ::tensorflow::Input |
666 | momentum, ::tensorflow::Input epsilon, |
667 | ::tensorflow::Input grad, const ApplyRMSProp::Attrs& |
668 | attrs) { |
669 | if (!scope.ok()) return; |
670 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
671 | if (!scope.ok()) return; |
672 | auto _ms = ::tensorflow::ops::AsNodeOut(scope, ms); |
673 | if (!scope.ok()) return; |
674 | auto _mom = ::tensorflow::ops::AsNodeOut(scope, mom); |
675 | if (!scope.ok()) return; |
676 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
677 | if (!scope.ok()) return; |
678 | auto _rho = ::tensorflow::ops::AsNodeOut(scope, rho); |
679 | if (!scope.ok()) return; |
680 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
681 | if (!scope.ok()) return; |
682 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
683 | if (!scope.ok()) return; |
684 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
685 | if (!scope.ok()) return; |
686 | ::tensorflow::Node* ret; |
687 | const auto unique_name = scope.GetUniqueNameForOp("ApplyRMSProp" ); |
688 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyRMSProp" ) |
689 | .Input(_var) |
690 | .Input(_ms) |
691 | .Input(_mom) |
692 | .Input(_lr) |
693 | .Input(_rho) |
694 | .Input(_momentum) |
695 | .Input(_epsilon) |
696 | .Input(_grad) |
697 | .Attr("use_locking" , attrs.use_locking_) |
698 | ; |
699 | scope.UpdateBuilder(&builder); |
700 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
701 | if (!scope.ok()) return; |
702 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
703 | this->operation = Operation(ret); |
704 | this->out = Output(ret, 0); |
705 | } |
706 | |
707 | ApplyRMSProp::ApplyRMSProp(const ::tensorflow::Scope& scope, |
708 | ::tensorflow::Input var, ::tensorflow::Input ms, |
709 | ::tensorflow::Input mom, ::tensorflow::Input lr, |
710 | ::tensorflow::Input rho, ::tensorflow::Input |
711 | momentum, ::tensorflow::Input epsilon, |
712 | ::tensorflow::Input grad) |
713 | : ApplyRMSProp(scope, var, ms, mom, lr, rho, momentum, epsilon, grad, ApplyRMSProp::Attrs()) {} |
714 | |
715 | ResourceApplyAdadelta::ResourceApplyAdadelta(const ::tensorflow::Scope& scope, |
716 | ::tensorflow::Input var, |
717 | ::tensorflow::Input accum, |
718 | ::tensorflow::Input accum_update, |
719 | ::tensorflow::Input lr, |
720 | ::tensorflow::Input rho, |
721 | ::tensorflow::Input epsilon, |
722 | ::tensorflow::Input grad, const |
723 | ResourceApplyAdadelta::Attrs& |
724 | attrs) { |
725 | if (!scope.ok()) return; |
726 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
727 | if (!scope.ok()) return; |
728 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
729 | if (!scope.ok()) return; |
730 | auto _accum_update = ::tensorflow::ops::AsNodeOut(scope, accum_update); |
731 | if (!scope.ok()) return; |
732 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
733 | if (!scope.ok()) return; |
734 | auto _rho = ::tensorflow::ops::AsNodeOut(scope, rho); |
735 | if (!scope.ok()) return; |
736 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
737 | if (!scope.ok()) return; |
738 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
739 | if (!scope.ok()) return; |
740 | ::tensorflow::Node* ret; |
741 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyAdadelta" ); |
742 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyAdadelta" ) |
743 | .Input(_var) |
744 | .Input(_accum) |
745 | .Input(_accum_update) |
746 | .Input(_lr) |
747 | .Input(_rho) |
748 | .Input(_epsilon) |
749 | .Input(_grad) |
750 | .Attr("use_locking" , attrs.use_locking_) |
751 | ; |
752 | scope.UpdateBuilder(&builder); |
753 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
754 | if (!scope.ok()) return; |
755 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
756 | this->operation = Operation(ret); |
757 | return; |
758 | } |
759 | |
760 | ResourceApplyAdadelta::ResourceApplyAdadelta(const ::tensorflow::Scope& scope, |
761 | ::tensorflow::Input var, |
762 | ::tensorflow::Input accum, |
763 | ::tensorflow::Input accum_update, |
764 | ::tensorflow::Input lr, |
765 | ::tensorflow::Input rho, |
766 | ::tensorflow::Input epsilon, |
767 | ::tensorflow::Input grad) |
768 | : ResourceApplyAdadelta(scope, var, accum, accum_update, lr, rho, epsilon, grad, ResourceApplyAdadelta::Attrs()) {} |
769 | |
770 | ResourceApplyAdagrad::ResourceApplyAdagrad(const ::tensorflow::Scope& scope, |
771 | ::tensorflow::Input var, |
772 | ::tensorflow::Input accum, |
773 | ::tensorflow::Input lr, |
774 | ::tensorflow::Input grad, const |
775 | ResourceApplyAdagrad::Attrs& attrs) { |
776 | if (!scope.ok()) return; |
777 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
778 | if (!scope.ok()) return; |
779 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
780 | if (!scope.ok()) return; |
781 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
782 | if (!scope.ok()) return; |
783 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
784 | if (!scope.ok()) return; |
785 | ::tensorflow::Node* ret; |
786 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyAdagrad" ); |
787 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyAdagrad" ) |
788 | .Input(_var) |
789 | .Input(_accum) |
790 | .Input(_lr) |
791 | .Input(_grad) |
792 | .Attr("use_locking" , attrs.use_locking_) |
793 | .Attr("update_slots" , attrs.update_slots_) |
794 | ; |
795 | scope.UpdateBuilder(&builder); |
796 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
797 | if (!scope.ok()) return; |
798 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
799 | this->operation = Operation(ret); |
800 | return; |
801 | } |
802 | |
803 | ResourceApplyAdagrad::ResourceApplyAdagrad(const ::tensorflow::Scope& scope, |
804 | ::tensorflow::Input var, |
805 | ::tensorflow::Input accum, |
806 | ::tensorflow::Input lr, |
807 | ::tensorflow::Input grad) |
808 | : ResourceApplyAdagrad(scope, var, accum, lr, grad, ResourceApplyAdagrad::Attrs()) {} |
809 | |
810 | ResourceApplyAdagradDA::ResourceApplyAdagradDA(const ::tensorflow::Scope& |
811 | scope, ::tensorflow::Input var, |
812 | ::tensorflow::Input |
813 | gradient_accumulator, |
814 | ::tensorflow::Input |
815 | gradient_squared_accumulator, |
816 | ::tensorflow::Input grad, |
817 | ::tensorflow::Input lr, |
818 | ::tensorflow::Input l1, |
819 | ::tensorflow::Input l2, |
820 | ::tensorflow::Input global_step, |
821 | const |
822 | ResourceApplyAdagradDA::Attrs& |
823 | attrs) { |
824 | if (!scope.ok()) return; |
825 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
826 | if (!scope.ok()) return; |
827 | auto _gradient_accumulator = ::tensorflow::ops::AsNodeOut(scope, gradient_accumulator); |
828 | if (!scope.ok()) return; |
829 | auto _gradient_squared_accumulator = ::tensorflow::ops::AsNodeOut(scope, gradient_squared_accumulator); |
830 | if (!scope.ok()) return; |
831 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
832 | if (!scope.ok()) return; |
833 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
834 | if (!scope.ok()) return; |
835 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
836 | if (!scope.ok()) return; |
837 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
838 | if (!scope.ok()) return; |
839 | auto _global_step = ::tensorflow::ops::AsNodeOut(scope, global_step); |
840 | if (!scope.ok()) return; |
841 | ::tensorflow::Node* ret; |
842 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyAdagradDA" ); |
843 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyAdagradDA" ) |
844 | .Input(_var) |
845 | .Input(_gradient_accumulator) |
846 | .Input(_gradient_squared_accumulator) |
847 | .Input(_grad) |
848 | .Input(_lr) |
849 | .Input(_l1) |
850 | .Input(_l2) |
851 | .Input(_global_step) |
852 | .Attr("use_locking" , attrs.use_locking_) |
853 | ; |
854 | scope.UpdateBuilder(&builder); |
855 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
856 | if (!scope.ok()) return; |
857 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
858 | this->operation = Operation(ret); |
859 | return; |
860 | } |
861 | |
862 | ResourceApplyAdagradDA::ResourceApplyAdagradDA(const ::tensorflow::Scope& |
863 | scope, ::tensorflow::Input var, |
864 | ::tensorflow::Input |
865 | gradient_accumulator, |
866 | ::tensorflow::Input |
867 | gradient_squared_accumulator, |
868 | ::tensorflow::Input grad, |
869 | ::tensorflow::Input lr, |
870 | ::tensorflow::Input l1, |
871 | ::tensorflow::Input l2, |
872 | ::tensorflow::Input global_step) |
873 | : ResourceApplyAdagradDA(scope, var, gradient_accumulator, gradient_squared_accumulator, grad, lr, l1, l2, global_step, ResourceApplyAdagradDA::Attrs()) {} |
874 | |
875 | ResourceApplyAdam::ResourceApplyAdam(const ::tensorflow::Scope& scope, |
876 | ::tensorflow::Input var, |
877 | ::tensorflow::Input m, ::tensorflow::Input |
878 | v, ::tensorflow::Input beta1_power, |
879 | ::tensorflow::Input beta2_power, |
880 | ::tensorflow::Input lr, |
881 | ::tensorflow::Input beta1, |
882 | ::tensorflow::Input beta2, |
883 | ::tensorflow::Input epsilon, |
884 | ::tensorflow::Input grad, const |
885 | ResourceApplyAdam::Attrs& attrs) { |
886 | if (!scope.ok()) return; |
887 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
888 | if (!scope.ok()) return; |
889 | auto _m = ::tensorflow::ops::AsNodeOut(scope, m); |
890 | if (!scope.ok()) return; |
891 | auto _v = ::tensorflow::ops::AsNodeOut(scope, v); |
892 | if (!scope.ok()) return; |
893 | auto _beta1_power = ::tensorflow::ops::AsNodeOut(scope, beta1_power); |
894 | if (!scope.ok()) return; |
895 | auto _beta2_power = ::tensorflow::ops::AsNodeOut(scope, beta2_power); |
896 | if (!scope.ok()) return; |
897 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
898 | if (!scope.ok()) return; |
899 | auto _beta1 = ::tensorflow::ops::AsNodeOut(scope, beta1); |
900 | if (!scope.ok()) return; |
901 | auto _beta2 = ::tensorflow::ops::AsNodeOut(scope, beta2); |
902 | if (!scope.ok()) return; |
903 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
904 | if (!scope.ok()) return; |
905 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
906 | if (!scope.ok()) return; |
907 | ::tensorflow::Node* ret; |
908 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyAdam" ); |
909 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyAdam" ) |
910 | .Input(_var) |
911 | .Input(_m) |
912 | .Input(_v) |
913 | .Input(_beta1_power) |
914 | .Input(_beta2_power) |
915 | .Input(_lr) |
916 | .Input(_beta1) |
917 | .Input(_beta2) |
918 | .Input(_epsilon) |
919 | .Input(_grad) |
920 | .Attr("use_locking" , attrs.use_locking_) |
921 | .Attr("use_nesterov" , attrs.use_nesterov_) |
922 | ; |
923 | scope.UpdateBuilder(&builder); |
924 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
925 | if (!scope.ok()) return; |
926 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
927 | this->operation = Operation(ret); |
928 | return; |
929 | } |
930 | |
931 | ResourceApplyAdam::ResourceApplyAdam(const ::tensorflow::Scope& scope, |
932 | ::tensorflow::Input var, |
933 | ::tensorflow::Input m, ::tensorflow::Input |
934 | v, ::tensorflow::Input beta1_power, |
935 | ::tensorflow::Input beta2_power, |
936 | ::tensorflow::Input lr, |
937 | ::tensorflow::Input beta1, |
938 | ::tensorflow::Input beta2, |
939 | ::tensorflow::Input epsilon, |
940 | ::tensorflow::Input grad) |
941 | : ResourceApplyAdam(scope, var, m, v, beta1_power, beta2_power, lr, beta1, beta2, epsilon, grad, ResourceApplyAdam::Attrs()) {} |
942 | |
943 | ResourceApplyAdamWithAmsgrad::ResourceApplyAdamWithAmsgrad(const |
944 | ::tensorflow::Scope& |
945 | scope, |
946 | ::tensorflow::Input |
947 | var, |
948 | ::tensorflow::Input |
949 | m, |
950 | ::tensorflow::Input |
951 | v, |
952 | ::tensorflow::Input |
953 | vhat, |
954 | ::tensorflow::Input |
955 | beta1_power, |
956 | ::tensorflow::Input |
957 | beta2_power, |
958 | ::tensorflow::Input |
959 | lr, |
960 | ::tensorflow::Input |
961 | beta1, |
962 | ::tensorflow::Input |
963 | beta2, |
964 | ::tensorflow::Input |
965 | epsilon, |
966 | ::tensorflow::Input |
967 | grad, const |
968 | ResourceApplyAdamWithAmsgrad::Attrs& |
969 | attrs) { |
970 | if (!scope.ok()) return; |
971 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
972 | if (!scope.ok()) return; |
973 | auto _m = ::tensorflow::ops::AsNodeOut(scope, m); |
974 | if (!scope.ok()) return; |
975 | auto _v = ::tensorflow::ops::AsNodeOut(scope, v); |
976 | if (!scope.ok()) return; |
977 | auto _vhat = ::tensorflow::ops::AsNodeOut(scope, vhat); |
978 | if (!scope.ok()) return; |
979 | auto _beta1_power = ::tensorflow::ops::AsNodeOut(scope, beta1_power); |
980 | if (!scope.ok()) return; |
981 | auto _beta2_power = ::tensorflow::ops::AsNodeOut(scope, beta2_power); |
982 | if (!scope.ok()) return; |
983 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
984 | if (!scope.ok()) return; |
985 | auto _beta1 = ::tensorflow::ops::AsNodeOut(scope, beta1); |
986 | if (!scope.ok()) return; |
987 | auto _beta2 = ::tensorflow::ops::AsNodeOut(scope, beta2); |
988 | if (!scope.ok()) return; |
989 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
990 | if (!scope.ok()) return; |
991 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
992 | if (!scope.ok()) return; |
993 | ::tensorflow::Node* ret; |
994 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyAdamWithAmsgrad" ); |
995 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyAdamWithAmsgrad" ) |
996 | .Input(_var) |
997 | .Input(_m) |
998 | .Input(_v) |
999 | .Input(_vhat) |
1000 | .Input(_beta1_power) |
1001 | .Input(_beta2_power) |
1002 | .Input(_lr) |
1003 | .Input(_beta1) |
1004 | .Input(_beta2) |
1005 | .Input(_epsilon) |
1006 | .Input(_grad) |
1007 | .Attr("use_locking" , attrs.use_locking_) |
1008 | ; |
1009 | scope.UpdateBuilder(&builder); |
1010 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1011 | if (!scope.ok()) return; |
1012 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1013 | this->operation = Operation(ret); |
1014 | return; |
1015 | } |
1016 | |
1017 | ResourceApplyAdamWithAmsgrad::ResourceApplyAdamWithAmsgrad(const |
1018 | ::tensorflow::Scope& |
1019 | scope, |
1020 | ::tensorflow::Input |
1021 | var, |
1022 | ::tensorflow::Input |
1023 | m, |
1024 | ::tensorflow::Input |
1025 | v, |
1026 | ::tensorflow::Input |
1027 | vhat, |
1028 | ::tensorflow::Input |
1029 | beta1_power, |
1030 | ::tensorflow::Input |
1031 | beta2_power, |
1032 | ::tensorflow::Input |
1033 | lr, |
1034 | ::tensorflow::Input |
1035 | beta1, |
1036 | ::tensorflow::Input |
1037 | beta2, |
1038 | ::tensorflow::Input |
1039 | epsilon, |
1040 | ::tensorflow::Input |
1041 | grad) |
1042 | : ResourceApplyAdamWithAmsgrad(scope, var, m, v, vhat, beta1_power, beta2_power, lr, beta1, beta2, epsilon, grad, ResourceApplyAdamWithAmsgrad::Attrs()) {} |
1043 | |
1044 | ResourceApplyAddSign::ResourceApplyAddSign(const ::tensorflow::Scope& scope, |
1045 | ::tensorflow::Input var, |
1046 | ::tensorflow::Input m, |
1047 | ::tensorflow::Input lr, |
1048 | ::tensorflow::Input alpha, |
1049 | ::tensorflow::Input sign_decay, |
1050 | ::tensorflow::Input beta, |
1051 | ::tensorflow::Input grad, const |
1052 | ResourceApplyAddSign::Attrs& attrs) { |
1053 | if (!scope.ok()) return; |
1054 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1055 | if (!scope.ok()) return; |
1056 | auto _m = ::tensorflow::ops::AsNodeOut(scope, m); |
1057 | if (!scope.ok()) return; |
1058 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
1059 | if (!scope.ok()) return; |
1060 | auto _alpha = ::tensorflow::ops::AsNodeOut(scope, alpha); |
1061 | if (!scope.ok()) return; |
1062 | auto _sign_decay = ::tensorflow::ops::AsNodeOut(scope, sign_decay); |
1063 | if (!scope.ok()) return; |
1064 | auto _beta = ::tensorflow::ops::AsNodeOut(scope, beta); |
1065 | if (!scope.ok()) return; |
1066 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
1067 | if (!scope.ok()) return; |
1068 | ::tensorflow::Node* ret; |
1069 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyAddSign" ); |
1070 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyAddSign" ) |
1071 | .Input(_var) |
1072 | .Input(_m) |
1073 | .Input(_lr) |
1074 | .Input(_alpha) |
1075 | .Input(_sign_decay) |
1076 | .Input(_beta) |
1077 | .Input(_grad) |
1078 | .Attr("use_locking" , attrs.use_locking_) |
1079 | ; |
1080 | scope.UpdateBuilder(&builder); |
1081 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1082 | if (!scope.ok()) return; |
1083 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1084 | this->operation = Operation(ret); |
1085 | return; |
1086 | } |
1087 | |
1088 | ResourceApplyAddSign::ResourceApplyAddSign(const ::tensorflow::Scope& scope, |
1089 | ::tensorflow::Input var, |
1090 | ::tensorflow::Input m, |
1091 | ::tensorflow::Input lr, |
1092 | ::tensorflow::Input alpha, |
1093 | ::tensorflow::Input sign_decay, |
1094 | ::tensorflow::Input beta, |
1095 | ::tensorflow::Input grad) |
1096 | : ResourceApplyAddSign(scope, var, m, lr, alpha, sign_decay, beta, grad, ResourceApplyAddSign::Attrs()) {} |
1097 | |
1098 | ResourceApplyCenteredRMSProp::ResourceApplyCenteredRMSProp(const |
1099 | ::tensorflow::Scope& |
1100 | scope, |
1101 | ::tensorflow::Input |
1102 | var, |
1103 | ::tensorflow::Input |
1104 | mg, |
1105 | ::tensorflow::Input |
1106 | ms, |
1107 | ::tensorflow::Input |
1108 | mom, |
1109 | ::tensorflow::Input |
1110 | lr, |
1111 | ::tensorflow::Input |
1112 | rho, |
1113 | ::tensorflow::Input |
1114 | momentum, |
1115 | ::tensorflow::Input |
1116 | epsilon, |
1117 | ::tensorflow::Input |
1118 | grad, const |
1119 | ResourceApplyCenteredRMSProp::Attrs& |
1120 | attrs) { |
1121 | if (!scope.ok()) return; |
1122 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1123 | if (!scope.ok()) return; |
1124 | auto _mg = ::tensorflow::ops::AsNodeOut(scope, mg); |
1125 | if (!scope.ok()) return; |
1126 | auto _ms = ::tensorflow::ops::AsNodeOut(scope, ms); |
1127 | if (!scope.ok()) return; |
1128 | auto _mom = ::tensorflow::ops::AsNodeOut(scope, mom); |
1129 | if (!scope.ok()) return; |
1130 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
1131 | if (!scope.ok()) return; |
1132 | auto _rho = ::tensorflow::ops::AsNodeOut(scope, rho); |
1133 | if (!scope.ok()) return; |
1134 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
1135 | if (!scope.ok()) return; |
1136 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
1137 | if (!scope.ok()) return; |
1138 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
1139 | if (!scope.ok()) return; |
1140 | ::tensorflow::Node* ret; |
1141 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyCenteredRMSProp" ); |
1142 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyCenteredRMSProp" ) |
1143 | .Input(_var) |
1144 | .Input(_mg) |
1145 | .Input(_ms) |
1146 | .Input(_mom) |
1147 | .Input(_lr) |
1148 | .Input(_rho) |
1149 | .Input(_momentum) |
1150 | .Input(_epsilon) |
1151 | .Input(_grad) |
1152 | .Attr("use_locking" , attrs.use_locking_) |
1153 | ; |
1154 | scope.UpdateBuilder(&builder); |
1155 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1156 | if (!scope.ok()) return; |
1157 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1158 | this->operation = Operation(ret); |
1159 | return; |
1160 | } |
1161 | |
1162 | ResourceApplyCenteredRMSProp::ResourceApplyCenteredRMSProp(const |
1163 | ::tensorflow::Scope& |
1164 | scope, |
1165 | ::tensorflow::Input |
1166 | var, |
1167 | ::tensorflow::Input |
1168 | mg, |
1169 | ::tensorflow::Input |
1170 | ms, |
1171 | ::tensorflow::Input |
1172 | mom, |
1173 | ::tensorflow::Input |
1174 | lr, |
1175 | ::tensorflow::Input |
1176 | rho, |
1177 | ::tensorflow::Input |
1178 | momentum, |
1179 | ::tensorflow::Input |
1180 | epsilon, |
1181 | ::tensorflow::Input |
1182 | grad) |
1183 | : ResourceApplyCenteredRMSProp(scope, var, mg, ms, mom, lr, rho, momentum, epsilon, grad, ResourceApplyCenteredRMSProp::Attrs()) {} |
1184 | |
1185 | ResourceApplyFtrl::ResourceApplyFtrl(const ::tensorflow::Scope& scope, |
1186 | ::tensorflow::Input var, |
1187 | ::tensorflow::Input accum, |
1188 | ::tensorflow::Input linear, |
1189 | ::tensorflow::Input grad, |
1190 | ::tensorflow::Input lr, |
1191 | ::tensorflow::Input l1, |
1192 | ::tensorflow::Input l2, |
1193 | ::tensorflow::Input lr_power, const |
1194 | ResourceApplyFtrl::Attrs& attrs) { |
1195 | if (!scope.ok()) return; |
1196 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1197 | if (!scope.ok()) return; |
1198 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
1199 | if (!scope.ok()) return; |
1200 | auto _linear = ::tensorflow::ops::AsNodeOut(scope, linear); |
1201 | if (!scope.ok()) return; |
1202 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
1203 | if (!scope.ok()) return; |
1204 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
1205 | if (!scope.ok()) return; |
1206 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
1207 | if (!scope.ok()) return; |
1208 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
1209 | if (!scope.ok()) return; |
1210 | auto _lr_power = ::tensorflow::ops::AsNodeOut(scope, lr_power); |
1211 | if (!scope.ok()) return; |
1212 | ::tensorflow::Node* ret; |
1213 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyFtrl" ); |
1214 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyFtrl" ) |
1215 | .Input(_var) |
1216 | .Input(_accum) |
1217 | .Input(_linear) |
1218 | .Input(_grad) |
1219 | .Input(_lr) |
1220 | .Input(_l1) |
1221 | .Input(_l2) |
1222 | .Input(_lr_power) |
1223 | .Attr("use_locking" , attrs.use_locking_) |
1224 | .Attr("multiply_linear_by_lr" , attrs.multiply_linear_by_lr_) |
1225 | ; |
1226 | scope.UpdateBuilder(&builder); |
1227 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1228 | if (!scope.ok()) return; |
1229 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1230 | this->operation = Operation(ret); |
1231 | return; |
1232 | } |
1233 | |
1234 | ResourceApplyFtrl::ResourceApplyFtrl(const ::tensorflow::Scope& scope, |
1235 | ::tensorflow::Input var, |
1236 | ::tensorflow::Input accum, |
1237 | ::tensorflow::Input linear, |
1238 | ::tensorflow::Input grad, |
1239 | ::tensorflow::Input lr, |
1240 | ::tensorflow::Input l1, |
1241 | ::tensorflow::Input l2, |
1242 | ::tensorflow::Input lr_power) |
1243 | : ResourceApplyFtrl(scope, var, accum, linear, grad, lr, l1, l2, lr_power, ResourceApplyFtrl::Attrs()) {} |
1244 | |
1245 | ResourceApplyFtrlV2::ResourceApplyFtrlV2(const ::tensorflow::Scope& scope, |
1246 | ::tensorflow::Input var, |
1247 | ::tensorflow::Input accum, |
1248 | ::tensorflow::Input linear, |
1249 | ::tensorflow::Input grad, |
1250 | ::tensorflow::Input lr, |
1251 | ::tensorflow::Input l1, |
1252 | ::tensorflow::Input l2, |
1253 | ::tensorflow::Input l2_shrinkage, |
1254 | ::tensorflow::Input lr_power, const |
1255 | ResourceApplyFtrlV2::Attrs& attrs) { |
1256 | if (!scope.ok()) return; |
1257 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1258 | if (!scope.ok()) return; |
1259 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
1260 | if (!scope.ok()) return; |
1261 | auto _linear = ::tensorflow::ops::AsNodeOut(scope, linear); |
1262 | if (!scope.ok()) return; |
1263 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
1264 | if (!scope.ok()) return; |
1265 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
1266 | if (!scope.ok()) return; |
1267 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
1268 | if (!scope.ok()) return; |
1269 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
1270 | if (!scope.ok()) return; |
1271 | auto _l2_shrinkage = ::tensorflow::ops::AsNodeOut(scope, l2_shrinkage); |
1272 | if (!scope.ok()) return; |
1273 | auto _lr_power = ::tensorflow::ops::AsNodeOut(scope, lr_power); |
1274 | if (!scope.ok()) return; |
1275 | ::tensorflow::Node* ret; |
1276 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyFtrlV2" ); |
1277 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyFtrlV2" ) |
1278 | .Input(_var) |
1279 | .Input(_accum) |
1280 | .Input(_linear) |
1281 | .Input(_grad) |
1282 | .Input(_lr) |
1283 | .Input(_l1) |
1284 | .Input(_l2) |
1285 | .Input(_l2_shrinkage) |
1286 | .Input(_lr_power) |
1287 | .Attr("use_locking" , attrs.use_locking_) |
1288 | .Attr("multiply_linear_by_lr" , attrs.multiply_linear_by_lr_) |
1289 | ; |
1290 | scope.UpdateBuilder(&builder); |
1291 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1292 | if (!scope.ok()) return; |
1293 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1294 | this->operation = Operation(ret); |
1295 | return; |
1296 | } |
1297 | |
1298 | ResourceApplyFtrlV2::ResourceApplyFtrlV2(const ::tensorflow::Scope& scope, |
1299 | ::tensorflow::Input var, |
1300 | ::tensorflow::Input accum, |
1301 | ::tensorflow::Input linear, |
1302 | ::tensorflow::Input grad, |
1303 | ::tensorflow::Input lr, |
1304 | ::tensorflow::Input l1, |
1305 | ::tensorflow::Input l2, |
1306 | ::tensorflow::Input l2_shrinkage, |
1307 | ::tensorflow::Input lr_power) |
1308 | : ResourceApplyFtrlV2(scope, var, accum, linear, grad, lr, l1, l2, l2_shrinkage, lr_power, ResourceApplyFtrlV2::Attrs()) {} |
1309 | |
1310 | ResourceApplyGradientDescent::ResourceApplyGradientDescent(const |
1311 | ::tensorflow::Scope& |
1312 | scope, |
1313 | ::tensorflow::Input |
1314 | var, |
1315 | ::tensorflow::Input |
1316 | alpha, |
1317 | ::tensorflow::Input |
1318 | delta, const |
1319 | ResourceApplyGradientDescent::Attrs& |
1320 | attrs) { |
1321 | if (!scope.ok()) return; |
1322 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1323 | if (!scope.ok()) return; |
1324 | auto _alpha = ::tensorflow::ops::AsNodeOut(scope, alpha); |
1325 | if (!scope.ok()) return; |
1326 | auto _delta = ::tensorflow::ops::AsNodeOut(scope, delta); |
1327 | if (!scope.ok()) return; |
1328 | ::tensorflow::Node* ret; |
1329 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyGradientDescent" ); |
1330 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyGradientDescent" ) |
1331 | .Input(_var) |
1332 | .Input(_alpha) |
1333 | .Input(_delta) |
1334 | .Attr("use_locking" , attrs.use_locking_) |
1335 | ; |
1336 | scope.UpdateBuilder(&builder); |
1337 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1338 | if (!scope.ok()) return; |
1339 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1340 | this->operation = Operation(ret); |
1341 | return; |
1342 | } |
1343 | |
1344 | ResourceApplyGradientDescent::ResourceApplyGradientDescent(const |
1345 | ::tensorflow::Scope& |
1346 | scope, |
1347 | ::tensorflow::Input |
1348 | var, |
1349 | ::tensorflow::Input |
1350 | alpha, |
1351 | ::tensorflow::Input |
1352 | delta) |
1353 | : ResourceApplyGradientDescent(scope, var, alpha, delta, ResourceApplyGradientDescent::Attrs()) {} |
1354 | |
1355 | ResourceApplyKerasMomentum::ResourceApplyKerasMomentum(const |
1356 | ::tensorflow::Scope& |
1357 | scope, |
1358 | ::tensorflow::Input var, |
1359 | ::tensorflow::Input |
1360 | accum, |
1361 | ::tensorflow::Input lr, |
1362 | ::tensorflow::Input |
1363 | grad, |
1364 | ::tensorflow::Input |
1365 | momentum, const |
1366 | ResourceApplyKerasMomentum::Attrs& |
1367 | attrs) { |
1368 | if (!scope.ok()) return; |
1369 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1370 | if (!scope.ok()) return; |
1371 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
1372 | if (!scope.ok()) return; |
1373 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
1374 | if (!scope.ok()) return; |
1375 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
1376 | if (!scope.ok()) return; |
1377 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
1378 | if (!scope.ok()) return; |
1379 | ::tensorflow::Node* ret; |
1380 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyKerasMomentum" ); |
1381 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyKerasMomentum" ) |
1382 | .Input(_var) |
1383 | .Input(_accum) |
1384 | .Input(_lr) |
1385 | .Input(_grad) |
1386 | .Input(_momentum) |
1387 | .Attr("use_locking" , attrs.use_locking_) |
1388 | .Attr("use_nesterov" , attrs.use_nesterov_) |
1389 | ; |
1390 | scope.UpdateBuilder(&builder); |
1391 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1392 | if (!scope.ok()) return; |
1393 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1394 | this->operation = Operation(ret); |
1395 | return; |
1396 | } |
1397 | |
1398 | ResourceApplyKerasMomentum::ResourceApplyKerasMomentum(const |
1399 | ::tensorflow::Scope& |
1400 | scope, |
1401 | ::tensorflow::Input var, |
1402 | ::tensorflow::Input |
1403 | accum, |
1404 | ::tensorflow::Input lr, |
1405 | ::tensorflow::Input |
1406 | grad, |
1407 | ::tensorflow::Input |
1408 | momentum) |
1409 | : ResourceApplyKerasMomentum(scope, var, accum, lr, grad, momentum, ResourceApplyKerasMomentum::Attrs()) {} |
1410 | |
1411 | ResourceApplyMomentum::ResourceApplyMomentum(const ::tensorflow::Scope& scope, |
1412 | ::tensorflow::Input var, |
1413 | ::tensorflow::Input accum, |
1414 | ::tensorflow::Input lr, |
1415 | ::tensorflow::Input grad, |
1416 | ::tensorflow::Input momentum, |
1417 | const |
1418 | ResourceApplyMomentum::Attrs& |
1419 | attrs) { |
1420 | if (!scope.ok()) return; |
1421 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1422 | if (!scope.ok()) return; |
1423 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
1424 | if (!scope.ok()) return; |
1425 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
1426 | if (!scope.ok()) return; |
1427 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
1428 | if (!scope.ok()) return; |
1429 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
1430 | if (!scope.ok()) return; |
1431 | ::tensorflow::Node* ret; |
1432 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyMomentum" ); |
1433 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyMomentum" ) |
1434 | .Input(_var) |
1435 | .Input(_accum) |
1436 | .Input(_lr) |
1437 | .Input(_grad) |
1438 | .Input(_momentum) |
1439 | .Attr("use_locking" , attrs.use_locking_) |
1440 | .Attr("use_nesterov" , attrs.use_nesterov_) |
1441 | ; |
1442 | scope.UpdateBuilder(&builder); |
1443 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1444 | if (!scope.ok()) return; |
1445 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1446 | this->operation = Operation(ret); |
1447 | return; |
1448 | } |
1449 | |
1450 | ResourceApplyMomentum::ResourceApplyMomentum(const ::tensorflow::Scope& scope, |
1451 | ::tensorflow::Input var, |
1452 | ::tensorflow::Input accum, |
1453 | ::tensorflow::Input lr, |
1454 | ::tensorflow::Input grad, |
1455 | ::tensorflow::Input momentum) |
1456 | : ResourceApplyMomentum(scope, var, accum, lr, grad, momentum, ResourceApplyMomentum::Attrs()) {} |
1457 | |
1458 | ResourceApplyPowerSign::ResourceApplyPowerSign(const ::tensorflow::Scope& |
1459 | scope, ::tensorflow::Input var, |
1460 | ::tensorflow::Input m, |
1461 | ::tensorflow::Input lr, |
1462 | ::tensorflow::Input logbase, |
1463 | ::tensorflow::Input sign_decay, |
1464 | ::tensorflow::Input beta, |
1465 | ::tensorflow::Input grad, const |
1466 | ResourceApplyPowerSign::Attrs& |
1467 | attrs) { |
1468 | if (!scope.ok()) return; |
1469 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1470 | if (!scope.ok()) return; |
1471 | auto _m = ::tensorflow::ops::AsNodeOut(scope, m); |
1472 | if (!scope.ok()) return; |
1473 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
1474 | if (!scope.ok()) return; |
1475 | auto _logbase = ::tensorflow::ops::AsNodeOut(scope, logbase); |
1476 | if (!scope.ok()) return; |
1477 | auto _sign_decay = ::tensorflow::ops::AsNodeOut(scope, sign_decay); |
1478 | if (!scope.ok()) return; |
1479 | auto _beta = ::tensorflow::ops::AsNodeOut(scope, beta); |
1480 | if (!scope.ok()) return; |
1481 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
1482 | if (!scope.ok()) return; |
1483 | ::tensorflow::Node* ret; |
1484 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyPowerSign" ); |
1485 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyPowerSign" ) |
1486 | .Input(_var) |
1487 | .Input(_m) |
1488 | .Input(_lr) |
1489 | .Input(_logbase) |
1490 | .Input(_sign_decay) |
1491 | .Input(_beta) |
1492 | .Input(_grad) |
1493 | .Attr("use_locking" , attrs.use_locking_) |
1494 | ; |
1495 | scope.UpdateBuilder(&builder); |
1496 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1497 | if (!scope.ok()) return; |
1498 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1499 | this->operation = Operation(ret); |
1500 | return; |
1501 | } |
1502 | |
1503 | ResourceApplyPowerSign::ResourceApplyPowerSign(const ::tensorflow::Scope& |
1504 | scope, ::tensorflow::Input var, |
1505 | ::tensorflow::Input m, |
1506 | ::tensorflow::Input lr, |
1507 | ::tensorflow::Input logbase, |
1508 | ::tensorflow::Input sign_decay, |
1509 | ::tensorflow::Input beta, |
1510 | ::tensorflow::Input grad) |
1511 | : ResourceApplyPowerSign(scope, var, m, lr, logbase, sign_decay, beta, grad, ResourceApplyPowerSign::Attrs()) {} |
1512 | |
1513 | ResourceApplyProximalAdagrad::ResourceApplyProximalAdagrad(const |
1514 | ::tensorflow::Scope& |
1515 | scope, |
1516 | ::tensorflow::Input |
1517 | var, |
1518 | ::tensorflow::Input |
1519 | accum, |
1520 | ::tensorflow::Input |
1521 | lr, |
1522 | ::tensorflow::Input |
1523 | l1, |
1524 | ::tensorflow::Input |
1525 | l2, |
1526 | ::tensorflow::Input |
1527 | grad, const |
1528 | ResourceApplyProximalAdagrad::Attrs& |
1529 | attrs) { |
1530 | if (!scope.ok()) return; |
1531 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1532 | if (!scope.ok()) return; |
1533 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
1534 | if (!scope.ok()) return; |
1535 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
1536 | if (!scope.ok()) return; |
1537 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
1538 | if (!scope.ok()) return; |
1539 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
1540 | if (!scope.ok()) return; |
1541 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
1542 | if (!scope.ok()) return; |
1543 | ::tensorflow::Node* ret; |
1544 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyProximalAdagrad" ); |
1545 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyProximalAdagrad" ) |
1546 | .Input(_var) |
1547 | .Input(_accum) |
1548 | .Input(_lr) |
1549 | .Input(_l1) |
1550 | .Input(_l2) |
1551 | .Input(_grad) |
1552 | .Attr("use_locking" , attrs.use_locking_) |
1553 | ; |
1554 | scope.UpdateBuilder(&builder); |
1555 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1556 | if (!scope.ok()) return; |
1557 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1558 | this->operation = Operation(ret); |
1559 | return; |
1560 | } |
1561 | |
1562 | ResourceApplyProximalAdagrad::ResourceApplyProximalAdagrad(const |
1563 | ::tensorflow::Scope& |
1564 | scope, |
1565 | ::tensorflow::Input |
1566 | var, |
1567 | ::tensorflow::Input |
1568 | accum, |
1569 | ::tensorflow::Input |
1570 | lr, |
1571 | ::tensorflow::Input |
1572 | l1, |
1573 | ::tensorflow::Input |
1574 | l2, |
1575 | ::tensorflow::Input |
1576 | grad) |
1577 | : ResourceApplyProximalAdagrad(scope, var, accum, lr, l1, l2, grad, ResourceApplyProximalAdagrad::Attrs()) {} |
1578 | |
1579 | ResourceApplyProximalGradientDescent::ResourceApplyProximalGradientDescent(const |
1580 | ::tensorflow::Scope& |
1581 | scope, |
1582 | ::tensorflow::Input |
1583 | var, |
1584 | ::tensorflow::Input |
1585 | alpha, |
1586 | ::tensorflow::Input |
1587 | l1, |
1588 | ::tensorflow::Input |
1589 | l2, |
1590 | ::tensorflow::Input |
1591 | delta, |
1592 | const |
1593 | ResourceApplyProximalGradientDescent::Attrs& |
1594 | attrs) { |
1595 | if (!scope.ok()) return; |
1596 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1597 | if (!scope.ok()) return; |
1598 | auto _alpha = ::tensorflow::ops::AsNodeOut(scope, alpha); |
1599 | if (!scope.ok()) return; |
1600 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
1601 | if (!scope.ok()) return; |
1602 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
1603 | if (!scope.ok()) return; |
1604 | auto _delta = ::tensorflow::ops::AsNodeOut(scope, delta); |
1605 | if (!scope.ok()) return; |
1606 | ::tensorflow::Node* ret; |
1607 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyProximalGradientDescent" ); |
1608 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyProximalGradientDescent" ) |
1609 | .Input(_var) |
1610 | .Input(_alpha) |
1611 | .Input(_l1) |
1612 | .Input(_l2) |
1613 | .Input(_delta) |
1614 | .Attr("use_locking" , attrs.use_locking_) |
1615 | ; |
1616 | scope.UpdateBuilder(&builder); |
1617 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1618 | if (!scope.ok()) return; |
1619 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1620 | this->operation = Operation(ret); |
1621 | return; |
1622 | } |
1623 | |
1624 | ResourceApplyProximalGradientDescent::ResourceApplyProximalGradientDescent(const |
1625 | ::tensorflow::Scope& |
1626 | scope, |
1627 | ::tensorflow::Input |
1628 | var, |
1629 | ::tensorflow::Input |
1630 | alpha, |
1631 | ::tensorflow::Input |
1632 | l1, |
1633 | ::tensorflow::Input |
1634 | l2, |
1635 | ::tensorflow::Input |
1636 | delta) |
1637 | : ResourceApplyProximalGradientDescent(scope, var, alpha, l1, l2, delta, ResourceApplyProximalGradientDescent::Attrs()) {} |
1638 | |
1639 | ResourceApplyRMSProp::ResourceApplyRMSProp(const ::tensorflow::Scope& scope, |
1640 | ::tensorflow::Input var, |
1641 | ::tensorflow::Input ms, |
1642 | ::tensorflow::Input mom, |
1643 | ::tensorflow::Input lr, |
1644 | ::tensorflow::Input rho, |
1645 | ::tensorflow::Input momentum, |
1646 | ::tensorflow::Input epsilon, |
1647 | ::tensorflow::Input grad, const |
1648 | ResourceApplyRMSProp::Attrs& attrs) { |
1649 | if (!scope.ok()) return; |
1650 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1651 | if (!scope.ok()) return; |
1652 | auto _ms = ::tensorflow::ops::AsNodeOut(scope, ms); |
1653 | if (!scope.ok()) return; |
1654 | auto _mom = ::tensorflow::ops::AsNodeOut(scope, mom); |
1655 | if (!scope.ok()) return; |
1656 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
1657 | if (!scope.ok()) return; |
1658 | auto _rho = ::tensorflow::ops::AsNodeOut(scope, rho); |
1659 | if (!scope.ok()) return; |
1660 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
1661 | if (!scope.ok()) return; |
1662 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
1663 | if (!scope.ok()) return; |
1664 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
1665 | if (!scope.ok()) return; |
1666 | ::tensorflow::Node* ret; |
1667 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyRMSProp" ); |
1668 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyRMSProp" ) |
1669 | .Input(_var) |
1670 | .Input(_ms) |
1671 | .Input(_mom) |
1672 | .Input(_lr) |
1673 | .Input(_rho) |
1674 | .Input(_momentum) |
1675 | .Input(_epsilon) |
1676 | .Input(_grad) |
1677 | .Attr("use_locking" , attrs.use_locking_) |
1678 | ; |
1679 | scope.UpdateBuilder(&builder); |
1680 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1681 | if (!scope.ok()) return; |
1682 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1683 | this->operation = Operation(ret); |
1684 | return; |
1685 | } |
1686 | |
1687 | ResourceApplyRMSProp::ResourceApplyRMSProp(const ::tensorflow::Scope& scope, |
1688 | ::tensorflow::Input var, |
1689 | ::tensorflow::Input ms, |
1690 | ::tensorflow::Input mom, |
1691 | ::tensorflow::Input lr, |
1692 | ::tensorflow::Input rho, |
1693 | ::tensorflow::Input momentum, |
1694 | ::tensorflow::Input epsilon, |
1695 | ::tensorflow::Input grad) |
1696 | : ResourceApplyRMSProp(scope, var, ms, mom, lr, rho, momentum, epsilon, grad, ResourceApplyRMSProp::Attrs()) {} |
1697 | |
1698 | ResourceSparseApplyAdadelta::ResourceSparseApplyAdadelta(const |
1699 | ::tensorflow::Scope& |
1700 | scope, |
1701 | ::tensorflow::Input |
1702 | var, |
1703 | ::tensorflow::Input |
1704 | accum, |
1705 | ::tensorflow::Input |
1706 | accum_update, |
1707 | ::tensorflow::Input |
1708 | lr, |
1709 | ::tensorflow::Input |
1710 | rho, |
1711 | ::tensorflow::Input |
1712 | epsilon, |
1713 | ::tensorflow::Input |
1714 | grad, |
1715 | ::tensorflow::Input |
1716 | indices, const |
1717 | ResourceSparseApplyAdadelta::Attrs& |
1718 | attrs) { |
1719 | if (!scope.ok()) return; |
1720 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1721 | if (!scope.ok()) return; |
1722 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
1723 | if (!scope.ok()) return; |
1724 | auto _accum_update = ::tensorflow::ops::AsNodeOut(scope, accum_update); |
1725 | if (!scope.ok()) return; |
1726 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
1727 | if (!scope.ok()) return; |
1728 | auto _rho = ::tensorflow::ops::AsNodeOut(scope, rho); |
1729 | if (!scope.ok()) return; |
1730 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
1731 | if (!scope.ok()) return; |
1732 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
1733 | if (!scope.ok()) return; |
1734 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
1735 | if (!scope.ok()) return; |
1736 | ::tensorflow::Node* ret; |
1737 | const auto unique_name = scope.GetUniqueNameForOp("ResourceSparseApplyAdadelta" ); |
1738 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceSparseApplyAdadelta" ) |
1739 | .Input(_var) |
1740 | .Input(_accum) |
1741 | .Input(_accum_update) |
1742 | .Input(_lr) |
1743 | .Input(_rho) |
1744 | .Input(_epsilon) |
1745 | .Input(_grad) |
1746 | .Input(_indices) |
1747 | .Attr("use_locking" , attrs.use_locking_) |
1748 | ; |
1749 | scope.UpdateBuilder(&builder); |
1750 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1751 | if (!scope.ok()) return; |
1752 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1753 | this->operation = Operation(ret); |
1754 | return; |
1755 | } |
1756 | |
1757 | ResourceSparseApplyAdadelta::ResourceSparseApplyAdadelta(const |
1758 | ::tensorflow::Scope& |
1759 | scope, |
1760 | ::tensorflow::Input |
1761 | var, |
1762 | ::tensorflow::Input |
1763 | accum, |
1764 | ::tensorflow::Input |
1765 | accum_update, |
1766 | ::tensorflow::Input |
1767 | lr, |
1768 | ::tensorflow::Input |
1769 | rho, |
1770 | ::tensorflow::Input |
1771 | epsilon, |
1772 | ::tensorflow::Input |
1773 | grad, |
1774 | ::tensorflow::Input |
1775 | indices) |
1776 | : ResourceSparseApplyAdadelta(scope, var, accum, accum_update, lr, rho, epsilon, grad, indices, ResourceSparseApplyAdadelta::Attrs()) {} |
1777 | |
1778 | ResourceSparseApplyAdagrad::ResourceSparseApplyAdagrad(const |
1779 | ::tensorflow::Scope& |
1780 | scope, |
1781 | ::tensorflow::Input var, |
1782 | ::tensorflow::Input |
1783 | accum, |
1784 | ::tensorflow::Input lr, |
1785 | ::tensorflow::Input |
1786 | grad, |
1787 | ::tensorflow::Input |
1788 | indices, const |
1789 | ResourceSparseApplyAdagrad::Attrs& |
1790 | attrs) { |
1791 | if (!scope.ok()) return; |
1792 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1793 | if (!scope.ok()) return; |
1794 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
1795 | if (!scope.ok()) return; |
1796 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
1797 | if (!scope.ok()) return; |
1798 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
1799 | if (!scope.ok()) return; |
1800 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
1801 | if (!scope.ok()) return; |
1802 | ::tensorflow::Node* ret; |
1803 | const auto unique_name = scope.GetUniqueNameForOp("ResourceSparseApplyAdagrad" ); |
1804 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceSparseApplyAdagrad" ) |
1805 | .Input(_var) |
1806 | .Input(_accum) |
1807 | .Input(_lr) |
1808 | .Input(_grad) |
1809 | .Input(_indices) |
1810 | .Attr("use_locking" , attrs.use_locking_) |
1811 | .Attr("update_slots" , attrs.update_slots_) |
1812 | ; |
1813 | scope.UpdateBuilder(&builder); |
1814 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1815 | if (!scope.ok()) return; |
1816 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1817 | this->operation = Operation(ret); |
1818 | return; |
1819 | } |
1820 | |
1821 | ResourceSparseApplyAdagrad::ResourceSparseApplyAdagrad(const |
1822 | ::tensorflow::Scope& |
1823 | scope, |
1824 | ::tensorflow::Input var, |
1825 | ::tensorflow::Input |
1826 | accum, |
1827 | ::tensorflow::Input lr, |
1828 | ::tensorflow::Input |
1829 | grad, |
1830 | ::tensorflow::Input |
1831 | indices) |
1832 | : ResourceSparseApplyAdagrad(scope, var, accum, lr, grad, indices, ResourceSparseApplyAdagrad::Attrs()) {} |
1833 | |
1834 | ResourceSparseApplyAdagradDA::ResourceSparseApplyAdagradDA(const |
1835 | ::tensorflow::Scope& |
1836 | scope, |
1837 | ::tensorflow::Input |
1838 | var, |
1839 | ::tensorflow::Input |
1840 | gradient_accumulator, |
1841 | ::tensorflow::Input |
1842 | gradient_squared_accumulator, |
1843 | ::tensorflow::Input |
1844 | grad, |
1845 | ::tensorflow::Input |
1846 | indices, |
1847 | ::tensorflow::Input |
1848 | lr, |
1849 | ::tensorflow::Input |
1850 | l1, |
1851 | ::tensorflow::Input |
1852 | l2, |
1853 | ::tensorflow::Input |
1854 | global_step, const |
1855 | ResourceSparseApplyAdagradDA::Attrs& |
1856 | attrs) { |
1857 | if (!scope.ok()) return; |
1858 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1859 | if (!scope.ok()) return; |
1860 | auto _gradient_accumulator = ::tensorflow::ops::AsNodeOut(scope, gradient_accumulator); |
1861 | if (!scope.ok()) return; |
1862 | auto _gradient_squared_accumulator = ::tensorflow::ops::AsNodeOut(scope, gradient_squared_accumulator); |
1863 | if (!scope.ok()) return; |
1864 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
1865 | if (!scope.ok()) return; |
1866 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
1867 | if (!scope.ok()) return; |
1868 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
1869 | if (!scope.ok()) return; |
1870 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
1871 | if (!scope.ok()) return; |
1872 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
1873 | if (!scope.ok()) return; |
1874 | auto _global_step = ::tensorflow::ops::AsNodeOut(scope, global_step); |
1875 | if (!scope.ok()) return; |
1876 | ::tensorflow::Node* ret; |
1877 | const auto unique_name = scope.GetUniqueNameForOp("ResourceSparseApplyAdagradDA" ); |
1878 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceSparseApplyAdagradDA" ) |
1879 | .Input(_var) |
1880 | .Input(_gradient_accumulator) |
1881 | .Input(_gradient_squared_accumulator) |
1882 | .Input(_grad) |
1883 | .Input(_indices) |
1884 | .Input(_lr) |
1885 | .Input(_l1) |
1886 | .Input(_l2) |
1887 | .Input(_global_step) |
1888 | .Attr("use_locking" , attrs.use_locking_) |
1889 | ; |
1890 | scope.UpdateBuilder(&builder); |
1891 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1892 | if (!scope.ok()) return; |
1893 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1894 | this->operation = Operation(ret); |
1895 | return; |
1896 | } |
1897 | |
1898 | ResourceSparseApplyAdagradDA::ResourceSparseApplyAdagradDA(const |
1899 | ::tensorflow::Scope& |
1900 | scope, |
1901 | ::tensorflow::Input |
1902 | var, |
1903 | ::tensorflow::Input |
1904 | gradient_accumulator, |
1905 | ::tensorflow::Input |
1906 | gradient_squared_accumulator, |
1907 | ::tensorflow::Input |
1908 | grad, |
1909 | ::tensorflow::Input |
1910 | indices, |
1911 | ::tensorflow::Input |
1912 | lr, |
1913 | ::tensorflow::Input |
1914 | l1, |
1915 | ::tensorflow::Input |
1916 | l2, |
1917 | ::tensorflow::Input |
1918 | global_step) |
1919 | : ResourceSparseApplyAdagradDA(scope, var, gradient_accumulator, gradient_squared_accumulator, grad, indices, lr, l1, l2, global_step, ResourceSparseApplyAdagradDA::Attrs()) {} |
1920 | |
1921 | ResourceSparseApplyCenteredRMSProp::ResourceSparseApplyCenteredRMSProp(const |
1922 | ::tensorflow::Scope& |
1923 | scope, |
1924 | ::tensorflow::Input |
1925 | var, |
1926 | ::tensorflow::Input |
1927 | mg, |
1928 | ::tensorflow::Input |
1929 | ms, |
1930 | ::tensorflow::Input |
1931 | mom, |
1932 | ::tensorflow::Input |
1933 | lr, |
1934 | ::tensorflow::Input |
1935 | rho, |
1936 | ::tensorflow::Input |
1937 | momentum, |
1938 | ::tensorflow::Input |
1939 | epsilon, |
1940 | ::tensorflow::Input |
1941 | grad, |
1942 | ::tensorflow::Input |
1943 | indices, |
1944 | const |
1945 | ResourceSparseApplyCenteredRMSProp::Attrs& |
1946 | attrs) { |
1947 | if (!scope.ok()) return; |
1948 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
1949 | if (!scope.ok()) return; |
1950 | auto _mg = ::tensorflow::ops::AsNodeOut(scope, mg); |
1951 | if (!scope.ok()) return; |
1952 | auto _ms = ::tensorflow::ops::AsNodeOut(scope, ms); |
1953 | if (!scope.ok()) return; |
1954 | auto _mom = ::tensorflow::ops::AsNodeOut(scope, mom); |
1955 | if (!scope.ok()) return; |
1956 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
1957 | if (!scope.ok()) return; |
1958 | auto _rho = ::tensorflow::ops::AsNodeOut(scope, rho); |
1959 | if (!scope.ok()) return; |
1960 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
1961 | if (!scope.ok()) return; |
1962 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
1963 | if (!scope.ok()) return; |
1964 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
1965 | if (!scope.ok()) return; |
1966 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
1967 | if (!scope.ok()) return; |
1968 | ::tensorflow::Node* ret; |
1969 | const auto unique_name = scope.GetUniqueNameForOp("ResourceSparseApplyCenteredRMSProp" ); |
1970 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceSparseApplyCenteredRMSProp" ) |
1971 | .Input(_var) |
1972 | .Input(_mg) |
1973 | .Input(_ms) |
1974 | .Input(_mom) |
1975 | .Input(_lr) |
1976 | .Input(_rho) |
1977 | .Input(_momentum) |
1978 | .Input(_epsilon) |
1979 | .Input(_grad) |
1980 | .Input(_indices) |
1981 | .Attr("use_locking" , attrs.use_locking_) |
1982 | ; |
1983 | scope.UpdateBuilder(&builder); |
1984 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
1985 | if (!scope.ok()) return; |
1986 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
1987 | this->operation = Operation(ret); |
1988 | return; |
1989 | } |
1990 | |
1991 | ResourceSparseApplyCenteredRMSProp::ResourceSparseApplyCenteredRMSProp(const |
1992 | ::tensorflow::Scope& |
1993 | scope, |
1994 | ::tensorflow::Input |
1995 | var, |
1996 | ::tensorflow::Input |
1997 | mg, |
1998 | ::tensorflow::Input |
1999 | ms, |
2000 | ::tensorflow::Input |
2001 | mom, |
2002 | ::tensorflow::Input |
2003 | lr, |
2004 | ::tensorflow::Input |
2005 | rho, |
2006 | ::tensorflow::Input |
2007 | momentum, |
2008 | ::tensorflow::Input |
2009 | epsilon, |
2010 | ::tensorflow::Input |
2011 | grad, |
2012 | ::tensorflow::Input |
2013 | indices) |
2014 | : ResourceSparseApplyCenteredRMSProp(scope, var, mg, ms, mom, lr, rho, momentum, epsilon, grad, indices, ResourceSparseApplyCenteredRMSProp::Attrs()) {} |
2015 | |
2016 | ResourceSparseApplyFtrl::ResourceSparseApplyFtrl(const ::tensorflow::Scope& |
2017 | scope, ::tensorflow::Input |
2018 | var, ::tensorflow::Input |
2019 | accum, ::tensorflow::Input |
2020 | linear, ::tensorflow::Input |
2021 | grad, ::tensorflow::Input |
2022 | indices, ::tensorflow::Input |
2023 | lr, ::tensorflow::Input l1, |
2024 | ::tensorflow::Input l2, |
2025 | ::tensorflow::Input lr_power, |
2026 | const |
2027 | ResourceSparseApplyFtrl::Attrs& |
2028 | attrs) { |
2029 | if (!scope.ok()) return; |
2030 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2031 | if (!scope.ok()) return; |
2032 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
2033 | if (!scope.ok()) return; |
2034 | auto _linear = ::tensorflow::ops::AsNodeOut(scope, linear); |
2035 | if (!scope.ok()) return; |
2036 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2037 | if (!scope.ok()) return; |
2038 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2039 | if (!scope.ok()) return; |
2040 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2041 | if (!scope.ok()) return; |
2042 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
2043 | if (!scope.ok()) return; |
2044 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
2045 | if (!scope.ok()) return; |
2046 | auto _lr_power = ::tensorflow::ops::AsNodeOut(scope, lr_power); |
2047 | if (!scope.ok()) return; |
2048 | ::tensorflow::Node* ret; |
2049 | const auto unique_name = scope.GetUniqueNameForOp("ResourceSparseApplyFtrl" ); |
2050 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceSparseApplyFtrl" ) |
2051 | .Input(_var) |
2052 | .Input(_accum) |
2053 | .Input(_linear) |
2054 | .Input(_grad) |
2055 | .Input(_indices) |
2056 | .Input(_lr) |
2057 | .Input(_l1) |
2058 | .Input(_l2) |
2059 | .Input(_lr_power) |
2060 | .Attr("use_locking" , attrs.use_locking_) |
2061 | .Attr("multiply_linear_by_lr" , attrs.multiply_linear_by_lr_) |
2062 | ; |
2063 | scope.UpdateBuilder(&builder); |
2064 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2065 | if (!scope.ok()) return; |
2066 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2067 | this->operation = Operation(ret); |
2068 | return; |
2069 | } |
2070 | |
2071 | ResourceSparseApplyFtrl::ResourceSparseApplyFtrl(const ::tensorflow::Scope& |
2072 | scope, ::tensorflow::Input |
2073 | var, ::tensorflow::Input |
2074 | accum, ::tensorflow::Input |
2075 | linear, ::tensorflow::Input |
2076 | grad, ::tensorflow::Input |
2077 | indices, ::tensorflow::Input |
2078 | lr, ::tensorflow::Input l1, |
2079 | ::tensorflow::Input l2, |
2080 | ::tensorflow::Input lr_power) |
2081 | : ResourceSparseApplyFtrl(scope, var, accum, linear, grad, indices, lr, l1, l2, lr_power, ResourceSparseApplyFtrl::Attrs()) {} |
2082 | |
2083 | ResourceSparseApplyFtrlV2::ResourceSparseApplyFtrlV2(const ::tensorflow::Scope& |
2084 | scope, ::tensorflow::Input |
2085 | var, ::tensorflow::Input |
2086 | accum, ::tensorflow::Input |
2087 | linear, |
2088 | ::tensorflow::Input grad, |
2089 | ::tensorflow::Input |
2090 | indices, |
2091 | ::tensorflow::Input lr, |
2092 | ::tensorflow::Input l1, |
2093 | ::tensorflow::Input l2, |
2094 | ::tensorflow::Input |
2095 | l2_shrinkage, |
2096 | ::tensorflow::Input |
2097 | lr_power, const |
2098 | ResourceSparseApplyFtrlV2::Attrs& |
2099 | attrs) { |
2100 | if (!scope.ok()) return; |
2101 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2102 | if (!scope.ok()) return; |
2103 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
2104 | if (!scope.ok()) return; |
2105 | auto _linear = ::tensorflow::ops::AsNodeOut(scope, linear); |
2106 | if (!scope.ok()) return; |
2107 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2108 | if (!scope.ok()) return; |
2109 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2110 | if (!scope.ok()) return; |
2111 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2112 | if (!scope.ok()) return; |
2113 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
2114 | if (!scope.ok()) return; |
2115 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
2116 | if (!scope.ok()) return; |
2117 | auto _l2_shrinkage = ::tensorflow::ops::AsNodeOut(scope, l2_shrinkage); |
2118 | if (!scope.ok()) return; |
2119 | auto _lr_power = ::tensorflow::ops::AsNodeOut(scope, lr_power); |
2120 | if (!scope.ok()) return; |
2121 | ::tensorflow::Node* ret; |
2122 | const auto unique_name = scope.GetUniqueNameForOp("ResourceSparseApplyFtrlV2" ); |
2123 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceSparseApplyFtrlV2" ) |
2124 | .Input(_var) |
2125 | .Input(_accum) |
2126 | .Input(_linear) |
2127 | .Input(_grad) |
2128 | .Input(_indices) |
2129 | .Input(_lr) |
2130 | .Input(_l1) |
2131 | .Input(_l2) |
2132 | .Input(_l2_shrinkage) |
2133 | .Input(_lr_power) |
2134 | .Attr("use_locking" , attrs.use_locking_) |
2135 | .Attr("multiply_linear_by_lr" , attrs.multiply_linear_by_lr_) |
2136 | ; |
2137 | scope.UpdateBuilder(&builder); |
2138 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2139 | if (!scope.ok()) return; |
2140 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2141 | this->operation = Operation(ret); |
2142 | return; |
2143 | } |
2144 | |
2145 | ResourceSparseApplyFtrlV2::ResourceSparseApplyFtrlV2(const ::tensorflow::Scope& |
2146 | scope, ::tensorflow::Input |
2147 | var, ::tensorflow::Input |
2148 | accum, ::tensorflow::Input |
2149 | linear, |
2150 | ::tensorflow::Input grad, |
2151 | ::tensorflow::Input |
2152 | indices, |
2153 | ::tensorflow::Input lr, |
2154 | ::tensorflow::Input l1, |
2155 | ::tensorflow::Input l2, |
2156 | ::tensorflow::Input |
2157 | l2_shrinkage, |
2158 | ::tensorflow::Input |
2159 | lr_power) |
2160 | : ResourceSparseApplyFtrlV2(scope, var, accum, linear, grad, indices, lr, l1, l2, l2_shrinkage, lr_power, ResourceSparseApplyFtrlV2::Attrs()) {} |
2161 | |
2162 | ResourceSparseApplyKerasMomentum::ResourceSparseApplyKerasMomentum(const |
2163 | ::tensorflow::Scope& |
2164 | scope, |
2165 | ::tensorflow::Input |
2166 | var, |
2167 | ::tensorflow::Input |
2168 | accum, |
2169 | ::tensorflow::Input |
2170 | lr, |
2171 | ::tensorflow::Input |
2172 | grad, |
2173 | ::tensorflow::Input |
2174 | indices, |
2175 | ::tensorflow::Input |
2176 | momentum, |
2177 | const |
2178 | ResourceSparseApplyKerasMomentum::Attrs& |
2179 | attrs) { |
2180 | if (!scope.ok()) return; |
2181 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2182 | if (!scope.ok()) return; |
2183 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
2184 | if (!scope.ok()) return; |
2185 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2186 | if (!scope.ok()) return; |
2187 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2188 | if (!scope.ok()) return; |
2189 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2190 | if (!scope.ok()) return; |
2191 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
2192 | if (!scope.ok()) return; |
2193 | ::tensorflow::Node* ret; |
2194 | const auto unique_name = scope.GetUniqueNameForOp("ResourceSparseApplyKerasMomentum" ); |
2195 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceSparseApplyKerasMomentum" ) |
2196 | .Input(_var) |
2197 | .Input(_accum) |
2198 | .Input(_lr) |
2199 | .Input(_grad) |
2200 | .Input(_indices) |
2201 | .Input(_momentum) |
2202 | .Attr("use_locking" , attrs.use_locking_) |
2203 | .Attr("use_nesterov" , attrs.use_nesterov_) |
2204 | ; |
2205 | scope.UpdateBuilder(&builder); |
2206 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2207 | if (!scope.ok()) return; |
2208 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2209 | this->operation = Operation(ret); |
2210 | return; |
2211 | } |
2212 | |
2213 | ResourceSparseApplyKerasMomentum::ResourceSparseApplyKerasMomentum(const |
2214 | ::tensorflow::Scope& |
2215 | scope, |
2216 | ::tensorflow::Input |
2217 | var, |
2218 | ::tensorflow::Input |
2219 | accum, |
2220 | ::tensorflow::Input |
2221 | lr, |
2222 | ::tensorflow::Input |
2223 | grad, |
2224 | ::tensorflow::Input |
2225 | indices, |
2226 | ::tensorflow::Input |
2227 | momentum) |
2228 | : ResourceSparseApplyKerasMomentum(scope, var, accum, lr, grad, indices, momentum, ResourceSparseApplyKerasMomentum::Attrs()) {} |
2229 | |
2230 | ResourceSparseApplyMomentum::ResourceSparseApplyMomentum(const |
2231 | ::tensorflow::Scope& |
2232 | scope, |
2233 | ::tensorflow::Input |
2234 | var, |
2235 | ::tensorflow::Input |
2236 | accum, |
2237 | ::tensorflow::Input |
2238 | lr, |
2239 | ::tensorflow::Input |
2240 | grad, |
2241 | ::tensorflow::Input |
2242 | indices, |
2243 | ::tensorflow::Input |
2244 | momentum, const |
2245 | ResourceSparseApplyMomentum::Attrs& |
2246 | attrs) { |
2247 | if (!scope.ok()) return; |
2248 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2249 | if (!scope.ok()) return; |
2250 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
2251 | if (!scope.ok()) return; |
2252 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2253 | if (!scope.ok()) return; |
2254 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2255 | if (!scope.ok()) return; |
2256 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2257 | if (!scope.ok()) return; |
2258 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
2259 | if (!scope.ok()) return; |
2260 | ::tensorflow::Node* ret; |
2261 | const auto unique_name = scope.GetUniqueNameForOp("ResourceSparseApplyMomentum" ); |
2262 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceSparseApplyMomentum" ) |
2263 | .Input(_var) |
2264 | .Input(_accum) |
2265 | .Input(_lr) |
2266 | .Input(_grad) |
2267 | .Input(_indices) |
2268 | .Input(_momentum) |
2269 | .Attr("use_locking" , attrs.use_locking_) |
2270 | .Attr("use_nesterov" , attrs.use_nesterov_) |
2271 | ; |
2272 | scope.UpdateBuilder(&builder); |
2273 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2274 | if (!scope.ok()) return; |
2275 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2276 | this->operation = Operation(ret); |
2277 | return; |
2278 | } |
2279 | |
2280 | ResourceSparseApplyMomentum::ResourceSparseApplyMomentum(const |
2281 | ::tensorflow::Scope& |
2282 | scope, |
2283 | ::tensorflow::Input |
2284 | var, |
2285 | ::tensorflow::Input |
2286 | accum, |
2287 | ::tensorflow::Input |
2288 | lr, |
2289 | ::tensorflow::Input |
2290 | grad, |
2291 | ::tensorflow::Input |
2292 | indices, |
2293 | ::tensorflow::Input |
2294 | momentum) |
2295 | : ResourceSparseApplyMomentum(scope, var, accum, lr, grad, indices, momentum, ResourceSparseApplyMomentum::Attrs()) {} |
2296 | |
2297 | ResourceSparseApplyProximalAdagrad::ResourceSparseApplyProximalAdagrad(const |
2298 | ::tensorflow::Scope& |
2299 | scope, |
2300 | ::tensorflow::Input |
2301 | var, |
2302 | ::tensorflow::Input |
2303 | accum, |
2304 | ::tensorflow::Input |
2305 | lr, |
2306 | ::tensorflow::Input |
2307 | l1, |
2308 | ::tensorflow::Input |
2309 | l2, |
2310 | ::tensorflow::Input |
2311 | grad, |
2312 | ::tensorflow::Input |
2313 | indices, |
2314 | const |
2315 | ResourceSparseApplyProximalAdagrad::Attrs& |
2316 | attrs) { |
2317 | if (!scope.ok()) return; |
2318 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2319 | if (!scope.ok()) return; |
2320 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
2321 | if (!scope.ok()) return; |
2322 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2323 | if (!scope.ok()) return; |
2324 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
2325 | if (!scope.ok()) return; |
2326 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
2327 | if (!scope.ok()) return; |
2328 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2329 | if (!scope.ok()) return; |
2330 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2331 | if (!scope.ok()) return; |
2332 | ::tensorflow::Node* ret; |
2333 | const auto unique_name = scope.GetUniqueNameForOp("ResourceSparseApplyProximalAdagrad" ); |
2334 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceSparseApplyProximalAdagrad" ) |
2335 | .Input(_var) |
2336 | .Input(_accum) |
2337 | .Input(_lr) |
2338 | .Input(_l1) |
2339 | .Input(_l2) |
2340 | .Input(_grad) |
2341 | .Input(_indices) |
2342 | .Attr("use_locking" , attrs.use_locking_) |
2343 | ; |
2344 | scope.UpdateBuilder(&builder); |
2345 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2346 | if (!scope.ok()) return; |
2347 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2348 | this->operation = Operation(ret); |
2349 | return; |
2350 | } |
2351 | |
2352 | ResourceSparseApplyProximalAdagrad::ResourceSparseApplyProximalAdagrad(const |
2353 | ::tensorflow::Scope& |
2354 | scope, |
2355 | ::tensorflow::Input |
2356 | var, |
2357 | ::tensorflow::Input |
2358 | accum, |
2359 | ::tensorflow::Input |
2360 | lr, |
2361 | ::tensorflow::Input |
2362 | l1, |
2363 | ::tensorflow::Input |
2364 | l2, |
2365 | ::tensorflow::Input |
2366 | grad, |
2367 | ::tensorflow::Input |
2368 | indices) |
2369 | : ResourceSparseApplyProximalAdagrad(scope, var, accum, lr, l1, l2, grad, indices, ResourceSparseApplyProximalAdagrad::Attrs()) {} |
2370 | |
2371 | ResourceSparseApplyProximalGradientDescent::ResourceSparseApplyProximalGradientDescent(const ::tensorflow::Scope& scope, ::tensorflow::Input var, ::tensorflow::Input alpha, ::tensorflow::Input l1, ::tensorflow::Input l2, ::tensorflow::Input grad, ::tensorflow::Input indices, const ResourceSparseApplyProximalGradientDescent::Attrs& |
2372 | attrs) { |
2373 | if (!scope.ok()) return; |
2374 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2375 | if (!scope.ok()) return; |
2376 | auto _alpha = ::tensorflow::ops::AsNodeOut(scope, alpha); |
2377 | if (!scope.ok()) return; |
2378 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
2379 | if (!scope.ok()) return; |
2380 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
2381 | if (!scope.ok()) return; |
2382 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2383 | if (!scope.ok()) return; |
2384 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2385 | if (!scope.ok()) return; |
2386 | ::tensorflow::Node* ret; |
2387 | const auto unique_name = scope.GetUniqueNameForOp("ResourceSparseApplyProximalGradientDescent" ); |
2388 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceSparseApplyProximalGradientDescent" ) |
2389 | .Input(_var) |
2390 | .Input(_alpha) |
2391 | .Input(_l1) |
2392 | .Input(_l2) |
2393 | .Input(_grad) |
2394 | .Input(_indices) |
2395 | .Attr("use_locking" , attrs.use_locking_) |
2396 | ; |
2397 | scope.UpdateBuilder(&builder); |
2398 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2399 | if (!scope.ok()) return; |
2400 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2401 | this->operation = Operation(ret); |
2402 | return; |
2403 | } |
2404 | |
2405 | ResourceSparseApplyProximalGradientDescent::ResourceSparseApplyProximalGradientDescent(const ::tensorflow::Scope& scope, ::tensorflow::Input var, ::tensorflow::Input alpha, ::tensorflow::Input l1, ::tensorflow::Input l2, ::tensorflow::Input grad, ::tensorflow::Input |
2406 | indices) |
2407 | : ResourceSparseApplyProximalGradientDescent(scope, var, alpha, l1, l2, grad, indices, ResourceSparseApplyProximalGradientDescent::Attrs()) {} |
2408 | |
2409 | ResourceSparseApplyRMSProp::ResourceSparseApplyRMSProp(const |
2410 | ::tensorflow::Scope& |
2411 | scope, |
2412 | ::tensorflow::Input var, |
2413 | ::tensorflow::Input ms, |
2414 | ::tensorflow::Input mom, |
2415 | ::tensorflow::Input lr, |
2416 | ::tensorflow::Input rho, |
2417 | ::tensorflow::Input |
2418 | momentum, |
2419 | ::tensorflow::Input |
2420 | epsilon, |
2421 | ::tensorflow::Input |
2422 | grad, |
2423 | ::tensorflow::Input |
2424 | indices, const |
2425 | ResourceSparseApplyRMSProp::Attrs& |
2426 | attrs) { |
2427 | if (!scope.ok()) return; |
2428 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2429 | if (!scope.ok()) return; |
2430 | auto _ms = ::tensorflow::ops::AsNodeOut(scope, ms); |
2431 | if (!scope.ok()) return; |
2432 | auto _mom = ::tensorflow::ops::AsNodeOut(scope, mom); |
2433 | if (!scope.ok()) return; |
2434 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2435 | if (!scope.ok()) return; |
2436 | auto _rho = ::tensorflow::ops::AsNodeOut(scope, rho); |
2437 | if (!scope.ok()) return; |
2438 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
2439 | if (!scope.ok()) return; |
2440 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
2441 | if (!scope.ok()) return; |
2442 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2443 | if (!scope.ok()) return; |
2444 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2445 | if (!scope.ok()) return; |
2446 | ::tensorflow::Node* ret; |
2447 | const auto unique_name = scope.GetUniqueNameForOp("ResourceSparseApplyRMSProp" ); |
2448 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceSparseApplyRMSProp" ) |
2449 | .Input(_var) |
2450 | .Input(_ms) |
2451 | .Input(_mom) |
2452 | .Input(_lr) |
2453 | .Input(_rho) |
2454 | .Input(_momentum) |
2455 | .Input(_epsilon) |
2456 | .Input(_grad) |
2457 | .Input(_indices) |
2458 | .Attr("use_locking" , attrs.use_locking_) |
2459 | ; |
2460 | scope.UpdateBuilder(&builder); |
2461 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2462 | if (!scope.ok()) return; |
2463 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2464 | this->operation = Operation(ret); |
2465 | return; |
2466 | } |
2467 | |
2468 | ResourceSparseApplyRMSProp::ResourceSparseApplyRMSProp(const |
2469 | ::tensorflow::Scope& |
2470 | scope, |
2471 | ::tensorflow::Input var, |
2472 | ::tensorflow::Input ms, |
2473 | ::tensorflow::Input mom, |
2474 | ::tensorflow::Input lr, |
2475 | ::tensorflow::Input rho, |
2476 | ::tensorflow::Input |
2477 | momentum, |
2478 | ::tensorflow::Input |
2479 | epsilon, |
2480 | ::tensorflow::Input |
2481 | grad, |
2482 | ::tensorflow::Input |
2483 | indices) |
2484 | : ResourceSparseApplyRMSProp(scope, var, ms, mom, lr, rho, momentum, epsilon, grad, indices, ResourceSparseApplyRMSProp::Attrs()) {} |
2485 | |
2486 | SparseApplyAdadelta::SparseApplyAdadelta(const ::tensorflow::Scope& scope, |
2487 | ::tensorflow::Input var, |
2488 | ::tensorflow::Input accum, |
2489 | ::tensorflow::Input accum_update, |
2490 | ::tensorflow::Input lr, |
2491 | ::tensorflow::Input rho, |
2492 | ::tensorflow::Input epsilon, |
2493 | ::tensorflow::Input grad, |
2494 | ::tensorflow::Input indices, const |
2495 | SparseApplyAdadelta::Attrs& attrs) { |
2496 | if (!scope.ok()) return; |
2497 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2498 | if (!scope.ok()) return; |
2499 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
2500 | if (!scope.ok()) return; |
2501 | auto _accum_update = ::tensorflow::ops::AsNodeOut(scope, accum_update); |
2502 | if (!scope.ok()) return; |
2503 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2504 | if (!scope.ok()) return; |
2505 | auto _rho = ::tensorflow::ops::AsNodeOut(scope, rho); |
2506 | if (!scope.ok()) return; |
2507 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
2508 | if (!scope.ok()) return; |
2509 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2510 | if (!scope.ok()) return; |
2511 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2512 | if (!scope.ok()) return; |
2513 | ::tensorflow::Node* ret; |
2514 | const auto unique_name = scope.GetUniqueNameForOp("SparseApplyAdadelta" ); |
2515 | auto builder = ::tensorflow::NodeBuilder(unique_name, "SparseApplyAdadelta" ) |
2516 | .Input(_var) |
2517 | .Input(_accum) |
2518 | .Input(_accum_update) |
2519 | .Input(_lr) |
2520 | .Input(_rho) |
2521 | .Input(_epsilon) |
2522 | .Input(_grad) |
2523 | .Input(_indices) |
2524 | .Attr("use_locking" , attrs.use_locking_) |
2525 | ; |
2526 | scope.UpdateBuilder(&builder); |
2527 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2528 | if (!scope.ok()) return; |
2529 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2530 | this->operation = Operation(ret); |
2531 | this->out = Output(ret, 0); |
2532 | } |
2533 | |
2534 | SparseApplyAdadelta::SparseApplyAdadelta(const ::tensorflow::Scope& scope, |
2535 | ::tensorflow::Input var, |
2536 | ::tensorflow::Input accum, |
2537 | ::tensorflow::Input accum_update, |
2538 | ::tensorflow::Input lr, |
2539 | ::tensorflow::Input rho, |
2540 | ::tensorflow::Input epsilon, |
2541 | ::tensorflow::Input grad, |
2542 | ::tensorflow::Input indices) |
2543 | : SparseApplyAdadelta(scope, var, accum, accum_update, lr, rho, epsilon, grad, indices, SparseApplyAdadelta::Attrs()) {} |
2544 | |
2545 | SparseApplyAdagrad::SparseApplyAdagrad(const ::tensorflow::Scope& scope, |
2546 | ::tensorflow::Input var, |
2547 | ::tensorflow::Input accum, |
2548 | ::tensorflow::Input lr, |
2549 | ::tensorflow::Input grad, |
2550 | ::tensorflow::Input indices, const |
2551 | SparseApplyAdagrad::Attrs& attrs) { |
2552 | if (!scope.ok()) return; |
2553 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2554 | if (!scope.ok()) return; |
2555 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
2556 | if (!scope.ok()) return; |
2557 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2558 | if (!scope.ok()) return; |
2559 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2560 | if (!scope.ok()) return; |
2561 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2562 | if (!scope.ok()) return; |
2563 | ::tensorflow::Node* ret; |
2564 | const auto unique_name = scope.GetUniqueNameForOp("SparseApplyAdagrad" ); |
2565 | auto builder = ::tensorflow::NodeBuilder(unique_name, "SparseApplyAdagrad" ) |
2566 | .Input(_var) |
2567 | .Input(_accum) |
2568 | .Input(_lr) |
2569 | .Input(_grad) |
2570 | .Input(_indices) |
2571 | .Attr("use_locking" , attrs.use_locking_) |
2572 | .Attr("update_slots" , attrs.update_slots_) |
2573 | ; |
2574 | scope.UpdateBuilder(&builder); |
2575 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2576 | if (!scope.ok()) return; |
2577 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2578 | this->operation = Operation(ret); |
2579 | this->out = Output(ret, 0); |
2580 | } |
2581 | |
2582 | SparseApplyAdagrad::SparseApplyAdagrad(const ::tensorflow::Scope& scope, |
2583 | ::tensorflow::Input var, |
2584 | ::tensorflow::Input accum, |
2585 | ::tensorflow::Input lr, |
2586 | ::tensorflow::Input grad, |
2587 | ::tensorflow::Input indices) |
2588 | : SparseApplyAdagrad(scope, var, accum, lr, grad, indices, SparseApplyAdagrad::Attrs()) {} |
2589 | |
2590 | SparseApplyAdagradDA::SparseApplyAdagradDA(const ::tensorflow::Scope& scope, |
2591 | ::tensorflow::Input var, |
2592 | ::tensorflow::Input |
2593 | gradient_accumulator, |
2594 | ::tensorflow::Input |
2595 | gradient_squared_accumulator, |
2596 | ::tensorflow::Input grad, |
2597 | ::tensorflow::Input indices, |
2598 | ::tensorflow::Input lr, |
2599 | ::tensorflow::Input l1, |
2600 | ::tensorflow::Input l2, |
2601 | ::tensorflow::Input global_step, |
2602 | const SparseApplyAdagradDA::Attrs& |
2603 | attrs) { |
2604 | if (!scope.ok()) return; |
2605 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2606 | if (!scope.ok()) return; |
2607 | auto _gradient_accumulator = ::tensorflow::ops::AsNodeOut(scope, gradient_accumulator); |
2608 | if (!scope.ok()) return; |
2609 | auto _gradient_squared_accumulator = ::tensorflow::ops::AsNodeOut(scope, gradient_squared_accumulator); |
2610 | if (!scope.ok()) return; |
2611 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2612 | if (!scope.ok()) return; |
2613 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2614 | if (!scope.ok()) return; |
2615 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2616 | if (!scope.ok()) return; |
2617 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
2618 | if (!scope.ok()) return; |
2619 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
2620 | if (!scope.ok()) return; |
2621 | auto _global_step = ::tensorflow::ops::AsNodeOut(scope, global_step); |
2622 | if (!scope.ok()) return; |
2623 | ::tensorflow::Node* ret; |
2624 | const auto unique_name = scope.GetUniqueNameForOp("SparseApplyAdagradDA" ); |
2625 | auto builder = ::tensorflow::NodeBuilder(unique_name, "SparseApplyAdagradDA" ) |
2626 | .Input(_var) |
2627 | .Input(_gradient_accumulator) |
2628 | .Input(_gradient_squared_accumulator) |
2629 | .Input(_grad) |
2630 | .Input(_indices) |
2631 | .Input(_lr) |
2632 | .Input(_l1) |
2633 | .Input(_l2) |
2634 | .Input(_global_step) |
2635 | .Attr("use_locking" , attrs.use_locking_) |
2636 | ; |
2637 | scope.UpdateBuilder(&builder); |
2638 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2639 | if (!scope.ok()) return; |
2640 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2641 | this->operation = Operation(ret); |
2642 | this->out = Output(ret, 0); |
2643 | } |
2644 | |
2645 | SparseApplyAdagradDA::SparseApplyAdagradDA(const ::tensorflow::Scope& scope, |
2646 | ::tensorflow::Input var, |
2647 | ::tensorflow::Input |
2648 | gradient_accumulator, |
2649 | ::tensorflow::Input |
2650 | gradient_squared_accumulator, |
2651 | ::tensorflow::Input grad, |
2652 | ::tensorflow::Input indices, |
2653 | ::tensorflow::Input lr, |
2654 | ::tensorflow::Input l1, |
2655 | ::tensorflow::Input l2, |
2656 | ::tensorflow::Input global_step) |
2657 | : SparseApplyAdagradDA(scope, var, gradient_accumulator, gradient_squared_accumulator, grad, indices, lr, l1, l2, global_step, SparseApplyAdagradDA::Attrs()) {} |
2658 | |
2659 | SparseApplyCenteredRMSProp::SparseApplyCenteredRMSProp(const |
2660 | ::tensorflow::Scope& |
2661 | scope, |
2662 | ::tensorflow::Input var, |
2663 | ::tensorflow::Input mg, |
2664 | ::tensorflow::Input ms, |
2665 | ::tensorflow::Input mom, |
2666 | ::tensorflow::Input lr, |
2667 | ::tensorflow::Input rho, |
2668 | ::tensorflow::Input |
2669 | momentum, |
2670 | ::tensorflow::Input |
2671 | epsilon, |
2672 | ::tensorflow::Input |
2673 | grad, |
2674 | ::tensorflow::Input |
2675 | indices, const |
2676 | SparseApplyCenteredRMSProp::Attrs& |
2677 | attrs) { |
2678 | if (!scope.ok()) return; |
2679 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2680 | if (!scope.ok()) return; |
2681 | auto _mg = ::tensorflow::ops::AsNodeOut(scope, mg); |
2682 | if (!scope.ok()) return; |
2683 | auto _ms = ::tensorflow::ops::AsNodeOut(scope, ms); |
2684 | if (!scope.ok()) return; |
2685 | auto _mom = ::tensorflow::ops::AsNodeOut(scope, mom); |
2686 | if (!scope.ok()) return; |
2687 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2688 | if (!scope.ok()) return; |
2689 | auto _rho = ::tensorflow::ops::AsNodeOut(scope, rho); |
2690 | if (!scope.ok()) return; |
2691 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
2692 | if (!scope.ok()) return; |
2693 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
2694 | if (!scope.ok()) return; |
2695 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2696 | if (!scope.ok()) return; |
2697 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2698 | if (!scope.ok()) return; |
2699 | ::tensorflow::Node* ret; |
2700 | const auto unique_name = scope.GetUniqueNameForOp("SparseApplyCenteredRMSProp" ); |
2701 | auto builder = ::tensorflow::NodeBuilder(unique_name, "SparseApplyCenteredRMSProp" ) |
2702 | .Input(_var) |
2703 | .Input(_mg) |
2704 | .Input(_ms) |
2705 | .Input(_mom) |
2706 | .Input(_lr) |
2707 | .Input(_rho) |
2708 | .Input(_momentum) |
2709 | .Input(_epsilon) |
2710 | .Input(_grad) |
2711 | .Input(_indices) |
2712 | .Attr("use_locking" , attrs.use_locking_) |
2713 | ; |
2714 | scope.UpdateBuilder(&builder); |
2715 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2716 | if (!scope.ok()) return; |
2717 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2718 | this->operation = Operation(ret); |
2719 | this->out = Output(ret, 0); |
2720 | } |
2721 | |
2722 | SparseApplyCenteredRMSProp::SparseApplyCenteredRMSProp(const |
2723 | ::tensorflow::Scope& |
2724 | scope, |
2725 | ::tensorflow::Input var, |
2726 | ::tensorflow::Input mg, |
2727 | ::tensorflow::Input ms, |
2728 | ::tensorflow::Input mom, |
2729 | ::tensorflow::Input lr, |
2730 | ::tensorflow::Input rho, |
2731 | ::tensorflow::Input |
2732 | momentum, |
2733 | ::tensorflow::Input |
2734 | epsilon, |
2735 | ::tensorflow::Input |
2736 | grad, |
2737 | ::tensorflow::Input |
2738 | indices) |
2739 | : SparseApplyCenteredRMSProp(scope, var, mg, ms, mom, lr, rho, momentum, epsilon, grad, indices, SparseApplyCenteredRMSProp::Attrs()) {} |
2740 | |
2741 | SparseApplyFtrl::SparseApplyFtrl(const ::tensorflow::Scope& scope, |
2742 | ::tensorflow::Input var, ::tensorflow::Input |
2743 | accum, ::tensorflow::Input linear, |
2744 | ::tensorflow::Input grad, ::tensorflow::Input |
2745 | indices, ::tensorflow::Input lr, |
2746 | ::tensorflow::Input l1, ::tensorflow::Input |
2747 | l2, ::tensorflow::Input lr_power, const |
2748 | SparseApplyFtrl::Attrs& attrs) { |
2749 | if (!scope.ok()) return; |
2750 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2751 | if (!scope.ok()) return; |
2752 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
2753 | if (!scope.ok()) return; |
2754 | auto _linear = ::tensorflow::ops::AsNodeOut(scope, linear); |
2755 | if (!scope.ok()) return; |
2756 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2757 | if (!scope.ok()) return; |
2758 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2759 | if (!scope.ok()) return; |
2760 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2761 | if (!scope.ok()) return; |
2762 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
2763 | if (!scope.ok()) return; |
2764 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
2765 | if (!scope.ok()) return; |
2766 | auto _lr_power = ::tensorflow::ops::AsNodeOut(scope, lr_power); |
2767 | if (!scope.ok()) return; |
2768 | ::tensorflow::Node* ret; |
2769 | const auto unique_name = scope.GetUniqueNameForOp("SparseApplyFtrl" ); |
2770 | auto builder = ::tensorflow::NodeBuilder(unique_name, "SparseApplyFtrl" ) |
2771 | .Input(_var) |
2772 | .Input(_accum) |
2773 | .Input(_linear) |
2774 | .Input(_grad) |
2775 | .Input(_indices) |
2776 | .Input(_lr) |
2777 | .Input(_l1) |
2778 | .Input(_l2) |
2779 | .Input(_lr_power) |
2780 | .Attr("use_locking" , attrs.use_locking_) |
2781 | .Attr("multiply_linear_by_lr" , attrs.multiply_linear_by_lr_) |
2782 | ; |
2783 | scope.UpdateBuilder(&builder); |
2784 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2785 | if (!scope.ok()) return; |
2786 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2787 | this->operation = Operation(ret); |
2788 | this->out = Output(ret, 0); |
2789 | } |
2790 | |
2791 | SparseApplyFtrl::SparseApplyFtrl(const ::tensorflow::Scope& scope, |
2792 | ::tensorflow::Input var, ::tensorflow::Input |
2793 | accum, ::tensorflow::Input linear, |
2794 | ::tensorflow::Input grad, ::tensorflow::Input |
2795 | indices, ::tensorflow::Input lr, |
2796 | ::tensorflow::Input l1, ::tensorflow::Input |
2797 | l2, ::tensorflow::Input lr_power) |
2798 | : SparseApplyFtrl(scope, var, accum, linear, grad, indices, lr, l1, l2, lr_power, SparseApplyFtrl::Attrs()) {} |
2799 | |
2800 | SparseApplyFtrlV2::SparseApplyFtrlV2(const ::tensorflow::Scope& scope, |
2801 | ::tensorflow::Input var, |
2802 | ::tensorflow::Input accum, |
2803 | ::tensorflow::Input linear, |
2804 | ::tensorflow::Input grad, |
2805 | ::tensorflow::Input indices, |
2806 | ::tensorflow::Input lr, |
2807 | ::tensorflow::Input l1, |
2808 | ::tensorflow::Input l2, |
2809 | ::tensorflow::Input l2_shrinkage, |
2810 | ::tensorflow::Input lr_power, const |
2811 | SparseApplyFtrlV2::Attrs& attrs) { |
2812 | if (!scope.ok()) return; |
2813 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2814 | if (!scope.ok()) return; |
2815 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
2816 | if (!scope.ok()) return; |
2817 | auto _linear = ::tensorflow::ops::AsNodeOut(scope, linear); |
2818 | if (!scope.ok()) return; |
2819 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2820 | if (!scope.ok()) return; |
2821 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2822 | if (!scope.ok()) return; |
2823 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2824 | if (!scope.ok()) return; |
2825 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
2826 | if (!scope.ok()) return; |
2827 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
2828 | if (!scope.ok()) return; |
2829 | auto _l2_shrinkage = ::tensorflow::ops::AsNodeOut(scope, l2_shrinkage); |
2830 | if (!scope.ok()) return; |
2831 | auto _lr_power = ::tensorflow::ops::AsNodeOut(scope, lr_power); |
2832 | if (!scope.ok()) return; |
2833 | ::tensorflow::Node* ret; |
2834 | const auto unique_name = scope.GetUniqueNameForOp("SparseApplyFtrlV2" ); |
2835 | auto builder = ::tensorflow::NodeBuilder(unique_name, "SparseApplyFtrlV2" ) |
2836 | .Input(_var) |
2837 | .Input(_accum) |
2838 | .Input(_linear) |
2839 | .Input(_grad) |
2840 | .Input(_indices) |
2841 | .Input(_lr) |
2842 | .Input(_l1) |
2843 | .Input(_l2) |
2844 | .Input(_l2_shrinkage) |
2845 | .Input(_lr_power) |
2846 | .Attr("use_locking" , attrs.use_locking_) |
2847 | .Attr("multiply_linear_by_lr" , attrs.multiply_linear_by_lr_) |
2848 | ; |
2849 | scope.UpdateBuilder(&builder); |
2850 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2851 | if (!scope.ok()) return; |
2852 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2853 | this->operation = Operation(ret); |
2854 | this->out = Output(ret, 0); |
2855 | } |
2856 | |
2857 | SparseApplyFtrlV2::SparseApplyFtrlV2(const ::tensorflow::Scope& scope, |
2858 | ::tensorflow::Input var, |
2859 | ::tensorflow::Input accum, |
2860 | ::tensorflow::Input linear, |
2861 | ::tensorflow::Input grad, |
2862 | ::tensorflow::Input indices, |
2863 | ::tensorflow::Input lr, |
2864 | ::tensorflow::Input l1, |
2865 | ::tensorflow::Input l2, |
2866 | ::tensorflow::Input l2_shrinkage, |
2867 | ::tensorflow::Input lr_power) |
2868 | : SparseApplyFtrlV2(scope, var, accum, linear, grad, indices, lr, l1, l2, l2_shrinkage, lr_power, SparseApplyFtrlV2::Attrs()) {} |
2869 | |
2870 | SparseApplyMomentum::SparseApplyMomentum(const ::tensorflow::Scope& scope, |
2871 | ::tensorflow::Input var, |
2872 | ::tensorflow::Input accum, |
2873 | ::tensorflow::Input lr, |
2874 | ::tensorflow::Input grad, |
2875 | ::tensorflow::Input indices, |
2876 | ::tensorflow::Input momentum, const |
2877 | SparseApplyMomentum::Attrs& attrs) { |
2878 | if (!scope.ok()) return; |
2879 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2880 | if (!scope.ok()) return; |
2881 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
2882 | if (!scope.ok()) return; |
2883 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2884 | if (!scope.ok()) return; |
2885 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2886 | if (!scope.ok()) return; |
2887 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2888 | if (!scope.ok()) return; |
2889 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
2890 | if (!scope.ok()) return; |
2891 | ::tensorflow::Node* ret; |
2892 | const auto unique_name = scope.GetUniqueNameForOp("SparseApplyMomentum" ); |
2893 | auto builder = ::tensorflow::NodeBuilder(unique_name, "SparseApplyMomentum" ) |
2894 | .Input(_var) |
2895 | .Input(_accum) |
2896 | .Input(_lr) |
2897 | .Input(_grad) |
2898 | .Input(_indices) |
2899 | .Input(_momentum) |
2900 | .Attr("use_locking" , attrs.use_locking_) |
2901 | .Attr("use_nesterov" , attrs.use_nesterov_) |
2902 | ; |
2903 | scope.UpdateBuilder(&builder); |
2904 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2905 | if (!scope.ok()) return; |
2906 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2907 | this->operation = Operation(ret); |
2908 | this->out = Output(ret, 0); |
2909 | } |
2910 | |
2911 | SparseApplyMomentum::SparseApplyMomentum(const ::tensorflow::Scope& scope, |
2912 | ::tensorflow::Input var, |
2913 | ::tensorflow::Input accum, |
2914 | ::tensorflow::Input lr, |
2915 | ::tensorflow::Input grad, |
2916 | ::tensorflow::Input indices, |
2917 | ::tensorflow::Input momentum) |
2918 | : SparseApplyMomentum(scope, var, accum, lr, grad, indices, momentum, SparseApplyMomentum::Attrs()) {} |
2919 | |
2920 | SparseApplyProximalAdagrad::SparseApplyProximalAdagrad(const |
2921 | ::tensorflow::Scope& |
2922 | scope, |
2923 | ::tensorflow::Input var, |
2924 | ::tensorflow::Input |
2925 | accum, |
2926 | ::tensorflow::Input lr, |
2927 | ::tensorflow::Input l1, |
2928 | ::tensorflow::Input l2, |
2929 | ::tensorflow::Input |
2930 | grad, |
2931 | ::tensorflow::Input |
2932 | indices, const |
2933 | SparseApplyProximalAdagrad::Attrs& |
2934 | attrs) { |
2935 | if (!scope.ok()) return; |
2936 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
2937 | if (!scope.ok()) return; |
2938 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
2939 | if (!scope.ok()) return; |
2940 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
2941 | if (!scope.ok()) return; |
2942 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
2943 | if (!scope.ok()) return; |
2944 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
2945 | if (!scope.ok()) return; |
2946 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
2947 | if (!scope.ok()) return; |
2948 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
2949 | if (!scope.ok()) return; |
2950 | ::tensorflow::Node* ret; |
2951 | const auto unique_name = scope.GetUniqueNameForOp("SparseApplyProximalAdagrad" ); |
2952 | auto builder = ::tensorflow::NodeBuilder(unique_name, "SparseApplyProximalAdagrad" ) |
2953 | .Input(_var) |
2954 | .Input(_accum) |
2955 | .Input(_lr) |
2956 | .Input(_l1) |
2957 | .Input(_l2) |
2958 | .Input(_grad) |
2959 | .Input(_indices) |
2960 | .Attr("use_locking" , attrs.use_locking_) |
2961 | ; |
2962 | scope.UpdateBuilder(&builder); |
2963 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
2964 | if (!scope.ok()) return; |
2965 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
2966 | this->operation = Operation(ret); |
2967 | this->out = Output(ret, 0); |
2968 | } |
2969 | |
2970 | SparseApplyProximalAdagrad::SparseApplyProximalAdagrad(const |
2971 | ::tensorflow::Scope& |
2972 | scope, |
2973 | ::tensorflow::Input var, |
2974 | ::tensorflow::Input |
2975 | accum, |
2976 | ::tensorflow::Input lr, |
2977 | ::tensorflow::Input l1, |
2978 | ::tensorflow::Input l2, |
2979 | ::tensorflow::Input |
2980 | grad, |
2981 | ::tensorflow::Input |
2982 | indices) |
2983 | : SparseApplyProximalAdagrad(scope, var, accum, lr, l1, l2, grad, indices, SparseApplyProximalAdagrad::Attrs()) {} |
2984 | |
2985 | SparseApplyProximalGradientDescent::SparseApplyProximalGradientDescent(const |
2986 | ::tensorflow::Scope& |
2987 | scope, |
2988 | ::tensorflow::Input |
2989 | var, |
2990 | ::tensorflow::Input |
2991 | alpha, |
2992 | ::tensorflow::Input |
2993 | l1, |
2994 | ::tensorflow::Input |
2995 | l2, |
2996 | ::tensorflow::Input |
2997 | grad, |
2998 | ::tensorflow::Input |
2999 | indices, |
3000 | const |
3001 | SparseApplyProximalGradientDescent::Attrs& |
3002 | attrs) { |
3003 | if (!scope.ok()) return; |
3004 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
3005 | if (!scope.ok()) return; |
3006 | auto _alpha = ::tensorflow::ops::AsNodeOut(scope, alpha); |
3007 | if (!scope.ok()) return; |
3008 | auto _l1 = ::tensorflow::ops::AsNodeOut(scope, l1); |
3009 | if (!scope.ok()) return; |
3010 | auto _l2 = ::tensorflow::ops::AsNodeOut(scope, l2); |
3011 | if (!scope.ok()) return; |
3012 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
3013 | if (!scope.ok()) return; |
3014 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
3015 | if (!scope.ok()) return; |
3016 | ::tensorflow::Node* ret; |
3017 | const auto unique_name = scope.GetUniqueNameForOp("SparseApplyProximalGradientDescent" ); |
3018 | auto builder = ::tensorflow::NodeBuilder(unique_name, "SparseApplyProximalGradientDescent" ) |
3019 | .Input(_var) |
3020 | .Input(_alpha) |
3021 | .Input(_l1) |
3022 | .Input(_l2) |
3023 | .Input(_grad) |
3024 | .Input(_indices) |
3025 | .Attr("use_locking" , attrs.use_locking_) |
3026 | ; |
3027 | scope.UpdateBuilder(&builder); |
3028 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
3029 | if (!scope.ok()) return; |
3030 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
3031 | this->operation = Operation(ret); |
3032 | this->out = Output(ret, 0); |
3033 | } |
3034 | |
3035 | SparseApplyProximalGradientDescent::SparseApplyProximalGradientDescent(const |
3036 | ::tensorflow::Scope& |
3037 | scope, |
3038 | ::tensorflow::Input |
3039 | var, |
3040 | ::tensorflow::Input |
3041 | alpha, |
3042 | ::tensorflow::Input |
3043 | l1, |
3044 | ::tensorflow::Input |
3045 | l2, |
3046 | ::tensorflow::Input |
3047 | grad, |
3048 | ::tensorflow::Input |
3049 | indices) |
3050 | : SparseApplyProximalGradientDescent(scope, var, alpha, l1, l2, grad, indices, SparseApplyProximalGradientDescent::Attrs()) {} |
3051 | |
3052 | SparseApplyRMSProp::SparseApplyRMSProp(const ::tensorflow::Scope& scope, |
3053 | ::tensorflow::Input var, |
3054 | ::tensorflow::Input ms, |
3055 | ::tensorflow::Input mom, |
3056 | ::tensorflow::Input lr, |
3057 | ::tensorflow::Input rho, |
3058 | ::tensorflow::Input momentum, |
3059 | ::tensorflow::Input epsilon, |
3060 | ::tensorflow::Input grad, |
3061 | ::tensorflow::Input indices, const |
3062 | SparseApplyRMSProp::Attrs& attrs) { |
3063 | if (!scope.ok()) return; |
3064 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
3065 | if (!scope.ok()) return; |
3066 | auto _ms = ::tensorflow::ops::AsNodeOut(scope, ms); |
3067 | if (!scope.ok()) return; |
3068 | auto _mom = ::tensorflow::ops::AsNodeOut(scope, mom); |
3069 | if (!scope.ok()) return; |
3070 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
3071 | if (!scope.ok()) return; |
3072 | auto _rho = ::tensorflow::ops::AsNodeOut(scope, rho); |
3073 | if (!scope.ok()) return; |
3074 | auto _momentum = ::tensorflow::ops::AsNodeOut(scope, momentum); |
3075 | if (!scope.ok()) return; |
3076 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
3077 | if (!scope.ok()) return; |
3078 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
3079 | if (!scope.ok()) return; |
3080 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
3081 | if (!scope.ok()) return; |
3082 | ::tensorflow::Node* ret; |
3083 | const auto unique_name = scope.GetUniqueNameForOp("SparseApplyRMSProp" ); |
3084 | auto builder = ::tensorflow::NodeBuilder(unique_name, "SparseApplyRMSProp" ) |
3085 | .Input(_var) |
3086 | .Input(_ms) |
3087 | .Input(_mom) |
3088 | .Input(_lr) |
3089 | .Input(_rho) |
3090 | .Input(_momentum) |
3091 | .Input(_epsilon) |
3092 | .Input(_grad) |
3093 | .Input(_indices) |
3094 | .Attr("use_locking" , attrs.use_locking_) |
3095 | ; |
3096 | scope.UpdateBuilder(&builder); |
3097 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
3098 | if (!scope.ok()) return; |
3099 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
3100 | this->operation = Operation(ret); |
3101 | this->out = Output(ret, 0); |
3102 | } |
3103 | |
3104 | SparseApplyRMSProp::SparseApplyRMSProp(const ::tensorflow::Scope& scope, |
3105 | ::tensorflow::Input var, |
3106 | ::tensorflow::Input ms, |
3107 | ::tensorflow::Input mom, |
3108 | ::tensorflow::Input lr, |
3109 | ::tensorflow::Input rho, |
3110 | ::tensorflow::Input momentum, |
3111 | ::tensorflow::Input epsilon, |
3112 | ::tensorflow::Input grad, |
3113 | ::tensorflow::Input indices) |
3114 | : SparseApplyRMSProp(scope, var, ms, mom, lr, rho, momentum, epsilon, grad, indices, SparseApplyRMSProp::Attrs()) {} |
3115 | |
3116 | /// @} |
3117 | |
3118 | } // namespace ops |
3119 | } // namespace tensorflow |
3120 | |