1 | // This file is MACHINE GENERATED! Do not edit. |
2 | |
3 | |
4 | #include "tensorflow/cc/ops/const_op.h" |
5 | #include "tensorflow/cc/ops/training_ops_internal.h" |
6 | |
7 | namespace tensorflow { |
8 | namespace ops { |
9 | namespace internal { |
10 | // NOTE: This namespace has internal TensorFlow details that |
11 | // are not part of TensorFlow's public API. |
12 | |
13 | ApplyAdaMax::ApplyAdaMax(const ::tensorflow::Scope& scope, ::tensorflow::Input |
14 | var, ::tensorflow::Input m, ::tensorflow::Input v, |
15 | ::tensorflow::Input beta1_power, ::tensorflow::Input |
16 | lr, ::tensorflow::Input beta1, ::tensorflow::Input |
17 | beta2, ::tensorflow::Input epsilon, |
18 | ::tensorflow::Input grad, const ApplyAdaMax::Attrs& |
19 | attrs) { |
20 | if (!scope.ok()) return; |
21 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
22 | if (!scope.ok()) return; |
23 | auto _m = ::tensorflow::ops::AsNodeOut(scope, m); |
24 | if (!scope.ok()) return; |
25 | auto _v = ::tensorflow::ops::AsNodeOut(scope, v); |
26 | if (!scope.ok()) return; |
27 | auto _beta1_power = ::tensorflow::ops::AsNodeOut(scope, beta1_power); |
28 | if (!scope.ok()) return; |
29 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
30 | if (!scope.ok()) return; |
31 | auto _beta1 = ::tensorflow::ops::AsNodeOut(scope, beta1); |
32 | if (!scope.ok()) return; |
33 | auto _beta2 = ::tensorflow::ops::AsNodeOut(scope, beta2); |
34 | if (!scope.ok()) return; |
35 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
36 | if (!scope.ok()) return; |
37 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
38 | if (!scope.ok()) return; |
39 | ::tensorflow::Node* ret; |
40 | const auto unique_name = scope.GetUniqueNameForOp("ApplyAdaMax" ); |
41 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyAdaMax" ) |
42 | .Input(_var) |
43 | .Input(_m) |
44 | .Input(_v) |
45 | .Input(_beta1_power) |
46 | .Input(_lr) |
47 | .Input(_beta1) |
48 | .Input(_beta2) |
49 | .Input(_epsilon) |
50 | .Input(_grad) |
51 | .Attr("use_locking" , attrs.use_locking_) |
52 | ; |
53 | scope.UpdateBuilder(&builder); |
54 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
55 | if (!scope.ok()) return; |
56 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
57 | this->operation = Operation(ret); |
58 | this->out = Output(ret, 0); |
59 | } |
60 | |
61 | ApplyAdaMax::ApplyAdaMax(const ::tensorflow::Scope& scope, ::tensorflow::Input |
62 | var, ::tensorflow::Input m, ::tensorflow::Input v, |
63 | ::tensorflow::Input beta1_power, ::tensorflow::Input |
64 | lr, ::tensorflow::Input beta1, ::tensorflow::Input |
65 | beta2, ::tensorflow::Input epsilon, |
66 | ::tensorflow::Input grad) |
67 | : ApplyAdaMax(scope, var, m, v, beta1_power, lr, beta1, beta2, epsilon, grad, ApplyAdaMax::Attrs()) {} |
68 | |
69 | ApplyAdagradV2::ApplyAdagradV2(const ::tensorflow::Scope& scope, |
70 | ::tensorflow::Input var, ::tensorflow::Input |
71 | accum, ::tensorflow::Input lr, |
72 | ::tensorflow::Input epsilon, ::tensorflow::Input |
73 | grad, const ApplyAdagradV2::Attrs& attrs) { |
74 | if (!scope.ok()) return; |
75 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
76 | if (!scope.ok()) return; |
77 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
78 | if (!scope.ok()) return; |
79 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
80 | if (!scope.ok()) return; |
81 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
82 | if (!scope.ok()) return; |
83 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
84 | if (!scope.ok()) return; |
85 | ::tensorflow::Node* ret; |
86 | const auto unique_name = scope.GetUniqueNameForOp("ApplyAdagradV2" ); |
87 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ApplyAdagradV2" ) |
88 | .Input(_var) |
89 | .Input(_accum) |
90 | .Input(_lr) |
91 | .Input(_epsilon) |
92 | .Input(_grad) |
93 | .Attr("use_locking" , attrs.use_locking_) |
94 | .Attr("update_slots" , attrs.update_slots_) |
95 | ; |
96 | scope.UpdateBuilder(&builder); |
97 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
98 | if (!scope.ok()) return; |
99 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
100 | this->operation = Operation(ret); |
101 | this->out = Output(ret, 0); |
102 | } |
103 | |
104 | ApplyAdagradV2::ApplyAdagradV2(const ::tensorflow::Scope& scope, |
105 | ::tensorflow::Input var, ::tensorflow::Input |
106 | accum, ::tensorflow::Input lr, |
107 | ::tensorflow::Input epsilon, ::tensorflow::Input |
108 | grad) |
109 | : ApplyAdagradV2(scope, var, accum, lr, epsilon, grad, ApplyAdagradV2::Attrs()) {} |
110 | |
111 | ResourceApplyAdaMax::ResourceApplyAdaMax(const ::tensorflow::Scope& scope, |
112 | ::tensorflow::Input var, |
113 | ::tensorflow::Input m, |
114 | ::tensorflow::Input v, |
115 | ::tensorflow::Input beta1_power, |
116 | ::tensorflow::Input lr, |
117 | ::tensorflow::Input beta1, |
118 | ::tensorflow::Input beta2, |
119 | ::tensorflow::Input epsilon, |
120 | ::tensorflow::Input grad, const |
121 | ResourceApplyAdaMax::Attrs& attrs) { |
122 | if (!scope.ok()) return; |
123 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
124 | if (!scope.ok()) return; |
125 | auto _m = ::tensorflow::ops::AsNodeOut(scope, m); |
126 | if (!scope.ok()) return; |
127 | auto _v = ::tensorflow::ops::AsNodeOut(scope, v); |
128 | if (!scope.ok()) return; |
129 | auto _beta1_power = ::tensorflow::ops::AsNodeOut(scope, beta1_power); |
130 | if (!scope.ok()) return; |
131 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
132 | if (!scope.ok()) return; |
133 | auto _beta1 = ::tensorflow::ops::AsNodeOut(scope, beta1); |
134 | if (!scope.ok()) return; |
135 | auto _beta2 = ::tensorflow::ops::AsNodeOut(scope, beta2); |
136 | if (!scope.ok()) return; |
137 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
138 | if (!scope.ok()) return; |
139 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
140 | if (!scope.ok()) return; |
141 | ::tensorflow::Node* ret; |
142 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyAdaMax" ); |
143 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyAdaMax" ) |
144 | .Input(_var) |
145 | .Input(_m) |
146 | .Input(_v) |
147 | .Input(_beta1_power) |
148 | .Input(_lr) |
149 | .Input(_beta1) |
150 | .Input(_beta2) |
151 | .Input(_epsilon) |
152 | .Input(_grad) |
153 | .Attr("use_locking" , attrs.use_locking_) |
154 | ; |
155 | scope.UpdateBuilder(&builder); |
156 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
157 | if (!scope.ok()) return; |
158 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
159 | this->operation = Operation(ret); |
160 | return; |
161 | } |
162 | |
163 | ResourceApplyAdaMax::ResourceApplyAdaMax(const ::tensorflow::Scope& scope, |
164 | ::tensorflow::Input var, |
165 | ::tensorflow::Input m, |
166 | ::tensorflow::Input v, |
167 | ::tensorflow::Input beta1_power, |
168 | ::tensorflow::Input lr, |
169 | ::tensorflow::Input beta1, |
170 | ::tensorflow::Input beta2, |
171 | ::tensorflow::Input epsilon, |
172 | ::tensorflow::Input grad) |
173 | : ResourceApplyAdaMax(scope, var, m, v, beta1_power, lr, beta1, beta2, epsilon, grad, ResourceApplyAdaMax::Attrs()) {} |
174 | |
175 | ResourceApplyAdagradV2::ResourceApplyAdagradV2(const ::tensorflow::Scope& |
176 | scope, ::tensorflow::Input var, |
177 | ::tensorflow::Input accum, |
178 | ::tensorflow::Input lr, |
179 | ::tensorflow::Input epsilon, |
180 | ::tensorflow::Input grad, const |
181 | ResourceApplyAdagradV2::Attrs& |
182 | attrs) { |
183 | if (!scope.ok()) return; |
184 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
185 | if (!scope.ok()) return; |
186 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
187 | if (!scope.ok()) return; |
188 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
189 | if (!scope.ok()) return; |
190 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
191 | if (!scope.ok()) return; |
192 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
193 | if (!scope.ok()) return; |
194 | ::tensorflow::Node* ret; |
195 | const auto unique_name = scope.GetUniqueNameForOp("ResourceApplyAdagradV2" ); |
196 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceApplyAdagradV2" ) |
197 | .Input(_var) |
198 | .Input(_accum) |
199 | .Input(_lr) |
200 | .Input(_epsilon) |
201 | .Input(_grad) |
202 | .Attr("use_locking" , attrs.use_locking_) |
203 | .Attr("update_slots" , attrs.update_slots_) |
204 | ; |
205 | scope.UpdateBuilder(&builder); |
206 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
207 | if (!scope.ok()) return; |
208 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
209 | this->operation = Operation(ret); |
210 | return; |
211 | } |
212 | |
213 | ResourceApplyAdagradV2::ResourceApplyAdagradV2(const ::tensorflow::Scope& |
214 | scope, ::tensorflow::Input var, |
215 | ::tensorflow::Input accum, |
216 | ::tensorflow::Input lr, |
217 | ::tensorflow::Input epsilon, |
218 | ::tensorflow::Input grad) |
219 | : ResourceApplyAdagradV2(scope, var, accum, lr, epsilon, grad, ResourceApplyAdagradV2::Attrs()) {} |
220 | |
221 | ResourceSparseApplyAdagradV2::ResourceSparseApplyAdagradV2(const |
222 | ::tensorflow::Scope& |
223 | scope, |
224 | ::tensorflow::Input |
225 | var, |
226 | ::tensorflow::Input |
227 | accum, |
228 | ::tensorflow::Input |
229 | lr, |
230 | ::tensorflow::Input |
231 | epsilon, |
232 | ::tensorflow::Input |
233 | grad, |
234 | ::tensorflow::Input |
235 | indices, const |
236 | ResourceSparseApplyAdagradV2::Attrs& |
237 | attrs) { |
238 | if (!scope.ok()) return; |
239 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
240 | if (!scope.ok()) return; |
241 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
242 | if (!scope.ok()) return; |
243 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
244 | if (!scope.ok()) return; |
245 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
246 | if (!scope.ok()) return; |
247 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
248 | if (!scope.ok()) return; |
249 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
250 | if (!scope.ok()) return; |
251 | ::tensorflow::Node* ret; |
252 | const auto unique_name = scope.GetUniqueNameForOp("ResourceSparseApplyAdagradV2" ); |
253 | auto builder = ::tensorflow::NodeBuilder(unique_name, "ResourceSparseApplyAdagradV2" ) |
254 | .Input(_var) |
255 | .Input(_accum) |
256 | .Input(_lr) |
257 | .Input(_epsilon) |
258 | .Input(_grad) |
259 | .Input(_indices) |
260 | .Attr("use_locking" , attrs.use_locking_) |
261 | .Attr("update_slots" , attrs.update_slots_) |
262 | ; |
263 | scope.UpdateBuilder(&builder); |
264 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
265 | if (!scope.ok()) return; |
266 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
267 | this->operation = Operation(ret); |
268 | return; |
269 | } |
270 | |
271 | ResourceSparseApplyAdagradV2::ResourceSparseApplyAdagradV2(const |
272 | ::tensorflow::Scope& |
273 | scope, |
274 | ::tensorflow::Input |
275 | var, |
276 | ::tensorflow::Input |
277 | accum, |
278 | ::tensorflow::Input |
279 | lr, |
280 | ::tensorflow::Input |
281 | epsilon, |
282 | ::tensorflow::Input |
283 | grad, |
284 | ::tensorflow::Input |
285 | indices) |
286 | : ResourceSparseApplyAdagradV2(scope, var, accum, lr, epsilon, grad, indices, ResourceSparseApplyAdagradV2::Attrs()) {} |
287 | |
288 | SparseApplyAdagradV2::SparseApplyAdagradV2(const ::tensorflow::Scope& scope, |
289 | ::tensorflow::Input var, |
290 | ::tensorflow::Input accum, |
291 | ::tensorflow::Input lr, |
292 | ::tensorflow::Input epsilon, |
293 | ::tensorflow::Input grad, |
294 | ::tensorflow::Input indices, const |
295 | SparseApplyAdagradV2::Attrs& attrs) { |
296 | if (!scope.ok()) return; |
297 | auto _var = ::tensorflow::ops::AsNodeOut(scope, var); |
298 | if (!scope.ok()) return; |
299 | auto _accum = ::tensorflow::ops::AsNodeOut(scope, accum); |
300 | if (!scope.ok()) return; |
301 | auto _lr = ::tensorflow::ops::AsNodeOut(scope, lr); |
302 | if (!scope.ok()) return; |
303 | auto _epsilon = ::tensorflow::ops::AsNodeOut(scope, epsilon); |
304 | if (!scope.ok()) return; |
305 | auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad); |
306 | if (!scope.ok()) return; |
307 | auto _indices = ::tensorflow::ops::AsNodeOut(scope, indices); |
308 | if (!scope.ok()) return; |
309 | ::tensorflow::Node* ret; |
310 | const auto unique_name = scope.GetUniqueNameForOp("SparseApplyAdagradV2" ); |
311 | auto builder = ::tensorflow::NodeBuilder(unique_name, "SparseApplyAdagradV2" ) |
312 | .Input(_var) |
313 | .Input(_accum) |
314 | .Input(_lr) |
315 | .Input(_epsilon) |
316 | .Input(_grad) |
317 | .Input(_indices) |
318 | .Attr("use_locking" , attrs.use_locking_) |
319 | .Attr("update_slots" , attrs.update_slots_) |
320 | ; |
321 | scope.UpdateBuilder(&builder); |
322 | scope.UpdateStatus(builder.Finalize(scope.graph(), &ret)); |
323 | if (!scope.ok()) return; |
324 | scope.UpdateStatus(scope.DoShapeInference(ret)); |
325 | this->operation = Operation(ret); |
326 | this->out = Output(ret, 0); |
327 | } |
328 | |
329 | SparseApplyAdagradV2::SparseApplyAdagradV2(const ::tensorflow::Scope& scope, |
330 | ::tensorflow::Input var, |
331 | ::tensorflow::Input accum, |
332 | ::tensorflow::Input lr, |
333 | ::tensorflow::Input epsilon, |
334 | ::tensorflow::Input grad, |
335 | ::tensorflow::Input indices) |
336 | : SparseApplyAdagradV2(scope, var, accum, lr, epsilon, grad, indices, SparseApplyAdagradV2::Attrs()) {} |
337 | |
338 | } // namespace internal |
339 | } // namespace ops |
340 | } // namespace tensorflow |
341 | |