1// This file is MACHINE GENERATED! Do not edit.
2
3
4#include "tensorflow/cc/ops/const_op.h"
5#include "tensorflow/cc/ops/nn_ops.h"
6
7namespace tensorflow {
8namespace ops {
9
10ApproxTopK::ApproxTopK(const ::tensorflow::Scope& scope, ::tensorflow::Input
11 input, int64 k, const ApproxTopK::Attrs& attrs) {
12 if (!scope.ok()) return;
13 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
14 if (!scope.ok()) return;
15 ::tensorflow::Node* ret;
16 const auto unique_name = scope.GetUniqueNameForOp("ApproxTopK");
17 auto builder = ::tensorflow::NodeBuilder(unique_name, "ApproxTopK")
18 .Input(_input)
19 .Attr("k", k)
20 .Attr("reduction_dimension", attrs.reduction_dimension_)
21 .Attr("recall_target", attrs.recall_target_)
22 .Attr("is_max_k", attrs.is_max_k_)
23 .Attr("reduction_input_size_override", attrs.reduction_input_size_override_)
24 .Attr("aggregate_to_topk", attrs.aggregate_to_topk_)
25 ;
26 scope.UpdateBuilder(&builder);
27 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
28 if (!scope.ok()) return;
29 scope.UpdateStatus(scope.DoShapeInference(ret));
30 this->operation = Operation(ret);
31 ::tensorflow::NameRangeMap _outputs_range;
32 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
33 if (!_status_.ok()) {
34 scope.UpdateStatus(_status_);
35 return;
36 }
37
38 this->values = Output(ret, _outputs_range["values"].first);
39 this->indices = Output(ret, _outputs_range["indices"].first);
40}
41
42ApproxTopK::ApproxTopK(const ::tensorflow::Scope& scope, ::tensorflow::Input
43 input, int64 k)
44 : ApproxTopK(scope, input, k, ApproxTopK::Attrs()) {}
45
46AvgPool::AvgPool(const ::tensorflow::Scope& scope, ::tensorflow::Input value,
47 const gtl::ArraySlice<int>& ksize, const gtl::ArraySlice<int>&
48 strides, StringPiece padding, const AvgPool::Attrs& attrs) {
49 if (!scope.ok()) return;
50 auto _value = ::tensorflow::ops::AsNodeOut(scope, value);
51 if (!scope.ok()) return;
52 ::tensorflow::Node* ret;
53 const auto unique_name = scope.GetUniqueNameForOp("AvgPool");
54 auto builder = ::tensorflow::NodeBuilder(unique_name, "AvgPool")
55 .Input(_value)
56 .Attr("ksize", ksize)
57 .Attr("strides", strides)
58 .Attr("padding", padding)
59 .Attr("data_format", attrs.data_format_)
60 ;
61 scope.UpdateBuilder(&builder);
62 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
63 if (!scope.ok()) return;
64 scope.UpdateStatus(scope.DoShapeInference(ret));
65 this->operation = Operation(ret);
66 this->output = Output(ret, 0);
67}
68
69AvgPool::AvgPool(const ::tensorflow::Scope& scope, ::tensorflow::Input value,
70 const gtl::ArraySlice<int>& ksize, const gtl::ArraySlice<int>&
71 strides, StringPiece padding)
72 : AvgPool(scope, value, ksize, strides, padding, AvgPool::Attrs()) {}
73
74AvgPool3D::AvgPool3D(const ::tensorflow::Scope& scope, ::tensorflow::Input
75 input, const gtl::ArraySlice<int>& ksize, const
76 gtl::ArraySlice<int>& strides, StringPiece padding, const
77 AvgPool3D::Attrs& attrs) {
78 if (!scope.ok()) return;
79 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
80 if (!scope.ok()) return;
81 ::tensorflow::Node* ret;
82 const auto unique_name = scope.GetUniqueNameForOp("AvgPool3D");
83 auto builder = ::tensorflow::NodeBuilder(unique_name, "AvgPool3D")
84 .Input(_input)
85 .Attr("ksize", ksize)
86 .Attr("strides", strides)
87 .Attr("padding", padding)
88 .Attr("data_format", attrs.data_format_)
89 ;
90 scope.UpdateBuilder(&builder);
91 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
92 if (!scope.ok()) return;
93 scope.UpdateStatus(scope.DoShapeInference(ret));
94 this->operation = Operation(ret);
95 this->output = Output(ret, 0);
96}
97
98AvgPool3D::AvgPool3D(const ::tensorflow::Scope& scope, ::tensorflow::Input
99 input, const gtl::ArraySlice<int>& ksize, const
100 gtl::ArraySlice<int>& strides, StringPiece padding)
101 : AvgPool3D(scope, input, ksize, strides, padding, AvgPool3D::Attrs()) {}
102
103AvgPool3DGrad::AvgPool3DGrad(const ::tensorflow::Scope& scope,
104 ::tensorflow::Input orig_input_shape,
105 ::tensorflow::Input grad, const
106 gtl::ArraySlice<int>& ksize, const
107 gtl::ArraySlice<int>& strides, StringPiece
108 padding, const AvgPool3DGrad::Attrs& attrs) {
109 if (!scope.ok()) return;
110 auto _orig_input_shape = ::tensorflow::ops::AsNodeOut(scope, orig_input_shape);
111 if (!scope.ok()) return;
112 auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad);
113 if (!scope.ok()) return;
114 ::tensorflow::Node* ret;
115 const auto unique_name = scope.GetUniqueNameForOp("AvgPool3DGrad");
116 auto builder = ::tensorflow::NodeBuilder(unique_name, "AvgPool3DGrad")
117 .Input(_orig_input_shape)
118 .Input(_grad)
119 .Attr("ksize", ksize)
120 .Attr("strides", strides)
121 .Attr("padding", padding)
122 .Attr("data_format", attrs.data_format_)
123 ;
124 scope.UpdateBuilder(&builder);
125 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
126 if (!scope.ok()) return;
127 scope.UpdateStatus(scope.DoShapeInference(ret));
128 this->operation = Operation(ret);
129 this->output = Output(ret, 0);
130}
131
132AvgPool3DGrad::AvgPool3DGrad(const ::tensorflow::Scope& scope,
133 ::tensorflow::Input orig_input_shape,
134 ::tensorflow::Input grad, const
135 gtl::ArraySlice<int>& ksize, const
136 gtl::ArraySlice<int>& strides, StringPiece
137 padding)
138 : AvgPool3DGrad(scope, orig_input_shape, grad, ksize, strides, padding, AvgPool3DGrad::Attrs()) {}
139
140BiasAdd::BiasAdd(const ::tensorflow::Scope& scope, ::tensorflow::Input value,
141 ::tensorflow::Input bias, const BiasAdd::Attrs& attrs) {
142 if (!scope.ok()) return;
143 auto _value = ::tensorflow::ops::AsNodeOut(scope, value);
144 if (!scope.ok()) return;
145 auto _bias = ::tensorflow::ops::AsNodeOut(scope, bias);
146 if (!scope.ok()) return;
147 ::tensorflow::Node* ret;
148 const auto unique_name = scope.GetUniqueNameForOp("BiasAdd");
149 auto builder = ::tensorflow::NodeBuilder(unique_name, "BiasAdd")
150 .Input(_value)
151 .Input(_bias)
152 .Attr("data_format", attrs.data_format_)
153 ;
154 scope.UpdateBuilder(&builder);
155 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
156 if (!scope.ok()) return;
157 scope.UpdateStatus(scope.DoShapeInference(ret));
158 this->operation = Operation(ret);
159 this->output = Output(ret, 0);
160}
161
162BiasAdd::BiasAdd(const ::tensorflow::Scope& scope, ::tensorflow::Input value,
163 ::tensorflow::Input bias)
164 : BiasAdd(scope, value, bias, BiasAdd::Attrs()) {}
165
166BiasAddGrad::BiasAddGrad(const ::tensorflow::Scope& scope, ::tensorflow::Input
167 out_backprop, const BiasAddGrad::Attrs& attrs) {
168 if (!scope.ok()) return;
169 auto _out_backprop = ::tensorflow::ops::AsNodeOut(scope, out_backprop);
170 if (!scope.ok()) return;
171 ::tensorflow::Node* ret;
172 const auto unique_name = scope.GetUniqueNameForOp("BiasAddGrad");
173 auto builder = ::tensorflow::NodeBuilder(unique_name, "BiasAddGrad")
174 .Input(_out_backprop)
175 .Attr("data_format", attrs.data_format_)
176 ;
177 scope.UpdateBuilder(&builder);
178 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
179 if (!scope.ok()) return;
180 scope.UpdateStatus(scope.DoShapeInference(ret));
181 this->operation = Operation(ret);
182 this->output = Output(ret, 0);
183}
184
185BiasAddGrad::BiasAddGrad(const ::tensorflow::Scope& scope, ::tensorflow::Input
186 out_backprop)
187 : BiasAddGrad(scope, out_backprop, BiasAddGrad::Attrs()) {}
188
189Conv2D::Conv2D(const ::tensorflow::Scope& scope, ::tensorflow::Input input,
190 ::tensorflow::Input filter, const gtl::ArraySlice<int>& strides,
191 StringPiece padding, const Conv2D::Attrs& attrs) {
192 if (!scope.ok()) return;
193 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
194 if (!scope.ok()) return;
195 auto _filter = ::tensorflow::ops::AsNodeOut(scope, filter);
196 if (!scope.ok()) return;
197 ::tensorflow::Node* ret;
198 const auto unique_name = scope.GetUniqueNameForOp("Conv2D");
199 auto builder = ::tensorflow::NodeBuilder(unique_name, "Conv2D")
200 .Input(_input)
201 .Input(_filter)
202 .Attr("strides", strides)
203 .Attr("use_cudnn_on_gpu", attrs.use_cudnn_on_gpu_)
204 .Attr("padding", padding)
205 .Attr("explicit_paddings", attrs.explicit_paddings_)
206 .Attr("data_format", attrs.data_format_)
207 .Attr("dilations", attrs.dilations_)
208 ;
209 scope.UpdateBuilder(&builder);
210 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
211 if (!scope.ok()) return;
212 scope.UpdateStatus(scope.DoShapeInference(ret));
213 this->operation = Operation(ret);
214 this->output = Output(ret, 0);
215}
216
217Conv2D::Conv2D(const ::tensorflow::Scope& scope, ::tensorflow::Input input,
218 ::tensorflow::Input filter, const gtl::ArraySlice<int>& strides,
219 StringPiece padding)
220 : Conv2D(scope, input, filter, strides, padding, Conv2D::Attrs()) {}
221
222Conv2DBackpropFilter::Conv2DBackpropFilter(const ::tensorflow::Scope& scope,
223 ::tensorflow::Input input,
224 ::tensorflow::Input filter_sizes,
225 ::tensorflow::Input out_backprop,
226 const gtl::ArraySlice<int>& strides,
227 StringPiece padding, const
228 Conv2DBackpropFilter::Attrs& attrs) {
229 if (!scope.ok()) return;
230 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
231 if (!scope.ok()) return;
232 auto _filter_sizes = ::tensorflow::ops::AsNodeOut(scope, filter_sizes);
233 if (!scope.ok()) return;
234 auto _out_backprop = ::tensorflow::ops::AsNodeOut(scope, out_backprop);
235 if (!scope.ok()) return;
236 ::tensorflow::Node* ret;
237 const auto unique_name = scope.GetUniqueNameForOp("Conv2DBackpropFilter");
238 auto builder = ::tensorflow::NodeBuilder(unique_name, "Conv2DBackpropFilter")
239 .Input(_input)
240 .Input(_filter_sizes)
241 .Input(_out_backprop)
242 .Attr("strides", strides)
243 .Attr("use_cudnn_on_gpu", attrs.use_cudnn_on_gpu_)
244 .Attr("padding", padding)
245 .Attr("explicit_paddings", attrs.explicit_paddings_)
246 .Attr("data_format", attrs.data_format_)
247 .Attr("dilations", attrs.dilations_)
248 ;
249 scope.UpdateBuilder(&builder);
250 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
251 if (!scope.ok()) return;
252 scope.UpdateStatus(scope.DoShapeInference(ret));
253 this->operation = Operation(ret);
254 this->output = Output(ret, 0);
255}
256
257Conv2DBackpropFilter::Conv2DBackpropFilter(const ::tensorflow::Scope& scope,
258 ::tensorflow::Input input,
259 ::tensorflow::Input filter_sizes,
260 ::tensorflow::Input out_backprop,
261 const gtl::ArraySlice<int>& strides,
262 StringPiece padding)
263 : Conv2DBackpropFilter(scope, input, filter_sizes, out_backprop, strides, padding, Conv2DBackpropFilter::Attrs()) {}
264
265Conv2DBackpropInput::Conv2DBackpropInput(const ::tensorflow::Scope& scope,
266 ::tensorflow::Input input_sizes,
267 ::tensorflow::Input filter,
268 ::tensorflow::Input out_backprop,
269 const gtl::ArraySlice<int>& strides,
270 StringPiece padding, const
271 Conv2DBackpropInput::Attrs& attrs) {
272 if (!scope.ok()) return;
273 auto _input_sizes = ::tensorflow::ops::AsNodeOut(scope, input_sizes);
274 if (!scope.ok()) return;
275 auto _filter = ::tensorflow::ops::AsNodeOut(scope, filter);
276 if (!scope.ok()) return;
277 auto _out_backprop = ::tensorflow::ops::AsNodeOut(scope, out_backprop);
278 if (!scope.ok()) return;
279 ::tensorflow::Node* ret;
280 const auto unique_name = scope.GetUniqueNameForOp("Conv2DBackpropInput");
281 auto builder = ::tensorflow::NodeBuilder(unique_name, "Conv2DBackpropInput")
282 .Input(_input_sizes)
283 .Input(_filter)
284 .Input(_out_backprop)
285 .Attr("strides", strides)
286 .Attr("use_cudnn_on_gpu", attrs.use_cudnn_on_gpu_)
287 .Attr("padding", padding)
288 .Attr("explicit_paddings", attrs.explicit_paddings_)
289 .Attr("data_format", attrs.data_format_)
290 .Attr("dilations", attrs.dilations_)
291 ;
292 scope.UpdateBuilder(&builder);
293 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
294 if (!scope.ok()) return;
295 scope.UpdateStatus(scope.DoShapeInference(ret));
296 this->operation = Operation(ret);
297 this->output = Output(ret, 0);
298}
299
300Conv2DBackpropInput::Conv2DBackpropInput(const ::tensorflow::Scope& scope,
301 ::tensorflow::Input input_sizes,
302 ::tensorflow::Input filter,
303 ::tensorflow::Input out_backprop,
304 const gtl::ArraySlice<int>& strides,
305 StringPiece padding)
306 : Conv2DBackpropInput(scope, input_sizes, filter, out_backprop, strides, padding, Conv2DBackpropInput::Attrs()) {}
307
308Conv3D::Conv3D(const ::tensorflow::Scope& scope, ::tensorflow::Input input,
309 ::tensorflow::Input filter, const gtl::ArraySlice<int>& strides,
310 StringPiece padding, const Conv3D::Attrs& attrs) {
311 if (!scope.ok()) return;
312 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
313 if (!scope.ok()) return;
314 auto _filter = ::tensorflow::ops::AsNodeOut(scope, filter);
315 if (!scope.ok()) return;
316 ::tensorflow::Node* ret;
317 const auto unique_name = scope.GetUniqueNameForOp("Conv3D");
318 auto builder = ::tensorflow::NodeBuilder(unique_name, "Conv3D")
319 .Input(_input)
320 .Input(_filter)
321 .Attr("strides", strides)
322 .Attr("padding", padding)
323 .Attr("data_format", attrs.data_format_)
324 .Attr("dilations", attrs.dilations_)
325 ;
326 scope.UpdateBuilder(&builder);
327 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
328 if (!scope.ok()) return;
329 scope.UpdateStatus(scope.DoShapeInference(ret));
330 this->operation = Operation(ret);
331 this->output = Output(ret, 0);
332}
333
334Conv3D::Conv3D(const ::tensorflow::Scope& scope, ::tensorflow::Input input,
335 ::tensorflow::Input filter, const gtl::ArraySlice<int>& strides,
336 StringPiece padding)
337 : Conv3D(scope, input, filter, strides, padding, Conv3D::Attrs()) {}
338
339Conv3DBackpropFilterV2::Conv3DBackpropFilterV2(const ::tensorflow::Scope&
340 scope, ::tensorflow::Input
341 input, ::tensorflow::Input
342 filter_sizes,
343 ::tensorflow::Input
344 out_backprop, const
345 gtl::ArraySlice<int>& strides,
346 StringPiece padding, const
347 Conv3DBackpropFilterV2::Attrs&
348 attrs) {
349 if (!scope.ok()) return;
350 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
351 if (!scope.ok()) return;
352 auto _filter_sizes = ::tensorflow::ops::AsNodeOut(scope, filter_sizes);
353 if (!scope.ok()) return;
354 auto _out_backprop = ::tensorflow::ops::AsNodeOut(scope, out_backprop);
355 if (!scope.ok()) return;
356 ::tensorflow::Node* ret;
357 const auto unique_name = scope.GetUniqueNameForOp("Conv3DBackpropFilterV2");
358 auto builder = ::tensorflow::NodeBuilder(unique_name, "Conv3DBackpropFilterV2")
359 .Input(_input)
360 .Input(_filter_sizes)
361 .Input(_out_backprop)
362 .Attr("strides", strides)
363 .Attr("padding", padding)
364 .Attr("data_format", attrs.data_format_)
365 .Attr("dilations", attrs.dilations_)
366 ;
367 scope.UpdateBuilder(&builder);
368 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
369 if (!scope.ok()) return;
370 scope.UpdateStatus(scope.DoShapeInference(ret));
371 this->operation = Operation(ret);
372 this->output = Output(ret, 0);
373}
374
375Conv3DBackpropFilterV2::Conv3DBackpropFilterV2(const ::tensorflow::Scope&
376 scope, ::tensorflow::Input
377 input, ::tensorflow::Input
378 filter_sizes,
379 ::tensorflow::Input
380 out_backprop, const
381 gtl::ArraySlice<int>& strides,
382 StringPiece padding)
383 : Conv3DBackpropFilterV2(scope, input, filter_sizes, out_backprop, strides, padding, Conv3DBackpropFilterV2::Attrs()) {}
384
385Conv3DBackpropInputV2::Conv3DBackpropInputV2(const ::tensorflow::Scope& scope,
386 ::tensorflow::Input input_sizes,
387 ::tensorflow::Input filter,
388 ::tensorflow::Input out_backprop,
389 const gtl::ArraySlice<int>&
390 strides, StringPiece padding,
391 const
392 Conv3DBackpropInputV2::Attrs&
393 attrs) {
394 if (!scope.ok()) return;
395 auto _input_sizes = ::tensorflow::ops::AsNodeOut(scope, input_sizes);
396 if (!scope.ok()) return;
397 auto _filter = ::tensorflow::ops::AsNodeOut(scope, filter);
398 if (!scope.ok()) return;
399 auto _out_backprop = ::tensorflow::ops::AsNodeOut(scope, out_backprop);
400 if (!scope.ok()) return;
401 ::tensorflow::Node* ret;
402 const auto unique_name = scope.GetUniqueNameForOp("Conv3DBackpropInputV2");
403 auto builder = ::tensorflow::NodeBuilder(unique_name, "Conv3DBackpropInputV2")
404 .Input(_input_sizes)
405 .Input(_filter)
406 .Input(_out_backprop)
407 .Attr("strides", strides)
408 .Attr("padding", padding)
409 .Attr("data_format", attrs.data_format_)
410 .Attr("dilations", attrs.dilations_)
411 ;
412 scope.UpdateBuilder(&builder);
413 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
414 if (!scope.ok()) return;
415 scope.UpdateStatus(scope.DoShapeInference(ret));
416 this->operation = Operation(ret);
417 this->output = Output(ret, 0);
418}
419
420Conv3DBackpropInputV2::Conv3DBackpropInputV2(const ::tensorflow::Scope& scope,
421 ::tensorflow::Input input_sizes,
422 ::tensorflow::Input filter,
423 ::tensorflow::Input out_backprop,
424 const gtl::ArraySlice<int>&
425 strides, StringPiece padding)
426 : Conv3DBackpropInputV2(scope, input_sizes, filter, out_backprop, strides, padding, Conv3DBackpropInputV2::Attrs()) {}
427
428DataFormatDimMap::DataFormatDimMap(const ::tensorflow::Scope& scope,
429 ::tensorflow::Input x, const
430 DataFormatDimMap::Attrs& attrs) {
431 if (!scope.ok()) return;
432 auto _x = ::tensorflow::ops::AsNodeOut(scope, x);
433 if (!scope.ok()) return;
434 ::tensorflow::Node* ret;
435 const auto unique_name = scope.GetUniqueNameForOp("DataFormatDimMap");
436 auto builder = ::tensorflow::NodeBuilder(unique_name, "DataFormatDimMap")
437 .Input(_x)
438 .Attr("src_format", attrs.src_format_)
439 .Attr("dst_format", attrs.dst_format_)
440 ;
441 scope.UpdateBuilder(&builder);
442 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
443 if (!scope.ok()) return;
444 scope.UpdateStatus(scope.DoShapeInference(ret));
445 this->operation = Operation(ret);
446 this->y = Output(ret, 0);
447}
448
449DataFormatDimMap::DataFormatDimMap(const ::tensorflow::Scope& scope,
450 ::tensorflow::Input x)
451 : DataFormatDimMap(scope, x, DataFormatDimMap::Attrs()) {}
452
453DataFormatVecPermute::DataFormatVecPermute(const ::tensorflow::Scope& scope,
454 ::tensorflow::Input x, const
455 DataFormatVecPermute::Attrs& attrs) {
456 if (!scope.ok()) return;
457 auto _x = ::tensorflow::ops::AsNodeOut(scope, x);
458 if (!scope.ok()) return;
459 ::tensorflow::Node* ret;
460 const auto unique_name = scope.GetUniqueNameForOp("DataFormatVecPermute");
461 auto builder = ::tensorflow::NodeBuilder(unique_name, "DataFormatVecPermute")
462 .Input(_x)
463 .Attr("src_format", attrs.src_format_)
464 .Attr("dst_format", attrs.dst_format_)
465 ;
466 scope.UpdateBuilder(&builder);
467 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
468 if (!scope.ok()) return;
469 scope.UpdateStatus(scope.DoShapeInference(ret));
470 this->operation = Operation(ret);
471 this->y = Output(ret, 0);
472}
473
474DataFormatVecPermute::DataFormatVecPermute(const ::tensorflow::Scope& scope,
475 ::tensorflow::Input x)
476 : DataFormatVecPermute(scope, x, DataFormatVecPermute::Attrs()) {}
477
478DepthwiseConv2dNative::DepthwiseConv2dNative(const ::tensorflow::Scope& scope,
479 ::tensorflow::Input input,
480 ::tensorflow::Input filter, const
481 gtl::ArraySlice<int>& strides,
482 StringPiece padding, const
483 DepthwiseConv2dNative::Attrs&
484 attrs) {
485 if (!scope.ok()) return;
486 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
487 if (!scope.ok()) return;
488 auto _filter = ::tensorflow::ops::AsNodeOut(scope, filter);
489 if (!scope.ok()) return;
490 ::tensorflow::Node* ret;
491 const auto unique_name = scope.GetUniqueNameForOp("DepthwiseConv2dNative");
492 auto builder = ::tensorflow::NodeBuilder(unique_name, "DepthwiseConv2dNative")
493 .Input(_input)
494 .Input(_filter)
495 .Attr("strides", strides)
496 .Attr("padding", padding)
497 .Attr("explicit_paddings", attrs.explicit_paddings_)
498 .Attr("data_format", attrs.data_format_)
499 .Attr("dilations", attrs.dilations_)
500 ;
501 scope.UpdateBuilder(&builder);
502 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
503 if (!scope.ok()) return;
504 scope.UpdateStatus(scope.DoShapeInference(ret));
505 this->operation = Operation(ret);
506 this->output = Output(ret, 0);
507}
508
509DepthwiseConv2dNative::DepthwiseConv2dNative(const ::tensorflow::Scope& scope,
510 ::tensorflow::Input input,
511 ::tensorflow::Input filter, const
512 gtl::ArraySlice<int>& strides,
513 StringPiece padding)
514 : DepthwiseConv2dNative(scope, input, filter, strides, padding, DepthwiseConv2dNative::Attrs()) {}
515
516DepthwiseConv2dNativeBackpropFilter::DepthwiseConv2dNativeBackpropFilter(const
517 ::tensorflow::Scope&
518 scope,
519 ::tensorflow::Input
520 input,
521 ::tensorflow::Input
522 filter_sizes,
523 ::tensorflow::Input
524 out_backprop,
525 const
526 gtl::ArraySlice<int>&
527 strides,
528 StringPiece
529 padding,
530 const
531 DepthwiseConv2dNativeBackpropFilter::Attrs&
532 attrs) {
533 if (!scope.ok()) return;
534 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
535 if (!scope.ok()) return;
536 auto _filter_sizes = ::tensorflow::ops::AsNodeOut(scope, filter_sizes);
537 if (!scope.ok()) return;
538 auto _out_backprop = ::tensorflow::ops::AsNodeOut(scope, out_backprop);
539 if (!scope.ok()) return;
540 ::tensorflow::Node* ret;
541 const auto unique_name = scope.GetUniqueNameForOp("DepthwiseConv2dNativeBackpropFilter");
542 auto builder = ::tensorflow::NodeBuilder(unique_name, "DepthwiseConv2dNativeBackpropFilter")
543 .Input(_input)
544 .Input(_filter_sizes)
545 .Input(_out_backprop)
546 .Attr("strides", strides)
547 .Attr("padding", padding)
548 .Attr("explicit_paddings", attrs.explicit_paddings_)
549 .Attr("data_format", attrs.data_format_)
550 .Attr("dilations", attrs.dilations_)
551 ;
552 scope.UpdateBuilder(&builder);
553 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
554 if (!scope.ok()) return;
555 scope.UpdateStatus(scope.DoShapeInference(ret));
556 this->operation = Operation(ret);
557 this->output = Output(ret, 0);
558}
559
560DepthwiseConv2dNativeBackpropFilter::DepthwiseConv2dNativeBackpropFilter(const
561 ::tensorflow::Scope&
562 scope,
563 ::tensorflow::Input
564 input,
565 ::tensorflow::Input
566 filter_sizes,
567 ::tensorflow::Input
568 out_backprop,
569 const
570 gtl::ArraySlice<int>&
571 strides,
572 StringPiece
573 padding)
574 : DepthwiseConv2dNativeBackpropFilter(scope, input, filter_sizes, out_backprop, strides, padding, DepthwiseConv2dNativeBackpropFilter::Attrs()) {}
575
576DepthwiseConv2dNativeBackpropInput::DepthwiseConv2dNativeBackpropInput(const
577 ::tensorflow::Scope&
578 scope,
579 ::tensorflow::Input
580 input_sizes,
581 ::tensorflow::Input
582 filter,
583 ::tensorflow::Input
584 out_backprop,
585 const
586 gtl::ArraySlice<int>&
587 strides,
588 StringPiece
589 padding,
590 const
591 DepthwiseConv2dNativeBackpropInput::Attrs&
592 attrs) {
593 if (!scope.ok()) return;
594 auto _input_sizes = ::tensorflow::ops::AsNodeOut(scope, input_sizes);
595 if (!scope.ok()) return;
596 auto _filter = ::tensorflow::ops::AsNodeOut(scope, filter);
597 if (!scope.ok()) return;
598 auto _out_backprop = ::tensorflow::ops::AsNodeOut(scope, out_backprop);
599 if (!scope.ok()) return;
600 ::tensorflow::Node* ret;
601 const auto unique_name = scope.GetUniqueNameForOp("DepthwiseConv2dNativeBackpropInput");
602 auto builder = ::tensorflow::NodeBuilder(unique_name, "DepthwiseConv2dNativeBackpropInput")
603 .Input(_input_sizes)
604 .Input(_filter)
605 .Input(_out_backprop)
606 .Attr("strides", strides)
607 .Attr("padding", padding)
608 .Attr("explicit_paddings", attrs.explicit_paddings_)
609 .Attr("data_format", attrs.data_format_)
610 .Attr("dilations", attrs.dilations_)
611 ;
612 scope.UpdateBuilder(&builder);
613 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
614 if (!scope.ok()) return;
615 scope.UpdateStatus(scope.DoShapeInference(ret));
616 this->operation = Operation(ret);
617 this->output = Output(ret, 0);
618}
619
620DepthwiseConv2dNativeBackpropInput::DepthwiseConv2dNativeBackpropInput(const
621 ::tensorflow::Scope&
622 scope,
623 ::tensorflow::Input
624 input_sizes,
625 ::tensorflow::Input
626 filter,
627 ::tensorflow::Input
628 out_backprop,
629 const
630 gtl::ArraySlice<int>&
631 strides,
632 StringPiece
633 padding)
634 : DepthwiseConv2dNativeBackpropInput(scope, input_sizes, filter, out_backprop, strides, padding, DepthwiseConv2dNativeBackpropInput::Attrs()) {}
635
636Dilation2D::Dilation2D(const ::tensorflow::Scope& scope, ::tensorflow::Input
637 input, ::tensorflow::Input filter, const
638 gtl::ArraySlice<int>& strides, const
639 gtl::ArraySlice<int>& rates, StringPiece padding) {
640 if (!scope.ok()) return;
641 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
642 if (!scope.ok()) return;
643 auto _filter = ::tensorflow::ops::AsNodeOut(scope, filter);
644 if (!scope.ok()) return;
645 ::tensorflow::Node* ret;
646 const auto unique_name = scope.GetUniqueNameForOp("Dilation2D");
647 auto builder = ::tensorflow::NodeBuilder(unique_name, "Dilation2D")
648 .Input(_input)
649 .Input(_filter)
650 .Attr("strides", strides)
651 .Attr("rates", rates)
652 .Attr("padding", padding)
653 ;
654 scope.UpdateBuilder(&builder);
655 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
656 if (!scope.ok()) return;
657 scope.UpdateStatus(scope.DoShapeInference(ret));
658 this->operation = Operation(ret);
659 this->output = Output(ret, 0);
660}
661
662Dilation2DBackpropFilter::Dilation2DBackpropFilter(const ::tensorflow::Scope&
663 scope, ::tensorflow::Input
664 input, ::tensorflow::Input
665 filter, ::tensorflow::Input
666 out_backprop, const
667 gtl::ArraySlice<int>&
668 strides, const
669 gtl::ArraySlice<int>& rates,
670 StringPiece padding) {
671 if (!scope.ok()) return;
672 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
673 if (!scope.ok()) return;
674 auto _filter = ::tensorflow::ops::AsNodeOut(scope, filter);
675 if (!scope.ok()) return;
676 auto _out_backprop = ::tensorflow::ops::AsNodeOut(scope, out_backprop);
677 if (!scope.ok()) return;
678 ::tensorflow::Node* ret;
679 const auto unique_name = scope.GetUniqueNameForOp("Dilation2DBackpropFilter");
680 auto builder = ::tensorflow::NodeBuilder(unique_name, "Dilation2DBackpropFilter")
681 .Input(_input)
682 .Input(_filter)
683 .Input(_out_backprop)
684 .Attr("strides", strides)
685 .Attr("rates", rates)
686 .Attr("padding", padding)
687 ;
688 scope.UpdateBuilder(&builder);
689 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
690 if (!scope.ok()) return;
691 scope.UpdateStatus(scope.DoShapeInference(ret));
692 this->operation = Operation(ret);
693 this->filter_backprop = Output(ret, 0);
694}
695
696Dilation2DBackpropInput::Dilation2DBackpropInput(const ::tensorflow::Scope&
697 scope, ::tensorflow::Input
698 input, ::tensorflow::Input
699 filter, ::tensorflow::Input
700 out_backprop, const
701 gtl::ArraySlice<int>& strides,
702 const gtl::ArraySlice<int>&
703 rates, StringPiece padding) {
704 if (!scope.ok()) return;
705 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
706 if (!scope.ok()) return;
707 auto _filter = ::tensorflow::ops::AsNodeOut(scope, filter);
708 if (!scope.ok()) return;
709 auto _out_backprop = ::tensorflow::ops::AsNodeOut(scope, out_backprop);
710 if (!scope.ok()) return;
711 ::tensorflow::Node* ret;
712 const auto unique_name = scope.GetUniqueNameForOp("Dilation2DBackpropInput");
713 auto builder = ::tensorflow::NodeBuilder(unique_name, "Dilation2DBackpropInput")
714 .Input(_input)
715 .Input(_filter)
716 .Input(_out_backprop)
717 .Attr("strides", strides)
718 .Attr("rates", rates)
719 .Attr("padding", padding)
720 ;
721 scope.UpdateBuilder(&builder);
722 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
723 if (!scope.ok()) return;
724 scope.UpdateStatus(scope.DoShapeInference(ret));
725 this->operation = Operation(ret);
726 this->in_backprop = Output(ret, 0);
727}
728
729Elu::Elu(const ::tensorflow::Scope& scope, ::tensorflow::Input features) {
730 if (!scope.ok()) return;
731 auto _features = ::tensorflow::ops::AsNodeOut(scope, features);
732 if (!scope.ok()) return;
733 ::tensorflow::Node* ret;
734 const auto unique_name = scope.GetUniqueNameForOp("Elu");
735 auto builder = ::tensorflow::NodeBuilder(unique_name, "Elu")
736 .Input(_features)
737 ;
738 scope.UpdateBuilder(&builder);
739 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
740 if (!scope.ok()) return;
741 scope.UpdateStatus(scope.DoShapeInference(ret));
742 this->operation = Operation(ret);
743 this->activations = Output(ret, 0);
744}
745
746FractionalAvgPool::FractionalAvgPool(const ::tensorflow::Scope& scope,
747 ::tensorflow::Input value, const
748 gtl::ArraySlice<float>& pooling_ratio,
749 const FractionalAvgPool::Attrs& attrs) {
750 if (!scope.ok()) return;
751 auto _value = ::tensorflow::ops::AsNodeOut(scope, value);
752 if (!scope.ok()) return;
753 ::tensorflow::Node* ret;
754 const auto unique_name = scope.GetUniqueNameForOp("FractionalAvgPool");
755 auto builder = ::tensorflow::NodeBuilder(unique_name, "FractionalAvgPool")
756 .Input(_value)
757 .Attr("pooling_ratio", pooling_ratio)
758 .Attr("pseudo_random", attrs.pseudo_random_)
759 .Attr("overlapping", attrs.overlapping_)
760 .Attr("deterministic", attrs.deterministic_)
761 .Attr("seed", attrs.seed_)
762 .Attr("seed2", attrs.seed2_)
763 ;
764 scope.UpdateBuilder(&builder);
765 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
766 if (!scope.ok()) return;
767 scope.UpdateStatus(scope.DoShapeInference(ret));
768 this->operation = Operation(ret);
769 ::tensorflow::NameRangeMap _outputs_range;
770 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
771 if (!_status_.ok()) {
772 scope.UpdateStatus(_status_);
773 return;
774 }
775
776 this->output = Output(ret, _outputs_range["output"].first);
777 this->row_pooling_sequence = Output(ret, _outputs_range["row_pooling_sequence"].first);
778 this->col_pooling_sequence = Output(ret, _outputs_range["col_pooling_sequence"].first);
779}
780
781FractionalAvgPool::FractionalAvgPool(const ::tensorflow::Scope& scope,
782 ::tensorflow::Input value, const
783 gtl::ArraySlice<float>& pooling_ratio)
784 : FractionalAvgPool(scope, value, pooling_ratio, FractionalAvgPool::Attrs()) {}
785
786FractionalMaxPool::FractionalMaxPool(const ::tensorflow::Scope& scope,
787 ::tensorflow::Input value, const
788 gtl::ArraySlice<float>& pooling_ratio,
789 const FractionalMaxPool::Attrs& attrs) {
790 if (!scope.ok()) return;
791 auto _value = ::tensorflow::ops::AsNodeOut(scope, value);
792 if (!scope.ok()) return;
793 ::tensorflow::Node* ret;
794 const auto unique_name = scope.GetUniqueNameForOp("FractionalMaxPool");
795 auto builder = ::tensorflow::NodeBuilder(unique_name, "FractionalMaxPool")
796 .Input(_value)
797 .Attr("pooling_ratio", pooling_ratio)
798 .Attr("pseudo_random", attrs.pseudo_random_)
799 .Attr("overlapping", attrs.overlapping_)
800 .Attr("deterministic", attrs.deterministic_)
801 .Attr("seed", attrs.seed_)
802 .Attr("seed2", attrs.seed2_)
803 ;
804 scope.UpdateBuilder(&builder);
805 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
806 if (!scope.ok()) return;
807 scope.UpdateStatus(scope.DoShapeInference(ret));
808 this->operation = Operation(ret);
809 ::tensorflow::NameRangeMap _outputs_range;
810 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
811 if (!_status_.ok()) {
812 scope.UpdateStatus(_status_);
813 return;
814 }
815
816 this->output = Output(ret, _outputs_range["output"].first);
817 this->row_pooling_sequence = Output(ret, _outputs_range["row_pooling_sequence"].first);
818 this->col_pooling_sequence = Output(ret, _outputs_range["col_pooling_sequence"].first);
819}
820
821FractionalMaxPool::FractionalMaxPool(const ::tensorflow::Scope& scope,
822 ::tensorflow::Input value, const
823 gtl::ArraySlice<float>& pooling_ratio)
824 : FractionalMaxPool(scope, value, pooling_ratio, FractionalMaxPool::Attrs()) {}
825
826FusedBatchNorm::FusedBatchNorm(const ::tensorflow::Scope& scope,
827 ::tensorflow::Input x, ::tensorflow::Input
828 scale, ::tensorflow::Input offset,
829 ::tensorflow::Input mean, ::tensorflow::Input
830 variance, const FusedBatchNorm::Attrs& attrs) {
831 if (!scope.ok()) return;
832 auto _x = ::tensorflow::ops::AsNodeOut(scope, x);
833 if (!scope.ok()) return;
834 auto _scale = ::tensorflow::ops::AsNodeOut(scope, scale);
835 if (!scope.ok()) return;
836 auto _offset = ::tensorflow::ops::AsNodeOut(scope, offset);
837 if (!scope.ok()) return;
838 auto _mean = ::tensorflow::ops::AsNodeOut(scope, mean);
839 if (!scope.ok()) return;
840 auto _variance = ::tensorflow::ops::AsNodeOut(scope, variance);
841 if (!scope.ok()) return;
842 ::tensorflow::Node* ret;
843 const auto unique_name = scope.GetUniqueNameForOp("FusedBatchNorm");
844 auto builder = ::tensorflow::NodeBuilder(unique_name, "FusedBatchNorm")
845 .Input(_x)
846 .Input(_scale)
847 .Input(_offset)
848 .Input(_mean)
849 .Input(_variance)
850 .Attr("epsilon", attrs.epsilon_)
851 .Attr("exponential_avg_factor", attrs.exponential_avg_factor_)
852 .Attr("data_format", attrs.data_format_)
853 .Attr("is_training", attrs.is_training_)
854 ;
855 scope.UpdateBuilder(&builder);
856 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
857 if (!scope.ok()) return;
858 scope.UpdateStatus(scope.DoShapeInference(ret));
859 this->operation = Operation(ret);
860 ::tensorflow::NameRangeMap _outputs_range;
861 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
862 if (!_status_.ok()) {
863 scope.UpdateStatus(_status_);
864 return;
865 }
866
867 this->y = Output(ret, _outputs_range["y"].first);
868 this->batch_mean = Output(ret, _outputs_range["batch_mean"].first);
869 this->batch_variance = Output(ret, _outputs_range["batch_variance"].first);
870 this->reserve_space_1 = Output(ret, _outputs_range["reserve_space_1"].first);
871 this->reserve_space_2 = Output(ret, _outputs_range["reserve_space_2"].first);
872}
873
874FusedBatchNorm::FusedBatchNorm(const ::tensorflow::Scope& scope,
875 ::tensorflow::Input x, ::tensorflow::Input
876 scale, ::tensorflow::Input offset,
877 ::tensorflow::Input mean, ::tensorflow::Input
878 variance)
879 : FusedBatchNorm(scope, x, scale, offset, mean, variance, FusedBatchNorm::Attrs()) {}
880
881FusedBatchNormGrad::FusedBatchNormGrad(const ::tensorflow::Scope& scope,
882 ::tensorflow::Input y_backprop,
883 ::tensorflow::Input x,
884 ::tensorflow::Input scale,
885 ::tensorflow::Input reserve_space_1,
886 ::tensorflow::Input reserve_space_2,
887 const FusedBatchNormGrad::Attrs& attrs) {
888 if (!scope.ok()) return;
889 auto _y_backprop = ::tensorflow::ops::AsNodeOut(scope, y_backprop);
890 if (!scope.ok()) return;
891 auto _x = ::tensorflow::ops::AsNodeOut(scope, x);
892 if (!scope.ok()) return;
893 auto _scale = ::tensorflow::ops::AsNodeOut(scope, scale);
894 if (!scope.ok()) return;
895 auto _reserve_space_1 = ::tensorflow::ops::AsNodeOut(scope, reserve_space_1);
896 if (!scope.ok()) return;
897 auto _reserve_space_2 = ::tensorflow::ops::AsNodeOut(scope, reserve_space_2);
898 if (!scope.ok()) return;
899 ::tensorflow::Node* ret;
900 const auto unique_name = scope.GetUniqueNameForOp("FusedBatchNormGrad");
901 auto builder = ::tensorflow::NodeBuilder(unique_name, "FusedBatchNormGrad")
902 .Input(_y_backprop)
903 .Input(_x)
904 .Input(_scale)
905 .Input(_reserve_space_1)
906 .Input(_reserve_space_2)
907 .Attr("epsilon", attrs.epsilon_)
908 .Attr("data_format", attrs.data_format_)
909 .Attr("is_training", attrs.is_training_)
910 ;
911 scope.UpdateBuilder(&builder);
912 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
913 if (!scope.ok()) return;
914 scope.UpdateStatus(scope.DoShapeInference(ret));
915 this->operation = Operation(ret);
916 ::tensorflow::NameRangeMap _outputs_range;
917 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
918 if (!_status_.ok()) {
919 scope.UpdateStatus(_status_);
920 return;
921 }
922
923 this->x_backprop = Output(ret, _outputs_range["x_backprop"].first);
924 this->scale_backprop = Output(ret, _outputs_range["scale_backprop"].first);
925 this->offset_backprop = Output(ret, _outputs_range["offset_backprop"].first);
926 this->reserve_space_3 = Output(ret, _outputs_range["reserve_space_3"].first);
927 this->reserve_space_4 = Output(ret, _outputs_range["reserve_space_4"].first);
928}
929
930FusedBatchNormGrad::FusedBatchNormGrad(const ::tensorflow::Scope& scope,
931 ::tensorflow::Input y_backprop,
932 ::tensorflow::Input x,
933 ::tensorflow::Input scale,
934 ::tensorflow::Input reserve_space_1,
935 ::tensorflow::Input reserve_space_2)
936 : FusedBatchNormGrad(scope, y_backprop, x, scale, reserve_space_1, reserve_space_2, FusedBatchNormGrad::Attrs()) {}
937
938FusedBatchNormGradV2::FusedBatchNormGradV2(const ::tensorflow::Scope& scope,
939 ::tensorflow::Input y_backprop,
940 ::tensorflow::Input x,
941 ::tensorflow::Input scale,
942 ::tensorflow::Input reserve_space_1,
943 ::tensorflow::Input reserve_space_2,
944 const FusedBatchNormGradV2::Attrs&
945 attrs) {
946 if (!scope.ok()) return;
947 auto _y_backprop = ::tensorflow::ops::AsNodeOut(scope, y_backprop);
948 if (!scope.ok()) return;
949 auto _x = ::tensorflow::ops::AsNodeOut(scope, x);
950 if (!scope.ok()) return;
951 auto _scale = ::tensorflow::ops::AsNodeOut(scope, scale);
952 if (!scope.ok()) return;
953 auto _reserve_space_1 = ::tensorflow::ops::AsNodeOut(scope, reserve_space_1);
954 if (!scope.ok()) return;
955 auto _reserve_space_2 = ::tensorflow::ops::AsNodeOut(scope, reserve_space_2);
956 if (!scope.ok()) return;
957 ::tensorflow::Node* ret;
958 const auto unique_name = scope.GetUniqueNameForOp("FusedBatchNormGradV2");
959 auto builder = ::tensorflow::NodeBuilder(unique_name, "FusedBatchNormGradV2")
960 .Input(_y_backprop)
961 .Input(_x)
962 .Input(_scale)
963 .Input(_reserve_space_1)
964 .Input(_reserve_space_2)
965 .Attr("epsilon", attrs.epsilon_)
966 .Attr("data_format", attrs.data_format_)
967 .Attr("is_training", attrs.is_training_)
968 ;
969 scope.UpdateBuilder(&builder);
970 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
971 if (!scope.ok()) return;
972 scope.UpdateStatus(scope.DoShapeInference(ret));
973 this->operation = Operation(ret);
974 ::tensorflow::NameRangeMap _outputs_range;
975 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
976 if (!_status_.ok()) {
977 scope.UpdateStatus(_status_);
978 return;
979 }
980
981 this->x_backprop = Output(ret, _outputs_range["x_backprop"].first);
982 this->scale_backprop = Output(ret, _outputs_range["scale_backprop"].first);
983 this->offset_backprop = Output(ret, _outputs_range["offset_backprop"].first);
984 this->reserve_space_3 = Output(ret, _outputs_range["reserve_space_3"].first);
985 this->reserve_space_4 = Output(ret, _outputs_range["reserve_space_4"].first);
986}
987
988FusedBatchNormGradV2::FusedBatchNormGradV2(const ::tensorflow::Scope& scope,
989 ::tensorflow::Input y_backprop,
990 ::tensorflow::Input x,
991 ::tensorflow::Input scale,
992 ::tensorflow::Input reserve_space_1,
993 ::tensorflow::Input reserve_space_2)
994 : FusedBatchNormGradV2(scope, y_backprop, x, scale, reserve_space_1, reserve_space_2, FusedBatchNormGradV2::Attrs()) {}
995
996FusedBatchNormGradV3::FusedBatchNormGradV3(const ::tensorflow::Scope& scope,
997 ::tensorflow::Input y_backprop,
998 ::tensorflow::Input x,
999 ::tensorflow::Input scale,
1000 ::tensorflow::Input reserve_space_1,
1001 ::tensorflow::Input reserve_space_2,
1002 ::tensorflow::Input reserve_space_3,
1003 const FusedBatchNormGradV3::Attrs&
1004 attrs) {
1005 if (!scope.ok()) return;
1006 auto _y_backprop = ::tensorflow::ops::AsNodeOut(scope, y_backprop);
1007 if (!scope.ok()) return;
1008 auto _x = ::tensorflow::ops::AsNodeOut(scope, x);
1009 if (!scope.ok()) return;
1010 auto _scale = ::tensorflow::ops::AsNodeOut(scope, scale);
1011 if (!scope.ok()) return;
1012 auto _reserve_space_1 = ::tensorflow::ops::AsNodeOut(scope, reserve_space_1);
1013 if (!scope.ok()) return;
1014 auto _reserve_space_2 = ::tensorflow::ops::AsNodeOut(scope, reserve_space_2);
1015 if (!scope.ok()) return;
1016 auto _reserve_space_3 = ::tensorflow::ops::AsNodeOut(scope, reserve_space_3);
1017 if (!scope.ok()) return;
1018 ::tensorflow::Node* ret;
1019 const auto unique_name = scope.GetUniqueNameForOp("FusedBatchNormGradV3");
1020 auto builder = ::tensorflow::NodeBuilder(unique_name, "FusedBatchNormGradV3")
1021 .Input(_y_backprop)
1022 .Input(_x)
1023 .Input(_scale)
1024 .Input(_reserve_space_1)
1025 .Input(_reserve_space_2)
1026 .Input(_reserve_space_3)
1027 .Attr("epsilon", attrs.epsilon_)
1028 .Attr("data_format", attrs.data_format_)
1029 .Attr("is_training", attrs.is_training_)
1030 ;
1031 scope.UpdateBuilder(&builder);
1032 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1033 if (!scope.ok()) return;
1034 scope.UpdateStatus(scope.DoShapeInference(ret));
1035 this->operation = Operation(ret);
1036 ::tensorflow::NameRangeMap _outputs_range;
1037 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
1038 if (!_status_.ok()) {
1039 scope.UpdateStatus(_status_);
1040 return;
1041 }
1042
1043 this->x_backprop = Output(ret, _outputs_range["x_backprop"].first);
1044 this->scale_backprop = Output(ret, _outputs_range["scale_backprop"].first);
1045 this->offset_backprop = Output(ret, _outputs_range["offset_backprop"].first);
1046 this->reserve_space_4 = Output(ret, _outputs_range["reserve_space_4"].first);
1047 this->reserve_space_5 = Output(ret, _outputs_range["reserve_space_5"].first);
1048}
1049
1050FusedBatchNormGradV3::FusedBatchNormGradV3(const ::tensorflow::Scope& scope,
1051 ::tensorflow::Input y_backprop,
1052 ::tensorflow::Input x,
1053 ::tensorflow::Input scale,
1054 ::tensorflow::Input reserve_space_1,
1055 ::tensorflow::Input reserve_space_2,
1056 ::tensorflow::Input reserve_space_3)
1057 : FusedBatchNormGradV3(scope, y_backprop, x, scale, reserve_space_1, reserve_space_2, reserve_space_3, FusedBatchNormGradV3::Attrs()) {}
1058
1059FusedBatchNormV2::FusedBatchNormV2(const ::tensorflow::Scope& scope,
1060 ::tensorflow::Input x, ::tensorflow::Input
1061 scale, ::tensorflow::Input offset,
1062 ::tensorflow::Input mean,
1063 ::tensorflow::Input variance, const
1064 FusedBatchNormV2::Attrs& attrs) {
1065 if (!scope.ok()) return;
1066 auto _x = ::tensorflow::ops::AsNodeOut(scope, x);
1067 if (!scope.ok()) return;
1068 auto _scale = ::tensorflow::ops::AsNodeOut(scope, scale);
1069 if (!scope.ok()) return;
1070 auto _offset = ::tensorflow::ops::AsNodeOut(scope, offset);
1071 if (!scope.ok()) return;
1072 auto _mean = ::tensorflow::ops::AsNodeOut(scope, mean);
1073 if (!scope.ok()) return;
1074 auto _variance = ::tensorflow::ops::AsNodeOut(scope, variance);
1075 if (!scope.ok()) return;
1076 ::tensorflow::Node* ret;
1077 const auto unique_name = scope.GetUniqueNameForOp("FusedBatchNormV2");
1078 auto builder = ::tensorflow::NodeBuilder(unique_name, "FusedBatchNormV2")
1079 .Input(_x)
1080 .Input(_scale)
1081 .Input(_offset)
1082 .Input(_mean)
1083 .Input(_variance)
1084 .Attr("epsilon", attrs.epsilon_)
1085 .Attr("exponential_avg_factor", attrs.exponential_avg_factor_)
1086 .Attr("data_format", attrs.data_format_)
1087 .Attr("is_training", attrs.is_training_)
1088 ;
1089 scope.UpdateBuilder(&builder);
1090 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1091 if (!scope.ok()) return;
1092 scope.UpdateStatus(scope.DoShapeInference(ret));
1093 this->operation = Operation(ret);
1094 ::tensorflow::NameRangeMap _outputs_range;
1095 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
1096 if (!_status_.ok()) {
1097 scope.UpdateStatus(_status_);
1098 return;
1099 }
1100
1101 this->y = Output(ret, _outputs_range["y"].first);
1102 this->batch_mean = Output(ret, _outputs_range["batch_mean"].first);
1103 this->batch_variance = Output(ret, _outputs_range["batch_variance"].first);
1104 this->reserve_space_1 = Output(ret, _outputs_range["reserve_space_1"].first);
1105 this->reserve_space_2 = Output(ret, _outputs_range["reserve_space_2"].first);
1106}
1107
1108FusedBatchNormV2::FusedBatchNormV2(const ::tensorflow::Scope& scope,
1109 ::tensorflow::Input x, ::tensorflow::Input
1110 scale, ::tensorflow::Input offset,
1111 ::tensorflow::Input mean,
1112 ::tensorflow::Input variance)
1113 : FusedBatchNormV2(scope, x, scale, offset, mean, variance, FusedBatchNormV2::Attrs()) {}
1114
1115FusedBatchNormV3::FusedBatchNormV3(const ::tensorflow::Scope& scope,
1116 ::tensorflow::Input x, ::tensorflow::Input
1117 scale, ::tensorflow::Input offset,
1118 ::tensorflow::Input mean,
1119 ::tensorflow::Input variance, const
1120 FusedBatchNormV3::Attrs& attrs) {
1121 if (!scope.ok()) return;
1122 auto _x = ::tensorflow::ops::AsNodeOut(scope, x);
1123 if (!scope.ok()) return;
1124 auto _scale = ::tensorflow::ops::AsNodeOut(scope, scale);
1125 if (!scope.ok()) return;
1126 auto _offset = ::tensorflow::ops::AsNodeOut(scope, offset);
1127 if (!scope.ok()) return;
1128 auto _mean = ::tensorflow::ops::AsNodeOut(scope, mean);
1129 if (!scope.ok()) return;
1130 auto _variance = ::tensorflow::ops::AsNodeOut(scope, variance);
1131 if (!scope.ok()) return;
1132 ::tensorflow::Node* ret;
1133 const auto unique_name = scope.GetUniqueNameForOp("FusedBatchNormV3");
1134 auto builder = ::tensorflow::NodeBuilder(unique_name, "FusedBatchNormV3")
1135 .Input(_x)
1136 .Input(_scale)
1137 .Input(_offset)
1138 .Input(_mean)
1139 .Input(_variance)
1140 .Attr("epsilon", attrs.epsilon_)
1141 .Attr("exponential_avg_factor", attrs.exponential_avg_factor_)
1142 .Attr("data_format", attrs.data_format_)
1143 .Attr("is_training", attrs.is_training_)
1144 ;
1145 scope.UpdateBuilder(&builder);
1146 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1147 if (!scope.ok()) return;
1148 scope.UpdateStatus(scope.DoShapeInference(ret));
1149 this->operation = Operation(ret);
1150 ::tensorflow::NameRangeMap _outputs_range;
1151 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
1152 if (!_status_.ok()) {
1153 scope.UpdateStatus(_status_);
1154 return;
1155 }
1156
1157 this->y = Output(ret, _outputs_range["y"].first);
1158 this->batch_mean = Output(ret, _outputs_range["batch_mean"].first);
1159 this->batch_variance = Output(ret, _outputs_range["batch_variance"].first);
1160 this->reserve_space_1 = Output(ret, _outputs_range["reserve_space_1"].first);
1161 this->reserve_space_2 = Output(ret, _outputs_range["reserve_space_2"].first);
1162 this->reserve_space_3 = Output(ret, _outputs_range["reserve_space_3"].first);
1163}
1164
1165FusedBatchNormV3::FusedBatchNormV3(const ::tensorflow::Scope& scope,
1166 ::tensorflow::Input x, ::tensorflow::Input
1167 scale, ::tensorflow::Input offset,
1168 ::tensorflow::Input mean,
1169 ::tensorflow::Input variance)
1170 : FusedBatchNormV3(scope, x, scale, offset, mean, variance, FusedBatchNormV3::Attrs()) {}
1171
1172FusedPadConv2D::FusedPadConv2D(const ::tensorflow::Scope& scope,
1173 ::tensorflow::Input input, ::tensorflow::Input
1174 paddings, ::tensorflow::Input filter,
1175 StringPiece mode, const gtl::ArraySlice<int>&
1176 strides, StringPiece padding) {
1177 if (!scope.ok()) return;
1178 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
1179 if (!scope.ok()) return;
1180 auto _paddings = ::tensorflow::ops::AsNodeOut(scope, paddings);
1181 if (!scope.ok()) return;
1182 auto _filter = ::tensorflow::ops::AsNodeOut(scope, filter);
1183 if (!scope.ok()) return;
1184 ::tensorflow::Node* ret;
1185 const auto unique_name = scope.GetUniqueNameForOp("FusedPadConv2D");
1186 auto builder = ::tensorflow::NodeBuilder(unique_name, "FusedPadConv2D")
1187 .Input(_input)
1188 .Input(_paddings)
1189 .Input(_filter)
1190 .Attr("mode", mode)
1191 .Attr("strides", strides)
1192 .Attr("padding", padding)
1193 ;
1194 scope.UpdateBuilder(&builder);
1195 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1196 if (!scope.ok()) return;
1197 scope.UpdateStatus(scope.DoShapeInference(ret));
1198 this->operation = Operation(ret);
1199 this->output = Output(ret, 0);
1200}
1201
1202FusedResizeAndPadConv2D::FusedResizeAndPadConv2D(const ::tensorflow::Scope&
1203 scope, ::tensorflow::Input
1204 input, ::tensorflow::Input
1205 size, ::tensorflow::Input
1206 paddings, ::tensorflow::Input
1207 filter, StringPiece mode,
1208 const gtl::ArraySlice<int>&
1209 strides, StringPiece padding,
1210 const
1211 FusedResizeAndPadConv2D::Attrs&
1212 attrs) {
1213 if (!scope.ok()) return;
1214 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
1215 if (!scope.ok()) return;
1216 auto _size = ::tensorflow::ops::AsNodeOut(scope, size);
1217 if (!scope.ok()) return;
1218 auto _paddings = ::tensorflow::ops::AsNodeOut(scope, paddings);
1219 if (!scope.ok()) return;
1220 auto _filter = ::tensorflow::ops::AsNodeOut(scope, filter);
1221 if (!scope.ok()) return;
1222 ::tensorflow::Node* ret;
1223 const auto unique_name = scope.GetUniqueNameForOp("FusedResizeAndPadConv2D");
1224 auto builder = ::tensorflow::NodeBuilder(unique_name, "FusedResizeAndPadConv2D")
1225 .Input(_input)
1226 .Input(_size)
1227 .Input(_paddings)
1228 .Input(_filter)
1229 .Attr("resize_align_corners", attrs.resize_align_corners_)
1230 .Attr("mode", mode)
1231 .Attr("strides", strides)
1232 .Attr("padding", padding)
1233 ;
1234 scope.UpdateBuilder(&builder);
1235 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1236 if (!scope.ok()) return;
1237 scope.UpdateStatus(scope.DoShapeInference(ret));
1238 this->operation = Operation(ret);
1239 this->output = Output(ret, 0);
1240}
1241
1242FusedResizeAndPadConv2D::FusedResizeAndPadConv2D(const ::tensorflow::Scope&
1243 scope, ::tensorflow::Input
1244 input, ::tensorflow::Input
1245 size, ::tensorflow::Input
1246 paddings, ::tensorflow::Input
1247 filter, StringPiece mode,
1248 const gtl::ArraySlice<int>&
1249 strides, StringPiece padding)
1250 : FusedResizeAndPadConv2D(scope, input, size, paddings, filter, mode, strides, padding, FusedResizeAndPadConv2D::Attrs()) {}
1251
1252InTopK::InTopK(const ::tensorflow::Scope& scope, ::tensorflow::Input
1253 predictions, ::tensorflow::Input targets, int64 k) {
1254 if (!scope.ok()) return;
1255 auto _predictions = ::tensorflow::ops::AsNodeOut(scope, predictions);
1256 if (!scope.ok()) return;
1257 auto _targets = ::tensorflow::ops::AsNodeOut(scope, targets);
1258 if (!scope.ok()) return;
1259 ::tensorflow::Node* ret;
1260 const auto unique_name = scope.GetUniqueNameForOp("InTopK");
1261 auto builder = ::tensorflow::NodeBuilder(unique_name, "InTopK")
1262 .Input(_predictions)
1263 .Input(_targets)
1264 .Attr("k", k)
1265 ;
1266 scope.UpdateBuilder(&builder);
1267 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1268 if (!scope.ok()) return;
1269 scope.UpdateStatus(scope.DoShapeInference(ret));
1270 this->operation = Operation(ret);
1271 this->precision = Output(ret, 0);
1272}
1273
1274InTopKV2::InTopKV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
1275 predictions, ::tensorflow::Input targets,
1276 ::tensorflow::Input k) {
1277 if (!scope.ok()) return;
1278 auto _predictions = ::tensorflow::ops::AsNodeOut(scope, predictions);
1279 if (!scope.ok()) return;
1280 auto _targets = ::tensorflow::ops::AsNodeOut(scope, targets);
1281 if (!scope.ok()) return;
1282 auto _k = ::tensorflow::ops::AsNodeOut(scope, k);
1283 if (!scope.ok()) return;
1284 ::tensorflow::Node* ret;
1285 const auto unique_name = scope.GetUniqueNameForOp("InTopKV2");
1286 auto builder = ::tensorflow::NodeBuilder(unique_name, "InTopKV2")
1287 .Input(_predictions)
1288 .Input(_targets)
1289 .Input(_k)
1290 ;
1291 scope.UpdateBuilder(&builder);
1292 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1293 if (!scope.ok()) return;
1294 scope.UpdateStatus(scope.DoShapeInference(ret));
1295 this->operation = Operation(ret);
1296 this->precision = Output(ret, 0);
1297}
1298
1299L2Loss::L2Loss(const ::tensorflow::Scope& scope, ::tensorflow::Input t) {
1300 if (!scope.ok()) return;
1301 auto _t = ::tensorflow::ops::AsNodeOut(scope, t);
1302 if (!scope.ok()) return;
1303 ::tensorflow::Node* ret;
1304 const auto unique_name = scope.GetUniqueNameForOp("L2Loss");
1305 auto builder = ::tensorflow::NodeBuilder(unique_name, "L2Loss")
1306 .Input(_t)
1307 ;
1308 scope.UpdateBuilder(&builder);
1309 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1310 if (!scope.ok()) return;
1311 scope.UpdateStatus(scope.DoShapeInference(ret));
1312 this->operation = Operation(ret);
1313 this->output = Output(ret, 0);
1314}
1315
1316LRN::LRN(const ::tensorflow::Scope& scope, ::tensorflow::Input input, const
1317 LRN::Attrs& attrs) {
1318 if (!scope.ok()) return;
1319 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
1320 if (!scope.ok()) return;
1321 ::tensorflow::Node* ret;
1322 const auto unique_name = scope.GetUniqueNameForOp("LRN");
1323 auto builder = ::tensorflow::NodeBuilder(unique_name, "LRN")
1324 .Input(_input)
1325 .Attr("depth_radius", attrs.depth_radius_)
1326 .Attr("bias", attrs.bias_)
1327 .Attr("alpha", attrs.alpha_)
1328 .Attr("beta", attrs.beta_)
1329 ;
1330 scope.UpdateBuilder(&builder);
1331 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1332 if (!scope.ok()) return;
1333 scope.UpdateStatus(scope.DoShapeInference(ret));
1334 this->operation = Operation(ret);
1335 this->output = Output(ret, 0);
1336}
1337
1338LRN::LRN(const ::tensorflow::Scope& scope, ::tensorflow::Input input)
1339 : LRN(scope, input, LRN::Attrs()) {}
1340
1341LogSoftmax::LogSoftmax(const ::tensorflow::Scope& scope, ::tensorflow::Input
1342 logits) {
1343 if (!scope.ok()) return;
1344 auto _logits = ::tensorflow::ops::AsNodeOut(scope, logits);
1345 if (!scope.ok()) return;
1346 ::tensorflow::Node* ret;
1347 const auto unique_name = scope.GetUniqueNameForOp("LogSoftmax");
1348 auto builder = ::tensorflow::NodeBuilder(unique_name, "LogSoftmax")
1349 .Input(_logits)
1350 ;
1351 scope.UpdateBuilder(&builder);
1352 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1353 if (!scope.ok()) return;
1354 scope.UpdateStatus(scope.DoShapeInference(ret));
1355 this->operation = Operation(ret);
1356 this->logsoftmax = Output(ret, 0);
1357}
1358
1359MaxPool::MaxPool(const ::tensorflow::Scope& scope, ::tensorflow::Input input,
1360 const gtl::ArraySlice<int>& ksize, const gtl::ArraySlice<int>&
1361 strides, StringPiece padding, const MaxPool::Attrs& attrs) {
1362 if (!scope.ok()) return;
1363 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
1364 if (!scope.ok()) return;
1365 ::tensorflow::Node* ret;
1366 const auto unique_name = scope.GetUniqueNameForOp("MaxPool");
1367 auto builder = ::tensorflow::NodeBuilder(unique_name, "MaxPool")
1368 .Input(_input)
1369 .Attr("ksize", ksize)
1370 .Attr("strides", strides)
1371 .Attr("padding", padding)
1372 .Attr("explicit_paddings", attrs.explicit_paddings_)
1373 .Attr("data_format", attrs.data_format_)
1374 ;
1375 scope.UpdateBuilder(&builder);
1376 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1377 if (!scope.ok()) return;
1378 scope.UpdateStatus(scope.DoShapeInference(ret));
1379 this->operation = Operation(ret);
1380 this->output = Output(ret, 0);
1381}
1382
1383MaxPool::MaxPool(const ::tensorflow::Scope& scope, ::tensorflow::Input input,
1384 const gtl::ArraySlice<int>& ksize, const gtl::ArraySlice<int>&
1385 strides, StringPiece padding)
1386 : MaxPool(scope, input, ksize, strides, padding, MaxPool::Attrs()) {}
1387
1388MaxPool3D::MaxPool3D(const ::tensorflow::Scope& scope, ::tensorflow::Input
1389 input, const gtl::ArraySlice<int>& ksize, const
1390 gtl::ArraySlice<int>& strides, StringPiece padding, const
1391 MaxPool3D::Attrs& attrs) {
1392 if (!scope.ok()) return;
1393 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
1394 if (!scope.ok()) return;
1395 ::tensorflow::Node* ret;
1396 const auto unique_name = scope.GetUniqueNameForOp("MaxPool3D");
1397 auto builder = ::tensorflow::NodeBuilder(unique_name, "MaxPool3D")
1398 .Input(_input)
1399 .Attr("ksize", ksize)
1400 .Attr("strides", strides)
1401 .Attr("padding", padding)
1402 .Attr("data_format", attrs.data_format_)
1403 ;
1404 scope.UpdateBuilder(&builder);
1405 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1406 if (!scope.ok()) return;
1407 scope.UpdateStatus(scope.DoShapeInference(ret));
1408 this->operation = Operation(ret);
1409 this->output = Output(ret, 0);
1410}
1411
1412MaxPool3D::MaxPool3D(const ::tensorflow::Scope& scope, ::tensorflow::Input
1413 input, const gtl::ArraySlice<int>& ksize, const
1414 gtl::ArraySlice<int>& strides, StringPiece padding)
1415 : MaxPool3D(scope, input, ksize, strides, padding, MaxPool3D::Attrs()) {}
1416
1417MaxPool3DGrad::MaxPool3DGrad(const ::tensorflow::Scope& scope,
1418 ::tensorflow::Input orig_input,
1419 ::tensorflow::Input orig_output,
1420 ::tensorflow::Input grad, const
1421 gtl::ArraySlice<int>& ksize, const
1422 gtl::ArraySlice<int>& strides, StringPiece
1423 padding, const MaxPool3DGrad::Attrs& attrs) {
1424 if (!scope.ok()) return;
1425 auto _orig_input = ::tensorflow::ops::AsNodeOut(scope, orig_input);
1426 if (!scope.ok()) return;
1427 auto _orig_output = ::tensorflow::ops::AsNodeOut(scope, orig_output);
1428 if (!scope.ok()) return;
1429 auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad);
1430 if (!scope.ok()) return;
1431 ::tensorflow::Node* ret;
1432 const auto unique_name = scope.GetUniqueNameForOp("MaxPool3DGrad");
1433 auto builder = ::tensorflow::NodeBuilder(unique_name, "MaxPool3DGrad")
1434 .Input(_orig_input)
1435 .Input(_orig_output)
1436 .Input(_grad)
1437 .Attr("ksize", ksize)
1438 .Attr("strides", strides)
1439 .Attr("padding", padding)
1440 .Attr("data_format", attrs.data_format_)
1441 ;
1442 scope.UpdateBuilder(&builder);
1443 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1444 if (!scope.ok()) return;
1445 scope.UpdateStatus(scope.DoShapeInference(ret));
1446 this->operation = Operation(ret);
1447 this->output = Output(ret, 0);
1448}
1449
1450MaxPool3DGrad::MaxPool3DGrad(const ::tensorflow::Scope& scope,
1451 ::tensorflow::Input orig_input,
1452 ::tensorflow::Input orig_output,
1453 ::tensorflow::Input grad, const
1454 gtl::ArraySlice<int>& ksize, const
1455 gtl::ArraySlice<int>& strides, StringPiece
1456 padding)
1457 : MaxPool3DGrad(scope, orig_input, orig_output, grad, ksize, strides, padding, MaxPool3DGrad::Attrs()) {}
1458
1459MaxPool3DGradGrad::MaxPool3DGradGrad(const ::tensorflow::Scope& scope,
1460 ::tensorflow::Input orig_input,
1461 ::tensorflow::Input orig_output,
1462 ::tensorflow::Input grad, const
1463 gtl::ArraySlice<int>& ksize, const
1464 gtl::ArraySlice<int>& strides, StringPiece
1465 padding, const MaxPool3DGradGrad::Attrs&
1466 attrs) {
1467 if (!scope.ok()) return;
1468 auto _orig_input = ::tensorflow::ops::AsNodeOut(scope, orig_input);
1469 if (!scope.ok()) return;
1470 auto _orig_output = ::tensorflow::ops::AsNodeOut(scope, orig_output);
1471 if (!scope.ok()) return;
1472 auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad);
1473 if (!scope.ok()) return;
1474 ::tensorflow::Node* ret;
1475 const auto unique_name = scope.GetUniqueNameForOp("MaxPool3DGradGrad");
1476 auto builder = ::tensorflow::NodeBuilder(unique_name, "MaxPool3DGradGrad")
1477 .Input(_orig_input)
1478 .Input(_orig_output)
1479 .Input(_grad)
1480 .Attr("ksize", ksize)
1481 .Attr("strides", strides)
1482 .Attr("padding", padding)
1483 .Attr("data_format", attrs.data_format_)
1484 ;
1485 scope.UpdateBuilder(&builder);
1486 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1487 if (!scope.ok()) return;
1488 scope.UpdateStatus(scope.DoShapeInference(ret));
1489 this->operation = Operation(ret);
1490 this->output = Output(ret, 0);
1491}
1492
1493MaxPool3DGradGrad::MaxPool3DGradGrad(const ::tensorflow::Scope& scope,
1494 ::tensorflow::Input orig_input,
1495 ::tensorflow::Input orig_output,
1496 ::tensorflow::Input grad, const
1497 gtl::ArraySlice<int>& ksize, const
1498 gtl::ArraySlice<int>& strides, StringPiece
1499 padding)
1500 : MaxPool3DGradGrad(scope, orig_input, orig_output, grad, ksize, strides, padding, MaxPool3DGradGrad::Attrs()) {}
1501
1502MaxPoolGradGrad::MaxPoolGradGrad(const ::tensorflow::Scope& scope,
1503 ::tensorflow::Input orig_input,
1504 ::tensorflow::Input orig_output,
1505 ::tensorflow::Input grad, const
1506 gtl::ArraySlice<int>& ksize, const
1507 gtl::ArraySlice<int>& strides, StringPiece
1508 padding, const MaxPoolGradGrad::Attrs& attrs) {
1509 if (!scope.ok()) return;
1510 auto _orig_input = ::tensorflow::ops::AsNodeOut(scope, orig_input);
1511 if (!scope.ok()) return;
1512 auto _orig_output = ::tensorflow::ops::AsNodeOut(scope, orig_output);
1513 if (!scope.ok()) return;
1514 auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad);
1515 if (!scope.ok()) return;
1516 ::tensorflow::Node* ret;
1517 const auto unique_name = scope.GetUniqueNameForOp("MaxPoolGradGrad");
1518 auto builder = ::tensorflow::NodeBuilder(unique_name, "MaxPoolGradGrad")
1519 .Input(_orig_input)
1520 .Input(_orig_output)
1521 .Input(_grad)
1522 .Attr("ksize", ksize)
1523 .Attr("strides", strides)
1524 .Attr("padding", padding)
1525 .Attr("data_format", attrs.data_format_)
1526 ;
1527 scope.UpdateBuilder(&builder);
1528 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1529 if (!scope.ok()) return;
1530 scope.UpdateStatus(scope.DoShapeInference(ret));
1531 this->operation = Operation(ret);
1532 this->output = Output(ret, 0);
1533}
1534
1535MaxPoolGradGrad::MaxPoolGradGrad(const ::tensorflow::Scope& scope,
1536 ::tensorflow::Input orig_input,
1537 ::tensorflow::Input orig_output,
1538 ::tensorflow::Input grad, const
1539 gtl::ArraySlice<int>& ksize, const
1540 gtl::ArraySlice<int>& strides, StringPiece
1541 padding)
1542 : MaxPoolGradGrad(scope, orig_input, orig_output, grad, ksize, strides, padding, MaxPoolGradGrad::Attrs()) {}
1543
1544MaxPoolGradGradV2::MaxPoolGradGradV2(const ::tensorflow::Scope& scope,
1545 ::tensorflow::Input orig_input,
1546 ::tensorflow::Input orig_output,
1547 ::tensorflow::Input grad,
1548 ::tensorflow::Input ksize,
1549 ::tensorflow::Input strides, StringPiece
1550 padding, const MaxPoolGradGradV2::Attrs&
1551 attrs) {
1552 if (!scope.ok()) return;
1553 auto _orig_input = ::tensorflow::ops::AsNodeOut(scope, orig_input);
1554 if (!scope.ok()) return;
1555 auto _orig_output = ::tensorflow::ops::AsNodeOut(scope, orig_output);
1556 if (!scope.ok()) return;
1557 auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad);
1558 if (!scope.ok()) return;
1559 auto _ksize = ::tensorflow::ops::AsNodeOut(scope, ksize);
1560 if (!scope.ok()) return;
1561 auto _strides = ::tensorflow::ops::AsNodeOut(scope, strides);
1562 if (!scope.ok()) return;
1563 ::tensorflow::Node* ret;
1564 const auto unique_name = scope.GetUniqueNameForOp("MaxPoolGradGradV2");
1565 auto builder = ::tensorflow::NodeBuilder(unique_name, "MaxPoolGradGradV2")
1566 .Input(_orig_input)
1567 .Input(_orig_output)
1568 .Input(_grad)
1569 .Input(_ksize)
1570 .Input(_strides)
1571 .Attr("padding", padding)
1572 .Attr("data_format", attrs.data_format_)
1573 ;
1574 scope.UpdateBuilder(&builder);
1575 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1576 if (!scope.ok()) return;
1577 scope.UpdateStatus(scope.DoShapeInference(ret));
1578 this->operation = Operation(ret);
1579 this->output = Output(ret, 0);
1580}
1581
1582MaxPoolGradGradV2::MaxPoolGradGradV2(const ::tensorflow::Scope& scope,
1583 ::tensorflow::Input orig_input,
1584 ::tensorflow::Input orig_output,
1585 ::tensorflow::Input grad,
1586 ::tensorflow::Input ksize,
1587 ::tensorflow::Input strides, StringPiece
1588 padding)
1589 : MaxPoolGradGradV2(scope, orig_input, orig_output, grad, ksize, strides, padding, MaxPoolGradGradV2::Attrs()) {}
1590
1591MaxPoolGradGradWithArgmax::MaxPoolGradGradWithArgmax(const ::tensorflow::Scope&
1592 scope, ::tensorflow::Input
1593 input, ::tensorflow::Input
1594 grad, ::tensorflow::Input
1595 argmax, const
1596 gtl::ArraySlice<int>&
1597 ksize, const
1598 gtl::ArraySlice<int>&
1599 strides, StringPiece
1600 padding, const
1601 MaxPoolGradGradWithArgmax::Attrs&
1602 attrs) {
1603 if (!scope.ok()) return;
1604 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
1605 if (!scope.ok()) return;
1606 auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad);
1607 if (!scope.ok()) return;
1608 auto _argmax = ::tensorflow::ops::AsNodeOut(scope, argmax);
1609 if (!scope.ok()) return;
1610 ::tensorflow::Node* ret;
1611 const auto unique_name = scope.GetUniqueNameForOp("MaxPoolGradGradWithArgmax");
1612 auto builder = ::tensorflow::NodeBuilder(unique_name, "MaxPoolGradGradWithArgmax")
1613 .Input(_input)
1614 .Input(_grad)
1615 .Input(_argmax)
1616 .Attr("ksize", ksize)
1617 .Attr("strides", strides)
1618 .Attr("padding", padding)
1619 .Attr("include_batch_in_index", attrs.include_batch_in_index_)
1620 ;
1621 scope.UpdateBuilder(&builder);
1622 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1623 if (!scope.ok()) return;
1624 scope.UpdateStatus(scope.DoShapeInference(ret));
1625 this->operation = Operation(ret);
1626 this->output = Output(ret, 0);
1627}
1628
1629MaxPoolGradGradWithArgmax::MaxPoolGradGradWithArgmax(const ::tensorflow::Scope&
1630 scope, ::tensorflow::Input
1631 input, ::tensorflow::Input
1632 grad, ::tensorflow::Input
1633 argmax, const
1634 gtl::ArraySlice<int>&
1635 ksize, const
1636 gtl::ArraySlice<int>&
1637 strides, StringPiece
1638 padding)
1639 : MaxPoolGradGradWithArgmax(scope, input, grad, argmax, ksize, strides, padding, MaxPoolGradGradWithArgmax::Attrs()) {}
1640
1641MaxPoolGradV2::MaxPoolGradV2(const ::tensorflow::Scope& scope,
1642 ::tensorflow::Input orig_input,
1643 ::tensorflow::Input orig_output,
1644 ::tensorflow::Input grad, ::tensorflow::Input
1645 ksize, ::tensorflow::Input strides, StringPiece
1646 padding, const MaxPoolGradV2::Attrs& attrs) {
1647 if (!scope.ok()) return;
1648 auto _orig_input = ::tensorflow::ops::AsNodeOut(scope, orig_input);
1649 if (!scope.ok()) return;
1650 auto _orig_output = ::tensorflow::ops::AsNodeOut(scope, orig_output);
1651 if (!scope.ok()) return;
1652 auto _grad = ::tensorflow::ops::AsNodeOut(scope, grad);
1653 if (!scope.ok()) return;
1654 auto _ksize = ::tensorflow::ops::AsNodeOut(scope, ksize);
1655 if (!scope.ok()) return;
1656 auto _strides = ::tensorflow::ops::AsNodeOut(scope, strides);
1657 if (!scope.ok()) return;
1658 ::tensorflow::Node* ret;
1659 const auto unique_name = scope.GetUniqueNameForOp("MaxPoolGradV2");
1660 auto builder = ::tensorflow::NodeBuilder(unique_name, "MaxPoolGradV2")
1661 .Input(_orig_input)
1662 .Input(_orig_output)
1663 .Input(_grad)
1664 .Input(_ksize)
1665 .Input(_strides)
1666 .Attr("padding", padding)
1667 .Attr("data_format", attrs.data_format_)
1668 ;
1669 scope.UpdateBuilder(&builder);
1670 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1671 if (!scope.ok()) return;
1672 scope.UpdateStatus(scope.DoShapeInference(ret));
1673 this->operation = Operation(ret);
1674 this->output = Output(ret, 0);
1675}
1676
1677MaxPoolGradV2::MaxPoolGradV2(const ::tensorflow::Scope& scope,
1678 ::tensorflow::Input orig_input,
1679 ::tensorflow::Input orig_output,
1680 ::tensorflow::Input grad, ::tensorflow::Input
1681 ksize, ::tensorflow::Input strides, StringPiece
1682 padding)
1683 : MaxPoolGradV2(scope, orig_input, orig_output, grad, ksize, strides, padding, MaxPoolGradV2::Attrs()) {}
1684
1685MaxPoolV2::MaxPoolV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
1686 input, ::tensorflow::Input ksize, ::tensorflow::Input
1687 strides, StringPiece padding, const MaxPoolV2::Attrs&
1688 attrs) {
1689 if (!scope.ok()) return;
1690 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
1691 if (!scope.ok()) return;
1692 auto _ksize = ::tensorflow::ops::AsNodeOut(scope, ksize);
1693 if (!scope.ok()) return;
1694 auto _strides = ::tensorflow::ops::AsNodeOut(scope, strides);
1695 if (!scope.ok()) return;
1696 ::tensorflow::Node* ret;
1697 const auto unique_name = scope.GetUniqueNameForOp("MaxPoolV2");
1698 auto builder = ::tensorflow::NodeBuilder(unique_name, "MaxPoolV2")
1699 .Input(_input)
1700 .Input(_ksize)
1701 .Input(_strides)
1702 .Attr("padding", padding)
1703 .Attr("data_format", attrs.data_format_)
1704 ;
1705 scope.UpdateBuilder(&builder);
1706 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1707 if (!scope.ok()) return;
1708 scope.UpdateStatus(scope.DoShapeInference(ret));
1709 this->operation = Operation(ret);
1710 this->output = Output(ret, 0);
1711}
1712
1713MaxPoolV2::MaxPoolV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
1714 input, ::tensorflow::Input ksize, ::tensorflow::Input
1715 strides, StringPiece padding)
1716 : MaxPoolV2(scope, input, ksize, strides, padding, MaxPoolV2::Attrs()) {}
1717
1718MaxPoolWithArgmax::MaxPoolWithArgmax(const ::tensorflow::Scope& scope,
1719 ::tensorflow::Input input, const
1720 gtl::ArraySlice<int>& ksize, const
1721 gtl::ArraySlice<int>& strides, StringPiece
1722 padding, const MaxPoolWithArgmax::Attrs&
1723 attrs) {
1724 if (!scope.ok()) return;
1725 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
1726 if (!scope.ok()) return;
1727 ::tensorflow::Node* ret;
1728 const auto unique_name = scope.GetUniqueNameForOp("MaxPoolWithArgmax");
1729 auto builder = ::tensorflow::NodeBuilder(unique_name, "MaxPoolWithArgmax")
1730 .Input(_input)
1731 .Attr("ksize", ksize)
1732 .Attr("strides", strides)
1733 .Attr("Targmax", attrs.Targmax_)
1734 .Attr("padding", padding)
1735 .Attr("include_batch_in_index", attrs.include_batch_in_index_)
1736 ;
1737 scope.UpdateBuilder(&builder);
1738 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1739 if (!scope.ok()) return;
1740 scope.UpdateStatus(scope.DoShapeInference(ret));
1741 this->operation = Operation(ret);
1742 ::tensorflow::NameRangeMap _outputs_range;
1743 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
1744 if (!_status_.ok()) {
1745 scope.UpdateStatus(_status_);
1746 return;
1747 }
1748
1749 this->output = Output(ret, _outputs_range["output"].first);
1750 this->argmax = Output(ret, _outputs_range["argmax"].first);
1751}
1752
1753MaxPoolWithArgmax::MaxPoolWithArgmax(const ::tensorflow::Scope& scope,
1754 ::tensorflow::Input input, const
1755 gtl::ArraySlice<int>& ksize, const
1756 gtl::ArraySlice<int>& strides, StringPiece
1757 padding)
1758 : MaxPoolWithArgmax(scope, input, ksize, strides, padding, MaxPoolWithArgmax::Attrs()) {}
1759
1760NthElement::NthElement(const ::tensorflow::Scope& scope, ::tensorflow::Input
1761 input, ::tensorflow::Input n, const NthElement::Attrs&
1762 attrs) {
1763 if (!scope.ok()) return;
1764 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
1765 if (!scope.ok()) return;
1766 auto _n = ::tensorflow::ops::AsNodeOut(scope, n);
1767 if (!scope.ok()) return;
1768 ::tensorflow::Node* ret;
1769 const auto unique_name = scope.GetUniqueNameForOp("NthElement");
1770 auto builder = ::tensorflow::NodeBuilder(unique_name, "NthElement")
1771 .Input(_input)
1772 .Input(_n)
1773 .Attr("reverse", attrs.reverse_)
1774 ;
1775 scope.UpdateBuilder(&builder);
1776 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1777 if (!scope.ok()) return;
1778 scope.UpdateStatus(scope.DoShapeInference(ret));
1779 this->operation = Operation(ret);
1780 this->values = Output(ret, 0);
1781}
1782
1783NthElement::NthElement(const ::tensorflow::Scope& scope, ::tensorflow::Input
1784 input, ::tensorflow::Input n)
1785 : NthElement(scope, input, n, NthElement::Attrs()) {}
1786
1787QuantizedAvgPool::QuantizedAvgPool(const ::tensorflow::Scope& scope,
1788 ::tensorflow::Input input,
1789 ::tensorflow::Input min_input,
1790 ::tensorflow::Input max_input, const
1791 gtl::ArraySlice<int>& ksize, const
1792 gtl::ArraySlice<int>& strides, StringPiece
1793 padding) {
1794 if (!scope.ok()) return;
1795 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
1796 if (!scope.ok()) return;
1797 auto _min_input = ::tensorflow::ops::AsNodeOut(scope, min_input);
1798 if (!scope.ok()) return;
1799 auto _max_input = ::tensorflow::ops::AsNodeOut(scope, max_input);
1800 if (!scope.ok()) return;
1801 ::tensorflow::Node* ret;
1802 const auto unique_name = scope.GetUniqueNameForOp("QuantizedAvgPool");
1803 auto builder = ::tensorflow::NodeBuilder(unique_name, "QuantizedAvgPool")
1804 .Input(_input)
1805 .Input(_min_input)
1806 .Input(_max_input)
1807 .Attr("ksize", ksize)
1808 .Attr("strides", strides)
1809 .Attr("padding", padding)
1810 ;
1811 scope.UpdateBuilder(&builder);
1812 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1813 if (!scope.ok()) return;
1814 scope.UpdateStatus(scope.DoShapeInference(ret));
1815 this->operation = Operation(ret);
1816 ::tensorflow::NameRangeMap _outputs_range;
1817 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
1818 if (!_status_.ok()) {
1819 scope.UpdateStatus(_status_);
1820 return;
1821 }
1822
1823 this->output = Output(ret, _outputs_range["output"].first);
1824 this->min_output = Output(ret, _outputs_range["min_output"].first);
1825 this->max_output = Output(ret, _outputs_range["max_output"].first);
1826}
1827
1828QuantizedBatchNormWithGlobalNormalization::QuantizedBatchNormWithGlobalNormalization(const ::tensorflow::Scope& scope, ::tensorflow::Input t, ::tensorflow::Input t_min, ::tensorflow::Input t_max, ::tensorflow::Input m, ::tensorflow::Input m_min, ::tensorflow::Input m_max, ::tensorflow::Input v, ::tensorflow::Input v_min, ::tensorflow::Input v_max, ::tensorflow::Input beta, ::tensorflow::Input beta_min, ::tensorflow::Input beta_max, ::tensorflow::Input gamma, ::tensorflow::Input gamma_min, ::tensorflow::Input gamma_max, DataType out_type, float variance_epsilon, bool
1829 scale_after_normalization) {
1830 if (!scope.ok()) return;
1831 auto _t = ::tensorflow::ops::AsNodeOut(scope, t);
1832 if (!scope.ok()) return;
1833 auto _t_min = ::tensorflow::ops::AsNodeOut(scope, t_min);
1834 if (!scope.ok()) return;
1835 auto _t_max = ::tensorflow::ops::AsNodeOut(scope, t_max);
1836 if (!scope.ok()) return;
1837 auto _m = ::tensorflow::ops::AsNodeOut(scope, m);
1838 if (!scope.ok()) return;
1839 auto _m_min = ::tensorflow::ops::AsNodeOut(scope, m_min);
1840 if (!scope.ok()) return;
1841 auto _m_max = ::tensorflow::ops::AsNodeOut(scope, m_max);
1842 if (!scope.ok()) return;
1843 auto _v = ::tensorflow::ops::AsNodeOut(scope, v);
1844 if (!scope.ok()) return;
1845 auto _v_min = ::tensorflow::ops::AsNodeOut(scope, v_min);
1846 if (!scope.ok()) return;
1847 auto _v_max = ::tensorflow::ops::AsNodeOut(scope, v_max);
1848 if (!scope.ok()) return;
1849 auto _beta = ::tensorflow::ops::AsNodeOut(scope, beta);
1850 if (!scope.ok()) return;
1851 auto _beta_min = ::tensorflow::ops::AsNodeOut(scope, beta_min);
1852 if (!scope.ok()) return;
1853 auto _beta_max = ::tensorflow::ops::AsNodeOut(scope, beta_max);
1854 if (!scope.ok()) return;
1855 auto _gamma = ::tensorflow::ops::AsNodeOut(scope, gamma);
1856 if (!scope.ok()) return;
1857 auto _gamma_min = ::tensorflow::ops::AsNodeOut(scope, gamma_min);
1858 if (!scope.ok()) return;
1859 auto _gamma_max = ::tensorflow::ops::AsNodeOut(scope, gamma_max);
1860 if (!scope.ok()) return;
1861 ::tensorflow::Node* ret;
1862 const auto unique_name = scope.GetUniqueNameForOp("QuantizedBatchNormWithGlobalNormalization");
1863 auto builder = ::tensorflow::NodeBuilder(unique_name, "QuantizedBatchNormWithGlobalNormalization")
1864 .Input(_t)
1865 .Input(_t_min)
1866 .Input(_t_max)
1867 .Input(_m)
1868 .Input(_m_min)
1869 .Input(_m_max)
1870 .Input(_v)
1871 .Input(_v_min)
1872 .Input(_v_max)
1873 .Input(_beta)
1874 .Input(_beta_min)
1875 .Input(_beta_max)
1876 .Input(_gamma)
1877 .Input(_gamma_min)
1878 .Input(_gamma_max)
1879 .Attr("out_type", out_type)
1880 .Attr("variance_epsilon", variance_epsilon)
1881 .Attr("scale_after_normalization", scale_after_normalization)
1882 ;
1883 scope.UpdateBuilder(&builder);
1884 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1885 if (!scope.ok()) return;
1886 scope.UpdateStatus(scope.DoShapeInference(ret));
1887 this->operation = Operation(ret);
1888 ::tensorflow::NameRangeMap _outputs_range;
1889 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
1890 if (!_status_.ok()) {
1891 scope.UpdateStatus(_status_);
1892 return;
1893 }
1894
1895 this->result = Output(ret, _outputs_range["result"].first);
1896 this->result_min = Output(ret, _outputs_range["result_min"].first);
1897 this->result_max = Output(ret, _outputs_range["result_max"].first);
1898}
1899
1900QuantizedBiasAdd::QuantizedBiasAdd(const ::tensorflow::Scope& scope,
1901 ::tensorflow::Input input,
1902 ::tensorflow::Input bias,
1903 ::tensorflow::Input min_input,
1904 ::tensorflow::Input max_input,
1905 ::tensorflow::Input min_bias,
1906 ::tensorflow::Input max_bias, DataType
1907 out_type) {
1908 if (!scope.ok()) return;
1909 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
1910 if (!scope.ok()) return;
1911 auto _bias = ::tensorflow::ops::AsNodeOut(scope, bias);
1912 if (!scope.ok()) return;
1913 auto _min_input = ::tensorflow::ops::AsNodeOut(scope, min_input);
1914 if (!scope.ok()) return;
1915 auto _max_input = ::tensorflow::ops::AsNodeOut(scope, max_input);
1916 if (!scope.ok()) return;
1917 auto _min_bias = ::tensorflow::ops::AsNodeOut(scope, min_bias);
1918 if (!scope.ok()) return;
1919 auto _max_bias = ::tensorflow::ops::AsNodeOut(scope, max_bias);
1920 if (!scope.ok()) return;
1921 ::tensorflow::Node* ret;
1922 const auto unique_name = scope.GetUniqueNameForOp("QuantizedBiasAdd");
1923 auto builder = ::tensorflow::NodeBuilder(unique_name, "QuantizedBiasAdd")
1924 .Input(_input)
1925 .Input(_bias)
1926 .Input(_min_input)
1927 .Input(_max_input)
1928 .Input(_min_bias)
1929 .Input(_max_bias)
1930 .Attr("out_type", out_type)
1931 ;
1932 scope.UpdateBuilder(&builder);
1933 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1934 if (!scope.ok()) return;
1935 scope.UpdateStatus(scope.DoShapeInference(ret));
1936 this->operation = Operation(ret);
1937 ::tensorflow::NameRangeMap _outputs_range;
1938 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
1939 if (!_status_.ok()) {
1940 scope.UpdateStatus(_status_);
1941 return;
1942 }
1943
1944 this->output = Output(ret, _outputs_range["output"].first);
1945 this->min_out = Output(ret, _outputs_range["min_out"].first);
1946 this->max_out = Output(ret, _outputs_range["max_out"].first);
1947}
1948
1949QuantizedConv2D::QuantizedConv2D(const ::tensorflow::Scope& scope,
1950 ::tensorflow::Input input, ::tensorflow::Input
1951 filter, ::tensorflow::Input min_input,
1952 ::tensorflow::Input max_input,
1953 ::tensorflow::Input min_filter,
1954 ::tensorflow::Input max_filter, const
1955 gtl::ArraySlice<int>& strides, StringPiece
1956 padding, const QuantizedConv2D::Attrs& attrs) {
1957 if (!scope.ok()) return;
1958 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
1959 if (!scope.ok()) return;
1960 auto _filter = ::tensorflow::ops::AsNodeOut(scope, filter);
1961 if (!scope.ok()) return;
1962 auto _min_input = ::tensorflow::ops::AsNodeOut(scope, min_input);
1963 if (!scope.ok()) return;
1964 auto _max_input = ::tensorflow::ops::AsNodeOut(scope, max_input);
1965 if (!scope.ok()) return;
1966 auto _min_filter = ::tensorflow::ops::AsNodeOut(scope, min_filter);
1967 if (!scope.ok()) return;
1968 auto _max_filter = ::tensorflow::ops::AsNodeOut(scope, max_filter);
1969 if (!scope.ok()) return;
1970 ::tensorflow::Node* ret;
1971 const auto unique_name = scope.GetUniqueNameForOp("QuantizedConv2D");
1972 auto builder = ::tensorflow::NodeBuilder(unique_name, "QuantizedConv2D")
1973 .Input(_input)
1974 .Input(_filter)
1975 .Input(_min_input)
1976 .Input(_max_input)
1977 .Input(_min_filter)
1978 .Input(_max_filter)
1979 .Attr("out_type", attrs.out_type_)
1980 .Attr("strides", strides)
1981 .Attr("padding", padding)
1982 .Attr("dilations", attrs.dilations_)
1983 ;
1984 scope.UpdateBuilder(&builder);
1985 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
1986 if (!scope.ok()) return;
1987 scope.UpdateStatus(scope.DoShapeInference(ret));
1988 this->operation = Operation(ret);
1989 ::tensorflow::NameRangeMap _outputs_range;
1990 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
1991 if (!_status_.ok()) {
1992 scope.UpdateStatus(_status_);
1993 return;
1994 }
1995
1996 this->output = Output(ret, _outputs_range["output"].first);
1997 this->min_output = Output(ret, _outputs_range["min_output"].first);
1998 this->max_output = Output(ret, _outputs_range["max_output"].first);
1999}
2000
2001QuantizedConv2D::QuantizedConv2D(const ::tensorflow::Scope& scope,
2002 ::tensorflow::Input input, ::tensorflow::Input
2003 filter, ::tensorflow::Input min_input,
2004 ::tensorflow::Input max_input,
2005 ::tensorflow::Input min_filter,
2006 ::tensorflow::Input max_filter, const
2007 gtl::ArraySlice<int>& strides, StringPiece
2008 padding)
2009 : QuantizedConv2D(scope, input, filter, min_input, max_input, min_filter, max_filter, strides, padding, QuantizedConv2D::Attrs()) {}
2010
2011QuantizedMaxPool::QuantizedMaxPool(const ::tensorflow::Scope& scope,
2012 ::tensorflow::Input input,
2013 ::tensorflow::Input min_input,
2014 ::tensorflow::Input max_input, const
2015 gtl::ArraySlice<int>& ksize, const
2016 gtl::ArraySlice<int>& strides, StringPiece
2017 padding) {
2018 if (!scope.ok()) return;
2019 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
2020 if (!scope.ok()) return;
2021 auto _min_input = ::tensorflow::ops::AsNodeOut(scope, min_input);
2022 if (!scope.ok()) return;
2023 auto _max_input = ::tensorflow::ops::AsNodeOut(scope, max_input);
2024 if (!scope.ok()) return;
2025 ::tensorflow::Node* ret;
2026 const auto unique_name = scope.GetUniqueNameForOp("QuantizedMaxPool");
2027 auto builder = ::tensorflow::NodeBuilder(unique_name, "QuantizedMaxPool")
2028 .Input(_input)
2029 .Input(_min_input)
2030 .Input(_max_input)
2031 .Attr("ksize", ksize)
2032 .Attr("strides", strides)
2033 .Attr("padding", padding)
2034 ;
2035 scope.UpdateBuilder(&builder);
2036 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
2037 if (!scope.ok()) return;
2038 scope.UpdateStatus(scope.DoShapeInference(ret));
2039 this->operation = Operation(ret);
2040 ::tensorflow::NameRangeMap _outputs_range;
2041 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
2042 if (!_status_.ok()) {
2043 scope.UpdateStatus(_status_);
2044 return;
2045 }
2046
2047 this->output = Output(ret, _outputs_range["output"].first);
2048 this->min_output = Output(ret, _outputs_range["min_output"].first);
2049 this->max_output = Output(ret, _outputs_range["max_output"].first);
2050}
2051
2052QuantizedRelu::QuantizedRelu(const ::tensorflow::Scope& scope,
2053 ::tensorflow::Input features, ::tensorflow::Input
2054 min_features, ::tensorflow::Input max_features,
2055 const QuantizedRelu::Attrs& attrs) {
2056 if (!scope.ok()) return;
2057 auto _features = ::tensorflow::ops::AsNodeOut(scope, features);
2058 if (!scope.ok()) return;
2059 auto _min_features = ::tensorflow::ops::AsNodeOut(scope, min_features);
2060 if (!scope.ok()) return;
2061 auto _max_features = ::tensorflow::ops::AsNodeOut(scope, max_features);
2062 if (!scope.ok()) return;
2063 ::tensorflow::Node* ret;
2064 const auto unique_name = scope.GetUniqueNameForOp("QuantizedRelu");
2065 auto builder = ::tensorflow::NodeBuilder(unique_name, "QuantizedRelu")
2066 .Input(_features)
2067 .Input(_min_features)
2068 .Input(_max_features)
2069 .Attr("out_type", attrs.out_type_)
2070 ;
2071 scope.UpdateBuilder(&builder);
2072 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
2073 if (!scope.ok()) return;
2074 scope.UpdateStatus(scope.DoShapeInference(ret));
2075 this->operation = Operation(ret);
2076 ::tensorflow::NameRangeMap _outputs_range;
2077 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
2078 if (!_status_.ok()) {
2079 scope.UpdateStatus(_status_);
2080 return;
2081 }
2082
2083 this->activations = Output(ret, _outputs_range["activations"].first);
2084 this->min_activations = Output(ret, _outputs_range["min_activations"].first);
2085 this->max_activations = Output(ret, _outputs_range["max_activations"].first);
2086}
2087
2088QuantizedRelu::QuantizedRelu(const ::tensorflow::Scope& scope,
2089 ::tensorflow::Input features, ::tensorflow::Input
2090 min_features, ::tensorflow::Input max_features)
2091 : QuantizedRelu(scope, features, min_features, max_features, QuantizedRelu::Attrs()) {}
2092
2093QuantizedRelu6::QuantizedRelu6(const ::tensorflow::Scope& scope,
2094 ::tensorflow::Input features,
2095 ::tensorflow::Input min_features,
2096 ::tensorflow::Input max_features, const
2097 QuantizedRelu6::Attrs& attrs) {
2098 if (!scope.ok()) return;
2099 auto _features = ::tensorflow::ops::AsNodeOut(scope, features);
2100 if (!scope.ok()) return;
2101 auto _min_features = ::tensorflow::ops::AsNodeOut(scope, min_features);
2102 if (!scope.ok()) return;
2103 auto _max_features = ::tensorflow::ops::AsNodeOut(scope, max_features);
2104 if (!scope.ok()) return;
2105 ::tensorflow::Node* ret;
2106 const auto unique_name = scope.GetUniqueNameForOp("QuantizedRelu6");
2107 auto builder = ::tensorflow::NodeBuilder(unique_name, "QuantizedRelu6")
2108 .Input(_features)
2109 .Input(_min_features)
2110 .Input(_max_features)
2111 .Attr("out_type", attrs.out_type_)
2112 ;
2113 scope.UpdateBuilder(&builder);
2114 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
2115 if (!scope.ok()) return;
2116 scope.UpdateStatus(scope.DoShapeInference(ret));
2117 this->operation = Operation(ret);
2118 ::tensorflow::NameRangeMap _outputs_range;
2119 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
2120 if (!_status_.ok()) {
2121 scope.UpdateStatus(_status_);
2122 return;
2123 }
2124
2125 this->activations = Output(ret, _outputs_range["activations"].first);
2126 this->min_activations = Output(ret, _outputs_range["min_activations"].first);
2127 this->max_activations = Output(ret, _outputs_range["max_activations"].first);
2128}
2129
2130QuantizedRelu6::QuantizedRelu6(const ::tensorflow::Scope& scope,
2131 ::tensorflow::Input features,
2132 ::tensorflow::Input min_features,
2133 ::tensorflow::Input max_features)
2134 : QuantizedRelu6(scope, features, min_features, max_features, QuantizedRelu6::Attrs()) {}
2135
2136QuantizedReluX::QuantizedReluX(const ::tensorflow::Scope& scope,
2137 ::tensorflow::Input features,
2138 ::tensorflow::Input max_value,
2139 ::tensorflow::Input min_features,
2140 ::tensorflow::Input max_features, const
2141 QuantizedReluX::Attrs& attrs) {
2142 if (!scope.ok()) return;
2143 auto _features = ::tensorflow::ops::AsNodeOut(scope, features);
2144 if (!scope.ok()) return;
2145 auto _max_value = ::tensorflow::ops::AsNodeOut(scope, max_value);
2146 if (!scope.ok()) return;
2147 auto _min_features = ::tensorflow::ops::AsNodeOut(scope, min_features);
2148 if (!scope.ok()) return;
2149 auto _max_features = ::tensorflow::ops::AsNodeOut(scope, max_features);
2150 if (!scope.ok()) return;
2151 ::tensorflow::Node* ret;
2152 const auto unique_name = scope.GetUniqueNameForOp("QuantizedReluX");
2153 auto builder = ::tensorflow::NodeBuilder(unique_name, "QuantizedReluX")
2154 .Input(_features)
2155 .Input(_max_value)
2156 .Input(_min_features)
2157 .Input(_max_features)
2158 .Attr("out_type", attrs.out_type_)
2159 ;
2160 scope.UpdateBuilder(&builder);
2161 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
2162 if (!scope.ok()) return;
2163 scope.UpdateStatus(scope.DoShapeInference(ret));
2164 this->operation = Operation(ret);
2165 ::tensorflow::NameRangeMap _outputs_range;
2166 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
2167 if (!_status_.ok()) {
2168 scope.UpdateStatus(_status_);
2169 return;
2170 }
2171
2172 this->activations = Output(ret, _outputs_range["activations"].first);
2173 this->min_activations = Output(ret, _outputs_range["min_activations"].first);
2174 this->max_activations = Output(ret, _outputs_range["max_activations"].first);
2175}
2176
2177QuantizedReluX::QuantizedReluX(const ::tensorflow::Scope& scope,
2178 ::tensorflow::Input features,
2179 ::tensorflow::Input max_value,
2180 ::tensorflow::Input min_features,
2181 ::tensorflow::Input max_features)
2182 : QuantizedReluX(scope, features, max_value, min_features, max_features, QuantizedReluX::Attrs()) {}
2183
2184Relu::Relu(const ::tensorflow::Scope& scope, ::tensorflow::Input features) {
2185 if (!scope.ok()) return;
2186 auto _features = ::tensorflow::ops::AsNodeOut(scope, features);
2187 if (!scope.ok()) return;
2188 ::tensorflow::Node* ret;
2189 const auto unique_name = scope.GetUniqueNameForOp("Relu");
2190 auto builder = ::tensorflow::NodeBuilder(unique_name, "Relu")
2191 .Input(_features)
2192 ;
2193 scope.UpdateBuilder(&builder);
2194 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
2195 if (!scope.ok()) return;
2196 scope.UpdateStatus(scope.DoShapeInference(ret));
2197 this->operation = Operation(ret);
2198 this->activations = Output(ret, 0);
2199}
2200
2201Relu6::Relu6(const ::tensorflow::Scope& scope, ::tensorflow::Input features) {
2202 if (!scope.ok()) return;
2203 auto _features = ::tensorflow::ops::AsNodeOut(scope, features);
2204 if (!scope.ok()) return;
2205 ::tensorflow::Node* ret;
2206 const auto unique_name = scope.GetUniqueNameForOp("Relu6");
2207 auto builder = ::tensorflow::NodeBuilder(unique_name, "Relu6")
2208 .Input(_features)
2209 ;
2210 scope.UpdateBuilder(&builder);
2211 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
2212 if (!scope.ok()) return;
2213 scope.UpdateStatus(scope.DoShapeInference(ret));
2214 this->operation = Operation(ret);
2215 this->activations = Output(ret, 0);
2216}
2217
2218Selu::Selu(const ::tensorflow::Scope& scope, ::tensorflow::Input features) {
2219 if (!scope.ok()) return;
2220 auto _features = ::tensorflow::ops::AsNodeOut(scope, features);
2221 if (!scope.ok()) return;
2222 ::tensorflow::Node* ret;
2223 const auto unique_name = scope.GetUniqueNameForOp("Selu");
2224 auto builder = ::tensorflow::NodeBuilder(unique_name, "Selu")
2225 .Input(_features)
2226 ;
2227 scope.UpdateBuilder(&builder);
2228 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
2229 if (!scope.ok()) return;
2230 scope.UpdateStatus(scope.DoShapeInference(ret));
2231 this->operation = Operation(ret);
2232 this->activations = Output(ret, 0);
2233}
2234
2235Softmax::Softmax(const ::tensorflow::Scope& scope, ::tensorflow::Input logits) {
2236 if (!scope.ok()) return;
2237 auto _logits = ::tensorflow::ops::AsNodeOut(scope, logits);
2238 if (!scope.ok()) return;
2239 ::tensorflow::Node* ret;
2240 const auto unique_name = scope.GetUniqueNameForOp("Softmax");
2241 auto builder = ::tensorflow::NodeBuilder(unique_name, "Softmax")
2242 .Input(_logits)
2243 ;
2244 scope.UpdateBuilder(&builder);
2245 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
2246 if (!scope.ok()) return;
2247 scope.UpdateStatus(scope.DoShapeInference(ret));
2248 this->operation = Operation(ret);
2249 this->softmax = Output(ret, 0);
2250}
2251
2252SoftmaxCrossEntropyWithLogits::SoftmaxCrossEntropyWithLogits(const
2253 ::tensorflow::Scope&
2254 scope,
2255 ::tensorflow::Input
2256 features,
2257 ::tensorflow::Input
2258 labels) {
2259 if (!scope.ok()) return;
2260 auto _features = ::tensorflow::ops::AsNodeOut(scope, features);
2261 if (!scope.ok()) return;
2262 auto _labels = ::tensorflow::ops::AsNodeOut(scope, labels);
2263 if (!scope.ok()) return;
2264 ::tensorflow::Node* ret;
2265 const auto unique_name = scope.GetUniqueNameForOp("SoftmaxCrossEntropyWithLogits");
2266 auto builder = ::tensorflow::NodeBuilder(unique_name, "SoftmaxCrossEntropyWithLogits")
2267 .Input(_features)
2268 .Input(_labels)
2269 ;
2270 scope.UpdateBuilder(&builder);
2271 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
2272 if (!scope.ok()) return;
2273 scope.UpdateStatus(scope.DoShapeInference(ret));
2274 this->operation = Operation(ret);
2275 ::tensorflow::NameRangeMap _outputs_range;
2276 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
2277 if (!_status_.ok()) {
2278 scope.UpdateStatus(_status_);
2279 return;
2280 }
2281
2282 this->loss = Output(ret, _outputs_range["loss"].first);
2283 this->backprop = Output(ret, _outputs_range["backprop"].first);
2284}
2285
2286Softplus::Softplus(const ::tensorflow::Scope& scope, ::tensorflow::Input
2287 features) {
2288 if (!scope.ok()) return;
2289 auto _features = ::tensorflow::ops::AsNodeOut(scope, features);
2290 if (!scope.ok()) return;
2291 ::tensorflow::Node* ret;
2292 const auto unique_name = scope.GetUniqueNameForOp("Softplus");
2293 auto builder = ::tensorflow::NodeBuilder(unique_name, "Softplus")
2294 .Input(_features)
2295 ;
2296 scope.UpdateBuilder(&builder);
2297 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
2298 if (!scope.ok()) return;
2299 scope.UpdateStatus(scope.DoShapeInference(ret));
2300 this->operation = Operation(ret);
2301 this->activations = Output(ret, 0);
2302}
2303
2304Softsign::Softsign(const ::tensorflow::Scope& scope, ::tensorflow::Input
2305 features) {
2306 if (!scope.ok()) return;
2307 auto _features = ::tensorflow::ops::AsNodeOut(scope, features);
2308 if (!scope.ok()) return;
2309 ::tensorflow::Node* ret;
2310 const auto unique_name = scope.GetUniqueNameForOp("Softsign");
2311 auto builder = ::tensorflow::NodeBuilder(unique_name, "Softsign")
2312 .Input(_features)
2313 ;
2314 scope.UpdateBuilder(&builder);
2315 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
2316 if (!scope.ok()) return;
2317 scope.UpdateStatus(scope.DoShapeInference(ret));
2318 this->operation = Operation(ret);
2319 this->activations = Output(ret, 0);
2320}
2321
2322SparseSoftmaxCrossEntropyWithLogits::SparseSoftmaxCrossEntropyWithLogits(const
2323 ::tensorflow::Scope&
2324 scope,
2325 ::tensorflow::Input
2326 features,
2327 ::tensorflow::Input
2328 labels) {
2329 if (!scope.ok()) return;
2330 auto _features = ::tensorflow::ops::AsNodeOut(scope, features);
2331 if (!scope.ok()) return;
2332 auto _labels = ::tensorflow::ops::AsNodeOut(scope, labels);
2333 if (!scope.ok()) return;
2334 ::tensorflow::Node* ret;
2335 const auto unique_name = scope.GetUniqueNameForOp("SparseSoftmaxCrossEntropyWithLogits");
2336 auto builder = ::tensorflow::NodeBuilder(unique_name, "SparseSoftmaxCrossEntropyWithLogits")
2337 .Input(_features)
2338 .Input(_labels)
2339 ;
2340 scope.UpdateBuilder(&builder);
2341 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
2342 if (!scope.ok()) return;
2343 scope.UpdateStatus(scope.DoShapeInference(ret));
2344 this->operation = Operation(ret);
2345 ::tensorflow::NameRangeMap _outputs_range;
2346 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
2347 if (!_status_.ok()) {
2348 scope.UpdateStatus(_status_);
2349 return;
2350 }
2351
2352 this->loss = Output(ret, _outputs_range["loss"].first);
2353 this->backprop = Output(ret, _outputs_range["backprop"].first);
2354}
2355
2356TopK::TopK(const ::tensorflow::Scope& scope, ::tensorflow::Input input,
2357 ::tensorflow::Input k, const TopK::Attrs& attrs) {
2358 if (!scope.ok()) return;
2359 auto _input = ::tensorflow::ops::AsNodeOut(scope, input);
2360 if (!scope.ok()) return;
2361 auto _k = ::tensorflow::ops::AsNodeOut(scope, k);
2362 if (!scope.ok()) return;
2363 ::tensorflow::Node* ret;
2364 const auto unique_name = scope.GetUniqueNameForOp("TopK");
2365 auto builder = ::tensorflow::NodeBuilder(unique_name, "TopKV2")
2366 .Input(_input)
2367 .Input(_k)
2368 .Attr("sorted", attrs.sorted_)
2369 ;
2370 scope.UpdateBuilder(&builder);
2371 scope.UpdateStatus(builder.Finalize(scope.graph(), &ret));
2372 if (!scope.ok()) return;
2373 scope.UpdateStatus(scope.DoShapeInference(ret));
2374 this->operation = Operation(ret);
2375 ::tensorflow::NameRangeMap _outputs_range;
2376 ::tensorflow::Status _status_ = ::tensorflow::NameRangesForNode(*ret, ret->op_def(), nullptr, &_outputs_range);
2377 if (!_status_.ok()) {
2378 scope.UpdateStatus(_status_);
2379 return;
2380 }
2381
2382 this->values = Output(ret, _outputs_range["values"].first);
2383 this->indices = Output(ret, _outputs_range["indices"].first);
2384}
2385
2386TopK::TopK(const ::tensorflow::Scope& scope, ::tensorflow::Input input,
2387 ::tensorflow::Input k)
2388 : TopK(scope, input, k, TopK::Attrs()) {}
2389
2390/// @}
2391
2392} // namespace ops
2393} // namespace tensorflow
2394