1// This file is MACHINE GENERATED! Do not edit.
2
3#ifndef TENSORFLOW_CC_OPS_EXPERIMENTAL_DATASET_OPS_INTERNAL_H_
4#define TENSORFLOW_CC_OPS_EXPERIMENTAL_DATASET_OPS_INTERNAL_H_
5
6// This file is MACHINE GENERATED! Do not edit.
7
8#include "tensorflow/cc/framework/ops.h"
9#include "tensorflow/cc/framework/scope.h"
10#include "tensorflow/core/framework/tensor.h"
11#include "tensorflow/core/framework/tensor_shape.h"
12#include "tensorflow/core/framework/types.h"
13#include "tensorflow/core/lib/gtl/array_slice.h"
14
15namespace tensorflow {
16namespace ops {
17namespace internal {
18// NOTE: This namespace has internal TensorFlow details that
19// are not part of TensorFlow's public API.
20
21/// @defgroup experimental_dataset_ops_internal Experimental Dataset Ops Internal
22/// @{
23
24/// TODO: add doc.
25///
26/// Args:
27/// * scope: A Scope object
28///
29/// Returns:
30/// * `Output`: The handle tensor.
31class AssertCardinalityDataset {
32 public:
33 AssertCardinalityDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
34 input_dataset, ::tensorflow::Input cardinality, const
35 DataTypeSlice& output_types, const
36 gtl::ArraySlice<PartialTensorShape>& output_shapes);
37 operator ::tensorflow::Output() const { return handle; }
38 operator ::tensorflow::Input() const { return handle; }
39 ::tensorflow::Node* node() const { return handle.node(); }
40
41 Operation operation;
42 ::tensorflow::Output handle;
43};
44
45/// A transformation that asserts which transformations happen next.
46///
47/// This transformation checks whether the camel-case names (i.e. "FlatMap", not
48/// "flat_map") of the transformations following this transformation match the list
49/// of names in the `transformations` argument. If there is a mismatch, the
50/// transformation raises an exception.
51///
52/// The check occurs when iterating over the contents of the dataset, which
53/// means that the check happens *after* any static optimizations are applied
54/// to the dataset graph.
55///
56/// Args:
57/// * scope: A Scope object
58/// * input_dataset: A variant tensor representing the input dataset.
59/// `AssertNextDataset` passes through the outputs of its input dataset.
60/// * transformations: A `tf.string` vector `tf.Tensor` identifying the transformations that are
61/// expected to happen next.
62///
63/// Returns:
64/// * `Output`: The handle tensor.
65class AssertNextDataset {
66 public:
67 AssertNextDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
68 input_dataset, ::tensorflow::Input transformations, const
69 DataTypeSlice& output_types, const
70 gtl::ArraySlice<PartialTensorShape>& output_shapes);
71 operator ::tensorflow::Output() const { return handle; }
72 operator ::tensorflow::Input() const { return handle; }
73 ::tensorflow::Node* node() const { return handle.node(); }
74
75 Operation operation;
76 ::tensorflow::Output handle;
77};
78
79/// A transformation that asserts which transformations happened previously.
80///
81/// This transformation checks the names and, optionally, the attribute name-value
82/// pairs in the `transformations` argument against those of the transformations
83/// that preceded this transformation. If there is a mismatch, the transformation
84/// raises an exception.
85///
86/// The check occurs when iterating over the contents of the dataset, which
87/// means that the check happens *after* any static optimizations are applied
88/// to the dataset graph.
89///
90/// Args:
91/// * scope: A Scope object
92/// * input_dataset: A variant tensor representing the input dataset.
93/// `AssertPrevDataset` passes through the outputs of its input dataset.
94/// * transformations: A `tf.string` vector `tf.Tensor` identifying the transformations, with optional
95/// attribute name-value pairs, that are expected to have happened previously.
96///
97/// Returns:
98/// * `Output`: The handle tensor.
99class AssertPrevDataset {
100 public:
101 AssertPrevDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
102 input_dataset, ::tensorflow::Input transformations, const
103 DataTypeSlice& output_types, const
104 gtl::ArraySlice<PartialTensorShape>& output_shapes);
105 operator ::tensorflow::Output() const { return handle; }
106 operator ::tensorflow::Input() const { return handle; }
107 ::tensorflow::Node* node() const { return handle.node(); }
108
109 Operation operation;
110 ::tensorflow::Output handle;
111};
112
113/// Creates a dataset that shards the input dataset.
114///
115/// Creates a dataset that shards the input dataset by num_workers, returning a
116/// sharded dataset for the index-th worker. This attempts to automatically shard
117/// a dataset by examining the Dataset graph and inserting a shard op before the
118/// inputs to a reader Dataset (e.g. CSVDataset, TFRecordDataset).
119///
120/// This dataset will throw a NotFound error if we cannot shard the dataset
121/// automatically.
122///
123/// Args:
124/// * scope: A Scope object
125/// * input_dataset: A variant tensor representing the input dataset.
126/// * num_workers: A scalar representing the number of workers to distribute this dataset across.
127/// * index: A scalar representing the index of the current worker out of num_workers.
128///
129/// Returns:
130/// * `Output`: The handle tensor.
131class AutoShardDataset {
132 public:
133 /// Optional attribute setters for AutoShardDataset
134 struct Attrs {
135 /// Defaults to 0
136 TF_MUST_USE_RESULT Attrs AutoShardPolicy(int64 x) {
137 Attrs ret = *this;
138 ret.auto_shard_policy_ = x;
139 return ret;
140 }
141
142 /// Defaults to 0
143 TF_MUST_USE_RESULT Attrs NumReplicas(int64 x) {
144 Attrs ret = *this;
145 ret.num_replicas_ = x;
146 return ret;
147 }
148
149 int64 auto_shard_policy_ = 0;
150 int64 num_replicas_ = 0;
151 };
152 AutoShardDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
153 input_dataset, ::tensorflow::Input num_workers,
154 ::tensorflow::Input index, const DataTypeSlice& output_types,
155 const gtl::ArraySlice<PartialTensorShape>& output_shapes);
156 AutoShardDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
157 input_dataset, ::tensorflow::Input num_workers,
158 ::tensorflow::Input index, const DataTypeSlice& output_types,
159 const gtl::ArraySlice<PartialTensorShape>& output_shapes,
160 const AutoShardDataset::Attrs& attrs);
161 operator ::tensorflow::Output() const { return handle; }
162 operator ::tensorflow::Input() const { return handle; }
163 ::tensorflow::Node* node() const { return handle.node(); }
164
165 static Attrs AutoShardPolicy(int64 x) {
166 return Attrs().AutoShardPolicy(x);
167 }
168 static Attrs NumReplicas(int64 x) {
169 return Attrs().NumReplicas(x);
170 }
171
172 Operation operation;
173 ::tensorflow::Output handle;
174};
175
176/// Records the bytes size of each element of `input_dataset` in a StatsAggregator.
177///
178/// Args:
179/// * scope: A Scope object
180///
181/// Returns:
182/// * `Output`: The handle tensor.
183class BytesProducedStatsDataset {
184 public:
185 BytesProducedStatsDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
186 input_dataset, ::tensorflow::Input tag, const
187 DataTypeSlice& output_types, const
188 gtl::ArraySlice<PartialTensorShape>& output_shapes);
189 operator ::tensorflow::Output() const { return handle; }
190 operator ::tensorflow::Input() const { return handle; }
191 ::tensorflow::Node* node() const { return handle.node(); }
192
193 Operation operation;
194 ::tensorflow::Output handle;
195};
196
197/// TODO: add doc.
198///
199/// Args:
200/// * scope: A Scope object
201///
202/// Returns:
203/// * `Output`: The handle tensor.
204class CSVDataset {
205 public:
206 CSVDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input filenames,
207 ::tensorflow::Input compression_type, ::tensorflow::Input
208 buffer_size, ::tensorflow::Input header, ::tensorflow::Input
209 field_delim, ::tensorflow::Input use_quote_delim,
210 ::tensorflow::Input na_value, ::tensorflow::Input select_cols,
211 ::tensorflow::InputList record_defaults, const
212 gtl::ArraySlice<PartialTensorShape>& output_shapes);
213 operator ::tensorflow::Output() const { return handle; }
214 operator ::tensorflow::Input() const { return handle; }
215 ::tensorflow::Node* node() const { return handle.node(); }
216
217 Operation operation;
218 ::tensorflow::Output handle;
219};
220
221/// TODO: add doc.
222///
223/// Args:
224/// * scope: A Scope object
225///
226/// Returns:
227/// * `Output`: The handle tensor.
228class CSVDatasetV2 {
229 public:
230 CSVDatasetV2(const ::tensorflow::Scope& scope, ::tensorflow::Input filenames,
231 ::tensorflow::Input compression_type, ::tensorflow::Input
232 buffer_size, ::tensorflow::Input header, ::tensorflow::Input
233 field_delim, ::tensorflow::Input use_quote_delim,
234 ::tensorflow::Input na_value, ::tensorflow::Input select_cols,
235 ::tensorflow::InputList record_defaults, ::tensorflow::Input
236 exclude_cols, const gtl::ArraySlice<PartialTensorShape>&
237 output_shapes);
238 operator ::tensorflow::Output() const { return handle; }
239 operator ::tensorflow::Input() const { return handle; }
240 ::tensorflow::Node* node() const { return handle.node(); }
241
242 Operation operation;
243 ::tensorflow::Output handle;
244};
245
246/// TODO: add doc.
247///
248/// Args:
249/// * scope: A Scope object
250///
251/// Returns:
252/// * `Output`: The handle tensor.
253class ChooseFastestBranchDataset {
254 public:
255 ChooseFastestBranchDataset(const ::tensorflow::Scope& scope,
256 ::tensorflow::Input input_dataset,
257 ::tensorflow::Input ratio_numerator,
258 ::tensorflow::Input ratio_denominator,
259 ::tensorflow::InputList other_arguments, int64
260 num_elements_per_branch, const
261 gtl::ArraySlice<NameAttrList>& branches, const
262 gtl::ArraySlice<int>& other_arguments_lengths, const
263 DataTypeSlice& output_types, const
264 gtl::ArraySlice<PartialTensorShape>& output_shapes);
265 operator ::tensorflow::Output() const { return handle; }
266 operator ::tensorflow::Input() const { return handle; }
267 ::tensorflow::Node* node() const { return handle.node(); }
268
269 Operation operation;
270 ::tensorflow::Output handle;
271};
272
273/// TODO: add doc.
274///
275/// Args:
276/// * scope: A Scope object
277///
278/// Returns:
279/// * `Output`: The handle tensor.
280class ChooseFastestDataset {
281 public:
282 ChooseFastestDataset(const ::tensorflow::Scope& scope, ::tensorflow::InputList
283 input_datasets, int64 num_experiments, const
284 DataTypeSlice& output_types, const
285 gtl::ArraySlice<PartialTensorShape>& output_shapes);
286 operator ::tensorflow::Output() const { return handle; }
287 operator ::tensorflow::Input() const { return handle; }
288 ::tensorflow::Node* node() const { return handle.node(); }
289
290 Operation operation;
291 ::tensorflow::Output handle;
292};
293
294/// Compresses a dataset element.
295///
296/// Args:
297/// * scope: A Scope object
298///
299/// Returns:
300/// * `Output`: The compressed tensor.
301class CompressElement {
302 public:
303 CompressElement(const ::tensorflow::Scope& scope, ::tensorflow::InputList
304 components);
305 operator ::tensorflow::Output() const { return compressed; }
306 operator ::tensorflow::Input() const { return compressed; }
307 ::tensorflow::Node* node() const { return compressed.node(); }
308
309 Operation operation;
310 ::tensorflow::Output compressed;
311};
312
313/// Computes the static batch size of a dataset sans partial batches.
314///
315/// Args:
316/// * scope: A Scope object
317///
318/// Returns:
319/// * `Output`: The batch_size tensor.
320class ComputeBatchSize {
321 public:
322 ComputeBatchSize(const ::tensorflow::Scope& scope, ::tensorflow::Input
323 input_dataset);
324 operator ::tensorflow::Output() const { return batch_size; }
325 operator ::tensorflow::Input() const { return batch_size; }
326 ::tensorflow::Node* node() const { return batch_size.node(); }
327
328 Operation operation;
329 ::tensorflow::Output batch_size;
330};
331
332/// Creates a dataset that reads data from the tf.data service.
333///
334/// Args:
335/// * scope: A Scope object
336///
337/// Returns:
338/// * `Output`: The handle tensor.
339class DataServiceDataset {
340 public:
341 /// Optional attribute setters for DataServiceDataset
342 struct Attrs {
343 /// Defaults to -1
344 TF_MUST_USE_RESULT Attrs TaskRefreshIntervalHintMs(int64 x) {
345 Attrs ret = *this;
346 ret.task_refresh_interval_hint_ms_ = x;
347 return ret;
348 }
349
350 /// Defaults to ""
351 TF_MUST_USE_RESULT Attrs DataTransferProtocol(StringPiece x) {
352 Attrs ret = *this;
353 ret.data_transfer_protocol_ = x;
354 return ret;
355 }
356
357 /// Defaults to "AUTO"
358 TF_MUST_USE_RESULT Attrs TargetWorkers(StringPiece x) {
359 Attrs ret = *this;
360 ret.target_workers_ = x;
361 return ret;
362 }
363
364 /// Defaults to ""
365 TF_MUST_USE_RESULT Attrs CrossTrainerCacheOptions(StringPiece x) {
366 Attrs ret = *this;
367 ret.cross_trainer_cache_options_ = x;
368 return ret;
369 }
370
371 int64 task_refresh_interval_hint_ms_ = -1;
372 StringPiece data_transfer_protocol_ = "";
373 StringPiece target_workers_ = "AUTO";
374 StringPiece cross_trainer_cache_options_ = "";
375 };
376 DataServiceDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
377 dataset_id, ::tensorflow::Input processing_mode,
378 ::tensorflow::Input address, ::tensorflow::Input protocol,
379 ::tensorflow::Input job_name, ::tensorflow::Input
380 max_outstanding_requests, ::tensorflow::Input
381 iteration_counter, const DataTypeSlice& output_types, const
382 gtl::ArraySlice<PartialTensorShape>& output_shapes);
383 DataServiceDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
384 dataset_id, ::tensorflow::Input processing_mode,
385 ::tensorflow::Input address, ::tensorflow::Input protocol,
386 ::tensorflow::Input job_name, ::tensorflow::Input
387 max_outstanding_requests, ::tensorflow::Input
388 iteration_counter, const DataTypeSlice& output_types, const
389 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
390 DataServiceDataset::Attrs& attrs);
391 operator ::tensorflow::Output() const { return handle; }
392 operator ::tensorflow::Input() const { return handle; }
393 ::tensorflow::Node* node() const { return handle.node(); }
394
395 static Attrs TaskRefreshIntervalHintMs(int64 x) {
396 return Attrs().TaskRefreshIntervalHintMs(x);
397 }
398 static Attrs DataTransferProtocol(StringPiece x) {
399 return Attrs().DataTransferProtocol(x);
400 }
401 static Attrs TargetWorkers(StringPiece x) {
402 return Attrs().TargetWorkers(x);
403 }
404 static Attrs CrossTrainerCacheOptions(StringPiece x) {
405 return Attrs().CrossTrainerCacheOptions(x);
406 }
407
408 Operation operation;
409 ::tensorflow::Output handle;
410};
411
412/// Creates a dataset that reads data from the tf.data service.
413///
414/// Args:
415/// * scope: A Scope object
416///
417/// Returns:
418/// * `Output`: The handle tensor.
419class DataServiceDatasetV2 {
420 public:
421 /// Optional attribute setters for DataServiceDatasetV2
422 struct Attrs {
423 /// Defaults to -1
424 TF_MUST_USE_RESULT Attrs TaskRefreshIntervalHintMs(int64 x) {
425 Attrs ret = *this;
426 ret.task_refresh_interval_hint_ms_ = x;
427 return ret;
428 }
429
430 /// Defaults to ""
431 TF_MUST_USE_RESULT Attrs DataTransferProtocol(StringPiece x) {
432 Attrs ret = *this;
433 ret.data_transfer_protocol_ = x;
434 return ret;
435 }
436
437 /// Defaults to "AUTO"
438 TF_MUST_USE_RESULT Attrs TargetWorkers(StringPiece x) {
439 Attrs ret = *this;
440 ret.target_workers_ = x;
441 return ret;
442 }
443
444 /// Defaults to ""
445 TF_MUST_USE_RESULT Attrs CrossTrainerCacheOptions(StringPiece x) {
446 Attrs ret = *this;
447 ret.cross_trainer_cache_options_ = x;
448 return ret;
449 }
450
451 int64 task_refresh_interval_hint_ms_ = -1;
452 StringPiece data_transfer_protocol_ = "";
453 StringPiece target_workers_ = "AUTO";
454 StringPiece cross_trainer_cache_options_ = "";
455 };
456 DataServiceDatasetV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
457 dataset_id, ::tensorflow::Input processing_mode,
458 ::tensorflow::Input address, ::tensorflow::Input protocol,
459 ::tensorflow::Input job_name, ::tensorflow::Input
460 consumer_index, ::tensorflow::Input num_consumers,
461 ::tensorflow::Input max_outstanding_requests,
462 ::tensorflow::Input iteration_counter, const
463 DataTypeSlice& output_types, const
464 gtl::ArraySlice<PartialTensorShape>& output_shapes);
465 DataServiceDatasetV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
466 dataset_id, ::tensorflow::Input processing_mode,
467 ::tensorflow::Input address, ::tensorflow::Input protocol,
468 ::tensorflow::Input job_name, ::tensorflow::Input
469 consumer_index, ::tensorflow::Input num_consumers,
470 ::tensorflow::Input max_outstanding_requests,
471 ::tensorflow::Input iteration_counter, const
472 DataTypeSlice& output_types, const
473 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
474 DataServiceDatasetV2::Attrs& attrs);
475 operator ::tensorflow::Output() const { return handle; }
476 operator ::tensorflow::Input() const { return handle; }
477 ::tensorflow::Node* node() const { return handle.node(); }
478
479 static Attrs TaskRefreshIntervalHintMs(int64 x) {
480 return Attrs().TaskRefreshIntervalHintMs(x);
481 }
482 static Attrs DataTransferProtocol(StringPiece x) {
483 return Attrs().DataTransferProtocol(x);
484 }
485 static Attrs TargetWorkers(StringPiece x) {
486 return Attrs().TargetWorkers(x);
487 }
488 static Attrs CrossTrainerCacheOptions(StringPiece x) {
489 return Attrs().CrossTrainerCacheOptions(x);
490 }
491
492 Operation operation;
493 ::tensorflow::Output handle;
494};
495
496/// Creates a dataset that reads data from the tf.data service.
497///
498/// Args:
499/// * scope: A Scope object
500///
501/// Returns:
502/// * `Output`: The handle tensor.
503class DataServiceDatasetV3 {
504 public:
505 /// Optional attribute setters for DataServiceDatasetV3
506 struct Attrs {
507 /// Defaults to -1
508 TF_MUST_USE_RESULT Attrs TaskRefreshIntervalHintMs(int64 x) {
509 Attrs ret = *this;
510 ret.task_refresh_interval_hint_ms_ = x;
511 return ret;
512 }
513
514 /// Defaults to ""
515 TF_MUST_USE_RESULT Attrs DataTransferProtocol(StringPiece x) {
516 Attrs ret = *this;
517 ret.data_transfer_protocol_ = x;
518 return ret;
519 }
520
521 /// Defaults to "AUTO"
522 TF_MUST_USE_RESULT Attrs TargetWorkers(StringPiece x) {
523 Attrs ret = *this;
524 ret.target_workers_ = x;
525 return ret;
526 }
527
528 /// Defaults to false
529 TF_MUST_USE_RESULT Attrs Uncompress(bool x) {
530 Attrs ret = *this;
531 ret.uncompress_ = x;
532 return ret;
533 }
534
535 /// Defaults to ""
536 TF_MUST_USE_RESULT Attrs CrossTrainerCacheOptions(StringPiece x) {
537 Attrs ret = *this;
538 ret.cross_trainer_cache_options_ = x;
539 return ret;
540 }
541
542 int64 task_refresh_interval_hint_ms_ = -1;
543 StringPiece data_transfer_protocol_ = "";
544 StringPiece target_workers_ = "AUTO";
545 bool uncompress_ = false;
546 StringPiece cross_trainer_cache_options_ = "";
547 };
548 DataServiceDatasetV3(const ::tensorflow::Scope& scope, ::tensorflow::Input
549 dataset_id, ::tensorflow::Input processing_mode,
550 ::tensorflow::Input address, ::tensorflow::Input protocol,
551 ::tensorflow::Input job_name, ::tensorflow::Input
552 consumer_index, ::tensorflow::Input num_consumers,
553 ::tensorflow::Input max_outstanding_requests,
554 ::tensorflow::Input iteration_counter, const
555 DataTypeSlice& output_types, const
556 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
557 NameAttrList& uncompress_fn);
558 DataServiceDatasetV3(const ::tensorflow::Scope& scope, ::tensorflow::Input
559 dataset_id, ::tensorflow::Input processing_mode,
560 ::tensorflow::Input address, ::tensorflow::Input protocol,
561 ::tensorflow::Input job_name, ::tensorflow::Input
562 consumer_index, ::tensorflow::Input num_consumers,
563 ::tensorflow::Input max_outstanding_requests,
564 ::tensorflow::Input iteration_counter, const
565 DataTypeSlice& output_types, const
566 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
567 NameAttrList& uncompress_fn, const
568 DataServiceDatasetV3::Attrs& attrs);
569 operator ::tensorflow::Output() const { return handle; }
570 operator ::tensorflow::Input() const { return handle; }
571 ::tensorflow::Node* node() const { return handle.node(); }
572
573 static Attrs TaskRefreshIntervalHintMs(int64 x) {
574 return Attrs().TaskRefreshIntervalHintMs(x);
575 }
576 static Attrs DataTransferProtocol(StringPiece x) {
577 return Attrs().DataTransferProtocol(x);
578 }
579 static Attrs TargetWorkers(StringPiece x) {
580 return Attrs().TargetWorkers(x);
581 }
582 static Attrs Uncompress(bool x) {
583 return Attrs().Uncompress(x);
584 }
585 static Attrs CrossTrainerCacheOptions(StringPiece x) {
586 return Attrs().CrossTrainerCacheOptions(x);
587 }
588
589 Operation operation;
590 ::tensorflow::Output handle;
591};
592
593/// Creates a dataset that reads data from the tf.data service.
594///
595/// Args:
596/// * scope: A Scope object
597///
598/// Returns:
599/// * `Output`: The handle tensor.
600class DataServiceDatasetV4 {
601 public:
602 /// Optional attribute setters for DataServiceDatasetV4
603 struct Attrs {
604 /// Defaults to -1
605 TF_MUST_USE_RESULT Attrs TaskRefreshIntervalHintMs(int64 x) {
606 Attrs ret = *this;
607 ret.task_refresh_interval_hint_ms_ = x;
608 return ret;
609 }
610
611 /// Defaults to ""
612 TF_MUST_USE_RESULT Attrs DataTransferProtocol(StringPiece x) {
613 Attrs ret = *this;
614 ret.data_transfer_protocol_ = x;
615 return ret;
616 }
617
618 /// Defaults to "AUTO"
619 TF_MUST_USE_RESULT Attrs TargetWorkers(StringPiece x) {
620 Attrs ret = *this;
621 ret.target_workers_ = x;
622 return ret;
623 }
624
625 /// Defaults to false
626 TF_MUST_USE_RESULT Attrs Uncompress(bool x) {
627 Attrs ret = *this;
628 ret.uncompress_ = x;
629 return ret;
630 }
631
632 /// Defaults to ""
633 TF_MUST_USE_RESULT Attrs CrossTrainerCacheOptions(StringPiece x) {
634 Attrs ret = *this;
635 ret.cross_trainer_cache_options_ = x;
636 return ret;
637 }
638
639 int64 task_refresh_interval_hint_ms_ = -1;
640 StringPiece data_transfer_protocol_ = "";
641 StringPiece target_workers_ = "AUTO";
642 bool uncompress_ = false;
643 StringPiece cross_trainer_cache_options_ = "";
644 };
645 DataServiceDatasetV4(const ::tensorflow::Scope& scope, ::tensorflow::Input
646 dataset_id, ::tensorflow::Input processing_mode,
647 ::tensorflow::Input address, ::tensorflow::Input protocol,
648 ::tensorflow::Input job_name, ::tensorflow::Input
649 consumer_index, ::tensorflow::Input num_consumers,
650 ::tensorflow::Input max_outstanding_requests,
651 ::tensorflow::Input iteration_counter, const
652 DataTypeSlice& output_types, const
653 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
654 NameAttrList& uncompress_fn);
655 DataServiceDatasetV4(const ::tensorflow::Scope& scope, ::tensorflow::Input
656 dataset_id, ::tensorflow::Input processing_mode,
657 ::tensorflow::Input address, ::tensorflow::Input protocol,
658 ::tensorflow::Input job_name, ::tensorflow::Input
659 consumer_index, ::tensorflow::Input num_consumers,
660 ::tensorflow::Input max_outstanding_requests,
661 ::tensorflow::Input iteration_counter, const
662 DataTypeSlice& output_types, const
663 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
664 NameAttrList& uncompress_fn, const
665 DataServiceDatasetV4::Attrs& attrs);
666 operator ::tensorflow::Output() const { return handle; }
667 operator ::tensorflow::Input() const { return handle; }
668 ::tensorflow::Node* node() const { return handle.node(); }
669
670 static Attrs TaskRefreshIntervalHintMs(int64 x) {
671 return Attrs().TaskRefreshIntervalHintMs(x);
672 }
673 static Attrs DataTransferProtocol(StringPiece x) {
674 return Attrs().DataTransferProtocol(x);
675 }
676 static Attrs TargetWorkers(StringPiece x) {
677 return Attrs().TargetWorkers(x);
678 }
679 static Attrs Uncompress(bool x) {
680 return Attrs().Uncompress(x);
681 }
682 static Attrs CrossTrainerCacheOptions(StringPiece x) {
683 return Attrs().CrossTrainerCacheOptions(x);
684 }
685
686 Operation operation;
687 ::tensorflow::Output handle;
688};
689
690/// Creates a dataset from the given `graph_def`.
691///
692/// Creates a dataset from the provided `graph_def`.
693///
694/// Args:
695/// * scope: A Scope object
696/// * graph_def: The graph representation of the dataset (as serialized GraphDef).
697///
698/// Returns:
699/// * `Output`: A variant tensor representing the dataset.
700class DatasetFromGraph {
701 public:
702 DatasetFromGraph(const ::tensorflow::Scope& scope, ::tensorflow::Input
703 graph_def);
704 operator ::tensorflow::Output() const { return handle; }
705 operator ::tensorflow::Input() const { return handle; }
706 ::tensorflow::Node* node() const { return handle.node(); }
707
708 Operation operation;
709 ::tensorflow::Output handle;
710};
711
712/// Writes the given dataset to the given file using the TFRecord format.
713///
714/// Args:
715/// * scope: A Scope object
716/// * input_dataset: A variant tensor representing the dataset to write.
717/// * filename: A scalar string tensor representing the filename to use.
718/// * compression_type: A scalar string tensor containing either (i) the empty string (no
719/// compression), (ii) "ZLIB", or (iii) "GZIP".
720///
721/// Returns:
722/// * the created `Operation`
723class DatasetToTFRecord {
724 public:
725 DatasetToTFRecord(const ::tensorflow::Scope& scope, ::tensorflow::Input
726 input_dataset, ::tensorflow::Input filename,
727 ::tensorflow::Input compression_type);
728 operator ::tensorflow::Operation() const { return operation; }
729
730 Operation operation;
731};
732
733/// Creates a dataset that batches input elements into a SparseTensor.
734///
735/// Args:
736/// * scope: A Scope object
737/// * input_dataset: A handle to an input dataset. Must have a single component.
738/// * batch_size: A scalar representing the number of elements to accumulate in a
739/// batch.
740/// * row_shape: A vector representing the dense shape of each row in the produced
741/// SparseTensor. The shape may be partially specified, using `-1` to indicate
742/// that a particular dimension should use the maximum size of all batch elements.
743///
744/// Returns:
745/// * `Output`: The handle tensor.
746class DenseToSparseBatchDataset {
747 public:
748 DenseToSparseBatchDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
749 input_dataset, ::tensorflow::Input batch_size,
750 ::tensorflow::Input row_shape, const DataTypeSlice&
751 output_types, const
752 gtl::ArraySlice<PartialTensorShape>& output_shapes);
753 operator ::tensorflow::Output() const { return handle; }
754 operator ::tensorflow::Input() const { return handle; }
755 ::tensorflow::Node* node() const { return handle.node(); }
756
757 Operation operation;
758 ::tensorflow::Output handle;
759};
760
761/// A substitute for `InterleaveDataset` on a fixed list of `N` datasets.
762///
763/// Args:
764/// * scope: A Scope object
765/// * selector_input_dataset: A dataset of scalar `DT_INT64` elements that determines which of the
766/// `N` data inputs should produce the next output element.
767/// * data_input_datasets: `N` datasets with the same type that will be interleaved according to
768/// the values of `selector_input_dataset`.
769///
770/// Returns:
771/// * `Output`: The handle tensor.
772class DirectedInterleaveDataset {
773 public:
774 /// Optional attribute setters for DirectedInterleaveDataset
775 struct Attrs {
776 /// Defaults to false
777 TF_MUST_USE_RESULT Attrs StopOnEmptyDataset(bool x) {
778 Attrs ret = *this;
779 ret.stop_on_empty_dataset_ = x;
780 return ret;
781 }
782
783 bool stop_on_empty_dataset_ = false;
784 };
785 DirectedInterleaveDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
786 selector_input_dataset, ::tensorflow::InputList
787 data_input_datasets, const DataTypeSlice&
788 output_types, const
789 gtl::ArraySlice<PartialTensorShape>& output_shapes);
790 DirectedInterleaveDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
791 selector_input_dataset, ::tensorflow::InputList
792 data_input_datasets, const DataTypeSlice&
793 output_types, const
794 gtl::ArraySlice<PartialTensorShape>& output_shapes,
795 const DirectedInterleaveDataset::Attrs& attrs);
796 operator ::tensorflow::Output() const { return handle; }
797 operator ::tensorflow::Input() const { return handle; }
798 ::tensorflow::Node* node() const { return handle.node(); }
799
800 static Attrs StopOnEmptyDataset(bool x) {
801 return Attrs().StopOnEmptyDataset(x);
802 }
803
804 Operation operation;
805 ::tensorflow::Output handle;
806};
807
808/// TODO: add doc.
809///
810/// Args:
811/// * scope: A Scope object
812///
813/// Returns:
814/// * `Output`: The handle tensor.
815class DummyIterationCounter {
816 public:
817 DummyIterationCounter(const ::tensorflow::Scope& scope);
818 operator ::tensorflow::Output() const { return handle; }
819 operator ::tensorflow::Input() const { return handle; }
820 ::tensorflow::Node* node() const { return handle.node(); }
821
822 Operation operation;
823 ::tensorflow::Output handle;
824};
825
826/// TODO: add doc.
827///
828/// Args:
829/// * scope: A Scope object
830///
831/// Returns:
832/// * `Output`: The handle tensor.
833class ExperimentalAssertNextDataset {
834 public:
835 ExperimentalAssertNextDataset(const ::tensorflow::Scope& scope,
836 ::tensorflow::Input input_dataset,
837 ::tensorflow::Input transformations, const
838 DataTypeSlice& output_types, const
839 gtl::ArraySlice<PartialTensorShape>&
840 output_shapes);
841 operator ::tensorflow::Output() const { return handle; }
842 operator ::tensorflow::Input() const { return handle; }
843 ::tensorflow::Node* node() const { return handle.node(); }
844
845 Operation operation;
846 ::tensorflow::Output handle;
847};
848
849/// Creates a dataset that shards the input dataset.
850///
851/// Creates a dataset that shards the input dataset by num_workers, returning a
852/// sharded dataset for the index-th worker. This attempts to automatically shard
853/// a dataset by examining the Dataset graph and inserting a shard op before the
854/// inputs to a reader Dataset (e.g. CSVDataset, TFRecordDataset).
855///
856/// This dataset will throw a NotFound error if we cannot shard the dataset
857/// automatically.
858///
859/// Args:
860/// * scope: A Scope object
861/// * input_dataset: A variant tensor representing the input dataset.
862/// * num_workers: A scalar representing the number of workers to distribute this dataset across.
863/// * index: A scalar representing the index of the current worker out of num_workers.
864///
865/// Returns:
866/// * `Output`: The handle tensor.
867class ExperimentalAutoShardDataset {
868 public:
869 /// Optional attribute setters for ExperimentalAutoShardDataset
870 struct Attrs {
871 /// Defaults to 0
872 TF_MUST_USE_RESULT Attrs AutoShardPolicy(int64 x) {
873 Attrs ret = *this;
874 ret.auto_shard_policy_ = x;
875 return ret;
876 }
877
878 int64 auto_shard_policy_ = 0;
879 };
880 ExperimentalAutoShardDataset(const ::tensorflow::Scope& scope,
881 ::tensorflow::Input input_dataset,
882 ::tensorflow::Input num_workers,
883 ::tensorflow::Input index, const DataTypeSlice&
884 output_types, const
885 gtl::ArraySlice<PartialTensorShape>&
886 output_shapes);
887 ExperimentalAutoShardDataset(const ::tensorflow::Scope& scope,
888 ::tensorflow::Input input_dataset,
889 ::tensorflow::Input num_workers,
890 ::tensorflow::Input index, const DataTypeSlice&
891 output_types, const
892 gtl::ArraySlice<PartialTensorShape>&
893 output_shapes, const
894 ExperimentalAutoShardDataset::Attrs& attrs);
895 operator ::tensorflow::Output() const { return handle; }
896 operator ::tensorflow::Input() const { return handle; }
897 ::tensorflow::Node* node() const { return handle.node(); }
898
899 static Attrs AutoShardPolicy(int64 x) {
900 return Attrs().AutoShardPolicy(x);
901 }
902
903 Operation operation;
904 ::tensorflow::Output handle;
905};
906
907/// Records the bytes size of each element of `input_dataset` in a StatsAggregator.
908///
909/// Args:
910/// * scope: A Scope object
911///
912/// Returns:
913/// * `Output`: The handle tensor.
914class ExperimentalBytesProducedStatsDataset {
915 public:
916 ExperimentalBytesProducedStatsDataset(const ::tensorflow::Scope& scope,
917 ::tensorflow::Input input_dataset,
918 ::tensorflow::Input tag, const
919 DataTypeSlice& output_types, const
920 gtl::ArraySlice<PartialTensorShape>&
921 output_shapes);
922 operator ::tensorflow::Output() const { return handle; }
923 operator ::tensorflow::Input() const { return handle; }
924 ::tensorflow::Node* node() const { return handle.node(); }
925
926 Operation operation;
927 ::tensorflow::Output handle;
928};
929
930/// TODO: add doc.
931///
932/// Args:
933/// * scope: A Scope object
934///
935/// Returns:
936/// * `Output`: The handle tensor.
937class ExperimentalCSVDataset {
938 public:
939 ExperimentalCSVDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
940 filenames, ::tensorflow::Input compression_type,
941 ::tensorflow::Input buffer_size, ::tensorflow::Input
942 header, ::tensorflow::Input field_delim,
943 ::tensorflow::Input use_quote_delim, ::tensorflow::Input
944 na_value, ::tensorflow::Input select_cols,
945 ::tensorflow::InputList record_defaults, const
946 gtl::ArraySlice<PartialTensorShape>& output_shapes);
947 operator ::tensorflow::Output() const { return handle; }
948 operator ::tensorflow::Input() const { return handle; }
949 ::tensorflow::Node* node() const { return handle.node(); }
950
951 Operation operation;
952 ::tensorflow::Output handle;
953};
954
955/// TODO: add doc.
956///
957/// Args:
958/// * scope: A Scope object
959///
960/// Returns:
961/// * `Output`: The handle tensor.
962class ExperimentalChooseFastestDataset {
963 public:
964 ExperimentalChooseFastestDataset(const ::tensorflow::Scope& scope,
965 ::tensorflow::InputList input_datasets, int64
966 num_experiments, const DataTypeSlice&
967 output_types, const
968 gtl::ArraySlice<PartialTensorShape>&
969 output_shapes);
970 operator ::tensorflow::Output() const { return handle; }
971 operator ::tensorflow::Input() const { return handle; }
972 ::tensorflow::Node* node() const { return handle.node(); }
973
974 Operation operation;
975 ::tensorflow::Output handle;
976};
977
978/// Returns the cardinality of `input_dataset`.
979///
980/// Returns the cardinality of `input_dataset`.
981///
982/// Args:
983/// * scope: A Scope object
984/// * input_dataset: A variant tensor representing the dataset to return cardinality for.
985///
986/// Returns:
987/// * `Output`: The cardinality of `input_dataset`. Named constants are used to represent
988/// infinite and unknown cardinality.
989class ExperimentalDatasetCardinality {
990 public:
991 ExperimentalDatasetCardinality(const ::tensorflow::Scope& scope,
992 ::tensorflow::Input input_dataset);
993 operator ::tensorflow::Output() const { return cardinality; }
994 operator ::tensorflow::Input() const { return cardinality; }
995 ::tensorflow::Node* node() const { return cardinality.node(); }
996
997 Operation operation;
998 ::tensorflow::Output cardinality;
999};
1000
1001/// Writes the given dataset to the given file using the TFRecord format.
1002///
1003/// Args:
1004/// * scope: A Scope object
1005/// * input_dataset: A variant tensor representing the dataset to write.
1006/// * filename: A scalar string tensor representing the filename to use.
1007/// * compression_type: A scalar string tensor containing either (i) the empty string (no
1008/// compression), (ii) "ZLIB", or (iii) "GZIP".
1009///
1010/// Returns:
1011/// * the created `Operation`
1012class ExperimentalDatasetToTFRecord {
1013 public:
1014 ExperimentalDatasetToTFRecord(const ::tensorflow::Scope& scope,
1015 ::tensorflow::Input input_dataset,
1016 ::tensorflow::Input filename, ::tensorflow::Input
1017 compression_type);
1018 operator ::tensorflow::Operation() const { return operation; }
1019
1020 Operation operation;
1021};
1022
1023/// Creates a dataset that batches input elements into a SparseTensor.
1024///
1025/// Args:
1026/// * scope: A Scope object
1027/// * input_dataset: A handle to an input dataset. Must have a single component.
1028/// * batch_size: A scalar representing the number of elements to accumulate in a
1029/// batch.
1030/// * row_shape: A vector representing the dense shape of each row in the produced
1031/// SparseTensor. The shape may be partially specified, using `-1` to indicate
1032/// that a particular dimension should use the maximum size of all batch elements.
1033///
1034/// Returns:
1035/// * `Output`: The handle tensor.
1036class ExperimentalDenseToSparseBatchDataset {
1037 public:
1038 ExperimentalDenseToSparseBatchDataset(const ::tensorflow::Scope& scope,
1039 ::tensorflow::Input input_dataset,
1040 ::tensorflow::Input batch_size,
1041 ::tensorflow::Input row_shape, const
1042 DataTypeSlice& output_types, const
1043 gtl::ArraySlice<PartialTensorShape>&
1044 output_shapes);
1045 operator ::tensorflow::Output() const { return handle; }
1046 operator ::tensorflow::Input() const { return handle; }
1047 ::tensorflow::Node* node() const { return handle.node(); }
1048
1049 Operation operation;
1050 ::tensorflow::Output handle;
1051};
1052
1053/// A substitute for `InterleaveDataset` on a fixed list of `N` datasets.
1054///
1055/// Args:
1056/// * scope: A Scope object
1057/// * selector_input_dataset: A dataset of scalar `DT_INT64` elements that determines which of the
1058/// `N` data inputs should produce the next output element.
1059/// * data_input_datasets: `N` datasets with the same type that will be interleaved according to
1060/// the values of `selector_input_dataset`.
1061///
1062/// Returns:
1063/// * `Output`: The handle tensor.
1064class ExperimentalDirectedInterleaveDataset {
1065 public:
1066 ExperimentalDirectedInterleaveDataset(const ::tensorflow::Scope& scope,
1067 ::tensorflow::Input
1068 selector_input_dataset,
1069 ::tensorflow::InputList
1070 data_input_datasets, const DataTypeSlice&
1071 output_types, const
1072 gtl::ArraySlice<PartialTensorShape>&
1073 output_shapes);
1074 operator ::tensorflow::Output() const { return handle; }
1075 operator ::tensorflow::Input() const { return handle; }
1076 ::tensorflow::Node* node() const { return handle.node(); }
1077
1078 Operation operation;
1079 ::tensorflow::Output handle;
1080};
1081
1082/// Creates a dataset that computes a group-by on `input_dataset`.
1083///
1084/// Creates a dataset that computes a group-by on `input_dataset`.
1085///
1086/// Args:
1087/// * scope: A Scope object
1088/// * input_dataset: A variant tensor representing the input dataset.
1089/// * key_func_other_arguments: A list of tensors, typically values that were captured when
1090/// building a closure for `key_func`.
1091/// * init_func_other_arguments: A list of tensors, typically values that were captured when
1092/// building a closure for `init_func`.
1093/// * reduce_func_other_arguments: A list of tensors, typically values that were captured when
1094/// building a closure for `reduce_func`.
1095/// * finalize_func_other_arguments: A list of tensors, typically values that were captured when
1096/// building a closure for `finalize_func`.
1097/// * key_func: A function mapping an element of `input_dataset`, concatenated
1098/// with `key_func_other_arguments` to a scalar value of type DT_INT64.
1099/// * init_func: A function mapping a key of type DT_INT64, concatenated with
1100/// `init_func_other_arguments` to the initial reducer state.
1101/// * reduce_func: A function mapping the current reducer state and an element of `input_dataset`,
1102/// concatenated with `reduce_func_other_arguments` to a new reducer state.
1103/// * finalize_func: A function mapping the final reducer state to an output element.
1104///
1105/// Returns:
1106/// * `Output`: The handle tensor.
1107class ExperimentalGroupByReducerDataset {
1108 public:
1109 ExperimentalGroupByReducerDataset(const ::tensorflow::Scope& scope,
1110 ::tensorflow::Input input_dataset,
1111 ::tensorflow::InputList
1112 key_func_other_arguments,
1113 ::tensorflow::InputList
1114 init_func_other_arguments,
1115 ::tensorflow::InputList
1116 reduce_func_other_arguments,
1117 ::tensorflow::InputList
1118 finalize_func_other_arguments, const
1119 NameAttrList& key_func, const NameAttrList&
1120 init_func, const NameAttrList& reduce_func,
1121 const NameAttrList& finalize_func, const
1122 DataTypeSlice& output_types, const
1123 gtl::ArraySlice<PartialTensorShape>&
1124 output_shapes);
1125 operator ::tensorflow::Output() const { return handle; }
1126 operator ::tensorflow::Input() const { return handle; }
1127 ::tensorflow::Node* node() const { return handle.node(); }
1128
1129 Operation operation;
1130 ::tensorflow::Output handle;
1131};
1132
1133/// Creates a dataset that computes a windowed group-by on `input_dataset`.
1134///
1135/// // TODO(mrry): Support non-int64 keys.
1136///
1137/// Args:
1138/// * scope: A Scope object
1139/// * key_func: A function mapping an element of `input_dataset`, concatenated
1140/// with `key_func_other_arguments` to a scalar value of type DT_INT64.
1141///
1142/// Returns:
1143/// * `Output`: The handle tensor.
1144class ExperimentalGroupByWindowDataset {
1145 public:
1146 ExperimentalGroupByWindowDataset(const ::tensorflow::Scope& scope,
1147 ::tensorflow::Input input_dataset,
1148 ::tensorflow::InputList
1149 key_func_other_arguments,
1150 ::tensorflow::InputList
1151 reduce_func_other_arguments,
1152 ::tensorflow::InputList
1153 window_size_func_other_arguments, const
1154 NameAttrList& key_func, const NameAttrList&
1155 reduce_func, const NameAttrList&
1156 window_size_func, const DataTypeSlice&
1157 output_types, const
1158 gtl::ArraySlice<PartialTensorShape>&
1159 output_shapes);
1160 operator ::tensorflow::Output() const { return handle; }
1161 operator ::tensorflow::Input() const { return handle; }
1162 ::tensorflow::Node* node() const { return handle.node(); }
1163
1164 Operation operation;
1165 ::tensorflow::Output handle;
1166};
1167
1168/// Creates a dataset that contains the elements of `input_dataset` ignoring errors.
1169///
1170/// Args:
1171/// * scope: A Scope object
1172///
1173/// Returns:
1174/// * `Output`: The handle tensor.
1175class ExperimentalIgnoreErrorsDataset {
1176 public:
1177 /// Optional attribute setters for ExperimentalIgnoreErrorsDataset
1178 struct Attrs {
1179 /// Defaults to false
1180 TF_MUST_USE_RESULT Attrs LogWarning(bool x) {
1181 Attrs ret = *this;
1182 ret.log_warning_ = x;
1183 return ret;
1184 }
1185
1186 bool log_warning_ = false;
1187 };
1188 ExperimentalIgnoreErrorsDataset(const ::tensorflow::Scope& scope,
1189 ::tensorflow::Input input_dataset, const
1190 DataTypeSlice& output_types, const
1191 gtl::ArraySlice<PartialTensorShape>&
1192 output_shapes);
1193 ExperimentalIgnoreErrorsDataset(const ::tensorflow::Scope& scope,
1194 ::tensorflow::Input input_dataset, const
1195 DataTypeSlice& output_types, const
1196 gtl::ArraySlice<PartialTensorShape>&
1197 output_shapes, const
1198 ExperimentalIgnoreErrorsDataset::Attrs& attrs);
1199 operator ::tensorflow::Output() const { return handle; }
1200 operator ::tensorflow::Input() const { return handle; }
1201 ::tensorflow::Node* node() const { return handle.node(); }
1202
1203 static Attrs LogWarning(bool x) {
1204 return Attrs().LogWarning(x);
1205 }
1206
1207 Operation operation;
1208 ::tensorflow::Output handle;
1209};
1210
1211/// Returns the name of the device on which `resource` has been placed.
1212///
1213/// Args:
1214/// * scope: A Scope object
1215///
1216/// Returns:
1217/// * `Output`: The device tensor.
1218class ExperimentalIteratorGetDevice {
1219 public:
1220 ExperimentalIteratorGetDevice(const ::tensorflow::Scope& scope,
1221 ::tensorflow::Input resource);
1222 operator ::tensorflow::Output() const { return device; }
1223 operator ::tensorflow::Input() const { return device; }
1224 ::tensorflow::Node* node() const { return device.node(); }
1225
1226 Operation operation;
1227 ::tensorflow::Output device;
1228};
1229
1230/// TODO: add doc.
1231///
1232/// Args:
1233/// * scope: A Scope object
1234///
1235/// Returns:
1236/// * `Output`: The handle tensor.
1237class ExperimentalLMDBDataset {
1238 public:
1239 ExperimentalLMDBDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
1240 filenames, const DataTypeSlice& output_types, const
1241 gtl::ArraySlice<PartialTensorShape>& output_shapes);
1242 operator ::tensorflow::Output() const { return handle; }
1243 operator ::tensorflow::Input() const { return handle; }
1244 ::tensorflow::Node* node() const { return handle.node(); }
1245
1246 Operation operation;
1247 ::tensorflow::Output handle;
1248};
1249
1250/// Records the latency of producing `input_dataset` elements in a StatsAggregator.
1251///
1252/// Args:
1253/// * scope: A Scope object
1254///
1255/// Returns:
1256/// * `Output`: The handle tensor.
1257class ExperimentalLatencyStatsDataset {
1258 public:
1259 ExperimentalLatencyStatsDataset(const ::tensorflow::Scope& scope,
1260 ::tensorflow::Input input_dataset,
1261 ::tensorflow::Input tag, const DataTypeSlice&
1262 output_types, const
1263 gtl::ArraySlice<PartialTensorShape>&
1264 output_shapes);
1265 operator ::tensorflow::Output() const { return handle; }
1266 operator ::tensorflow::Input() const { return handle; }
1267 ::tensorflow::Node* node() const { return handle.node(); }
1268
1269 Operation operation;
1270 ::tensorflow::Output handle;
1271};
1272
1273/// Creates a dataset that fuses mapping with batching.
1274///
1275/// Creates a dataset that applies `f` to the outputs of `input_dataset` and then
1276/// batches `batch_size` of them.
1277///
1278/// Unlike a "MapDataset", which applies `f` sequentially, this dataset invokes up
1279/// to `batch_size * num_parallel_batches` copies of `f` in parallel.
1280///
1281/// Args:
1282/// * scope: A Scope object
1283/// * input_dataset: A variant tensor representing the input dataset.
1284/// * other_arguments: A list of tensors, typically values that were captured when building a closure
1285/// for `f`.
1286/// * batch_size: A scalar representing the number of elements to accumulate in a
1287/// batch. It determines the number of concurrent invocations of `f` that process
1288/// elements from `input_dataset` in parallel.
1289/// * num_parallel_calls: A scalar representing the maximum number of parallel invocations of the `map_fn`
1290/// function. Applying the `map_fn` on consecutive input elements in parallel has
1291/// the potential to improve input pipeline throughput.
1292/// * drop_remainder: A scalar representing whether the last batch should be dropped in case its size
1293/// is smaller than desired.
1294/// * f: A function to apply to the outputs of `input_dataset`.
1295///
1296/// Returns:
1297/// * `Output`: The handle tensor.
1298class ExperimentalMapAndBatchDataset {
1299 public:
1300 /// Optional attribute setters for ExperimentalMapAndBatchDataset
1301 struct Attrs {
1302 /// Defaults to false
1303 TF_MUST_USE_RESULT Attrs PreserveCardinality(bool x) {
1304 Attrs ret = *this;
1305 ret.preserve_cardinality_ = x;
1306 return ret;
1307 }
1308
1309 bool preserve_cardinality_ = false;
1310 };
1311 ExperimentalMapAndBatchDataset(const ::tensorflow::Scope& scope,
1312 ::tensorflow::Input input_dataset,
1313 ::tensorflow::InputList other_arguments,
1314 ::tensorflow::Input batch_size,
1315 ::tensorflow::Input num_parallel_calls,
1316 ::tensorflow::Input drop_remainder, const
1317 NameAttrList& f, const DataTypeSlice&
1318 output_types, const
1319 gtl::ArraySlice<PartialTensorShape>&
1320 output_shapes);
1321 ExperimentalMapAndBatchDataset(const ::tensorflow::Scope& scope,
1322 ::tensorflow::Input input_dataset,
1323 ::tensorflow::InputList other_arguments,
1324 ::tensorflow::Input batch_size,
1325 ::tensorflow::Input num_parallel_calls,
1326 ::tensorflow::Input drop_remainder, const
1327 NameAttrList& f, const DataTypeSlice&
1328 output_types, const
1329 gtl::ArraySlice<PartialTensorShape>&
1330 output_shapes, const
1331 ExperimentalMapAndBatchDataset::Attrs& attrs);
1332 operator ::tensorflow::Output() const { return handle; }
1333 operator ::tensorflow::Input() const { return handle; }
1334 ::tensorflow::Node* node() const { return handle.node(); }
1335
1336 static Attrs PreserveCardinality(bool x) {
1337 return Attrs().PreserveCardinality(x);
1338 }
1339
1340 Operation operation;
1341 ::tensorflow::Output handle;
1342};
1343
1344/// Creates a dataset that applies `f` to the outputs of `input_dataset`.
1345///
1346/// Args:
1347/// * scope: A Scope object
1348///
1349/// Returns:
1350/// * `Output`: The handle tensor.
1351class ExperimentalMapDataset {
1352 public:
1353 /// Optional attribute setters for ExperimentalMapDataset
1354 struct Attrs {
1355 /// Defaults to true
1356 TF_MUST_USE_RESULT Attrs UseInterOpParallelism(bool x) {
1357 Attrs ret = *this;
1358 ret.use_inter_op_parallelism_ = x;
1359 return ret;
1360 }
1361
1362 /// Defaults to false
1363 TF_MUST_USE_RESULT Attrs PreserveCardinality(bool x) {
1364 Attrs ret = *this;
1365 ret.preserve_cardinality_ = x;
1366 return ret;
1367 }
1368
1369 bool use_inter_op_parallelism_ = true;
1370 bool preserve_cardinality_ = false;
1371 };
1372 ExperimentalMapDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
1373 input_dataset, ::tensorflow::InputList other_arguments,
1374 const NameAttrList& f, const DataTypeSlice&
1375 output_types, const gtl::ArraySlice<PartialTensorShape>&
1376 output_shapes);
1377 ExperimentalMapDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
1378 input_dataset, ::tensorflow::InputList other_arguments,
1379 const NameAttrList& f, const DataTypeSlice&
1380 output_types, const gtl::ArraySlice<PartialTensorShape>&
1381 output_shapes, const ExperimentalMapDataset::Attrs&
1382 attrs);
1383 operator ::tensorflow::Output() const { return handle; }
1384 operator ::tensorflow::Input() const { return handle; }
1385 ::tensorflow::Node* node() const { return handle.node(); }
1386
1387 static Attrs UseInterOpParallelism(bool x) {
1388 return Attrs().UseInterOpParallelism(x);
1389 }
1390 static Attrs PreserveCardinality(bool x) {
1391 return Attrs().PreserveCardinality(x);
1392 }
1393
1394 Operation operation;
1395 ::tensorflow::Output handle;
1396};
1397
1398/// TODO: add doc.
1399///
1400/// Args:
1401/// * scope: A Scope object
1402///
1403/// Returns:
1404/// * `Output`: The handle tensor.
1405class ExperimentalMatchingFilesDataset {
1406 public:
1407 ExperimentalMatchingFilesDataset(const ::tensorflow::Scope& scope,
1408 ::tensorflow::Input patterns);
1409 operator ::tensorflow::Output() const { return handle; }
1410 operator ::tensorflow::Input() const { return handle; }
1411 ::tensorflow::Node* node() const { return handle.node(); }
1412
1413 Operation operation;
1414 ::tensorflow::Output handle;
1415};
1416
1417/// Creates a dataset that overrides the maximum intra-op parallelism.
1418///
1419/// Args:
1420/// * scope: A Scope object
1421/// * max_intra_op_parallelism: Identifies the maximum intra-op parallelism to use.
1422///
1423/// Returns:
1424/// * `Output`: The handle tensor.
1425class ExperimentalMaxIntraOpParallelismDataset {
1426 public:
1427 ExperimentalMaxIntraOpParallelismDataset(const ::tensorflow::Scope& scope,
1428 ::tensorflow::Input input_dataset,
1429 ::tensorflow::Input
1430 max_intra_op_parallelism, const
1431 DataTypeSlice& output_types, const
1432 gtl::ArraySlice<PartialTensorShape>&
1433 output_shapes);
1434 operator ::tensorflow::Output() const { return handle; }
1435 operator ::tensorflow::Input() const { return handle; }
1436 ::tensorflow::Node* node() const { return handle.node(); }
1437
1438 Operation operation;
1439 ::tensorflow::Output handle;
1440};
1441
1442/// TODO: add doc.
1443///
1444/// Args:
1445/// * scope: A Scope object
1446///
1447/// Returns:
1448/// * `Output`: The handle tensor.
1449class ExperimentalNonSerializableDataset {
1450 public:
1451 ExperimentalNonSerializableDataset(const ::tensorflow::Scope& scope,
1452 ::tensorflow::Input input_dataset, const
1453 DataTypeSlice& output_types, const
1454 gtl::ArraySlice<PartialTensorShape>&
1455 output_shapes);
1456 operator ::tensorflow::Output() const { return handle; }
1457 operator ::tensorflow::Input() const { return handle; }
1458 ::tensorflow::Node* node() const { return handle.node(); }
1459
1460 Operation operation;
1461 ::tensorflow::Output handle;
1462};
1463
1464/// Creates a dataset that applies `f` to the outputs of `input_dataset`.
1465///
1466/// The resulting dataset is similar to the `InterleaveDataset`, with the exception
1467/// that if retrieving the next value from a dataset would cause the requester to
1468/// block, it will skip that input dataset. This dataset is especially useful
1469/// when loading data from a variable-latency datastores (e.g. HDFS, GCS), as it
1470/// allows the training step to proceed so long as some data is available.
1471///
1472/// !! WARNING !! This dataset is not deterministic!
1473///
1474/// Args:
1475/// * scope: A Scope object
1476/// * f: A function mapping elements of `input_dataset`, concatenated with
1477/// `other_arguments`, to a Dataset variant that contains elements matching
1478/// `output_types` and `output_shapes`.
1479///
1480/// Returns:
1481/// * `Output`: The handle tensor.
1482class ExperimentalParallelInterleaveDataset {
1483 public:
1484 ExperimentalParallelInterleaveDataset(const ::tensorflow::Scope& scope,
1485 ::tensorflow::Input input_dataset,
1486 ::tensorflow::InputList other_arguments,
1487 ::tensorflow::Input cycle_length,
1488 ::tensorflow::Input block_length,
1489 ::tensorflow::Input sloppy,
1490 ::tensorflow::Input
1491 buffer_output_elements,
1492 ::tensorflow::Input
1493 prefetch_input_elements, const
1494 NameAttrList& f, const DataTypeSlice&
1495 output_types, const
1496 gtl::ArraySlice<PartialTensorShape>&
1497 output_shapes);
1498 operator ::tensorflow::Output() const { return handle; }
1499 operator ::tensorflow::Input() const { return handle; }
1500 ::tensorflow::Node* node() const { return handle.node(); }
1501
1502 Operation operation;
1503 ::tensorflow::Output handle;
1504};
1505
1506/// Transforms `input_dataset` containing `Example` protos as vectors of DT_STRING into a dataset of `Tensor` or `SparseTensor` objects representing the parsed features.
1507///
1508/// Args:
1509/// * scope: A Scope object
1510/// * dense_defaults: A dict mapping string keys to `Tensor`s.
1511/// The keys of the dict must match the dense_keys of the feature.
1512/// * sparse_keys: A list of string keys in the examples features.
1513/// The results for these keys will be returned as `SparseTensor` objects.
1514/// * dense_keys: A list of Ndense string Tensors (scalars).
1515/// The keys expected in the Examples features associated with dense values.
1516/// * sparse_types: A list of `DTypes` of the same length as `sparse_keys`.
1517/// Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`),
1518/// and `tf.string` (`BytesList`) are supported.
1519/// * dense_shapes: List of tuples with the same length as `dense_keys`.
1520/// The shape of the data for each dense feature referenced by `dense_keys`.
1521/// Required for any input tensors identified by `dense_keys`. Must be
1522/// either fully defined, or may contain an unknown first dimension.
1523/// An unknown first dimension means the feature is treated as having
1524/// a variable number of blocks, and the output shape along this dimension
1525/// is considered unknown at graph build time. Padding is applied for
1526/// minibatch elements smaller than the maximum number of blocks for the
1527/// given feature along this dimension.
1528/// * output_types: The type list for the return values.
1529/// * output_shapes: The list of shapes being produced.
1530///
1531/// Returns:
1532/// * `Output`: The handle tensor.
1533class ExperimentalParseExampleDataset {
1534 public:
1535 /// Optional attribute setters for ExperimentalParseExampleDataset
1536 struct Attrs {
1537 /// Defaults to false
1538 TF_MUST_USE_RESULT Attrs Sloppy(bool x) {
1539 Attrs ret = *this;
1540 ret.sloppy_ = x;
1541 return ret;
1542 }
1543
1544 bool sloppy_ = false;
1545 };
1546 ExperimentalParseExampleDataset(const ::tensorflow::Scope& scope,
1547 ::tensorflow::Input input_dataset,
1548 ::tensorflow::Input num_parallel_calls,
1549 ::tensorflow::InputList dense_defaults, const
1550 gtl::ArraySlice<::tensorflow::tstring>&
1551 sparse_keys, const
1552 gtl::ArraySlice<::tensorflow::tstring>&
1553 dense_keys, const DataTypeSlice& sparse_types,
1554 const gtl::ArraySlice<PartialTensorShape>&
1555 dense_shapes, const DataTypeSlice&
1556 output_types, const
1557 gtl::ArraySlice<PartialTensorShape>&
1558 output_shapes);
1559 ExperimentalParseExampleDataset(const ::tensorflow::Scope& scope,
1560 ::tensorflow::Input input_dataset,
1561 ::tensorflow::Input num_parallel_calls,
1562 ::tensorflow::InputList dense_defaults, const
1563 gtl::ArraySlice<::tensorflow::tstring>&
1564 sparse_keys, const
1565 gtl::ArraySlice<::tensorflow::tstring>&
1566 dense_keys, const DataTypeSlice& sparse_types,
1567 const gtl::ArraySlice<PartialTensorShape>&
1568 dense_shapes, const DataTypeSlice&
1569 output_types, const
1570 gtl::ArraySlice<PartialTensorShape>&
1571 output_shapes, const
1572 ExperimentalParseExampleDataset::Attrs& attrs);
1573 operator ::tensorflow::Output() const { return handle; }
1574 operator ::tensorflow::Input() const { return handle; }
1575 ::tensorflow::Node* node() const { return handle.node(); }
1576
1577 static Attrs Sloppy(bool x) {
1578 return Attrs().Sloppy(x);
1579 }
1580
1581 Operation operation;
1582 ::tensorflow::Output handle;
1583};
1584
1585/// Creates a dataset that uses a custom thread pool to compute `input_dataset`.
1586///
1587/// Args:
1588/// * scope: A Scope object
1589/// * num_threads: Identifies the number of threads to use for the private threadpool.
1590///
1591/// Returns:
1592/// * `Output`: The handle tensor.
1593class ExperimentalPrivateThreadPoolDataset {
1594 public:
1595 ExperimentalPrivateThreadPoolDataset(const ::tensorflow::Scope& scope,
1596 ::tensorflow::Input input_dataset,
1597 ::tensorflow::Input num_threads, const
1598 DataTypeSlice& output_types, const
1599 gtl::ArraySlice<PartialTensorShape>&
1600 output_shapes);
1601 operator ::tensorflow::Output() const { return handle; }
1602 operator ::tensorflow::Input() const { return handle; }
1603 ::tensorflow::Node* node() const { return handle.node(); }
1604
1605 Operation operation;
1606 ::tensorflow::Output handle;
1607};
1608
1609/// Creates a Dataset that returns pseudorandom numbers.
1610///
1611/// Args:
1612/// * scope: A Scope object
1613/// * seed: A scalar seed for the random number generator. If either seed or
1614/// seed2 is set to be non-zero, the random number generator is seeded
1615/// by the given seed. Otherwise, a random seed is used.
1616/// * seed2: A second scalar seed to avoid seed collision.
1617///
1618/// Returns:
1619/// * `Output`: The handle tensor.
1620class ExperimentalRandomDataset {
1621 public:
1622 ExperimentalRandomDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
1623 seed, ::tensorflow::Input seed2, const DataTypeSlice&
1624 output_types, const
1625 gtl::ArraySlice<PartialTensorShape>& output_shapes);
1626 operator ::tensorflow::Output() const { return handle; }
1627 operator ::tensorflow::Input() const { return handle; }
1628 ::tensorflow::Node* node() const { return handle.node(); }
1629
1630 Operation operation;
1631 ::tensorflow::Output handle;
1632};
1633
1634/// Creates a dataset that changes the batch size.
1635///
1636/// Creates a dataset that changes the batch size of the dataset to current batch
1637/// size // num_replicas.
1638///
1639/// Args:
1640/// * scope: A Scope object
1641/// * input_dataset: A variant tensor representing the input dataset.
1642/// * num_replicas: A scalar representing the number of replicas to distribute this batch across. As
1643/// a result of this transformation the current batch size would end up being
1644/// divided by this parameter.
1645///
1646/// Returns:
1647/// * `Output`: The handle tensor.
1648class ExperimentalRebatchDataset {
1649 public:
1650 /// Optional attribute setters for ExperimentalRebatchDataset
1651 struct Attrs {
1652 /// Defaults to true
1653 TF_MUST_USE_RESULT Attrs UseFallback(bool x) {
1654 Attrs ret = *this;
1655 ret.use_fallback_ = x;
1656 return ret;
1657 }
1658
1659 bool use_fallback_ = true;
1660 };
1661 ExperimentalRebatchDataset(const ::tensorflow::Scope& scope,
1662 ::tensorflow::Input input_dataset,
1663 ::tensorflow::Input num_replicas, const
1664 DataTypeSlice& output_types, const
1665 gtl::ArraySlice<PartialTensorShape>& output_shapes);
1666 ExperimentalRebatchDataset(const ::tensorflow::Scope& scope,
1667 ::tensorflow::Input input_dataset,
1668 ::tensorflow::Input num_replicas, const
1669 DataTypeSlice& output_types, const
1670 gtl::ArraySlice<PartialTensorShape>& output_shapes,
1671 const ExperimentalRebatchDataset::Attrs& attrs);
1672 operator ::tensorflow::Output() const { return handle; }
1673 operator ::tensorflow::Input() const { return handle; }
1674 ::tensorflow::Node* node() const { return handle.node(); }
1675
1676 static Attrs UseFallback(bool x) {
1677 return Attrs().UseFallback(x);
1678 }
1679
1680 Operation operation;
1681 ::tensorflow::Output handle;
1682};
1683
1684/// Creates a dataset successively reduces `f` over the elements of `input_dataset`.
1685///
1686/// Args:
1687/// * scope: A Scope object
1688///
1689/// Returns:
1690/// * `Output`: The handle tensor.
1691class ExperimentalScanDataset {
1692 public:
1693 /// Optional attribute setters for ExperimentalScanDataset
1694 struct Attrs {
1695 /// Defaults to false
1696 TF_MUST_USE_RESULT Attrs PreserveCardinality(bool x) {
1697 Attrs ret = *this;
1698 ret.preserve_cardinality_ = x;
1699 return ret;
1700 }
1701
1702 bool preserve_cardinality_ = false;
1703 };
1704 ExperimentalScanDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
1705 input_dataset, ::tensorflow::InputList initial_state,
1706 ::tensorflow::InputList other_arguments, const
1707 NameAttrList& f, const DataTypeSlice& output_types,
1708 const gtl::ArraySlice<PartialTensorShape>&
1709 output_shapes);
1710 ExperimentalScanDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
1711 input_dataset, ::tensorflow::InputList initial_state,
1712 ::tensorflow::InputList other_arguments, const
1713 NameAttrList& f, const DataTypeSlice& output_types,
1714 const gtl::ArraySlice<PartialTensorShape>&
1715 output_shapes, const ExperimentalScanDataset::Attrs&
1716 attrs);
1717 operator ::tensorflow::Output() const { return handle; }
1718 operator ::tensorflow::Input() const { return handle; }
1719 ::tensorflow::Node* node() const { return handle.node(); }
1720
1721 static Attrs PreserveCardinality(bool x) {
1722 return Attrs().PreserveCardinality(x);
1723 }
1724
1725 Operation operation;
1726 ::tensorflow::Output handle;
1727};
1728
1729/// TODO: add doc.
1730///
1731/// Args:
1732/// * scope: A Scope object
1733///
1734/// Returns:
1735/// * `Output`: The handle tensor.
1736class ExperimentalSetStatsAggregatorDataset {
1737 public:
1738 ExperimentalSetStatsAggregatorDataset(const ::tensorflow::Scope& scope,
1739 ::tensorflow::Input input_dataset,
1740 ::tensorflow::Input stats_aggregator,
1741 ::tensorflow::Input tag,
1742 ::tensorflow::Input counter_prefix, const
1743 DataTypeSlice& output_types, const
1744 gtl::ArraySlice<PartialTensorShape>&
1745 output_shapes);
1746 operator ::tensorflow::Output() const { return handle; }
1747 operator ::tensorflow::Input() const { return handle; }
1748 ::tensorflow::Node* node() const { return handle.node(); }
1749
1750 Operation operation;
1751 ::tensorflow::Output handle;
1752};
1753
1754/// TODO: add doc.
1755///
1756/// Args:
1757/// * scope: A Scope object
1758///
1759/// Returns:
1760/// * `Output`: The handle tensor.
1761class ExperimentalSleepDataset {
1762 public:
1763 ExperimentalSleepDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
1764 input_dataset, ::tensorflow::Input sleep_microseconds,
1765 const DataTypeSlice& output_types, const
1766 gtl::ArraySlice<PartialTensorShape>& output_shapes);
1767 operator ::tensorflow::Output() const { return handle; }
1768 operator ::tensorflow::Input() const { return handle; }
1769 ::tensorflow::Node* node() const { return handle.node(); }
1770
1771 Operation operation;
1772 ::tensorflow::Output handle;
1773};
1774
1775/// Creates a dataset that passes a sliding window over `input_dataset`.
1776///
1777/// Args:
1778/// * scope: A Scope object
1779/// * window_size: A scalar representing the number of elements in the
1780/// sliding window.
1781/// * window_shift: A scalar representing the steps moving the sliding window
1782/// forward in one iteration. It must be positive.
1783/// * window_stride: A scalar representing the stride of the input elements of the sliding window.
1784/// It must be positive.
1785///
1786/// Returns:
1787/// * `Output`: The handle tensor.
1788class ExperimentalSlidingWindowDataset {
1789 public:
1790 ExperimentalSlidingWindowDataset(const ::tensorflow::Scope& scope,
1791 ::tensorflow::Input input_dataset,
1792 ::tensorflow::Input window_size,
1793 ::tensorflow::Input window_shift,
1794 ::tensorflow::Input window_stride, const
1795 DataTypeSlice& output_types, const
1796 gtl::ArraySlice<PartialTensorShape>&
1797 output_shapes);
1798 operator ::tensorflow::Output() const { return handle; }
1799 operator ::tensorflow::Input() const { return handle; }
1800 ::tensorflow::Node* node() const { return handle.node(); }
1801
1802 Operation operation;
1803 ::tensorflow::Output handle;
1804};
1805
1806/// Creates a dataset that executes a SQL query and emits rows of the result set.
1807///
1808/// Args:
1809/// * scope: A Scope object
1810/// * driver_name: The database type. Currently, the only supported type is 'sqlite'.
1811/// * data_source_name: A connection string to connect to the database.
1812/// * query: A SQL query to execute.
1813///
1814/// Returns:
1815/// * `Output`: The handle tensor.
1816class ExperimentalSqlDataset {
1817 public:
1818 ExperimentalSqlDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
1819 driver_name, ::tensorflow::Input data_source_name,
1820 ::tensorflow::Input query, const DataTypeSlice&
1821 output_types, const gtl::ArraySlice<PartialTensorShape>&
1822 output_shapes);
1823 operator ::tensorflow::Output() const { return handle; }
1824 operator ::tensorflow::Input() const { return handle; }
1825 ::tensorflow::Node* node() const { return handle.node(); }
1826
1827 Operation operation;
1828 ::tensorflow::Output handle;
1829};
1830
1831/// Creates a statistics manager resource.
1832///
1833/// Args:
1834/// * scope: A Scope object
1835///
1836/// Returns:
1837/// * `Output`: The handle tensor.
1838class ExperimentalStatsAggregatorHandle {
1839 public:
1840 /// Optional attribute setters for ExperimentalStatsAggregatorHandle
1841 struct Attrs {
1842 /// Defaults to ""
1843 TF_MUST_USE_RESULT Attrs Container(StringPiece x) {
1844 Attrs ret = *this;
1845 ret.container_ = x;
1846 return ret;
1847 }
1848
1849 /// Defaults to ""
1850 TF_MUST_USE_RESULT Attrs SharedName(StringPiece x) {
1851 Attrs ret = *this;
1852 ret.shared_name_ = x;
1853 return ret;
1854 }
1855
1856 StringPiece container_ = "";
1857 StringPiece shared_name_ = "";
1858 };
1859 ExperimentalStatsAggregatorHandle(const ::tensorflow::Scope& scope);
1860 ExperimentalStatsAggregatorHandle(const ::tensorflow::Scope& scope, const
1861 ExperimentalStatsAggregatorHandle::Attrs&
1862 attrs);
1863 operator ::tensorflow::Output() const { return handle; }
1864 operator ::tensorflow::Input() const { return handle; }
1865 ::tensorflow::Node* node() const { return handle.node(); }
1866
1867 static Attrs Container(StringPiece x) {
1868 return Attrs().Container(x);
1869 }
1870 static Attrs SharedName(StringPiece x) {
1871 return Attrs().SharedName(x);
1872 }
1873
1874 Operation operation;
1875 ::tensorflow::Output handle;
1876};
1877
1878/// Produces a summary of any statistics recorded by the given statistics manager.
1879///
1880/// Args:
1881/// * scope: A Scope object
1882///
1883/// Returns:
1884/// * `Output`: The summary tensor.
1885class ExperimentalStatsAggregatorSummary {
1886 public:
1887 ExperimentalStatsAggregatorSummary(const ::tensorflow::Scope& scope,
1888 ::tensorflow::Input iterator);
1889 operator ::tensorflow::Output() const { return summary; }
1890 operator ::tensorflow::Input() const { return summary; }
1891 ::tensorflow::Node* node() const { return summary.node(); }
1892
1893 Operation operation;
1894 ::tensorflow::Output summary;
1895};
1896
1897/// Creates a dataset that stops iteration when predicate` is false.
1898///
1899/// The `predicate` function must return a scalar boolean and accept the
1900/// following arguments:
1901///
1902/// * One tensor for each component of an element of `input_dataset`.
1903/// * One tensor for each value in `other_arguments`.
1904///
1905/// Args:
1906/// * scope: A Scope object
1907/// * other_arguments: A list of tensors, typically values that were captured when
1908/// building a closure for `predicate`.
1909/// * predicate: A function returning a scalar boolean.
1910///
1911/// Returns:
1912/// * `Output`: The handle tensor.
1913class ExperimentalTakeWhileDataset {
1914 public:
1915 ExperimentalTakeWhileDataset(const ::tensorflow::Scope& scope,
1916 ::tensorflow::Input input_dataset,
1917 ::tensorflow::InputList other_arguments, const
1918 NameAttrList& predicate, const DataTypeSlice&
1919 output_types, const
1920 gtl::ArraySlice<PartialTensorShape>&
1921 output_shapes);
1922 operator ::tensorflow::Output() const { return handle; }
1923 operator ::tensorflow::Input() const { return handle; }
1924 ::tensorflow::Node* node() const { return handle.node(); }
1925
1926 Operation operation;
1927 ::tensorflow::Output handle;
1928};
1929
1930/// Creates a dataset that uses a custom thread pool to compute `input_dataset`.
1931///
1932/// Args:
1933/// * scope: A Scope object
1934/// * thread_pool: A resource produced by the ThreadPoolHandle op.
1935///
1936/// Returns:
1937/// * `Output`: The handle tensor.
1938class ExperimentalThreadPoolDataset {
1939 public:
1940 ExperimentalThreadPoolDataset(const ::tensorflow::Scope& scope,
1941 ::tensorflow::Input input_dataset,
1942 ::tensorflow::Input thread_pool, const
1943 DataTypeSlice& output_types, const
1944 gtl::ArraySlice<PartialTensorShape>&
1945 output_shapes);
1946 operator ::tensorflow::Output() const { return handle; }
1947 operator ::tensorflow::Input() const { return handle; }
1948 ::tensorflow::Node* node() const { return handle.node(); }
1949
1950 Operation operation;
1951 ::tensorflow::Output handle;
1952};
1953
1954/// Creates a dataset that uses a custom thread pool to compute `input_dataset`.
1955///
1956/// Args:
1957/// * scope: A Scope object
1958/// * num_threads: The number of threads in the thread pool.
1959/// * display_name: A human-readable name for the threads that may be visible in some
1960/// visualizations.
1961/// threadpool.
1962///
1963/// Optional attributes (see `Attrs`):
1964/// * max_intra_op_parallelism: The maximum degree of parallelism to use within operations that execute on this
1965/// threadpool.
1966///
1967/// Returns:
1968/// * `Output`: A resource that can be consumed by one or more ExperimentalThreadPoolDataset
1969/// ops.
1970class ExperimentalThreadPoolHandle {
1971 public:
1972 /// Optional attribute setters for ExperimentalThreadPoolHandle
1973 struct Attrs {
1974 /// The maximum degree of parallelism to use within operations that execute on this
1975 /// threadpool.
1976 ///
1977 /// Defaults to 1
1978 TF_MUST_USE_RESULT Attrs MaxIntraOpParallelism(int64 x) {
1979 Attrs ret = *this;
1980 ret.max_intra_op_parallelism_ = x;
1981 return ret;
1982 }
1983
1984 /// Defaults to ""
1985 TF_MUST_USE_RESULT Attrs Container(StringPiece x) {
1986 Attrs ret = *this;
1987 ret.container_ = x;
1988 return ret;
1989 }
1990
1991 /// Defaults to ""
1992 TF_MUST_USE_RESULT Attrs SharedName(StringPiece x) {
1993 Attrs ret = *this;
1994 ret.shared_name_ = x;
1995 return ret;
1996 }
1997
1998 int64 max_intra_op_parallelism_ = 1;
1999 StringPiece container_ = "";
2000 StringPiece shared_name_ = "";
2001 };
2002 ExperimentalThreadPoolHandle(const ::tensorflow::Scope& scope, int64
2003 num_threads, StringPiece display_name);
2004 ExperimentalThreadPoolHandle(const ::tensorflow::Scope& scope, int64
2005 num_threads, StringPiece display_name, const
2006 ExperimentalThreadPoolHandle::Attrs& attrs);
2007 operator ::tensorflow::Output() const { return handle; }
2008 operator ::tensorflow::Input() const { return handle; }
2009 ::tensorflow::Node* node() const { return handle.node(); }
2010
2011 static Attrs MaxIntraOpParallelism(int64 x) {
2012 return Attrs().MaxIntraOpParallelism(x);
2013 }
2014 static Attrs Container(StringPiece x) {
2015 return Attrs().Container(x);
2016 }
2017 static Attrs SharedName(StringPiece x) {
2018 return Attrs().SharedName(x);
2019 }
2020
2021 Operation operation;
2022 ::tensorflow::Output handle;
2023};
2024
2025/// A dataset that splits the elements of its input into multiple elements.
2026///
2027/// Args:
2028/// * scope: A Scope object
2029///
2030/// Returns:
2031/// * `Output`: The handle tensor.
2032class ExperimentalUnbatchDataset {
2033 public:
2034 ExperimentalUnbatchDataset(const ::tensorflow::Scope& scope,
2035 ::tensorflow::Input input_dataset, const
2036 DataTypeSlice& output_types, const
2037 gtl::ArraySlice<PartialTensorShape>& output_shapes);
2038 operator ::tensorflow::Output() const { return handle; }
2039 operator ::tensorflow::Input() const { return handle; }
2040 ::tensorflow::Node* node() const { return handle.node(); }
2041
2042 Operation operation;
2043 ::tensorflow::Output handle;
2044};
2045
2046/// Creates a dataset that contains the unique elements of `input_dataset`.
2047///
2048/// Args:
2049/// * scope: A Scope object
2050///
2051/// Returns:
2052/// * `Output`: The handle tensor.
2053class ExperimentalUniqueDataset {
2054 public:
2055 ExperimentalUniqueDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2056 input_dataset, const DataTypeSlice& output_types,
2057 const gtl::ArraySlice<PartialTensorShape>&
2058 output_shapes);
2059 operator ::tensorflow::Output() const { return handle; }
2060 operator ::tensorflow::Input() const { return handle; }
2061 ::tensorflow::Node* node() const { return handle.node(); }
2062
2063 Operation operation;
2064 ::tensorflow::Output handle;
2065};
2066
2067/// Gets the element at the specified index in a dataset.
2068///
2069/// Args:
2070/// * scope: A Scope object
2071///
2072/// Returns:
2073/// * `OutputList`: The components tensor.
2074class GetElementAtIndex {
2075 public:
2076 GetElementAtIndex(const ::tensorflow::Scope& scope, ::tensorflow::Input
2077 dataset, ::tensorflow::Input index, const DataTypeSlice&
2078 output_types, const gtl::ArraySlice<PartialTensorShape>&
2079 output_shapes);
2080 ::tensorflow::Output operator[](size_t index) const { return components[index]; }
2081
2082
2083 Operation operation;
2084 ::tensorflow::OutputList components;
2085};
2086
2087/// Creates a dataset that computes a group-by on `input_dataset`.
2088///
2089/// Creates a dataset that computes a group-by on `input_dataset`.
2090///
2091/// Args:
2092/// * scope: A Scope object
2093/// * input_dataset: A variant tensor representing the input dataset.
2094/// * key_func_other_arguments: A list of tensors, typically values that were captured when
2095/// building a closure for `key_func`.
2096/// * init_func_other_arguments: A list of tensors, typically values that were captured when
2097/// building a closure for `init_func`.
2098/// * reduce_func_other_arguments: A list of tensors, typically values that were captured when
2099/// building a closure for `reduce_func`.
2100/// * finalize_func_other_arguments: A list of tensors, typically values that were captured when
2101/// building a closure for `finalize_func`.
2102/// * key_func: A function mapping an element of `input_dataset`, concatenated
2103/// with `key_func_other_arguments` to a scalar value of type DT_INT64.
2104/// * init_func: A function mapping a key of type DT_INT64, concatenated with
2105/// `init_func_other_arguments` to the initial reducer state.
2106/// * reduce_func: A function mapping the current reducer state and an element of `input_dataset`,
2107/// concatenated with `reduce_func_other_arguments` to a new reducer state.
2108/// * finalize_func: A function mapping the final reducer state to an output element.
2109///
2110/// Returns:
2111/// * `Output`: The handle tensor.
2112class GroupByReducerDataset {
2113 public:
2114 GroupByReducerDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2115 input_dataset, ::tensorflow::InputList
2116 key_func_other_arguments, ::tensorflow::InputList
2117 init_func_other_arguments, ::tensorflow::InputList
2118 reduce_func_other_arguments, ::tensorflow::InputList
2119 finalize_func_other_arguments, const NameAttrList&
2120 key_func, const NameAttrList& init_func, const
2121 NameAttrList& reduce_func, const NameAttrList&
2122 finalize_func, const DataTypeSlice& output_types, const
2123 gtl::ArraySlice<PartialTensorShape>& output_shapes);
2124 operator ::tensorflow::Output() const { return handle; }
2125 operator ::tensorflow::Input() const { return handle; }
2126 ::tensorflow::Node* node() const { return handle.node(); }
2127
2128 Operation operation;
2129 ::tensorflow::Output handle;
2130};
2131
2132/// Creates a dataset that computes a windowed group-by on `input_dataset`.
2133///
2134/// // TODO(mrry): Support non-int64 keys.
2135///
2136/// Args:
2137/// * scope: A Scope object
2138/// * key_func: A function mapping an element of `input_dataset`, concatenated
2139/// with `key_func_other_arguments` to a scalar value of type DT_INT64.
2140///
2141/// Returns:
2142/// * `Output`: The handle tensor.
2143class GroupByWindowDataset {
2144 public:
2145 /// Optional attribute setters for GroupByWindowDataset
2146 struct Attrs {
2147 /// Defaults to ""
2148 TF_MUST_USE_RESULT Attrs Metadata(StringPiece x) {
2149 Attrs ret = *this;
2150 ret.metadata_ = x;
2151 return ret;
2152 }
2153
2154 StringPiece metadata_ = "";
2155 };
2156 GroupByWindowDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2157 input_dataset, ::tensorflow::InputList
2158 key_func_other_arguments, ::tensorflow::InputList
2159 reduce_func_other_arguments, ::tensorflow::InputList
2160 window_size_func_other_arguments, const NameAttrList&
2161 key_func, const NameAttrList& reduce_func, const
2162 NameAttrList& window_size_func, const DataTypeSlice&
2163 output_types, const gtl::ArraySlice<PartialTensorShape>&
2164 output_shapes);
2165 GroupByWindowDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2166 input_dataset, ::tensorflow::InputList
2167 key_func_other_arguments, ::tensorflow::InputList
2168 reduce_func_other_arguments, ::tensorflow::InputList
2169 window_size_func_other_arguments, const NameAttrList&
2170 key_func, const NameAttrList& reduce_func, const
2171 NameAttrList& window_size_func, const DataTypeSlice&
2172 output_types, const gtl::ArraySlice<PartialTensorShape>&
2173 output_shapes, const GroupByWindowDataset::Attrs& attrs);
2174 operator ::tensorflow::Output() const { return handle; }
2175 operator ::tensorflow::Input() const { return handle; }
2176 ::tensorflow::Node* node() const { return handle.node(); }
2177
2178 static Attrs Metadata(StringPiece x) {
2179 return Attrs().Metadata(x);
2180 }
2181
2182 Operation operation;
2183 ::tensorflow::Output handle;
2184};
2185
2186/// Creates a dataset that contains the elements of `input_dataset` ignoring errors.
2187///
2188/// Args:
2189/// * scope: A Scope object
2190///
2191/// Returns:
2192/// * `Output`: The handle tensor.
2193class IgnoreErrorsDataset {
2194 public:
2195 /// Optional attribute setters for IgnoreErrorsDataset
2196 struct Attrs {
2197 /// Defaults to false
2198 TF_MUST_USE_RESULT Attrs LogWarning(bool x) {
2199 Attrs ret = *this;
2200 ret.log_warning_ = x;
2201 return ret;
2202 }
2203
2204 bool log_warning_ = false;
2205 };
2206 IgnoreErrorsDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2207 input_dataset, const DataTypeSlice& output_types, const
2208 gtl::ArraySlice<PartialTensorShape>& output_shapes);
2209 IgnoreErrorsDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2210 input_dataset, const DataTypeSlice& output_types, const
2211 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
2212 IgnoreErrorsDataset::Attrs& attrs);
2213 operator ::tensorflow::Output() const { return handle; }
2214 operator ::tensorflow::Input() const { return handle; }
2215 ::tensorflow::Node* node() const { return handle.node(); }
2216
2217 static Attrs LogWarning(bool x) {
2218 return Attrs().LogWarning(x);
2219 }
2220
2221 Operation operation;
2222 ::tensorflow::Output handle;
2223};
2224
2225/// TODO: add doc.
2226///
2227/// Args:
2228/// * scope: A Scope object
2229///
2230/// Returns:
2231/// * the created `Operation`
2232class InitializeTableFromDataset {
2233 public:
2234 InitializeTableFromDataset(const ::tensorflow::Scope& scope,
2235 ::tensorflow::Input table_handle,
2236 ::tensorflow::Input dataset);
2237 operator ::tensorflow::Operation() const { return operation; }
2238
2239 Operation operation;
2240};
2241
2242/// Returns the name of the device on which `resource` has been placed.
2243///
2244/// Args:
2245/// * scope: A Scope object
2246///
2247/// Returns:
2248/// * `Output`: The device tensor.
2249class IteratorGetDevice {
2250 public:
2251 IteratorGetDevice(const ::tensorflow::Scope& scope, ::tensorflow::Input
2252 resource);
2253 operator ::tensorflow::Output() const { return device; }
2254 operator ::tensorflow::Input() const { return device; }
2255 ::tensorflow::Node* node() const { return device.node(); }
2256
2257 Operation operation;
2258 ::tensorflow::Output device;
2259};
2260
2261/// Creates a dataset that emits the key-value pairs in one or more LMDB files.
2262///
2263/// The Lightning Memory-Mapped Database Manager, or LMDB, is an embedded binary
2264/// key-value database. This dataset can read the contents of LMDB database files,
2265/// the names of which generally have the `.mdb` suffix.
2266///
2267/// Each output element consists of a key-value pair represented as a pair of
2268/// scalar string `Tensor`s, where the first `Tensor` contains the key and the
2269/// second `Tensor` contains the value.
2270///
2271/// LMDB uses different file formats on big- and little-endian machines.
2272/// `LMDBDataset` can only read files in the format of the host machine.
2273///
2274/// Args:
2275/// * scope: A Scope object
2276/// * filenames: A scalar or a vector containing the name(s) of the binary file(s) to be
2277/// read.
2278///
2279/// Returns:
2280/// * `Output`: The handle tensor.
2281class LMDBDataset {
2282 public:
2283 LMDBDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input filenames,
2284 const DataTypeSlice& output_types, const
2285 gtl::ArraySlice<PartialTensorShape>& output_shapes);
2286 operator ::tensorflow::Output() const { return handle; }
2287 operator ::tensorflow::Input() const { return handle; }
2288 ::tensorflow::Node* node() const { return handle.node(); }
2289
2290 Operation operation;
2291 ::tensorflow::Output handle;
2292};
2293
2294/// Records the latency of producing `input_dataset` elements in a StatsAggregator.
2295///
2296/// Args:
2297/// * scope: A Scope object
2298///
2299/// Returns:
2300/// * `Output`: The handle tensor.
2301class LatencyStatsDataset {
2302 public:
2303 LatencyStatsDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2304 input_dataset, ::tensorflow::Input tag, const
2305 DataTypeSlice& output_types, const
2306 gtl::ArraySlice<PartialTensorShape>& output_shapes);
2307 operator ::tensorflow::Output() const { return handle; }
2308 operator ::tensorflow::Input() const { return handle; }
2309 ::tensorflow::Node* node() const { return handle.node(); }
2310
2311 Operation operation;
2312 ::tensorflow::Output handle;
2313};
2314
2315/// Creates a dataset that applies `f` to the outputs of `input_dataset`.
2316///
2317/// The resulting dataset is similar to the `InterleaveDataset`, with the exception
2318/// that if retrieving the next value from a dataset would cause the requester to
2319/// block, it will skip that input dataset. This dataset is especially useful
2320/// when loading data from a variable-latency datastores (e.g. HDFS, GCS), as it
2321/// allows the training step to proceed so long as some data is available.
2322///
2323/// !! WARNING !! This dataset is not deterministic!
2324///
2325/// Args:
2326/// * scope: A Scope object
2327/// * f: A function mapping elements of `input_dataset`, concatenated with
2328/// `other_arguments`, to a Dataset variant that contains elements matching
2329/// `output_types` and `output_shapes`.
2330///
2331/// Returns:
2332/// * `Output`: The handle tensor.
2333class LegacyParallelInterleaveDatasetV2 {
2334 public:
2335 /// Optional attribute setters for LegacyParallelInterleaveDatasetV2
2336 struct Attrs {
2337 /// Defaults to "default"
2338 TF_MUST_USE_RESULT Attrs Deterministic(StringPiece x) {
2339 Attrs ret = *this;
2340 ret.deterministic_ = x;
2341 return ret;
2342 }
2343
2344 /// Defaults to ""
2345 TF_MUST_USE_RESULT Attrs Metadata(StringPiece x) {
2346 Attrs ret = *this;
2347 ret.metadata_ = x;
2348 return ret;
2349 }
2350
2351 StringPiece deterministic_ = "default";
2352 StringPiece metadata_ = "";
2353 };
2354 LegacyParallelInterleaveDatasetV2(const ::tensorflow::Scope& scope,
2355 ::tensorflow::Input input_dataset,
2356 ::tensorflow::InputList other_arguments,
2357 ::tensorflow::Input cycle_length,
2358 ::tensorflow::Input block_length,
2359 ::tensorflow::Input buffer_output_elements,
2360 ::tensorflow::Input prefetch_input_elements,
2361 const NameAttrList& f, const DataTypeSlice&
2362 output_types, const
2363 gtl::ArraySlice<PartialTensorShape>&
2364 output_shapes);
2365 LegacyParallelInterleaveDatasetV2(const ::tensorflow::Scope& scope,
2366 ::tensorflow::Input input_dataset,
2367 ::tensorflow::InputList other_arguments,
2368 ::tensorflow::Input cycle_length,
2369 ::tensorflow::Input block_length,
2370 ::tensorflow::Input buffer_output_elements,
2371 ::tensorflow::Input prefetch_input_elements,
2372 const NameAttrList& f, const DataTypeSlice&
2373 output_types, const
2374 gtl::ArraySlice<PartialTensorShape>&
2375 output_shapes, const
2376 LegacyParallelInterleaveDatasetV2::Attrs&
2377 attrs);
2378 operator ::tensorflow::Output() const { return handle; }
2379 operator ::tensorflow::Input() const { return handle; }
2380 ::tensorflow::Node* node() const { return handle.node(); }
2381
2382 static Attrs Deterministic(StringPiece x) {
2383 return Attrs().Deterministic(x);
2384 }
2385 static Attrs Metadata(StringPiece x) {
2386 return Attrs().Metadata(x);
2387 }
2388
2389 Operation operation;
2390 ::tensorflow::Output handle;
2391};
2392
2393/// Creates a dataset that emits each of `tensors` once.
2394///
2395/// Args:
2396/// * scope: A Scope object
2397///
2398/// Returns:
2399/// * `Output`: The handle tensor.
2400class ListDataset {
2401 public:
2402 /// Optional attribute setters for ListDataset
2403 struct Attrs {
2404 /// Defaults to ""
2405 TF_MUST_USE_RESULT Attrs Metadata(StringPiece x) {
2406 Attrs ret = *this;
2407 ret.metadata_ = x;
2408 return ret;
2409 }
2410
2411 StringPiece metadata_ = "";
2412 };
2413 ListDataset(const ::tensorflow::Scope& scope, ::tensorflow::InputList tensors,
2414 const DataTypeSlice& output_types, const
2415 gtl::ArraySlice<PartialTensorShape>& output_shapes);
2416 ListDataset(const ::tensorflow::Scope& scope, ::tensorflow::InputList tensors,
2417 const DataTypeSlice& output_types, const
2418 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
2419 ListDataset::Attrs& attrs);
2420 operator ::tensorflow::Output() const { return handle; }
2421 operator ::tensorflow::Input() const { return handle; }
2422 ::tensorflow::Node* node() const { return handle.node(); }
2423
2424 static Attrs Metadata(StringPiece x) {
2425 return Attrs().Metadata(x);
2426 }
2427
2428 Operation operation;
2429 ::tensorflow::Output handle;
2430};
2431
2432/// TODO: add doc.
2433///
2434/// Args:
2435/// * scope: A Scope object
2436///
2437/// Returns:
2438/// * `Output`: The handle tensor.
2439class LoadDataset {
2440 public:
2441 /// Optional attribute setters for LoadDataset
2442 struct Attrs {
2443 /// Defaults to ""
2444 TF_MUST_USE_RESULT Attrs Compression(StringPiece x) {
2445 Attrs ret = *this;
2446 ret.compression_ = x;
2447 return ret;
2448 }
2449
2450 StringPiece compression_ = "";
2451 };
2452 LoadDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input path,
2453 ::tensorflow::InputList reader_func_other_args, const
2454 DataTypeSlice& output_types, const
2455 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
2456 NameAttrList& reader_func);
2457 LoadDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input path,
2458 ::tensorflow::InputList reader_func_other_args, const
2459 DataTypeSlice& output_types, const
2460 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
2461 NameAttrList& reader_func, const LoadDataset::Attrs& attrs);
2462 operator ::tensorflow::Output() const { return handle; }
2463 operator ::tensorflow::Input() const { return handle; }
2464 ::tensorflow::Node* node() const { return handle.node(); }
2465
2466 static Attrs Compression(StringPiece x) {
2467 return Attrs().Compression(x);
2468 }
2469
2470 Operation operation;
2471 ::tensorflow::Output handle;
2472};
2473
2474/// Creates a dataset that fuses mapping with batching.
2475///
2476/// Creates a dataset that applies `f` to the outputs of `input_dataset` and then
2477/// batches `batch_size` of them.
2478///
2479/// Unlike a "MapDataset", which applies `f` sequentially, this dataset invokes up
2480/// to `batch_size * num_parallel_batches` copies of `f` in parallel.
2481///
2482/// Args:
2483/// * scope: A Scope object
2484/// * input_dataset: A variant tensor representing the input dataset.
2485/// * other_arguments: A list of tensors, typically values that were captured when building a closure
2486/// for `f`.
2487/// * batch_size: A scalar representing the number of elements to accumulate in a
2488/// batch. It determines the number of concurrent invocations of `f` that process
2489/// elements from `input_dataset` in parallel.
2490/// * num_parallel_calls: A scalar representing the maximum number of parallel invocations of the `map_fn`
2491/// function. Applying the `map_fn` on consecutive input elements in parallel has
2492/// the potential to improve input pipeline throughput.
2493/// * drop_remainder: A scalar representing whether the last batch should be dropped in case its size
2494/// is smaller than desired.
2495/// * f: A function to apply to the outputs of `input_dataset`.
2496///
2497/// Returns:
2498/// * `Output`: The handle tensor.
2499class MapAndBatchDataset {
2500 public:
2501 /// Optional attribute setters for MapAndBatchDataset
2502 struct Attrs {
2503 /// Defaults to false
2504 TF_MUST_USE_RESULT Attrs PreserveCardinality(bool x) {
2505 Attrs ret = *this;
2506 ret.preserve_cardinality_ = x;
2507 return ret;
2508 }
2509
2510 /// Defaults to ""
2511 TF_MUST_USE_RESULT Attrs Metadata(StringPiece x) {
2512 Attrs ret = *this;
2513 ret.metadata_ = x;
2514 return ret;
2515 }
2516
2517 bool preserve_cardinality_ = false;
2518 StringPiece metadata_ = "";
2519 };
2520 MapAndBatchDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2521 input_dataset, ::tensorflow::InputList other_arguments,
2522 ::tensorflow::Input batch_size, ::tensorflow::Input
2523 num_parallel_calls, ::tensorflow::Input drop_remainder,
2524 const NameAttrList& f, const DataTypeSlice& output_types,
2525 const gtl::ArraySlice<PartialTensorShape>& output_shapes);
2526 MapAndBatchDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2527 input_dataset, ::tensorflow::InputList other_arguments,
2528 ::tensorflow::Input batch_size, ::tensorflow::Input
2529 num_parallel_calls, ::tensorflow::Input drop_remainder,
2530 const NameAttrList& f, const DataTypeSlice& output_types,
2531 const gtl::ArraySlice<PartialTensorShape>& output_shapes,
2532 const MapAndBatchDataset::Attrs& attrs);
2533 operator ::tensorflow::Output() const { return handle; }
2534 operator ::tensorflow::Input() const { return handle; }
2535 ::tensorflow::Node* node() const { return handle.node(); }
2536
2537 static Attrs PreserveCardinality(bool x) {
2538 return Attrs().PreserveCardinality(x);
2539 }
2540 static Attrs Metadata(StringPiece x) {
2541 return Attrs().Metadata(x);
2542 }
2543
2544 Operation operation;
2545 ::tensorflow::Output handle;
2546};
2547
2548/// TODO: add doc.
2549///
2550/// Args:
2551/// * scope: A Scope object
2552///
2553/// Returns:
2554/// * `Output`: The handle tensor.
2555class MatchingFilesDataset {
2556 public:
2557 MatchingFilesDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2558 patterns);
2559 operator ::tensorflow::Output() const { return handle; }
2560 operator ::tensorflow::Input() const { return handle; }
2561 ::tensorflow::Node* node() const { return handle.node(); }
2562
2563 Operation operation;
2564 ::tensorflow::Output handle;
2565};
2566
2567/// Creates a dataset that overrides the maximum intra-op parallelism.
2568///
2569/// Args:
2570/// * scope: A Scope object
2571/// * max_intra_op_parallelism: Identifies the maximum intra-op parallelism to use.
2572///
2573/// Returns:
2574/// * `Output`: The handle tensor.
2575class MaxIntraOpParallelismDataset {
2576 public:
2577 MaxIntraOpParallelismDataset(const ::tensorflow::Scope& scope,
2578 ::tensorflow::Input input_dataset,
2579 ::tensorflow::Input max_intra_op_parallelism,
2580 const DataTypeSlice& output_types, const
2581 gtl::ArraySlice<PartialTensorShape>&
2582 output_shapes);
2583 operator ::tensorflow::Output() const { return handle; }
2584 operator ::tensorflow::Input() const { return handle; }
2585 ::tensorflow::Node* node() const { return handle.node(); }
2586
2587 Operation operation;
2588 ::tensorflow::Output handle;
2589};
2590
2591/// TODO: add doc.
2592///
2593/// Args:
2594/// * scope: A Scope object
2595///
2596/// Returns:
2597/// * `Output`: The handle tensor.
2598class NonSerializableDataset {
2599 public:
2600 NonSerializableDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2601 input_dataset, const DataTypeSlice& output_types, const
2602 gtl::ArraySlice<PartialTensorShape>& output_shapes);
2603 operator ::tensorflow::Output() const { return handle; }
2604 operator ::tensorflow::Input() const { return handle; }
2605 ::tensorflow::Node* node() const { return handle.node(); }
2606
2607 Operation operation;
2608 ::tensorflow::Output handle;
2609};
2610
2611/// Creates a dataset that applies `f` to the outputs of `input_dataset`.
2612///
2613/// The resulting dataset is similar to the `InterleaveDataset`, with the exception
2614/// that if retrieving the next value from a dataset would cause the requester to
2615/// block, it will skip that input dataset. This dataset is especially useful
2616/// when loading data from a variable-latency datastores (e.g. HDFS, GCS), as it
2617/// allows the training step to proceed so long as some data is available.
2618///
2619/// !! WARNING !! If the `sloppy` parameter is set to `True`, the operation of this
2620/// dataset will not be deterministic!
2621///
2622/// This dataset has been superseded by `ParallelInterleaveDatasetV2`. New code
2623/// should use `ParallelInterleaveDatasetV2`.
2624///
2625/// The Python API `tf.data.experimental.parallel_interleave` creates instances of
2626/// this op. `tf.data.experimental.parallel_interleave` is a deprecated API.
2627///
2628/// Args:
2629/// * scope: A Scope object
2630/// * input_dataset: Dataset that produces a stream of arguments for the function `f`.
2631/// * other_arguments: Additional arguments to pass to `f` beyond those produced by `input_dataset`.
2632/// Evaluated once when the dataset is instantiated.
2633/// * cycle_length: Number of datasets (each created by applying `f` to the elements of
2634/// `input_dataset`) among which the `ParallelInterleaveDataset` will cycle in a
2635/// round-robin fashion.
2636/// * block_length: Number of elements at a time to produce from each interleaved invocation of a
2637/// dataset returned by `f`.
2638/// * sloppy: If `True`, return elements as they become available, even if that means returning
2639/// these elements in a non-deterministic order. Sloppy operation may result in better
2640/// performance in the presence of stragglers, but the dataset will still block if
2641/// all of its open streams are blocked.
2642/// If `False`, always return elements in a deterministic order.
2643/// * buffer_output_elements: The number of elements each iterator being interleaved should buffer (similar
2644/// to the `.prefetch()` transformation for each interleaved iterator).
2645/// * prefetch_input_elements: Determines the number of iterators to prefetch, allowing buffers to warm up and
2646/// data to be pre-fetched without blocking the main thread.
2647/// * f: A function mapping elements of `input_dataset`, concatenated with
2648/// `other_arguments`, to a Dataset variant that contains elements matching
2649/// `output_types` and `output_shapes`.
2650///
2651/// Returns:
2652/// * `Output`: The handle tensor.
2653class ParallelInterleaveDataset {
2654 public:
2655 /// Optional attribute setters for ParallelInterleaveDataset
2656 struct Attrs {
2657 /// Defaults to ""
2658 TF_MUST_USE_RESULT Attrs Metadata(StringPiece x) {
2659 Attrs ret = *this;
2660 ret.metadata_ = x;
2661 return ret;
2662 }
2663
2664 StringPiece metadata_ = "";
2665 };
2666 ParallelInterleaveDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2667 input_dataset, ::tensorflow::InputList
2668 other_arguments, ::tensorflow::Input cycle_length,
2669 ::tensorflow::Input block_length, ::tensorflow::Input
2670 sloppy, ::tensorflow::Input buffer_output_elements,
2671 ::tensorflow::Input prefetch_input_elements, const
2672 NameAttrList& f, const DataTypeSlice& output_types,
2673 const gtl::ArraySlice<PartialTensorShape>&
2674 output_shapes);
2675 ParallelInterleaveDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2676 input_dataset, ::tensorflow::InputList
2677 other_arguments, ::tensorflow::Input cycle_length,
2678 ::tensorflow::Input block_length, ::tensorflow::Input
2679 sloppy, ::tensorflow::Input buffer_output_elements,
2680 ::tensorflow::Input prefetch_input_elements, const
2681 NameAttrList& f, const DataTypeSlice& output_types,
2682 const gtl::ArraySlice<PartialTensorShape>&
2683 output_shapes, const
2684 ParallelInterleaveDataset::Attrs& attrs);
2685 operator ::tensorflow::Output() const { return handle; }
2686 operator ::tensorflow::Input() const { return handle; }
2687 ::tensorflow::Node* node() const { return handle.node(); }
2688
2689 static Attrs Metadata(StringPiece x) {
2690 return Attrs().Metadata(x);
2691 }
2692
2693 Operation operation;
2694 ::tensorflow::Output handle;
2695};
2696
2697/// Transforms `input_dataset` containing `Example` protos as vectors of DT_STRING into a dataset of `Tensor` or `SparseTensor` objects representing the parsed features.
2698///
2699/// Args:
2700/// * scope: A Scope object
2701/// * dense_defaults: A dict mapping string keys to `Tensor`s.
2702/// The keys of the dict must match the dense_keys of the feature.
2703/// * sparse_keys: A list of string keys in the examples features.
2704/// The results for these keys will be returned as `SparseTensor` objects.
2705/// * dense_keys: A list of Ndense string Tensors (scalars).
2706/// The keys expected in the Examples features associated with dense values.
2707/// * sparse_types: A list of `DTypes` of the same length as `sparse_keys`.
2708/// Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`),
2709/// and `tf.string` (`BytesList`) are supported.
2710/// * dense_shapes: List of tuples with the same length as `dense_keys`.
2711/// The shape of the data for each dense feature referenced by `dense_keys`.
2712/// Required for any input tensors identified by `dense_keys`. Must be
2713/// either fully defined, or may contain an unknown first dimension.
2714/// An unknown first dimension means the feature is treated as having
2715/// a variable number of blocks, and the output shape along this dimension
2716/// is considered unknown at graph build time. Padding is applied for
2717/// minibatch elements smaller than the maximum number of blocks for the
2718/// given feature along this dimension.
2719/// * output_types: The type list for the return values.
2720/// * output_shapes: The list of shapes being produced.
2721///
2722/// Returns:
2723/// * `Output`: The handle tensor.
2724class ParseExampleDataset {
2725 public:
2726 /// Optional attribute setters for ParseExampleDataset
2727 struct Attrs {
2728 /// Defaults to false
2729 TF_MUST_USE_RESULT Attrs Sloppy(bool x) {
2730 Attrs ret = *this;
2731 ret.sloppy_ = x;
2732 return ret;
2733 }
2734
2735 /// Defaults to []
2736 TF_MUST_USE_RESULT Attrs RaggedKeys(const gtl::ArraySlice<::tensorflow::tstring>& x) {
2737 Attrs ret = *this;
2738 ret.ragged_keys_ = x;
2739 return ret;
2740 }
2741
2742 /// Defaults to []
2743 TF_MUST_USE_RESULT Attrs RaggedValueTypes(const DataTypeSlice& x) {
2744 Attrs ret = *this;
2745 ret.ragged_value_types_ = x;
2746 return ret;
2747 }
2748
2749 /// Defaults to []
2750 TF_MUST_USE_RESULT Attrs RaggedSplitTypes(const DataTypeSlice& x) {
2751 Attrs ret = *this;
2752 ret.ragged_split_types_ = x;
2753 return ret;
2754 }
2755
2756 bool sloppy_ = false;
2757 gtl::ArraySlice<::tensorflow::tstring> ragged_keys_ = {};
2758 DataTypeSlice ragged_value_types_ = {};
2759 DataTypeSlice ragged_split_types_ = {};
2760 };
2761 ParseExampleDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2762 input_dataset, ::tensorflow::Input num_parallel_calls,
2763 ::tensorflow::InputList dense_defaults, const
2764 gtl::ArraySlice<::tensorflow::tstring>& sparse_keys, const
2765 gtl::ArraySlice<::tensorflow::tstring>& dense_keys, const
2766 DataTypeSlice& sparse_types, const
2767 gtl::ArraySlice<PartialTensorShape>& dense_shapes, const
2768 DataTypeSlice& output_types, const
2769 gtl::ArraySlice<PartialTensorShape>& output_shapes);
2770 ParseExampleDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2771 input_dataset, ::tensorflow::Input num_parallel_calls,
2772 ::tensorflow::InputList dense_defaults, const
2773 gtl::ArraySlice<::tensorflow::tstring>& sparse_keys, const
2774 gtl::ArraySlice<::tensorflow::tstring>& dense_keys, const
2775 DataTypeSlice& sparse_types, const
2776 gtl::ArraySlice<PartialTensorShape>& dense_shapes, const
2777 DataTypeSlice& output_types, const
2778 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
2779 ParseExampleDataset::Attrs& attrs);
2780 operator ::tensorflow::Output() const { return handle; }
2781 operator ::tensorflow::Input() const { return handle; }
2782 ::tensorflow::Node* node() const { return handle.node(); }
2783
2784 static Attrs Sloppy(bool x) {
2785 return Attrs().Sloppy(x);
2786 }
2787 static Attrs RaggedKeys(const gtl::ArraySlice<::tensorflow::tstring>& x) {
2788 return Attrs().RaggedKeys(x);
2789 }
2790 static Attrs RaggedValueTypes(const DataTypeSlice& x) {
2791 return Attrs().RaggedValueTypes(x);
2792 }
2793 static Attrs RaggedSplitTypes(const DataTypeSlice& x) {
2794 return Attrs().RaggedSplitTypes(x);
2795 }
2796
2797 Operation operation;
2798 ::tensorflow::Output handle;
2799};
2800
2801/// Transforms `input_dataset` containing `Example` protos as vectors of DT_STRING into a dataset of `Tensor` or `SparseTensor` objects representing the parsed features.
2802///
2803/// Args:
2804/// * scope: A Scope object
2805/// * dense_defaults: A dict mapping string keys to `Tensor`s.
2806/// The keys of the dict must match the dense_keys of the feature.
2807/// * sparse_keys: A list of string keys in the examples features.
2808/// The results for these keys will be returned as `SparseTensor` objects.
2809/// * dense_keys: A list of Ndense string Tensors (scalars).
2810/// The keys expected in the Examples features associated with dense values.
2811/// * sparse_types: A list of `DTypes` of the same length as `sparse_keys`.
2812/// Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`),
2813/// and `tf.string` (`BytesList`) are supported.
2814/// * dense_shapes: List of tuples with the same length as `dense_keys`.
2815/// The shape of the data for each dense feature referenced by `dense_keys`.
2816/// Required for any input tensors identified by `dense_keys`. Must be
2817/// either fully defined, or may contain an unknown first dimension.
2818/// An unknown first dimension means the feature is treated as having
2819/// a variable number of blocks, and the output shape along this dimension
2820/// is considered unknown at graph build time. Padding is applied for
2821/// minibatch elements smaller than the maximum number of blocks for the
2822/// given feature along this dimension.
2823/// * output_types: The type list for the return values.
2824/// * output_shapes: The list of shapes being produced.
2825///
2826/// Optional attributes (see `Attrs`):
2827/// * deterministic: A string indicating the op-level determinism to use. Deterministic controls
2828/// whether the dataset is allowed to return elements out of order if the next
2829/// element to be returned isn't available, but a later element is. Options are
2830/// "true", "false", and "default". "default" indicates that determinism should be
2831/// decided by the `experimental_deterministic` parameter of `tf.data.Options`.
2832///
2833/// Returns:
2834/// * `Output`: The handle tensor.
2835class ParseExampleDatasetV2 {
2836 public:
2837 /// Optional attribute setters for ParseExampleDatasetV2
2838 struct Attrs {
2839 /// A string indicating the op-level determinism to use. Deterministic controls
2840 /// whether the dataset is allowed to return elements out of order if the next
2841 /// element to be returned isn't available, but a later element is. Options are
2842 /// "true", "false", and "default". "default" indicates that determinism should be
2843 /// decided by the `experimental_deterministic` parameter of `tf.data.Options`.
2844 ///
2845 /// Defaults to "default"
2846 TF_MUST_USE_RESULT Attrs Deterministic(StringPiece x) {
2847 Attrs ret = *this;
2848 ret.deterministic_ = x;
2849 return ret;
2850 }
2851
2852 /// Defaults to []
2853 TF_MUST_USE_RESULT Attrs RaggedKeys(const gtl::ArraySlice<::tensorflow::tstring>& x) {
2854 Attrs ret = *this;
2855 ret.ragged_keys_ = x;
2856 return ret;
2857 }
2858
2859 /// Defaults to []
2860 TF_MUST_USE_RESULT Attrs RaggedValueTypes(const DataTypeSlice& x) {
2861 Attrs ret = *this;
2862 ret.ragged_value_types_ = x;
2863 return ret;
2864 }
2865
2866 /// Defaults to []
2867 TF_MUST_USE_RESULT Attrs RaggedSplitTypes(const DataTypeSlice& x) {
2868 Attrs ret = *this;
2869 ret.ragged_split_types_ = x;
2870 return ret;
2871 }
2872
2873 StringPiece deterministic_ = "default";
2874 gtl::ArraySlice<::tensorflow::tstring> ragged_keys_ = {};
2875 DataTypeSlice ragged_value_types_ = {};
2876 DataTypeSlice ragged_split_types_ = {};
2877 };
2878 ParseExampleDatasetV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
2879 input_dataset, ::tensorflow::Input num_parallel_calls,
2880 ::tensorflow::InputList dense_defaults, const
2881 gtl::ArraySlice<::tensorflow::tstring>& sparse_keys,
2882 const gtl::ArraySlice<::tensorflow::tstring>& dense_keys,
2883 const DataTypeSlice& sparse_types, const
2884 gtl::ArraySlice<PartialTensorShape>& dense_shapes, const
2885 DataTypeSlice& output_types, const
2886 gtl::ArraySlice<PartialTensorShape>& output_shapes);
2887 ParseExampleDatasetV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
2888 input_dataset, ::tensorflow::Input num_parallel_calls,
2889 ::tensorflow::InputList dense_defaults, const
2890 gtl::ArraySlice<::tensorflow::tstring>& sparse_keys,
2891 const gtl::ArraySlice<::tensorflow::tstring>& dense_keys,
2892 const DataTypeSlice& sparse_types, const
2893 gtl::ArraySlice<PartialTensorShape>& dense_shapes, const
2894 DataTypeSlice& output_types, const
2895 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
2896 ParseExampleDatasetV2::Attrs& attrs);
2897 operator ::tensorflow::Output() const { return handle; }
2898 operator ::tensorflow::Input() const { return handle; }
2899 ::tensorflow::Node* node() const { return handle.node(); }
2900
2901 static Attrs Deterministic(StringPiece x) {
2902 return Attrs().Deterministic(x);
2903 }
2904 static Attrs RaggedKeys(const gtl::ArraySlice<::tensorflow::tstring>& x) {
2905 return Attrs().RaggedKeys(x);
2906 }
2907 static Attrs RaggedValueTypes(const DataTypeSlice& x) {
2908 return Attrs().RaggedValueTypes(x);
2909 }
2910 static Attrs RaggedSplitTypes(const DataTypeSlice& x) {
2911 return Attrs().RaggedSplitTypes(x);
2912 }
2913
2914 Operation operation;
2915 ::tensorflow::Output handle;
2916};
2917
2918/// Creates a dataset that uses a custom thread pool to compute `input_dataset`.
2919///
2920/// Args:
2921/// * scope: A Scope object
2922/// * num_threads: Identifies the number of threads to use for the private threadpool.
2923///
2924/// Returns:
2925/// * `Output`: The handle tensor.
2926class PrivateThreadPoolDataset {
2927 public:
2928 PrivateThreadPoolDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
2929 input_dataset, ::tensorflow::Input num_threads, const
2930 DataTypeSlice& output_types, const
2931 gtl::ArraySlice<PartialTensorShape>& output_shapes);
2932 operator ::tensorflow::Output() const { return handle; }
2933 operator ::tensorflow::Input() const { return handle; }
2934 ::tensorflow::Node* node() const { return handle.node(); }
2935
2936 Operation operation;
2937 ::tensorflow::Output handle;
2938};
2939
2940/// Creates a Dataset that returns pseudorandom numbers.
2941///
2942/// Creates a Dataset that returns a stream of uniformly distributed
2943/// pseudorandom 64-bit signed integers.
2944///
2945/// In the TensorFlow Python API, you can instantiate this dataset via the
2946/// class `tf.data.experimental.RandomDataset`.
2947///
2948/// Instances of this dataset are also created as a result of the
2949/// `hoist_random_uniform` static optimization. Whether this optimization is
2950/// performed is determined by the `experimental_optimization.hoist_random_uniform`
2951/// option of `tf.data.Options`.
2952///
2953/// Args:
2954/// * scope: A Scope object
2955/// * seed: A scalar seed for the random number generator. If either seed or
2956/// seed2 is set to be non-zero, the random number generator is seeded
2957/// by the given seed. Otherwise, a random seed is used.
2958/// * seed2: A second scalar seed to avoid seed collision.
2959///
2960/// Returns:
2961/// * `Output`: The handle tensor.
2962class RandomDataset {
2963 public:
2964 /// Optional attribute setters for RandomDataset
2965 struct Attrs {
2966 /// Defaults to ""
2967 TF_MUST_USE_RESULT Attrs Metadata(StringPiece x) {
2968 Attrs ret = *this;
2969 ret.metadata_ = x;
2970 return ret;
2971 }
2972
2973 StringPiece metadata_ = "";
2974 };
2975 RandomDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input seed,
2976 ::tensorflow::Input seed2, const DataTypeSlice& output_types,
2977 const gtl::ArraySlice<PartialTensorShape>& output_shapes);
2978 RandomDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input seed,
2979 ::tensorflow::Input seed2, const DataTypeSlice& output_types,
2980 const gtl::ArraySlice<PartialTensorShape>& output_shapes, const
2981 RandomDataset::Attrs& attrs);
2982 operator ::tensorflow::Output() const { return handle; }
2983 operator ::tensorflow::Input() const { return handle; }
2984 ::tensorflow::Node* node() const { return handle.node(); }
2985
2986 static Attrs Metadata(StringPiece x) {
2987 return Attrs().Metadata(x);
2988 }
2989
2990 Operation operation;
2991 ::tensorflow::Output handle;
2992};
2993
2994/// Creates a dataset that changes the batch size.
2995///
2996/// Creates a dataset that changes the batch size of the dataset to current batch
2997/// size // num_workers.
2998///
2999/// Args:
3000/// * scope: A Scope object
3001/// * input_dataset: A variant tensor representing the input dataset.
3002/// * num_replicas: A scalar representing the number of replicas to distribute this batch across. As
3003/// a result of this transformation the current batch size would end up being
3004/// divided by this parameter.
3005///
3006/// Returns:
3007/// * `Output`: The handle tensor.
3008class RebatchDataset {
3009 public:
3010 /// Optional attribute setters for RebatchDataset
3011 struct Attrs {
3012 /// Defaults to true
3013 TF_MUST_USE_RESULT Attrs UseFallback(bool x) {
3014 Attrs ret = *this;
3015 ret.use_fallback_ = x;
3016 return ret;
3017 }
3018
3019 bool use_fallback_ = true;
3020 };
3021 RebatchDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
3022 input_dataset, ::tensorflow::Input num_replicas, const
3023 DataTypeSlice& output_types, const
3024 gtl::ArraySlice<PartialTensorShape>& output_shapes);
3025 RebatchDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
3026 input_dataset, ::tensorflow::Input num_replicas, const
3027 DataTypeSlice& output_types, const
3028 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
3029 RebatchDataset::Attrs& attrs);
3030 operator ::tensorflow::Output() const { return handle; }
3031 operator ::tensorflow::Input() const { return handle; }
3032 ::tensorflow::Node* node() const { return handle.node(); }
3033
3034 static Attrs UseFallback(bool x) {
3035 return Attrs().UseFallback(x);
3036 }
3037
3038 Operation operation;
3039 ::tensorflow::Output handle;
3040};
3041
3042/// Creates a dataset that changes the batch size.
3043///
3044/// Creates a dataset that rebatches elements from `input_dataset` into new batch
3045/// sizes.
3046///
3047/// Args:
3048/// * scope: A Scope object
3049/// * input_dataset: A variant tensor representing the input dataset.
3050/// * batch_sizes: A vector of integers representing the size of batches to produce. These values
3051/// are cycled through in order.
3052///
3053/// Returns:
3054/// * `Output`: The handle tensor.
3055class RebatchDatasetV2 {
3056 public:
3057 RebatchDatasetV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
3058 input_dataset, ::tensorflow::Input batch_sizes,
3059 ::tensorflow::Input drop_remainder, const DataTypeSlice&
3060 output_types, const gtl::ArraySlice<PartialTensorShape>&
3061 output_shapes);
3062 operator ::tensorflow::Output() const { return handle; }
3063 operator ::tensorflow::Input() const { return handle; }
3064 ::tensorflow::Node* node() const { return handle.node(); }
3065
3066 Operation operation;
3067 ::tensorflow::Output handle;
3068};
3069
3070/// Registers a dataset with the tf.data service.
3071///
3072/// Args:
3073/// * scope: A Scope object
3074///
3075/// Returns:
3076/// * `Output`: The dataset_id tensor.
3077class RegisterDataset {
3078 public:
3079 /// Optional attribute setters for RegisterDataset
3080 struct Attrs {
3081 /// Defaults to ""
3082 TF_MUST_USE_RESULT Attrs ElementSpec(StringPiece x) {
3083 Attrs ret = *this;
3084 ret.element_spec_ = x;
3085 return ret;
3086 }
3087
3088 /// Defaults to ""
3089 TF_MUST_USE_RESULT Attrs Metadata(StringPiece x) {
3090 Attrs ret = *this;
3091 ret.metadata_ = x;
3092 return ret;
3093 }
3094
3095 StringPiece element_spec_ = "";
3096 StringPiece metadata_ = "";
3097 };
3098 RegisterDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input dataset,
3099 ::tensorflow::Input address, ::tensorflow::Input protocol,
3100 int64 external_state_policy);
3101 RegisterDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input dataset,
3102 ::tensorflow::Input address, ::tensorflow::Input protocol,
3103 int64 external_state_policy, const RegisterDataset::Attrs&
3104 attrs);
3105 operator ::tensorflow::Output() const { return dataset_id; }
3106 operator ::tensorflow::Input() const { return dataset_id; }
3107 ::tensorflow::Node* node() const { return dataset_id.node(); }
3108
3109 static Attrs ElementSpec(StringPiece x) {
3110 return Attrs().ElementSpec(x);
3111 }
3112 static Attrs Metadata(StringPiece x) {
3113 return Attrs().Metadata(x);
3114 }
3115
3116 Operation operation;
3117 ::tensorflow::Output dataset_id;
3118};
3119
3120/// Registers a dataset with the tf.data service.
3121///
3122/// Args:
3123/// * scope: A Scope object
3124///
3125/// Returns:
3126/// * `Output`: The dataset_id tensor.
3127class RegisterDatasetV2 {
3128 public:
3129 /// Optional attribute setters for RegisterDatasetV2
3130 struct Attrs {
3131 /// Defaults to ""
3132 TF_MUST_USE_RESULT Attrs ElementSpec(StringPiece x) {
3133 Attrs ret = *this;
3134 ret.element_spec_ = x;
3135 return ret;
3136 }
3137
3138 /// Defaults to ""
3139 TF_MUST_USE_RESULT Attrs RequestedDatasetId(StringPiece x) {
3140 Attrs ret = *this;
3141 ret.requested_dataset_id_ = x;
3142 return ret;
3143 }
3144
3145 /// Defaults to ""
3146 TF_MUST_USE_RESULT Attrs Metadata(StringPiece x) {
3147 Attrs ret = *this;
3148 ret.metadata_ = x;
3149 return ret;
3150 }
3151
3152 StringPiece element_spec_ = "";
3153 StringPiece requested_dataset_id_ = "";
3154 StringPiece metadata_ = "";
3155 };
3156 RegisterDatasetV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
3157 dataset, ::tensorflow::Input address, ::tensorflow::Input
3158 protocol, int64 external_state_policy);
3159 RegisterDatasetV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
3160 dataset, ::tensorflow::Input address, ::tensorflow::Input
3161 protocol, int64 external_state_policy, const
3162 RegisterDatasetV2::Attrs& attrs);
3163 operator ::tensorflow::Output() const { return dataset_id; }
3164 operator ::tensorflow::Input() const { return dataset_id; }
3165 ::tensorflow::Node* node() const { return dataset_id.node(); }
3166
3167 static Attrs ElementSpec(StringPiece x) {
3168 return Attrs().ElementSpec(x);
3169 }
3170 static Attrs RequestedDatasetId(StringPiece x) {
3171 return Attrs().RequestedDatasetId(x);
3172 }
3173 static Attrs Metadata(StringPiece x) {
3174 return Attrs().Metadata(x);
3175 }
3176
3177 Operation operation;
3178 ::tensorflow::Output dataset_id;
3179};
3180
3181/// Creates a dataset that takes a Bernoulli sample of the contents of another dataset.
3182///
3183/// There is no transformation in the `tf.data` Python API for creating this dataset.
3184/// Instead, it is created as a result of the `filter_with_random_uniform_fusion`
3185/// static optimization. Whether this optimization is performed is determined by the
3186/// `experimental_optimization.filter_with_random_uniform_fusion` option of
3187/// `tf.data.Options`.
3188///
3189/// Args:
3190/// * scope: A Scope object
3191/// * rate: A scalar representing the sample rate. Each element of `input_dataset` is
3192/// retained with this probability, independent of all other elements.
3193/// * seed: A scalar representing seed of random number generator.
3194/// * seed2: A scalar representing seed2 of random number generator.
3195///
3196/// Returns:
3197/// * `Output`: The handle tensor.
3198class SamplingDataset {
3199 public:
3200 SamplingDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
3201 input_dataset, ::tensorflow::Input rate, ::tensorflow::Input
3202 seed, ::tensorflow::Input seed2, const DataTypeSlice&
3203 output_types, const gtl::ArraySlice<PartialTensorShape>&
3204 output_shapes);
3205 operator ::tensorflow::Output() const { return handle; }
3206 operator ::tensorflow::Input() const { return handle; }
3207 ::tensorflow::Node* node() const { return handle.node(); }
3208
3209 Operation operation;
3210 ::tensorflow::Output handle;
3211};
3212
3213/// TODO: add doc.
3214///
3215/// Args:
3216/// * scope: A Scope object
3217///
3218/// Returns:
3219/// * the created `Operation`
3220class SaveDataset {
3221 public:
3222 /// Optional attribute setters for SaveDataset
3223 struct Attrs {
3224 /// Defaults to ""
3225 TF_MUST_USE_RESULT Attrs Compression(StringPiece x) {
3226 Attrs ret = *this;
3227 ret.compression_ = x;
3228 return ret;
3229 }
3230
3231 /// Defaults to true
3232 TF_MUST_USE_RESULT Attrs UseShardFunc(bool x) {
3233 Attrs ret = *this;
3234 ret.use_shard_func_ = x;
3235 return ret;
3236 }
3237
3238 StringPiece compression_ = "";
3239 bool use_shard_func_ = true;
3240 };
3241 SaveDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
3242 input_dataset, ::tensorflow::Input path, ::tensorflow::InputList
3243 shard_func_other_args, const NameAttrList& shard_func);
3244 SaveDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
3245 input_dataset, ::tensorflow::Input path, ::tensorflow::InputList
3246 shard_func_other_args, const NameAttrList& shard_func, const
3247 SaveDataset::Attrs& attrs);
3248 operator ::tensorflow::Operation() const { return operation; }
3249
3250 static Attrs Compression(StringPiece x) {
3251 return Attrs().Compression(x);
3252 }
3253 static Attrs UseShardFunc(bool x) {
3254 return Attrs().UseShardFunc(x);
3255 }
3256
3257 Operation operation;
3258};
3259
3260/// TODO: add doc.
3261///
3262/// Args:
3263/// * scope: A Scope object
3264///
3265/// Returns:
3266/// * `Output`: The handle tensor.
3267class SaveDatasetV2 {
3268 public:
3269 /// Optional attribute setters for SaveDatasetV2
3270 struct Attrs {
3271 /// Defaults to ""
3272 TF_MUST_USE_RESULT Attrs Compression(StringPiece x) {
3273 Attrs ret = *this;
3274 ret.compression_ = x;
3275 return ret;
3276 }
3277
3278 /// Defaults to true
3279 TF_MUST_USE_RESULT Attrs UseShardFunc(bool x) {
3280 Attrs ret = *this;
3281 ret.use_shard_func_ = x;
3282 return ret;
3283 }
3284
3285 StringPiece compression_ = "";
3286 bool use_shard_func_ = true;
3287 };
3288 SaveDatasetV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
3289 input_dataset, ::tensorflow::Input path, ::tensorflow::InputList
3290 shard_func_other_args, const NameAttrList& shard_func, const
3291 DataTypeSlice& output_types, const
3292 gtl::ArraySlice<PartialTensorShape>& output_shapes);
3293 SaveDatasetV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
3294 input_dataset, ::tensorflow::Input path, ::tensorflow::InputList
3295 shard_func_other_args, const NameAttrList& shard_func, const
3296 DataTypeSlice& output_types, const
3297 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
3298 SaveDatasetV2::Attrs& attrs);
3299 operator ::tensorflow::Output() const { return handle; }
3300 operator ::tensorflow::Input() const { return handle; }
3301 ::tensorflow::Node* node() const { return handle.node(); }
3302
3303 static Attrs Compression(StringPiece x) {
3304 return Attrs().Compression(x);
3305 }
3306 static Attrs UseShardFunc(bool x) {
3307 return Attrs().UseShardFunc(x);
3308 }
3309
3310 Operation operation;
3311 ::tensorflow::Output handle;
3312};
3313
3314/// Creates a dataset successively reduces `f` over the elements of `input_dataset`.
3315///
3316/// Args:
3317/// * scope: A Scope object
3318///
3319/// Returns:
3320/// * `Output`: The handle tensor.
3321class ScanDataset {
3322 public:
3323 /// Optional attribute setters for ScanDataset
3324 struct Attrs {
3325 /// Defaults to false
3326 TF_MUST_USE_RESULT Attrs PreserveCardinality(bool x) {
3327 Attrs ret = *this;
3328 ret.preserve_cardinality_ = x;
3329 return ret;
3330 }
3331
3332 /// Defaults to true
3333 TF_MUST_USE_RESULT Attrs UseDefaultDevice(bool x) {
3334 Attrs ret = *this;
3335 ret.use_default_device_ = x;
3336 return ret;
3337 }
3338
3339 /// Defaults to ""
3340 TF_MUST_USE_RESULT Attrs Metadata(StringPiece x) {
3341 Attrs ret = *this;
3342 ret.metadata_ = x;
3343 return ret;
3344 }
3345
3346 bool preserve_cardinality_ = false;
3347 bool use_default_device_ = true;
3348 StringPiece metadata_ = "";
3349 };
3350 ScanDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
3351 input_dataset, ::tensorflow::InputList initial_state,
3352 ::tensorflow::InputList other_arguments, const NameAttrList& f,
3353 const DataTypeSlice& output_types, const
3354 gtl::ArraySlice<PartialTensorShape>& output_shapes);
3355 ScanDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
3356 input_dataset, ::tensorflow::InputList initial_state,
3357 ::tensorflow::InputList other_arguments, const NameAttrList& f,
3358 const DataTypeSlice& output_types, const
3359 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
3360 ScanDataset::Attrs& attrs);
3361 operator ::tensorflow::Output() const { return handle; }
3362 operator ::tensorflow::Input() const { return handle; }
3363 ::tensorflow::Node* node() const { return handle.node(); }
3364
3365 static Attrs PreserveCardinality(bool x) {
3366 return Attrs().PreserveCardinality(x);
3367 }
3368 static Attrs UseDefaultDevice(bool x) {
3369 return Attrs().UseDefaultDevice(x);
3370 }
3371 static Attrs Metadata(StringPiece x) {
3372 return Attrs().Metadata(x);
3373 }
3374
3375 Operation operation;
3376 ::tensorflow::Output handle;
3377};
3378
3379/// TODO: add doc.
3380///
3381/// Args:
3382/// * scope: A Scope object
3383///
3384/// Returns:
3385/// * `Output`: The handle tensor.
3386class SetStatsAggregatorDataset {
3387 public:
3388 SetStatsAggregatorDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
3389 input_dataset, ::tensorflow::Input stats_aggregator,
3390 ::tensorflow::Input tag, ::tensorflow::Input
3391 counter_prefix, const DataTypeSlice& output_types,
3392 const gtl::ArraySlice<PartialTensorShape>&
3393 output_shapes);
3394 operator ::tensorflow::Output() const { return handle; }
3395 operator ::tensorflow::Input() const { return handle; }
3396 ::tensorflow::Node* node() const { return handle.node(); }
3397
3398 Operation operation;
3399 ::tensorflow::Output handle;
3400};
3401
3402/// TODO: add doc.
3403///
3404/// Args:
3405/// * scope: A Scope object
3406///
3407/// Returns:
3408/// * `Output`: The handle tensor.
3409class SleepDataset {
3410 public:
3411 SleepDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
3412 input_dataset, ::tensorflow::Input sleep_microseconds, const
3413 DataTypeSlice& output_types, const
3414 gtl::ArraySlice<PartialTensorShape>& output_shapes);
3415 operator ::tensorflow::Output() const { return handle; }
3416 operator ::tensorflow::Input() const { return handle; }
3417 ::tensorflow::Node* node() const { return handle.node(); }
3418
3419 Operation operation;
3420 ::tensorflow::Output handle;
3421};
3422
3423/// Creates a dataset that passes a sliding window over `input_dataset`.
3424///
3425/// Args:
3426/// * scope: A Scope object
3427/// * window_size: A scalar representing the number of elements in the
3428/// sliding window.
3429/// * window_shift: A scalar representing the steps moving the sliding window
3430/// forward in one iteration. It must be positive.
3431/// * window_stride: A scalar representing the stride of the input elements of the sliding window.
3432/// It must be positive.
3433///
3434/// Returns:
3435/// * `Output`: The handle tensor.
3436class SlidingWindowDataset {
3437 public:
3438 /// Optional attribute setters for SlidingWindowDataset
3439 struct Attrs {
3440 /// Defaults to true
3441 TF_MUST_USE_RESULT Attrs DropRemainder(bool x) {
3442 Attrs ret = *this;
3443 ret.drop_remainder_ = x;
3444 return ret;
3445 }
3446
3447 bool drop_remainder_ = true;
3448 };
3449 SlidingWindowDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
3450 input_dataset, ::tensorflow::Input window_size,
3451 ::tensorflow::Input window_shift, ::tensorflow::Input
3452 window_stride, const DataTypeSlice& output_types, const
3453 gtl::ArraySlice<PartialTensorShape>& output_shapes);
3454 SlidingWindowDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
3455 input_dataset, ::tensorflow::Input window_size,
3456 ::tensorflow::Input window_shift, ::tensorflow::Input
3457 window_stride, const DataTypeSlice& output_types, const
3458 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
3459 SlidingWindowDataset::Attrs& attrs);
3460 operator ::tensorflow::Output() const { return handle; }
3461 operator ::tensorflow::Input() const { return handle; }
3462 ::tensorflow::Node* node() const { return handle.node(); }
3463
3464 static Attrs DropRemainder(bool x) {
3465 return Attrs().DropRemainder(x);
3466 }
3467
3468 Operation operation;
3469 ::tensorflow::Output handle;
3470};
3471
3472/// Creates a dataset that will write to / read from a snapshot.
3473///
3474/// This dataset attempts to determine whether a valid snapshot exists at the
3475/// `snapshot_path`, and reads from the snapshot in lieu of using `input_dataset`.
3476/// If not, it will run the preprocessing pipeline as usual, and write out a
3477/// snapshot of the data processed for future use.
3478///
3479/// Args:
3480/// * scope: A Scope object
3481/// * input_dataset: A variant tensor representing the input dataset.
3482/// * path: The path we should write snapshots to / read snapshots from.
3483///
3484/// Returns:
3485/// * `Output`: The handle tensor.
3486class SnapshotDataset {
3487 public:
3488 /// Optional attribute setters for SnapshotDataset
3489 struct Attrs {
3490 /// Defaults to ""
3491 TF_MUST_USE_RESULT Attrs Compression(StringPiece x) {
3492 Attrs ret = *this;
3493 ret.compression_ = x;
3494 return ret;
3495 }
3496
3497 /// Defaults to ""
3498 TF_MUST_USE_RESULT Attrs ReaderPathPrefix(StringPiece x) {
3499 Attrs ret = *this;
3500 ret.reader_path_prefix_ = x;
3501 return ret;
3502 }
3503
3504 /// Defaults to ""
3505 TF_MUST_USE_RESULT Attrs WriterPathPrefix(StringPiece x) {
3506 Attrs ret = *this;
3507 ret.writer_path_prefix_ = x;
3508 return ret;
3509 }
3510
3511 /// Defaults to 10737418240
3512 TF_MUST_USE_RESULT Attrs ShardSizeBytes(int64 x) {
3513 Attrs ret = *this;
3514 ret.shard_size_bytes_ = x;
3515 return ret;
3516 }
3517
3518 /// Defaults to 86400
3519 TF_MUST_USE_RESULT Attrs PendingSnapshotExpirySeconds(int64 x) {
3520 Attrs ret = *this;
3521 ret.pending_snapshot_expiry_seconds_ = x;
3522 return ret;
3523 }
3524
3525 /// Defaults to 1
3526 TF_MUST_USE_RESULT Attrs NumReaderThreads(int64 x) {
3527 Attrs ret = *this;
3528 ret.num_reader_threads_ = x;
3529 return ret;
3530 }
3531
3532 /// Defaults to 1
3533 TF_MUST_USE_RESULT Attrs ReaderBufferSize(int64 x) {
3534 Attrs ret = *this;
3535 ret.reader_buffer_size_ = x;
3536 return ret;
3537 }
3538
3539 /// Defaults to 1
3540 TF_MUST_USE_RESULT Attrs NumWriterThreads(int64 x) {
3541 Attrs ret = *this;
3542 ret.num_writer_threads_ = x;
3543 return ret;
3544 }
3545
3546 /// Defaults to 1
3547 TF_MUST_USE_RESULT Attrs WriterBufferSize(int64 x) {
3548 Attrs ret = *this;
3549 ret.writer_buffer_size_ = x;
3550 return ret;
3551 }
3552
3553 /// Defaults to false
3554 TF_MUST_USE_RESULT Attrs ShuffleOnRead(bool x) {
3555 Attrs ret = *this;
3556 ret.shuffle_on_read_ = x;
3557 return ret;
3558 }
3559
3560 /// Defaults to 0
3561 TF_MUST_USE_RESULT Attrs Seed(int64 x) {
3562 Attrs ret = *this;
3563 ret.seed_ = x;
3564 return ret;
3565 }
3566
3567 /// Defaults to 0
3568 TF_MUST_USE_RESULT Attrs Seed2(int64 x) {
3569 Attrs ret = *this;
3570 ret.seed2_ = x;
3571 return ret;
3572 }
3573
3574 /// Defaults to "auto"
3575 TF_MUST_USE_RESULT Attrs Mode(StringPiece x) {
3576 Attrs ret = *this;
3577 ret.mode_ = x;
3578 return ret;
3579 }
3580
3581 /// Defaults to ""
3582 TF_MUST_USE_RESULT Attrs SnapshotName(StringPiece x) {
3583 Attrs ret = *this;
3584 ret.snapshot_name_ = x;
3585 return ret;
3586 }
3587
3588 StringPiece compression_ = "";
3589 StringPiece reader_path_prefix_ = "";
3590 StringPiece writer_path_prefix_ = "";
3591 int64 shard_size_bytes_ = 10737418240;
3592 int64 pending_snapshot_expiry_seconds_ = 86400;
3593 int64 num_reader_threads_ = 1;
3594 int64 reader_buffer_size_ = 1;
3595 int64 num_writer_threads_ = 1;
3596 int64 writer_buffer_size_ = 1;
3597 bool shuffle_on_read_ = false;
3598 int64 seed_ = 0;
3599 int64 seed2_ = 0;
3600 StringPiece mode_ = "auto";
3601 StringPiece snapshot_name_ = "";
3602 };
3603 SnapshotDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
3604 input_dataset, ::tensorflow::Input path, const DataTypeSlice&
3605 output_types, const gtl::ArraySlice<PartialTensorShape>&
3606 output_shapes);
3607 SnapshotDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
3608 input_dataset, ::tensorflow::Input path, const DataTypeSlice&
3609 output_types, const gtl::ArraySlice<PartialTensorShape>&
3610 output_shapes, const SnapshotDataset::Attrs& attrs);
3611 operator ::tensorflow::Output() const { return handle; }
3612 operator ::tensorflow::Input() const { return handle; }
3613 ::tensorflow::Node* node() const { return handle.node(); }
3614
3615 static Attrs Compression(StringPiece x) {
3616 return Attrs().Compression(x);
3617 }
3618 static Attrs ReaderPathPrefix(StringPiece x) {
3619 return Attrs().ReaderPathPrefix(x);
3620 }
3621 static Attrs WriterPathPrefix(StringPiece x) {
3622 return Attrs().WriterPathPrefix(x);
3623 }
3624 static Attrs ShardSizeBytes(int64 x) {
3625 return Attrs().ShardSizeBytes(x);
3626 }
3627 static Attrs PendingSnapshotExpirySeconds(int64 x) {
3628 return Attrs().PendingSnapshotExpirySeconds(x);
3629 }
3630 static Attrs NumReaderThreads(int64 x) {
3631 return Attrs().NumReaderThreads(x);
3632 }
3633 static Attrs ReaderBufferSize(int64 x) {
3634 return Attrs().ReaderBufferSize(x);
3635 }
3636 static Attrs NumWriterThreads(int64 x) {
3637 return Attrs().NumWriterThreads(x);
3638 }
3639 static Attrs WriterBufferSize(int64 x) {
3640 return Attrs().WriterBufferSize(x);
3641 }
3642 static Attrs ShuffleOnRead(bool x) {
3643 return Attrs().ShuffleOnRead(x);
3644 }
3645 static Attrs Seed(int64 x) {
3646 return Attrs().Seed(x);
3647 }
3648 static Attrs Seed2(int64 x) {
3649 return Attrs().Seed2(x);
3650 }
3651 static Attrs Mode(StringPiece x) {
3652 return Attrs().Mode(x);
3653 }
3654 static Attrs SnapshotName(StringPiece x) {
3655 return Attrs().SnapshotName(x);
3656 }
3657
3658 Operation operation;
3659 ::tensorflow::Output handle;
3660};
3661
3662/// TODO: add doc.
3663///
3664/// Args:
3665/// * scope: A Scope object
3666///
3667/// Returns:
3668/// * `Output`: The handle tensor.
3669class SnapshotDatasetReader {
3670 public:
3671 /// Optional attribute setters for SnapshotDatasetReader
3672 struct Attrs {
3673 /// Defaults to ""
3674 TF_MUST_USE_RESULT Attrs Compression(StringPiece x) {
3675 Attrs ret = *this;
3676 ret.compression_ = x;
3677 return ret;
3678 }
3679
3680 StringPiece compression_ = "";
3681 };
3682 SnapshotDatasetReader(const ::tensorflow::Scope& scope, ::tensorflow::Input
3683 shard_dir, ::tensorflow::Input start_index, const
3684 DataTypeSlice& output_types, const
3685 gtl::ArraySlice<PartialTensorShape>& output_shapes, int64
3686 version);
3687 SnapshotDatasetReader(const ::tensorflow::Scope& scope, ::tensorflow::Input
3688 shard_dir, ::tensorflow::Input start_index, const
3689 DataTypeSlice& output_types, const
3690 gtl::ArraySlice<PartialTensorShape>& output_shapes, int64
3691 version, const SnapshotDatasetReader::Attrs& attrs);
3692 operator ::tensorflow::Output() const { return handle; }
3693 operator ::tensorflow::Input() const { return handle; }
3694 ::tensorflow::Node* node() const { return handle.node(); }
3695
3696 static Attrs Compression(StringPiece x) {
3697 return Attrs().Compression(x);
3698 }
3699
3700 Operation operation;
3701 ::tensorflow::Output handle;
3702};
3703
3704/// Creates a dataset that will write to / read from a snapshot.
3705///
3706/// This dataset attempts to determine whether a valid snapshot exists at the
3707/// `snapshot_path`, and reads from the snapshot in lieu of using `input_dataset`.
3708/// If not, it will run the preprocessing pipeline as usual, and write out a
3709/// snapshot of the data processed for future use.
3710///
3711/// Args:
3712/// * scope: A Scope object
3713/// * input_dataset: A variant tensor representing the input dataset.
3714/// * path: The path we should write snapshots to / read snapshots from.
3715/// * reader_func: Optional. A function to control how to read data from snapshot shards.
3716/// * shard_func: Optional. A function to control how to shard data when writing a snapshot.
3717///
3718/// Optional attributes (see `Attrs`):
3719/// * compression: The type of compression to be applied to the saved snapshot files.
3720///
3721/// Returns:
3722/// * `Output`: The handle tensor.
3723class SnapshotDatasetV2 {
3724 public:
3725 /// Optional attribute setters for SnapshotDatasetV2
3726 struct Attrs {
3727 /// The type of compression to be applied to the saved snapshot files.
3728 ///
3729 /// Defaults to ""
3730 TF_MUST_USE_RESULT Attrs Compression(StringPiece x) {
3731 Attrs ret = *this;
3732 ret.compression_ = x;
3733 return ret;
3734 }
3735
3736 /// Defaults to ""
3737 TF_MUST_USE_RESULT Attrs ReaderPrefix(StringPiece x) {
3738 Attrs ret = *this;
3739 ret.reader_prefix_ = x;
3740 return ret;
3741 }
3742
3743 /// Defaults to ""
3744 TF_MUST_USE_RESULT Attrs WriterPrefix(StringPiece x) {
3745 Attrs ret = *this;
3746 ret.writer_prefix_ = x;
3747 return ret;
3748 }
3749
3750 /// Defaults to false
3751 TF_MUST_USE_RESULT Attrs HashValid(bool x) {
3752 Attrs ret = *this;
3753 ret.hash_valid_ = x;
3754 return ret;
3755 }
3756
3757 /// Defaults to 0
3758 TF_MUST_USE_RESULT Attrs Hash(int64 x) {
3759 Attrs ret = *this;
3760 ret.hash_ = x;
3761 return ret;
3762 }
3763
3764 /// Defaults to ""
3765 TF_MUST_USE_RESULT Attrs Metadata(StringPiece x) {
3766 Attrs ret = *this;
3767 ret.metadata_ = x;
3768 return ret;
3769 }
3770
3771 StringPiece compression_ = "";
3772 StringPiece reader_prefix_ = "";
3773 StringPiece writer_prefix_ = "";
3774 bool hash_valid_ = false;
3775 int64 hash_ = 0;
3776 StringPiece metadata_ = "";
3777 };
3778 SnapshotDatasetV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
3779 input_dataset, ::tensorflow::Input path,
3780 ::tensorflow::InputList reader_func_other_args,
3781 ::tensorflow::InputList shard_func_other_args, const
3782 DataTypeSlice& output_types, const
3783 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
3784 NameAttrList& reader_func, const NameAttrList& shard_func);
3785 SnapshotDatasetV2(const ::tensorflow::Scope& scope, ::tensorflow::Input
3786 input_dataset, ::tensorflow::Input path,
3787 ::tensorflow::InputList reader_func_other_args,
3788 ::tensorflow::InputList shard_func_other_args, const
3789 DataTypeSlice& output_types, const
3790 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
3791 NameAttrList& reader_func, const NameAttrList& shard_func,
3792 const SnapshotDatasetV2::Attrs& attrs);
3793 operator ::tensorflow::Output() const { return handle; }
3794 operator ::tensorflow::Input() const { return handle; }
3795 ::tensorflow::Node* node() const { return handle.node(); }
3796
3797 static Attrs Compression(StringPiece x) {
3798 return Attrs().Compression(x);
3799 }
3800 static Attrs ReaderPrefix(StringPiece x) {
3801 return Attrs().ReaderPrefix(x);
3802 }
3803 static Attrs WriterPrefix(StringPiece x) {
3804 return Attrs().WriterPrefix(x);
3805 }
3806 static Attrs HashValid(bool x) {
3807 return Attrs().HashValid(x);
3808 }
3809 static Attrs Hash(int64 x) {
3810 return Attrs().Hash(x);
3811 }
3812 static Attrs Metadata(StringPiece x) {
3813 return Attrs().Metadata(x);
3814 }
3815
3816 Operation operation;
3817 ::tensorflow::Output handle;
3818};
3819
3820/// TODO: add doc.
3821///
3822/// Args:
3823/// * scope: A Scope object
3824///
3825/// Returns:
3826/// * `Output`: The handle tensor.
3827class SnapshotNestedDatasetReader {
3828 public:
3829 SnapshotNestedDatasetReader(const ::tensorflow::Scope& scope,
3830 ::tensorflow::InputList inputs, const
3831 DataTypeSlice& output_types, const
3832 gtl::ArraySlice<PartialTensorShape>& output_shapes);
3833 operator ::tensorflow::Output() const { return handle; }
3834 operator ::tensorflow::Input() const { return handle; }
3835 ::tensorflow::Node* node() const { return handle.node(); }
3836
3837 Operation operation;
3838 ::tensorflow::Output handle;
3839};
3840
3841/// Creates a dataset that executes a SQL query and emits rows of the result set.
3842///
3843/// Args:
3844/// * scope: A Scope object
3845/// * driver_name: The database type. Currently, the only supported type is 'sqlite'.
3846/// * data_source_name: A connection string to connect to the database.
3847/// * query: A SQL query to execute.
3848///
3849/// Returns:
3850/// * `Output`: The handle tensor.
3851class SqlDataset {
3852 public:
3853 SqlDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input driver_name,
3854 ::tensorflow::Input data_source_name, ::tensorflow::Input query,
3855 const DataTypeSlice& output_types, const
3856 gtl::ArraySlice<PartialTensorShape>& output_shapes);
3857 operator ::tensorflow::Output() const { return handle; }
3858 operator ::tensorflow::Input() const { return handle; }
3859 ::tensorflow::Node* node() const { return handle.node(); }
3860
3861 Operation operation;
3862 ::tensorflow::Output handle;
3863};
3864
3865/// Creates a statistics manager resource.
3866///
3867/// Args:
3868/// * scope: A Scope object
3869///
3870/// Returns:
3871/// * `Output`: The handle tensor.
3872class StatsAggregatorHandle {
3873 public:
3874 /// Optional attribute setters for StatsAggregatorHandle
3875 struct Attrs {
3876 /// Defaults to ""
3877 TF_MUST_USE_RESULT Attrs Container(StringPiece x) {
3878 Attrs ret = *this;
3879 ret.container_ = x;
3880 return ret;
3881 }
3882
3883 /// Defaults to ""
3884 TF_MUST_USE_RESULT Attrs SharedName(StringPiece x) {
3885 Attrs ret = *this;
3886 ret.shared_name_ = x;
3887 return ret;
3888 }
3889
3890 StringPiece container_ = "";
3891 StringPiece shared_name_ = "";
3892 };
3893 StatsAggregatorHandle(const ::tensorflow::Scope& scope);
3894 StatsAggregatorHandle(const ::tensorflow::Scope& scope, const
3895 StatsAggregatorHandle::Attrs& attrs);
3896 operator ::tensorflow::Output() const { return handle; }
3897 operator ::tensorflow::Input() const { return handle; }
3898 ::tensorflow::Node* node() const { return handle.node(); }
3899
3900 static Attrs Container(StringPiece x) {
3901 return Attrs().Container(x);
3902 }
3903 static Attrs SharedName(StringPiece x) {
3904 return Attrs().SharedName(x);
3905 }
3906
3907 Operation operation;
3908 ::tensorflow::Output handle;
3909};
3910
3911/// TODO: add doc.
3912///
3913/// Args:
3914/// * scope: A Scope object
3915///
3916/// Returns:
3917/// * `Output`: The handle tensor.
3918class StatsAggregatorHandleV2 {
3919 public:
3920 /// Optional attribute setters for StatsAggregatorHandleV2
3921 struct Attrs {
3922 /// Defaults to ""
3923 TF_MUST_USE_RESULT Attrs Container(StringPiece x) {
3924 Attrs ret = *this;
3925 ret.container_ = x;
3926 return ret;
3927 }
3928
3929 /// Defaults to ""
3930 TF_MUST_USE_RESULT Attrs SharedName(StringPiece x) {
3931 Attrs ret = *this;
3932 ret.shared_name_ = x;
3933 return ret;
3934 }
3935
3936 StringPiece container_ = "";
3937 StringPiece shared_name_ = "";
3938 };
3939 StatsAggregatorHandleV2(const ::tensorflow::Scope& scope);
3940 StatsAggregatorHandleV2(const ::tensorflow::Scope& scope, const
3941 StatsAggregatorHandleV2::Attrs& attrs);
3942 operator ::tensorflow::Output() const { return handle; }
3943 operator ::tensorflow::Input() const { return handle; }
3944 ::tensorflow::Node* node() const { return handle.node(); }
3945
3946 static Attrs Container(StringPiece x) {
3947 return Attrs().Container(x);
3948 }
3949 static Attrs SharedName(StringPiece x) {
3950 return Attrs().SharedName(x);
3951 }
3952
3953 Operation operation;
3954 ::tensorflow::Output handle;
3955};
3956
3957/// Set a summary_writer_interface to record statistics using given stats_aggregator.
3958///
3959/// Args:
3960/// * scope: A Scope object
3961///
3962/// Returns:
3963/// * the created `Operation`
3964class StatsAggregatorSetSummaryWriter {
3965 public:
3966 StatsAggregatorSetSummaryWriter(const ::tensorflow::Scope& scope,
3967 ::tensorflow::Input stats_aggregator,
3968 ::tensorflow::Input summary);
3969 operator ::tensorflow::Operation() const { return operation; }
3970
3971 Operation operation;
3972};
3973
3974/// Produces a summary of any statistics recorded by the given statistics manager.
3975///
3976/// Args:
3977/// * scope: A Scope object
3978///
3979/// Returns:
3980/// * `Output`: The summary tensor.
3981class StatsAggregatorSummary {
3982 public:
3983 StatsAggregatorSummary(const ::tensorflow::Scope& scope, ::tensorflow::Input
3984 iterator);
3985 operator ::tensorflow::Output() const { return summary; }
3986 operator ::tensorflow::Input() const { return summary; }
3987 ::tensorflow::Node* node() const { return summary.node(); }
3988
3989 Operation operation;
3990 ::tensorflow::Output summary;
3991};
3992
3993/// Creates a dataset that stops iteration when predicate` is false.
3994///
3995/// The `predicate` function must return a scalar boolean and accept the
3996/// following arguments:
3997///
3998/// * One tensor for each component of an element of `input_dataset`.
3999/// * One tensor for each value in `other_arguments`.
4000///
4001/// Args:
4002/// * scope: A Scope object
4003/// * other_arguments: A list of tensors, typically values that were captured when
4004/// building a closure for `predicate`.
4005/// * predicate: A function returning a scalar boolean.
4006///
4007/// Returns:
4008/// * `Output`: The handle tensor.
4009class TakeWhileDataset {
4010 public:
4011 /// Optional attribute setters for TakeWhileDataset
4012 struct Attrs {
4013 /// Defaults to ""
4014 TF_MUST_USE_RESULT Attrs Metadata(StringPiece x) {
4015 Attrs ret = *this;
4016 ret.metadata_ = x;
4017 return ret;
4018 }
4019
4020 StringPiece metadata_ = "";
4021 };
4022 TakeWhileDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
4023 input_dataset, ::tensorflow::InputList other_arguments, const
4024 NameAttrList& predicate, const DataTypeSlice& output_types,
4025 const gtl::ArraySlice<PartialTensorShape>& output_shapes);
4026 TakeWhileDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
4027 input_dataset, ::tensorflow::InputList other_arguments, const
4028 NameAttrList& predicate, const DataTypeSlice& output_types,
4029 const gtl::ArraySlice<PartialTensorShape>& output_shapes,
4030 const TakeWhileDataset::Attrs& attrs);
4031 operator ::tensorflow::Output() const { return handle; }
4032 operator ::tensorflow::Input() const { return handle; }
4033 ::tensorflow::Node* node() const { return handle.node(); }
4034
4035 static Attrs Metadata(StringPiece x) {
4036 return Attrs().Metadata(x);
4037 }
4038
4039 Operation operation;
4040 ::tensorflow::Output handle;
4041};
4042
4043/// Creates a dataset that uses a custom thread pool to compute `input_dataset`.
4044///
4045/// Args:
4046/// * scope: A Scope object
4047/// * thread_pool: A resource produced by the ThreadPoolHandle op.
4048///
4049/// Returns:
4050/// * `Output`: The handle tensor.
4051class ThreadPoolDataset {
4052 public:
4053 ThreadPoolDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
4054 input_dataset, ::tensorflow::Input thread_pool, const
4055 DataTypeSlice& output_types, const
4056 gtl::ArraySlice<PartialTensorShape>& output_shapes);
4057 operator ::tensorflow::Output() const { return handle; }
4058 operator ::tensorflow::Input() const { return handle; }
4059 ::tensorflow::Node* node() const { return handle.node(); }
4060
4061 Operation operation;
4062 ::tensorflow::Output handle;
4063};
4064
4065/// Creates a dataset that uses a custom thread pool to compute `input_dataset`.
4066///
4067/// Args:
4068/// * scope: A Scope object
4069/// * num_threads: The number of threads in the thread pool.
4070/// * display_name: A human-readable name for the threads that may be visible in some
4071/// visualizations.
4072/// threadpool.
4073///
4074/// Optional attributes (see `Attrs`):
4075/// * max_intra_op_parallelism: The maximum degree of parallelism to use within operations that execute on this
4076/// threadpool.
4077///
4078/// Returns:
4079/// * `Output`: A resource that can be consumed by one or more ExperimentalThreadPoolDataset
4080/// ops.
4081class ThreadPoolHandle {
4082 public:
4083 /// Optional attribute setters for ThreadPoolHandle
4084 struct Attrs {
4085 /// The maximum degree of parallelism to use within operations that execute on this
4086 /// threadpool.
4087 ///
4088 /// Defaults to 1
4089 TF_MUST_USE_RESULT Attrs MaxIntraOpParallelism(int64 x) {
4090 Attrs ret = *this;
4091 ret.max_intra_op_parallelism_ = x;
4092 return ret;
4093 }
4094
4095 /// Defaults to ""
4096 TF_MUST_USE_RESULT Attrs Container(StringPiece x) {
4097 Attrs ret = *this;
4098 ret.container_ = x;
4099 return ret;
4100 }
4101
4102 /// Defaults to ""
4103 TF_MUST_USE_RESULT Attrs SharedName(StringPiece x) {
4104 Attrs ret = *this;
4105 ret.shared_name_ = x;
4106 return ret;
4107 }
4108
4109 int64 max_intra_op_parallelism_ = 1;
4110 StringPiece container_ = "";
4111 StringPiece shared_name_ = "";
4112 };
4113 ThreadPoolHandle(const ::tensorflow::Scope& scope, int64 num_threads,
4114 StringPiece display_name);
4115 ThreadPoolHandle(const ::tensorflow::Scope& scope, int64 num_threads,
4116 StringPiece display_name, const ThreadPoolHandle::Attrs&
4117 attrs);
4118 operator ::tensorflow::Output() const { return handle; }
4119 operator ::tensorflow::Input() const { return handle; }
4120 ::tensorflow::Node* node() const { return handle.node(); }
4121
4122 static Attrs MaxIntraOpParallelism(int64 x) {
4123 return Attrs().MaxIntraOpParallelism(x);
4124 }
4125 static Attrs Container(StringPiece x) {
4126 return Attrs().Container(x);
4127 }
4128 static Attrs SharedName(StringPiece x) {
4129 return Attrs().SharedName(x);
4130 }
4131
4132 Operation operation;
4133 ::tensorflow::Output handle;
4134};
4135
4136/// A dataset that splits the elements of its input into multiple elements.
4137///
4138/// Args:
4139/// * scope: A Scope object
4140///
4141/// Returns:
4142/// * `Output`: The handle tensor.
4143class UnbatchDataset {
4144 public:
4145 /// Optional attribute setters for UnbatchDataset
4146 struct Attrs {
4147 /// Defaults to ""
4148 TF_MUST_USE_RESULT Attrs Metadata(StringPiece x) {
4149 Attrs ret = *this;
4150 ret.metadata_ = x;
4151 return ret;
4152 }
4153
4154 StringPiece metadata_ = "";
4155 };
4156 UnbatchDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
4157 input_dataset, const DataTypeSlice& output_types, const
4158 gtl::ArraySlice<PartialTensorShape>& output_shapes);
4159 UnbatchDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
4160 input_dataset, const DataTypeSlice& output_types, const
4161 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
4162 UnbatchDataset::Attrs& attrs);
4163 operator ::tensorflow::Output() const { return handle; }
4164 operator ::tensorflow::Input() const { return handle; }
4165 ::tensorflow::Node* node() const { return handle.node(); }
4166
4167 static Attrs Metadata(StringPiece x) {
4168 return Attrs().Metadata(x);
4169 }
4170
4171 Operation operation;
4172 ::tensorflow::Output handle;
4173};
4174
4175/// Uncompresses a compressed dataset element.
4176///
4177/// Args:
4178/// * scope: A Scope object
4179///
4180/// Returns:
4181/// * `OutputList`: The components tensor.
4182class UncompressElement {
4183 public:
4184 UncompressElement(const ::tensorflow::Scope& scope, ::tensorflow::Input
4185 compressed, const DataTypeSlice& output_types, const
4186 gtl::ArraySlice<PartialTensorShape>& output_shapes);
4187 ::tensorflow::Output operator[](size_t index) const { return components[index]; }
4188
4189
4190 Operation operation;
4191 ::tensorflow::OutputList components;
4192};
4193
4194/// Creates a dataset that contains the unique elements of `input_dataset`.
4195///
4196/// Args:
4197/// * scope: A Scope object
4198///
4199/// Returns:
4200/// * `Output`: The handle tensor.
4201class UniqueDataset {
4202 public:
4203 /// Optional attribute setters for UniqueDataset
4204 struct Attrs {
4205 /// Defaults to ""
4206 TF_MUST_USE_RESULT Attrs Metadata(StringPiece x) {
4207 Attrs ret = *this;
4208 ret.metadata_ = x;
4209 return ret;
4210 }
4211
4212 StringPiece metadata_ = "";
4213 };
4214 UniqueDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
4215 input_dataset, const DataTypeSlice& output_types, const
4216 gtl::ArraySlice<PartialTensorShape>& output_shapes);
4217 UniqueDataset(const ::tensorflow::Scope& scope, ::tensorflow::Input
4218 input_dataset, const DataTypeSlice& output_types, const
4219 gtl::ArraySlice<PartialTensorShape>& output_shapes, const
4220 UniqueDataset::Attrs& attrs);
4221 operator ::tensorflow::Output() const { return handle; }
4222 operator ::tensorflow::Input() const { return handle; }
4223 ::tensorflow::Node* node() const { return handle.node(); }
4224
4225 static Attrs Metadata(StringPiece x) {
4226 return Attrs().Metadata(x);
4227 }
4228
4229 Operation operation;
4230 ::tensorflow::Output handle;
4231};
4232
4233} // namespace internal
4234} // namespace ops
4235} // namespace tensorflow
4236
4237#endif // TENSORFLOW_CC_OPS_EXPERIMENTAL_DATASET_OPS_INTERNAL_H_
4238