1// Generated by the protocol buffer compiler. DO NOT EDIT!
2// source: tensorflow/core/protobuf/tensor_bundle.proto
3
4#ifndef GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto
5#define GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto
6
7#include <limits>
8#include <string>
9
10#include <google/protobuf/port_def.inc>
11#if PROTOBUF_VERSION < 3009000
12#error This file was generated by a newer version of protoc which is
13#error incompatible with your Protocol Buffer headers. Please update
14#error your headers.
15#endif
16#if 3009002 < PROTOBUF_MIN_PROTOC_VERSION
17#error This file was generated by an older version of protoc which is
18#error incompatible with your Protocol Buffer headers. Please
19#error regenerate this file with a newer version of protoc.
20#endif
21
22#include <google/protobuf/port_undef.inc>
23#include <google/protobuf/io/coded_stream.h>
24#include <google/protobuf/arena.h>
25#include <google/protobuf/arenastring.h>
26#include <google/protobuf/generated_message_table_driven.h>
27#include <google/protobuf/generated_message_util.h>
28#include <google/protobuf/inlined_string_field.h>
29#include <google/protobuf/metadata.h>
30#include <google/protobuf/generated_message_reflection.h>
31#include <google/protobuf/message.h>
32#include <google/protobuf/repeated_field.h> // IWYU pragma: export
33#include <google/protobuf/extension_set.h> // IWYU pragma: export
34#include <google/protobuf/generated_enum_reflection.h>
35#include <google/protobuf/unknown_field_set.h>
36#include "tensorflow/core/framework/tensor_shape.pb.h"
37#include "tensorflow/core/framework/tensor_slice.pb.h"
38#include "tensorflow/core/framework/types.pb.h"
39#include "tensorflow/core/framework/versions.pb.h"
40// @@protoc_insertion_point(includes)
41#include <google/protobuf/port_def.inc>
42#define PROTOBUF_INTERNAL_EXPORT_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto
43PROTOBUF_NAMESPACE_OPEN
44namespace internal {
45class AnyMetadata;
46} // namespace internal
47PROTOBUF_NAMESPACE_CLOSE
48
49// Internal implementation detail -- do not use these members.
50struct TableStruct_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto {
51 static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTableField entries[]
52 PROTOBUF_SECTION_VARIABLE(protodesc_cold);
53 static const ::PROTOBUF_NAMESPACE_ID::internal::AuxillaryParseTableField aux[]
54 PROTOBUF_SECTION_VARIABLE(protodesc_cold);
55 static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTable schema[2]
56 PROTOBUF_SECTION_VARIABLE(protodesc_cold);
57 static const ::PROTOBUF_NAMESPACE_ID::internal::FieldMetadata field_metadata[];
58 static const ::PROTOBUF_NAMESPACE_ID::internal::SerializationTable serialization_table[];
59 static const ::PROTOBUF_NAMESPACE_ID::uint32 offsets[];
60};
61extern const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto;
62namespace tensorflow {
63class BundleEntryProto;
64class BundleEntryProtoDefaultTypeInternal;
65extern BundleEntryProtoDefaultTypeInternal _BundleEntryProto_default_instance_;
66class BundleHeaderProto;
67class BundleHeaderProtoDefaultTypeInternal;
68extern BundleHeaderProtoDefaultTypeInternal _BundleHeaderProto_default_instance_;
69} // namespace tensorflow
70PROTOBUF_NAMESPACE_OPEN
71template<> ::tensorflow::BundleEntryProto* Arena::CreateMaybeMessage<::tensorflow::BundleEntryProto>(Arena*);
72template<> ::tensorflow::BundleHeaderProto* Arena::CreateMaybeMessage<::tensorflow::BundleHeaderProto>(Arena*);
73PROTOBUF_NAMESPACE_CLOSE
74namespace tensorflow {
75
76enum BundleHeaderProto_Endianness : int {
77 BundleHeaderProto_Endianness_LITTLE = 0,
78 BundleHeaderProto_Endianness_BIG = 1,
79 BundleHeaderProto_Endianness_BundleHeaderProto_Endianness_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::min(),
80 BundleHeaderProto_Endianness_BundleHeaderProto_Endianness_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::max()
81};
82bool BundleHeaderProto_Endianness_IsValid(int value);
83constexpr BundleHeaderProto_Endianness BundleHeaderProto_Endianness_Endianness_MIN = BundleHeaderProto_Endianness_LITTLE;
84constexpr BundleHeaderProto_Endianness BundleHeaderProto_Endianness_Endianness_MAX = BundleHeaderProto_Endianness_BIG;
85constexpr int BundleHeaderProto_Endianness_Endianness_ARRAYSIZE = BundleHeaderProto_Endianness_Endianness_MAX + 1;
86
87const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* BundleHeaderProto_Endianness_descriptor();
88template<typename T>
89inline const std::string& BundleHeaderProto_Endianness_Name(T enum_t_value) {
90 static_assert(::std::is_same<T, BundleHeaderProto_Endianness>::value ||
91 ::std::is_integral<T>::value,
92 "Incorrect type passed to function BundleHeaderProto_Endianness_Name.");
93 return ::PROTOBUF_NAMESPACE_ID::internal::NameOfEnum(
94 BundleHeaderProto_Endianness_descriptor(), enum_t_value);
95}
96inline bool BundleHeaderProto_Endianness_Parse(
97 const std::string& name, BundleHeaderProto_Endianness* value) {
98 return ::PROTOBUF_NAMESPACE_ID::internal::ParseNamedEnum<BundleHeaderProto_Endianness>(
99 BundleHeaderProto_Endianness_descriptor(), name, value);
100}
101// ===================================================================
102
103class BundleHeaderProto :
104 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.BundleHeaderProto) */ {
105 public:
106 BundleHeaderProto();
107 virtual ~BundleHeaderProto();
108
109 BundleHeaderProto(const BundleHeaderProto& from);
110 BundleHeaderProto(BundleHeaderProto&& from) noexcept
111 : BundleHeaderProto() {
112 *this = ::std::move(from);
113 }
114
115 inline BundleHeaderProto& operator=(const BundleHeaderProto& from) {
116 CopyFrom(from);
117 return *this;
118 }
119 inline BundleHeaderProto& operator=(BundleHeaderProto&& from) noexcept {
120 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
121 if (this != &from) InternalSwap(&from);
122 } else {
123 CopyFrom(from);
124 }
125 return *this;
126 }
127
128 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
129 return GetArenaNoVirtual();
130 }
131 inline void* GetMaybeArenaPointer() const final {
132 return MaybeArenaPtr();
133 }
134 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
135 return GetDescriptor();
136 }
137 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
138 return GetMetadataStatic().descriptor;
139 }
140 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
141 return GetMetadataStatic().reflection;
142 }
143 static const BundleHeaderProto& default_instance();
144
145 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
146 static inline const BundleHeaderProto* internal_default_instance() {
147 return reinterpret_cast<const BundleHeaderProto*>(
148 &_BundleHeaderProto_default_instance_);
149 }
150 static constexpr int kIndexInFileMessages =
151 0;
152
153 friend void swap(BundleHeaderProto& a, BundleHeaderProto& b) {
154 a.Swap(&b);
155 }
156 inline void Swap(BundleHeaderProto* other) {
157 if (other == this) return;
158 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
159 InternalSwap(other);
160 } else {
161 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
162 }
163 }
164 void UnsafeArenaSwap(BundleHeaderProto* other) {
165 if (other == this) return;
166 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
167 InternalSwap(other);
168 }
169
170 // implements Message ----------------------------------------------
171
172 inline BundleHeaderProto* New() const final {
173 return CreateMaybeMessage<BundleHeaderProto>(nullptr);
174 }
175
176 BundleHeaderProto* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
177 return CreateMaybeMessage<BundleHeaderProto>(arena);
178 }
179 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
180 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
181 void CopyFrom(const BundleHeaderProto& from);
182 void MergeFrom(const BundleHeaderProto& from);
183 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
184 bool IsInitialized() const final;
185
186 size_t ByteSizeLong() const final;
187 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
188 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
189 #else
190 bool MergePartialFromCodedStream(
191 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
192 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
193 void SerializeWithCachedSizes(
194 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
195 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
196 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
197 int GetCachedSize() const final { return _cached_size_.Get(); }
198
199 private:
200 inline void SharedCtor();
201 inline void SharedDtor();
202 void SetCachedSize(int size) const final;
203 void InternalSwap(BundleHeaderProto* other);
204 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
205 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
206 return "tensorflow.BundleHeaderProto";
207 }
208 protected:
209 explicit BundleHeaderProto(::PROTOBUF_NAMESPACE_ID::Arena* arena);
210 private:
211 static void ArenaDtor(void* object);
212 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
213 private:
214 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
215 return _internal_metadata_.arena();
216 }
217 inline void* MaybeArenaPtr() const {
218 return _internal_metadata_.raw_arena_ptr();
219 }
220 public:
221
222 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
223 private:
224 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
225 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto);
226 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto.file_level_metadata[kIndexInFileMessages];
227 }
228
229 public:
230
231 // nested types ----------------------------------------------------
232
233 typedef BundleHeaderProto_Endianness Endianness;
234 static constexpr Endianness LITTLE =
235 BundleHeaderProto_Endianness_LITTLE;
236 static constexpr Endianness BIG =
237 BundleHeaderProto_Endianness_BIG;
238 static inline bool Endianness_IsValid(int value) {
239 return BundleHeaderProto_Endianness_IsValid(value);
240 }
241 static constexpr Endianness Endianness_MIN =
242 BundleHeaderProto_Endianness_Endianness_MIN;
243 static constexpr Endianness Endianness_MAX =
244 BundleHeaderProto_Endianness_Endianness_MAX;
245 static constexpr int Endianness_ARRAYSIZE =
246 BundleHeaderProto_Endianness_Endianness_ARRAYSIZE;
247 static inline const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor*
248 Endianness_descriptor() {
249 return BundleHeaderProto_Endianness_descriptor();
250 }
251 template<typename T>
252 static inline const std::string& Endianness_Name(T enum_t_value) {
253 static_assert(::std::is_same<T, Endianness>::value ||
254 ::std::is_integral<T>::value,
255 "Incorrect type passed to function Endianness_Name.");
256 return BundleHeaderProto_Endianness_Name(enum_t_value);
257 }
258 static inline bool Endianness_Parse(const std::string& name,
259 Endianness* value) {
260 return BundleHeaderProto_Endianness_Parse(name, value);
261 }
262
263 // accessors -------------------------------------------------------
264
265 enum : int {
266 kVersionFieldNumber = 3,
267 kNumShardsFieldNumber = 1,
268 kEndiannessFieldNumber = 2,
269 };
270 // .tensorflow.VersionDef version = 3;
271 bool has_version() const;
272 void clear_version();
273 const ::tensorflow::VersionDef& version() const;
274 ::tensorflow::VersionDef* release_version();
275 ::tensorflow::VersionDef* mutable_version();
276 void set_allocated_version(::tensorflow::VersionDef* version);
277 void unsafe_arena_set_allocated_version(
278 ::tensorflow::VersionDef* version);
279 ::tensorflow::VersionDef* unsafe_arena_release_version();
280
281 // int32 num_shards = 1;
282 void clear_num_shards();
283 ::PROTOBUF_NAMESPACE_ID::int32 num_shards() const;
284 void set_num_shards(::PROTOBUF_NAMESPACE_ID::int32 value);
285
286 // .tensorflow.BundleHeaderProto.Endianness endianness = 2;
287 void clear_endianness();
288 ::tensorflow::BundleHeaderProto_Endianness endianness() const;
289 void set_endianness(::tensorflow::BundleHeaderProto_Endianness value);
290
291 // @@protoc_insertion_point(class_scope:tensorflow.BundleHeaderProto)
292 private:
293 class _Internal;
294
295 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
296 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
297 typedef void InternalArenaConstructable_;
298 typedef void DestructorSkippable_;
299 ::tensorflow::VersionDef* version_;
300 ::PROTOBUF_NAMESPACE_ID::int32 num_shards_;
301 int endianness_;
302 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
303 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto;
304};
305// -------------------------------------------------------------------
306
307class BundleEntryProto :
308 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.BundleEntryProto) */ {
309 public:
310 BundleEntryProto();
311 virtual ~BundleEntryProto();
312
313 BundleEntryProto(const BundleEntryProto& from);
314 BundleEntryProto(BundleEntryProto&& from) noexcept
315 : BundleEntryProto() {
316 *this = ::std::move(from);
317 }
318
319 inline BundleEntryProto& operator=(const BundleEntryProto& from) {
320 CopyFrom(from);
321 return *this;
322 }
323 inline BundleEntryProto& operator=(BundleEntryProto&& from) noexcept {
324 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
325 if (this != &from) InternalSwap(&from);
326 } else {
327 CopyFrom(from);
328 }
329 return *this;
330 }
331
332 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
333 return GetArenaNoVirtual();
334 }
335 inline void* GetMaybeArenaPointer() const final {
336 return MaybeArenaPtr();
337 }
338 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
339 return GetDescriptor();
340 }
341 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
342 return GetMetadataStatic().descriptor;
343 }
344 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
345 return GetMetadataStatic().reflection;
346 }
347 static const BundleEntryProto& default_instance();
348
349 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
350 static inline const BundleEntryProto* internal_default_instance() {
351 return reinterpret_cast<const BundleEntryProto*>(
352 &_BundleEntryProto_default_instance_);
353 }
354 static constexpr int kIndexInFileMessages =
355 1;
356
357 friend void swap(BundleEntryProto& a, BundleEntryProto& b) {
358 a.Swap(&b);
359 }
360 inline void Swap(BundleEntryProto* other) {
361 if (other == this) return;
362 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
363 InternalSwap(other);
364 } else {
365 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
366 }
367 }
368 void UnsafeArenaSwap(BundleEntryProto* other) {
369 if (other == this) return;
370 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
371 InternalSwap(other);
372 }
373
374 // implements Message ----------------------------------------------
375
376 inline BundleEntryProto* New() const final {
377 return CreateMaybeMessage<BundleEntryProto>(nullptr);
378 }
379
380 BundleEntryProto* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
381 return CreateMaybeMessage<BundleEntryProto>(arena);
382 }
383 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
384 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
385 void CopyFrom(const BundleEntryProto& from);
386 void MergeFrom(const BundleEntryProto& from);
387 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
388 bool IsInitialized() const final;
389
390 size_t ByteSizeLong() const final;
391 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
392 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
393 #else
394 bool MergePartialFromCodedStream(
395 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
396 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
397 void SerializeWithCachedSizes(
398 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
399 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
400 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
401 int GetCachedSize() const final { return _cached_size_.Get(); }
402
403 private:
404 inline void SharedCtor();
405 inline void SharedDtor();
406 void SetCachedSize(int size) const final;
407 void InternalSwap(BundleEntryProto* other);
408 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
409 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
410 return "tensorflow.BundleEntryProto";
411 }
412 protected:
413 explicit BundleEntryProto(::PROTOBUF_NAMESPACE_ID::Arena* arena);
414 private:
415 static void ArenaDtor(void* object);
416 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
417 private:
418 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
419 return _internal_metadata_.arena();
420 }
421 inline void* MaybeArenaPtr() const {
422 return _internal_metadata_.raw_arena_ptr();
423 }
424 public:
425
426 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
427 private:
428 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
429 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto);
430 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto.file_level_metadata[kIndexInFileMessages];
431 }
432
433 public:
434
435 // nested types ----------------------------------------------------
436
437 // accessors -------------------------------------------------------
438
439 enum : int {
440 kSlicesFieldNumber = 7,
441 kShapeFieldNumber = 2,
442 kDtypeFieldNumber = 1,
443 kShardIdFieldNumber = 3,
444 kOffsetFieldNumber = 4,
445 kSizeFieldNumber = 5,
446 kCrc32CFieldNumber = 6,
447 };
448 // repeated .tensorflow.TensorSliceProto slices = 7;
449 int slices_size() const;
450 void clear_slices();
451 ::tensorflow::TensorSliceProto* mutable_slices(int index);
452 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorSliceProto >*
453 mutable_slices();
454 const ::tensorflow::TensorSliceProto& slices(int index) const;
455 ::tensorflow::TensorSliceProto* add_slices();
456 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorSliceProto >&
457 slices() const;
458
459 // .tensorflow.TensorShapeProto shape = 2;
460 bool has_shape() const;
461 void clear_shape();
462 const ::tensorflow::TensorShapeProto& shape() const;
463 ::tensorflow::TensorShapeProto* release_shape();
464 ::tensorflow::TensorShapeProto* mutable_shape();
465 void set_allocated_shape(::tensorflow::TensorShapeProto* shape);
466 void unsafe_arena_set_allocated_shape(
467 ::tensorflow::TensorShapeProto* shape);
468 ::tensorflow::TensorShapeProto* unsafe_arena_release_shape();
469
470 // .tensorflow.DataType dtype = 1;
471 void clear_dtype();
472 ::tensorflow::DataType dtype() const;
473 void set_dtype(::tensorflow::DataType value);
474
475 // int32 shard_id = 3;
476 void clear_shard_id();
477 ::PROTOBUF_NAMESPACE_ID::int32 shard_id() const;
478 void set_shard_id(::PROTOBUF_NAMESPACE_ID::int32 value);
479
480 // int64 offset = 4;
481 void clear_offset();
482 ::PROTOBUF_NAMESPACE_ID::int64 offset() const;
483 void set_offset(::PROTOBUF_NAMESPACE_ID::int64 value);
484
485 // int64 size = 5;
486 void clear_size();
487 ::PROTOBUF_NAMESPACE_ID::int64 size() const;
488 void set_size(::PROTOBUF_NAMESPACE_ID::int64 value);
489
490 // fixed32 crc32c = 6;
491 void clear_crc32c();
492 ::PROTOBUF_NAMESPACE_ID::uint32 crc32c() const;
493 void set_crc32c(::PROTOBUF_NAMESPACE_ID::uint32 value);
494
495 // @@protoc_insertion_point(class_scope:tensorflow.BundleEntryProto)
496 private:
497 class _Internal;
498
499 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
500 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
501 typedef void InternalArenaConstructable_;
502 typedef void DestructorSkippable_;
503 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorSliceProto > slices_;
504 ::tensorflow::TensorShapeProto* shape_;
505 int dtype_;
506 ::PROTOBUF_NAMESPACE_ID::int32 shard_id_;
507 ::PROTOBUF_NAMESPACE_ID::int64 offset_;
508 ::PROTOBUF_NAMESPACE_ID::int64 size_;
509 ::PROTOBUF_NAMESPACE_ID::uint32 crc32c_;
510 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
511 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto;
512};
513// ===================================================================
514
515
516// ===================================================================
517
518#ifdef __GNUC__
519 #pragma GCC diagnostic push
520 #pragma GCC diagnostic ignored "-Wstrict-aliasing"
521#endif // __GNUC__
522// BundleHeaderProto
523
524// int32 num_shards = 1;
525inline void BundleHeaderProto::clear_num_shards() {
526 num_shards_ = 0;
527}
528inline ::PROTOBUF_NAMESPACE_ID::int32 BundleHeaderProto::num_shards() const {
529 // @@protoc_insertion_point(field_get:tensorflow.BundleHeaderProto.num_shards)
530 return num_shards_;
531}
532inline void BundleHeaderProto::set_num_shards(::PROTOBUF_NAMESPACE_ID::int32 value) {
533
534 num_shards_ = value;
535 // @@protoc_insertion_point(field_set:tensorflow.BundleHeaderProto.num_shards)
536}
537
538// .tensorflow.BundleHeaderProto.Endianness endianness = 2;
539inline void BundleHeaderProto::clear_endianness() {
540 endianness_ = 0;
541}
542inline ::tensorflow::BundleHeaderProto_Endianness BundleHeaderProto::endianness() const {
543 // @@protoc_insertion_point(field_get:tensorflow.BundleHeaderProto.endianness)
544 return static_cast< ::tensorflow::BundleHeaderProto_Endianness >(endianness_);
545}
546inline void BundleHeaderProto::set_endianness(::tensorflow::BundleHeaderProto_Endianness value) {
547
548 endianness_ = value;
549 // @@protoc_insertion_point(field_set:tensorflow.BundleHeaderProto.endianness)
550}
551
552// .tensorflow.VersionDef version = 3;
553inline bool BundleHeaderProto::has_version() const {
554 return this != internal_default_instance() && version_ != nullptr;
555}
556inline const ::tensorflow::VersionDef& BundleHeaderProto::version() const {
557 const ::tensorflow::VersionDef* p = version_;
558 // @@protoc_insertion_point(field_get:tensorflow.BundleHeaderProto.version)
559 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::VersionDef*>(
560 &::tensorflow::_VersionDef_default_instance_);
561}
562inline ::tensorflow::VersionDef* BundleHeaderProto::release_version() {
563 // @@protoc_insertion_point(field_release:tensorflow.BundleHeaderProto.version)
564
565 ::tensorflow::VersionDef* temp = version_;
566 if (GetArenaNoVirtual() != nullptr) {
567 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
568 }
569 version_ = nullptr;
570 return temp;
571}
572inline ::tensorflow::VersionDef* BundleHeaderProto::unsafe_arena_release_version() {
573 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.BundleHeaderProto.version)
574
575 ::tensorflow::VersionDef* temp = version_;
576 version_ = nullptr;
577 return temp;
578}
579inline ::tensorflow::VersionDef* BundleHeaderProto::mutable_version() {
580
581 if (version_ == nullptr) {
582 auto* p = CreateMaybeMessage<::tensorflow::VersionDef>(GetArenaNoVirtual());
583 version_ = p;
584 }
585 // @@protoc_insertion_point(field_mutable:tensorflow.BundleHeaderProto.version)
586 return version_;
587}
588inline void BundleHeaderProto::set_allocated_version(::tensorflow::VersionDef* version) {
589 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
590 if (message_arena == nullptr) {
591 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(version_);
592 }
593 if (version) {
594 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
595 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(version)->GetArena();
596 if (message_arena != submessage_arena) {
597 version = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
598 message_arena, version, submessage_arena);
599 }
600
601 } else {
602
603 }
604 version_ = version;
605 // @@protoc_insertion_point(field_set_allocated:tensorflow.BundleHeaderProto.version)
606}
607
608// -------------------------------------------------------------------
609
610// BundleEntryProto
611
612// .tensorflow.DataType dtype = 1;
613inline void BundleEntryProto::clear_dtype() {
614 dtype_ = 0;
615}
616inline ::tensorflow::DataType BundleEntryProto::dtype() const {
617 // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.dtype)
618 return static_cast< ::tensorflow::DataType >(dtype_);
619}
620inline void BundleEntryProto::set_dtype(::tensorflow::DataType value) {
621
622 dtype_ = value;
623 // @@protoc_insertion_point(field_set:tensorflow.BundleEntryProto.dtype)
624}
625
626// .tensorflow.TensorShapeProto shape = 2;
627inline bool BundleEntryProto::has_shape() const {
628 return this != internal_default_instance() && shape_ != nullptr;
629}
630inline const ::tensorflow::TensorShapeProto& BundleEntryProto::shape() const {
631 const ::tensorflow::TensorShapeProto* p = shape_;
632 // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.shape)
633 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::TensorShapeProto*>(
634 &::tensorflow::_TensorShapeProto_default_instance_);
635}
636inline ::tensorflow::TensorShapeProto* BundleEntryProto::release_shape() {
637 // @@protoc_insertion_point(field_release:tensorflow.BundleEntryProto.shape)
638
639 ::tensorflow::TensorShapeProto* temp = shape_;
640 if (GetArenaNoVirtual() != nullptr) {
641 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
642 }
643 shape_ = nullptr;
644 return temp;
645}
646inline ::tensorflow::TensorShapeProto* BundleEntryProto::unsafe_arena_release_shape() {
647 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.BundleEntryProto.shape)
648
649 ::tensorflow::TensorShapeProto* temp = shape_;
650 shape_ = nullptr;
651 return temp;
652}
653inline ::tensorflow::TensorShapeProto* BundleEntryProto::mutable_shape() {
654
655 if (shape_ == nullptr) {
656 auto* p = CreateMaybeMessage<::tensorflow::TensorShapeProto>(GetArenaNoVirtual());
657 shape_ = p;
658 }
659 // @@protoc_insertion_point(field_mutable:tensorflow.BundleEntryProto.shape)
660 return shape_;
661}
662inline void BundleEntryProto::set_allocated_shape(::tensorflow::TensorShapeProto* shape) {
663 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
664 if (message_arena == nullptr) {
665 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(shape_);
666 }
667 if (shape) {
668 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
669 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(shape)->GetArena();
670 if (message_arena != submessage_arena) {
671 shape = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
672 message_arena, shape, submessage_arena);
673 }
674
675 } else {
676
677 }
678 shape_ = shape;
679 // @@protoc_insertion_point(field_set_allocated:tensorflow.BundleEntryProto.shape)
680}
681
682// int32 shard_id = 3;
683inline void BundleEntryProto::clear_shard_id() {
684 shard_id_ = 0;
685}
686inline ::PROTOBUF_NAMESPACE_ID::int32 BundleEntryProto::shard_id() const {
687 // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.shard_id)
688 return shard_id_;
689}
690inline void BundleEntryProto::set_shard_id(::PROTOBUF_NAMESPACE_ID::int32 value) {
691
692 shard_id_ = value;
693 // @@protoc_insertion_point(field_set:tensorflow.BundleEntryProto.shard_id)
694}
695
696// int64 offset = 4;
697inline void BundleEntryProto::clear_offset() {
698 offset_ = PROTOBUF_LONGLONG(0);
699}
700inline ::PROTOBUF_NAMESPACE_ID::int64 BundleEntryProto::offset() const {
701 // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.offset)
702 return offset_;
703}
704inline void BundleEntryProto::set_offset(::PROTOBUF_NAMESPACE_ID::int64 value) {
705
706 offset_ = value;
707 // @@protoc_insertion_point(field_set:tensorflow.BundleEntryProto.offset)
708}
709
710// int64 size = 5;
711inline void BundleEntryProto::clear_size() {
712 size_ = PROTOBUF_LONGLONG(0);
713}
714inline ::PROTOBUF_NAMESPACE_ID::int64 BundleEntryProto::size() const {
715 // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.size)
716 return size_;
717}
718inline void BundleEntryProto::set_size(::PROTOBUF_NAMESPACE_ID::int64 value) {
719
720 size_ = value;
721 // @@protoc_insertion_point(field_set:tensorflow.BundleEntryProto.size)
722}
723
724// fixed32 crc32c = 6;
725inline void BundleEntryProto::clear_crc32c() {
726 crc32c_ = 0u;
727}
728inline ::PROTOBUF_NAMESPACE_ID::uint32 BundleEntryProto::crc32c() const {
729 // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.crc32c)
730 return crc32c_;
731}
732inline void BundleEntryProto::set_crc32c(::PROTOBUF_NAMESPACE_ID::uint32 value) {
733
734 crc32c_ = value;
735 // @@protoc_insertion_point(field_set:tensorflow.BundleEntryProto.crc32c)
736}
737
738// repeated .tensorflow.TensorSliceProto slices = 7;
739inline int BundleEntryProto::slices_size() const {
740 return slices_.size();
741}
742inline ::tensorflow::TensorSliceProto* BundleEntryProto::mutable_slices(int index) {
743 // @@protoc_insertion_point(field_mutable:tensorflow.BundleEntryProto.slices)
744 return slices_.Mutable(index);
745}
746inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorSliceProto >*
747BundleEntryProto::mutable_slices() {
748 // @@protoc_insertion_point(field_mutable_list:tensorflow.BundleEntryProto.slices)
749 return &slices_;
750}
751inline const ::tensorflow::TensorSliceProto& BundleEntryProto::slices(int index) const {
752 // @@protoc_insertion_point(field_get:tensorflow.BundleEntryProto.slices)
753 return slices_.Get(index);
754}
755inline ::tensorflow::TensorSliceProto* BundleEntryProto::add_slices() {
756 // @@protoc_insertion_point(field_add:tensorflow.BundleEntryProto.slices)
757 return slices_.Add();
758}
759inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorSliceProto >&
760BundleEntryProto::slices() const {
761 // @@protoc_insertion_point(field_list:tensorflow.BundleEntryProto.slices)
762 return slices_;
763}
764
765#ifdef __GNUC__
766 #pragma GCC diagnostic pop
767#endif // __GNUC__
768// -------------------------------------------------------------------
769
770
771// @@protoc_insertion_point(namespace_scope)
772
773} // namespace tensorflow
774
775PROTOBUF_NAMESPACE_OPEN
776
777template <> struct is_proto_enum< ::tensorflow::BundleHeaderProto_Endianness> : ::std::true_type {};
778template <>
779inline const EnumDescriptor* GetEnumDescriptor< ::tensorflow::BundleHeaderProto_Endianness>() {
780 return ::tensorflow::BundleHeaderProto_Endianness_descriptor();
781}
782
783PROTOBUF_NAMESPACE_CLOSE
784
785// @@protoc_insertion_point(global_scope)
786
787#include <google/protobuf/port_undef.inc>
788#endif // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2ftensor_5fbundle_2eproto
789