1// Generated by the protocol buffer compiler. DO NOT EDIT!
2// source: tensorflow/core/framework/log_memory.proto
3
4#ifndef GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto
5#define GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto
6
7#include <limits>
8#include <string>
9
10#include <google/protobuf/port_def.inc>
11#if PROTOBUF_VERSION < 3009000
12#error This file was generated by a newer version of protoc which is
13#error incompatible with your Protocol Buffer headers. Please update
14#error your headers.
15#endif
16#if 3009002 < PROTOBUF_MIN_PROTOC_VERSION
17#error This file was generated by an older version of protoc which is
18#error incompatible with your Protocol Buffer headers. Please
19#error regenerate this file with a newer version of protoc.
20#endif
21
22#include <google/protobuf/port_undef.inc>
23#include <google/protobuf/io/coded_stream.h>
24#include <google/protobuf/arena.h>
25#include <google/protobuf/arenastring.h>
26#include <google/protobuf/generated_message_table_driven.h>
27#include <google/protobuf/generated_message_util.h>
28#include <google/protobuf/inlined_string_field.h>
29#include <google/protobuf/metadata.h>
30#include <google/protobuf/generated_message_reflection.h>
31#include <google/protobuf/message.h>
32#include <google/protobuf/repeated_field.h> // IWYU pragma: export
33#include <google/protobuf/extension_set.h> // IWYU pragma: export
34#include <google/protobuf/unknown_field_set.h>
35#include "tensorflow/core/framework/tensor_description.pb.h"
36// @@protoc_insertion_point(includes)
37#include <google/protobuf/port_def.inc>
38#define PROTOBUF_INTERNAL_EXPORT_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto
39PROTOBUF_NAMESPACE_OPEN
40namespace internal {
41class AnyMetadata;
42} // namespace internal
43PROTOBUF_NAMESPACE_CLOSE
44
45// Internal implementation detail -- do not use these members.
46struct TableStruct_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto {
47 static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTableField entries[]
48 PROTOBUF_SECTION_VARIABLE(protodesc_cold);
49 static const ::PROTOBUF_NAMESPACE_ID::internal::AuxillaryParseTableField aux[]
50 PROTOBUF_SECTION_VARIABLE(protodesc_cold);
51 static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTable schema[6]
52 PROTOBUF_SECTION_VARIABLE(protodesc_cold);
53 static const ::PROTOBUF_NAMESPACE_ID::internal::FieldMetadata field_metadata[];
54 static const ::PROTOBUF_NAMESPACE_ID::internal::SerializationTable serialization_table[];
55 static const ::PROTOBUF_NAMESPACE_ID::uint32 offsets[];
56};
57extern const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto;
58namespace tensorflow {
59class MemoryLogRawAllocation;
60class MemoryLogRawAllocationDefaultTypeInternal;
61extern MemoryLogRawAllocationDefaultTypeInternal _MemoryLogRawAllocation_default_instance_;
62class MemoryLogRawDeallocation;
63class MemoryLogRawDeallocationDefaultTypeInternal;
64extern MemoryLogRawDeallocationDefaultTypeInternal _MemoryLogRawDeallocation_default_instance_;
65class MemoryLogStep;
66class MemoryLogStepDefaultTypeInternal;
67extern MemoryLogStepDefaultTypeInternal _MemoryLogStep_default_instance_;
68class MemoryLogTensorAllocation;
69class MemoryLogTensorAllocationDefaultTypeInternal;
70extern MemoryLogTensorAllocationDefaultTypeInternal _MemoryLogTensorAllocation_default_instance_;
71class MemoryLogTensorDeallocation;
72class MemoryLogTensorDeallocationDefaultTypeInternal;
73extern MemoryLogTensorDeallocationDefaultTypeInternal _MemoryLogTensorDeallocation_default_instance_;
74class MemoryLogTensorOutput;
75class MemoryLogTensorOutputDefaultTypeInternal;
76extern MemoryLogTensorOutputDefaultTypeInternal _MemoryLogTensorOutput_default_instance_;
77} // namespace tensorflow
78PROTOBUF_NAMESPACE_OPEN
79template<> ::tensorflow::MemoryLogRawAllocation* Arena::CreateMaybeMessage<::tensorflow::MemoryLogRawAllocation>(Arena*);
80template<> ::tensorflow::MemoryLogRawDeallocation* Arena::CreateMaybeMessage<::tensorflow::MemoryLogRawDeallocation>(Arena*);
81template<> ::tensorflow::MemoryLogStep* Arena::CreateMaybeMessage<::tensorflow::MemoryLogStep>(Arena*);
82template<> ::tensorflow::MemoryLogTensorAllocation* Arena::CreateMaybeMessage<::tensorflow::MemoryLogTensorAllocation>(Arena*);
83template<> ::tensorflow::MemoryLogTensorDeallocation* Arena::CreateMaybeMessage<::tensorflow::MemoryLogTensorDeallocation>(Arena*);
84template<> ::tensorflow::MemoryLogTensorOutput* Arena::CreateMaybeMessage<::tensorflow::MemoryLogTensorOutput>(Arena*);
85PROTOBUF_NAMESPACE_CLOSE
86namespace tensorflow {
87
88// ===================================================================
89
90class MemoryLogStep :
91 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.MemoryLogStep) */ {
92 public:
93 MemoryLogStep();
94 virtual ~MemoryLogStep();
95
96 MemoryLogStep(const MemoryLogStep& from);
97 MemoryLogStep(MemoryLogStep&& from) noexcept
98 : MemoryLogStep() {
99 *this = ::std::move(from);
100 }
101
102 inline MemoryLogStep& operator=(const MemoryLogStep& from) {
103 CopyFrom(from);
104 return *this;
105 }
106 inline MemoryLogStep& operator=(MemoryLogStep&& from) noexcept {
107 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
108 if (this != &from) InternalSwap(&from);
109 } else {
110 CopyFrom(from);
111 }
112 return *this;
113 }
114
115 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
116 return GetArenaNoVirtual();
117 }
118 inline void* GetMaybeArenaPointer() const final {
119 return MaybeArenaPtr();
120 }
121 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
122 return GetDescriptor();
123 }
124 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
125 return GetMetadataStatic().descriptor;
126 }
127 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
128 return GetMetadataStatic().reflection;
129 }
130 static const MemoryLogStep& default_instance();
131
132 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
133 static inline const MemoryLogStep* internal_default_instance() {
134 return reinterpret_cast<const MemoryLogStep*>(
135 &_MemoryLogStep_default_instance_);
136 }
137 static constexpr int kIndexInFileMessages =
138 0;
139
140 friend void swap(MemoryLogStep& a, MemoryLogStep& b) {
141 a.Swap(&b);
142 }
143 inline void Swap(MemoryLogStep* other) {
144 if (other == this) return;
145 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
146 InternalSwap(other);
147 } else {
148 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
149 }
150 }
151 void UnsafeArenaSwap(MemoryLogStep* other) {
152 if (other == this) return;
153 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
154 InternalSwap(other);
155 }
156
157 // implements Message ----------------------------------------------
158
159 inline MemoryLogStep* New() const final {
160 return CreateMaybeMessage<MemoryLogStep>(nullptr);
161 }
162
163 MemoryLogStep* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
164 return CreateMaybeMessage<MemoryLogStep>(arena);
165 }
166 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
167 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
168 void CopyFrom(const MemoryLogStep& from);
169 void MergeFrom(const MemoryLogStep& from);
170 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
171 bool IsInitialized() const final;
172
173 size_t ByteSizeLong() const final;
174 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
175 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
176 #else
177 bool MergePartialFromCodedStream(
178 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
179 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
180 void SerializeWithCachedSizes(
181 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
182 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
183 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
184 int GetCachedSize() const final { return _cached_size_.Get(); }
185
186 private:
187 inline void SharedCtor();
188 inline void SharedDtor();
189 void SetCachedSize(int size) const final;
190 void InternalSwap(MemoryLogStep* other);
191 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
192 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
193 return "tensorflow.MemoryLogStep";
194 }
195 protected:
196 explicit MemoryLogStep(::PROTOBUF_NAMESPACE_ID::Arena* arena);
197 private:
198 static void ArenaDtor(void* object);
199 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
200 private:
201 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
202 return _internal_metadata_.arena();
203 }
204 inline void* MaybeArenaPtr() const {
205 return _internal_metadata_.raw_arena_ptr();
206 }
207 public:
208
209 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
210 private:
211 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
212 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto);
213 return ::descriptor_table_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto.file_level_metadata[kIndexInFileMessages];
214 }
215
216 public:
217
218 // nested types ----------------------------------------------------
219
220 // accessors -------------------------------------------------------
221
222 enum : int {
223 kHandleFieldNumber = 2,
224 kStepIdFieldNumber = 1,
225 };
226 // string handle = 2;
227 void clear_handle();
228 const std::string& handle() const;
229 void set_handle(const std::string& value);
230 void set_handle(std::string&& value);
231 void set_handle(const char* value);
232 void set_handle(const char* value, size_t size);
233 std::string* mutable_handle();
234 std::string* release_handle();
235 void set_allocated_handle(std::string* handle);
236 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
237 " string fields are deprecated and will be removed in a"
238 " future release.")
239 std::string* unsafe_arena_release_handle();
240 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
241 " string fields are deprecated and will be removed in a"
242 " future release.")
243 void unsafe_arena_set_allocated_handle(
244 std::string* handle);
245
246 // int64 step_id = 1;
247 void clear_step_id();
248 ::PROTOBUF_NAMESPACE_ID::int64 step_id() const;
249 void set_step_id(::PROTOBUF_NAMESPACE_ID::int64 value);
250
251 // @@protoc_insertion_point(class_scope:tensorflow.MemoryLogStep)
252 private:
253 class _Internal;
254
255 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
256 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
257 typedef void InternalArenaConstructable_;
258 typedef void DestructorSkippable_;
259 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr handle_;
260 ::PROTOBUF_NAMESPACE_ID::int64 step_id_;
261 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
262 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto;
263};
264// -------------------------------------------------------------------
265
266class MemoryLogTensorAllocation :
267 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.MemoryLogTensorAllocation) */ {
268 public:
269 MemoryLogTensorAllocation();
270 virtual ~MemoryLogTensorAllocation();
271
272 MemoryLogTensorAllocation(const MemoryLogTensorAllocation& from);
273 MemoryLogTensorAllocation(MemoryLogTensorAllocation&& from) noexcept
274 : MemoryLogTensorAllocation() {
275 *this = ::std::move(from);
276 }
277
278 inline MemoryLogTensorAllocation& operator=(const MemoryLogTensorAllocation& from) {
279 CopyFrom(from);
280 return *this;
281 }
282 inline MemoryLogTensorAllocation& operator=(MemoryLogTensorAllocation&& from) noexcept {
283 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
284 if (this != &from) InternalSwap(&from);
285 } else {
286 CopyFrom(from);
287 }
288 return *this;
289 }
290
291 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
292 return GetArenaNoVirtual();
293 }
294 inline void* GetMaybeArenaPointer() const final {
295 return MaybeArenaPtr();
296 }
297 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
298 return GetDescriptor();
299 }
300 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
301 return GetMetadataStatic().descriptor;
302 }
303 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
304 return GetMetadataStatic().reflection;
305 }
306 static const MemoryLogTensorAllocation& default_instance();
307
308 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
309 static inline const MemoryLogTensorAllocation* internal_default_instance() {
310 return reinterpret_cast<const MemoryLogTensorAllocation*>(
311 &_MemoryLogTensorAllocation_default_instance_);
312 }
313 static constexpr int kIndexInFileMessages =
314 1;
315
316 friend void swap(MemoryLogTensorAllocation& a, MemoryLogTensorAllocation& b) {
317 a.Swap(&b);
318 }
319 inline void Swap(MemoryLogTensorAllocation* other) {
320 if (other == this) return;
321 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
322 InternalSwap(other);
323 } else {
324 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
325 }
326 }
327 void UnsafeArenaSwap(MemoryLogTensorAllocation* other) {
328 if (other == this) return;
329 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
330 InternalSwap(other);
331 }
332
333 // implements Message ----------------------------------------------
334
335 inline MemoryLogTensorAllocation* New() const final {
336 return CreateMaybeMessage<MemoryLogTensorAllocation>(nullptr);
337 }
338
339 MemoryLogTensorAllocation* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
340 return CreateMaybeMessage<MemoryLogTensorAllocation>(arena);
341 }
342 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
343 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
344 void CopyFrom(const MemoryLogTensorAllocation& from);
345 void MergeFrom(const MemoryLogTensorAllocation& from);
346 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
347 bool IsInitialized() const final;
348
349 size_t ByteSizeLong() const final;
350 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
351 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
352 #else
353 bool MergePartialFromCodedStream(
354 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
355 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
356 void SerializeWithCachedSizes(
357 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
358 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
359 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
360 int GetCachedSize() const final { return _cached_size_.Get(); }
361
362 private:
363 inline void SharedCtor();
364 inline void SharedDtor();
365 void SetCachedSize(int size) const final;
366 void InternalSwap(MemoryLogTensorAllocation* other);
367 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
368 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
369 return "tensorflow.MemoryLogTensorAllocation";
370 }
371 protected:
372 explicit MemoryLogTensorAllocation(::PROTOBUF_NAMESPACE_ID::Arena* arena);
373 private:
374 static void ArenaDtor(void* object);
375 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
376 private:
377 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
378 return _internal_metadata_.arena();
379 }
380 inline void* MaybeArenaPtr() const {
381 return _internal_metadata_.raw_arena_ptr();
382 }
383 public:
384
385 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
386 private:
387 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
388 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto);
389 return ::descriptor_table_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto.file_level_metadata[kIndexInFileMessages];
390 }
391
392 public:
393
394 // nested types ----------------------------------------------------
395
396 // accessors -------------------------------------------------------
397
398 enum : int {
399 kKernelNameFieldNumber = 2,
400 kTensorFieldNumber = 3,
401 kStepIdFieldNumber = 1,
402 };
403 // string kernel_name = 2;
404 void clear_kernel_name();
405 const std::string& kernel_name() const;
406 void set_kernel_name(const std::string& value);
407 void set_kernel_name(std::string&& value);
408 void set_kernel_name(const char* value);
409 void set_kernel_name(const char* value, size_t size);
410 std::string* mutable_kernel_name();
411 std::string* release_kernel_name();
412 void set_allocated_kernel_name(std::string* kernel_name);
413 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
414 " string fields are deprecated and will be removed in a"
415 " future release.")
416 std::string* unsafe_arena_release_kernel_name();
417 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
418 " string fields are deprecated and will be removed in a"
419 " future release.")
420 void unsafe_arena_set_allocated_kernel_name(
421 std::string* kernel_name);
422
423 // .tensorflow.TensorDescription tensor = 3;
424 bool has_tensor() const;
425 void clear_tensor();
426 const ::tensorflow::TensorDescription& tensor() const;
427 ::tensorflow::TensorDescription* release_tensor();
428 ::tensorflow::TensorDescription* mutable_tensor();
429 void set_allocated_tensor(::tensorflow::TensorDescription* tensor);
430 void unsafe_arena_set_allocated_tensor(
431 ::tensorflow::TensorDescription* tensor);
432 ::tensorflow::TensorDescription* unsafe_arena_release_tensor();
433
434 // int64 step_id = 1;
435 void clear_step_id();
436 ::PROTOBUF_NAMESPACE_ID::int64 step_id() const;
437 void set_step_id(::PROTOBUF_NAMESPACE_ID::int64 value);
438
439 // @@protoc_insertion_point(class_scope:tensorflow.MemoryLogTensorAllocation)
440 private:
441 class _Internal;
442
443 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
444 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
445 typedef void InternalArenaConstructable_;
446 typedef void DestructorSkippable_;
447 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr kernel_name_;
448 ::tensorflow::TensorDescription* tensor_;
449 ::PROTOBUF_NAMESPACE_ID::int64 step_id_;
450 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
451 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto;
452};
453// -------------------------------------------------------------------
454
455class MemoryLogTensorDeallocation :
456 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.MemoryLogTensorDeallocation) */ {
457 public:
458 MemoryLogTensorDeallocation();
459 virtual ~MemoryLogTensorDeallocation();
460
461 MemoryLogTensorDeallocation(const MemoryLogTensorDeallocation& from);
462 MemoryLogTensorDeallocation(MemoryLogTensorDeallocation&& from) noexcept
463 : MemoryLogTensorDeallocation() {
464 *this = ::std::move(from);
465 }
466
467 inline MemoryLogTensorDeallocation& operator=(const MemoryLogTensorDeallocation& from) {
468 CopyFrom(from);
469 return *this;
470 }
471 inline MemoryLogTensorDeallocation& operator=(MemoryLogTensorDeallocation&& from) noexcept {
472 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
473 if (this != &from) InternalSwap(&from);
474 } else {
475 CopyFrom(from);
476 }
477 return *this;
478 }
479
480 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
481 return GetArenaNoVirtual();
482 }
483 inline void* GetMaybeArenaPointer() const final {
484 return MaybeArenaPtr();
485 }
486 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
487 return GetDescriptor();
488 }
489 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
490 return GetMetadataStatic().descriptor;
491 }
492 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
493 return GetMetadataStatic().reflection;
494 }
495 static const MemoryLogTensorDeallocation& default_instance();
496
497 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
498 static inline const MemoryLogTensorDeallocation* internal_default_instance() {
499 return reinterpret_cast<const MemoryLogTensorDeallocation*>(
500 &_MemoryLogTensorDeallocation_default_instance_);
501 }
502 static constexpr int kIndexInFileMessages =
503 2;
504
505 friend void swap(MemoryLogTensorDeallocation& a, MemoryLogTensorDeallocation& b) {
506 a.Swap(&b);
507 }
508 inline void Swap(MemoryLogTensorDeallocation* other) {
509 if (other == this) return;
510 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
511 InternalSwap(other);
512 } else {
513 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
514 }
515 }
516 void UnsafeArenaSwap(MemoryLogTensorDeallocation* other) {
517 if (other == this) return;
518 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
519 InternalSwap(other);
520 }
521
522 // implements Message ----------------------------------------------
523
524 inline MemoryLogTensorDeallocation* New() const final {
525 return CreateMaybeMessage<MemoryLogTensorDeallocation>(nullptr);
526 }
527
528 MemoryLogTensorDeallocation* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
529 return CreateMaybeMessage<MemoryLogTensorDeallocation>(arena);
530 }
531 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
532 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
533 void CopyFrom(const MemoryLogTensorDeallocation& from);
534 void MergeFrom(const MemoryLogTensorDeallocation& from);
535 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
536 bool IsInitialized() const final;
537
538 size_t ByteSizeLong() const final;
539 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
540 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
541 #else
542 bool MergePartialFromCodedStream(
543 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
544 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
545 void SerializeWithCachedSizes(
546 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
547 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
548 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
549 int GetCachedSize() const final { return _cached_size_.Get(); }
550
551 private:
552 inline void SharedCtor();
553 inline void SharedDtor();
554 void SetCachedSize(int size) const final;
555 void InternalSwap(MemoryLogTensorDeallocation* other);
556 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
557 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
558 return "tensorflow.MemoryLogTensorDeallocation";
559 }
560 protected:
561 explicit MemoryLogTensorDeallocation(::PROTOBUF_NAMESPACE_ID::Arena* arena);
562 private:
563 static void ArenaDtor(void* object);
564 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
565 private:
566 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
567 return _internal_metadata_.arena();
568 }
569 inline void* MaybeArenaPtr() const {
570 return _internal_metadata_.raw_arena_ptr();
571 }
572 public:
573
574 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
575 private:
576 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
577 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto);
578 return ::descriptor_table_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto.file_level_metadata[kIndexInFileMessages];
579 }
580
581 public:
582
583 // nested types ----------------------------------------------------
584
585 // accessors -------------------------------------------------------
586
587 enum : int {
588 kAllocatorNameFieldNumber = 2,
589 kAllocationIdFieldNumber = 1,
590 };
591 // string allocator_name = 2;
592 void clear_allocator_name();
593 const std::string& allocator_name() const;
594 void set_allocator_name(const std::string& value);
595 void set_allocator_name(std::string&& value);
596 void set_allocator_name(const char* value);
597 void set_allocator_name(const char* value, size_t size);
598 std::string* mutable_allocator_name();
599 std::string* release_allocator_name();
600 void set_allocated_allocator_name(std::string* allocator_name);
601 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
602 " string fields are deprecated and will be removed in a"
603 " future release.")
604 std::string* unsafe_arena_release_allocator_name();
605 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
606 " string fields are deprecated and will be removed in a"
607 " future release.")
608 void unsafe_arena_set_allocated_allocator_name(
609 std::string* allocator_name);
610
611 // int64 allocation_id = 1;
612 void clear_allocation_id();
613 ::PROTOBUF_NAMESPACE_ID::int64 allocation_id() const;
614 void set_allocation_id(::PROTOBUF_NAMESPACE_ID::int64 value);
615
616 // @@protoc_insertion_point(class_scope:tensorflow.MemoryLogTensorDeallocation)
617 private:
618 class _Internal;
619
620 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
621 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
622 typedef void InternalArenaConstructable_;
623 typedef void DestructorSkippable_;
624 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr allocator_name_;
625 ::PROTOBUF_NAMESPACE_ID::int64 allocation_id_;
626 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
627 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto;
628};
629// -------------------------------------------------------------------
630
631class MemoryLogTensorOutput :
632 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.MemoryLogTensorOutput) */ {
633 public:
634 MemoryLogTensorOutput();
635 virtual ~MemoryLogTensorOutput();
636
637 MemoryLogTensorOutput(const MemoryLogTensorOutput& from);
638 MemoryLogTensorOutput(MemoryLogTensorOutput&& from) noexcept
639 : MemoryLogTensorOutput() {
640 *this = ::std::move(from);
641 }
642
643 inline MemoryLogTensorOutput& operator=(const MemoryLogTensorOutput& from) {
644 CopyFrom(from);
645 return *this;
646 }
647 inline MemoryLogTensorOutput& operator=(MemoryLogTensorOutput&& from) noexcept {
648 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
649 if (this != &from) InternalSwap(&from);
650 } else {
651 CopyFrom(from);
652 }
653 return *this;
654 }
655
656 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
657 return GetArenaNoVirtual();
658 }
659 inline void* GetMaybeArenaPointer() const final {
660 return MaybeArenaPtr();
661 }
662 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
663 return GetDescriptor();
664 }
665 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
666 return GetMetadataStatic().descriptor;
667 }
668 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
669 return GetMetadataStatic().reflection;
670 }
671 static const MemoryLogTensorOutput& default_instance();
672
673 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
674 static inline const MemoryLogTensorOutput* internal_default_instance() {
675 return reinterpret_cast<const MemoryLogTensorOutput*>(
676 &_MemoryLogTensorOutput_default_instance_);
677 }
678 static constexpr int kIndexInFileMessages =
679 3;
680
681 friend void swap(MemoryLogTensorOutput& a, MemoryLogTensorOutput& b) {
682 a.Swap(&b);
683 }
684 inline void Swap(MemoryLogTensorOutput* other) {
685 if (other == this) return;
686 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
687 InternalSwap(other);
688 } else {
689 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
690 }
691 }
692 void UnsafeArenaSwap(MemoryLogTensorOutput* other) {
693 if (other == this) return;
694 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
695 InternalSwap(other);
696 }
697
698 // implements Message ----------------------------------------------
699
700 inline MemoryLogTensorOutput* New() const final {
701 return CreateMaybeMessage<MemoryLogTensorOutput>(nullptr);
702 }
703
704 MemoryLogTensorOutput* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
705 return CreateMaybeMessage<MemoryLogTensorOutput>(arena);
706 }
707 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
708 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
709 void CopyFrom(const MemoryLogTensorOutput& from);
710 void MergeFrom(const MemoryLogTensorOutput& from);
711 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
712 bool IsInitialized() const final;
713
714 size_t ByteSizeLong() const final;
715 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
716 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
717 #else
718 bool MergePartialFromCodedStream(
719 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
720 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
721 void SerializeWithCachedSizes(
722 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
723 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
724 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
725 int GetCachedSize() const final { return _cached_size_.Get(); }
726
727 private:
728 inline void SharedCtor();
729 inline void SharedDtor();
730 void SetCachedSize(int size) const final;
731 void InternalSwap(MemoryLogTensorOutput* other);
732 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
733 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
734 return "tensorflow.MemoryLogTensorOutput";
735 }
736 protected:
737 explicit MemoryLogTensorOutput(::PROTOBUF_NAMESPACE_ID::Arena* arena);
738 private:
739 static void ArenaDtor(void* object);
740 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
741 private:
742 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
743 return _internal_metadata_.arena();
744 }
745 inline void* MaybeArenaPtr() const {
746 return _internal_metadata_.raw_arena_ptr();
747 }
748 public:
749
750 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
751 private:
752 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
753 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto);
754 return ::descriptor_table_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto.file_level_metadata[kIndexInFileMessages];
755 }
756
757 public:
758
759 // nested types ----------------------------------------------------
760
761 // accessors -------------------------------------------------------
762
763 enum : int {
764 kKernelNameFieldNumber = 2,
765 kTensorFieldNumber = 4,
766 kStepIdFieldNumber = 1,
767 kIndexFieldNumber = 3,
768 };
769 // string kernel_name = 2;
770 void clear_kernel_name();
771 const std::string& kernel_name() const;
772 void set_kernel_name(const std::string& value);
773 void set_kernel_name(std::string&& value);
774 void set_kernel_name(const char* value);
775 void set_kernel_name(const char* value, size_t size);
776 std::string* mutable_kernel_name();
777 std::string* release_kernel_name();
778 void set_allocated_kernel_name(std::string* kernel_name);
779 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
780 " string fields are deprecated and will be removed in a"
781 " future release.")
782 std::string* unsafe_arena_release_kernel_name();
783 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
784 " string fields are deprecated and will be removed in a"
785 " future release.")
786 void unsafe_arena_set_allocated_kernel_name(
787 std::string* kernel_name);
788
789 // .tensorflow.TensorDescription tensor = 4;
790 bool has_tensor() const;
791 void clear_tensor();
792 const ::tensorflow::TensorDescription& tensor() const;
793 ::tensorflow::TensorDescription* release_tensor();
794 ::tensorflow::TensorDescription* mutable_tensor();
795 void set_allocated_tensor(::tensorflow::TensorDescription* tensor);
796 void unsafe_arena_set_allocated_tensor(
797 ::tensorflow::TensorDescription* tensor);
798 ::tensorflow::TensorDescription* unsafe_arena_release_tensor();
799
800 // int64 step_id = 1;
801 void clear_step_id();
802 ::PROTOBUF_NAMESPACE_ID::int64 step_id() const;
803 void set_step_id(::PROTOBUF_NAMESPACE_ID::int64 value);
804
805 // int32 index = 3;
806 void clear_index();
807 ::PROTOBUF_NAMESPACE_ID::int32 index() const;
808 void set_index(::PROTOBUF_NAMESPACE_ID::int32 value);
809
810 // @@protoc_insertion_point(class_scope:tensorflow.MemoryLogTensorOutput)
811 private:
812 class _Internal;
813
814 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
815 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
816 typedef void InternalArenaConstructable_;
817 typedef void DestructorSkippable_;
818 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr kernel_name_;
819 ::tensorflow::TensorDescription* tensor_;
820 ::PROTOBUF_NAMESPACE_ID::int64 step_id_;
821 ::PROTOBUF_NAMESPACE_ID::int32 index_;
822 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
823 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto;
824};
825// -------------------------------------------------------------------
826
827class MemoryLogRawAllocation :
828 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.MemoryLogRawAllocation) */ {
829 public:
830 MemoryLogRawAllocation();
831 virtual ~MemoryLogRawAllocation();
832
833 MemoryLogRawAllocation(const MemoryLogRawAllocation& from);
834 MemoryLogRawAllocation(MemoryLogRawAllocation&& from) noexcept
835 : MemoryLogRawAllocation() {
836 *this = ::std::move(from);
837 }
838
839 inline MemoryLogRawAllocation& operator=(const MemoryLogRawAllocation& from) {
840 CopyFrom(from);
841 return *this;
842 }
843 inline MemoryLogRawAllocation& operator=(MemoryLogRawAllocation&& from) noexcept {
844 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
845 if (this != &from) InternalSwap(&from);
846 } else {
847 CopyFrom(from);
848 }
849 return *this;
850 }
851
852 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
853 return GetArenaNoVirtual();
854 }
855 inline void* GetMaybeArenaPointer() const final {
856 return MaybeArenaPtr();
857 }
858 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
859 return GetDescriptor();
860 }
861 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
862 return GetMetadataStatic().descriptor;
863 }
864 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
865 return GetMetadataStatic().reflection;
866 }
867 static const MemoryLogRawAllocation& default_instance();
868
869 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
870 static inline const MemoryLogRawAllocation* internal_default_instance() {
871 return reinterpret_cast<const MemoryLogRawAllocation*>(
872 &_MemoryLogRawAllocation_default_instance_);
873 }
874 static constexpr int kIndexInFileMessages =
875 4;
876
877 friend void swap(MemoryLogRawAllocation& a, MemoryLogRawAllocation& b) {
878 a.Swap(&b);
879 }
880 inline void Swap(MemoryLogRawAllocation* other) {
881 if (other == this) return;
882 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
883 InternalSwap(other);
884 } else {
885 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
886 }
887 }
888 void UnsafeArenaSwap(MemoryLogRawAllocation* other) {
889 if (other == this) return;
890 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
891 InternalSwap(other);
892 }
893
894 // implements Message ----------------------------------------------
895
896 inline MemoryLogRawAllocation* New() const final {
897 return CreateMaybeMessage<MemoryLogRawAllocation>(nullptr);
898 }
899
900 MemoryLogRawAllocation* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
901 return CreateMaybeMessage<MemoryLogRawAllocation>(arena);
902 }
903 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
904 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
905 void CopyFrom(const MemoryLogRawAllocation& from);
906 void MergeFrom(const MemoryLogRawAllocation& from);
907 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
908 bool IsInitialized() const final;
909
910 size_t ByteSizeLong() const final;
911 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
912 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
913 #else
914 bool MergePartialFromCodedStream(
915 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
916 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
917 void SerializeWithCachedSizes(
918 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
919 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
920 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
921 int GetCachedSize() const final { return _cached_size_.Get(); }
922
923 private:
924 inline void SharedCtor();
925 inline void SharedDtor();
926 void SetCachedSize(int size) const final;
927 void InternalSwap(MemoryLogRawAllocation* other);
928 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
929 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
930 return "tensorflow.MemoryLogRawAllocation";
931 }
932 protected:
933 explicit MemoryLogRawAllocation(::PROTOBUF_NAMESPACE_ID::Arena* arena);
934 private:
935 static void ArenaDtor(void* object);
936 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
937 private:
938 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
939 return _internal_metadata_.arena();
940 }
941 inline void* MaybeArenaPtr() const {
942 return _internal_metadata_.raw_arena_ptr();
943 }
944 public:
945
946 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
947 private:
948 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
949 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto);
950 return ::descriptor_table_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto.file_level_metadata[kIndexInFileMessages];
951 }
952
953 public:
954
955 // nested types ----------------------------------------------------
956
957 // accessors -------------------------------------------------------
958
959 enum : int {
960 kOperationFieldNumber = 2,
961 kAllocatorNameFieldNumber = 6,
962 kStepIdFieldNumber = 1,
963 kNumBytesFieldNumber = 3,
964 kPtrFieldNumber = 4,
965 kAllocationIdFieldNumber = 5,
966 };
967 // string operation = 2;
968 void clear_operation();
969 const std::string& operation() const;
970 void set_operation(const std::string& value);
971 void set_operation(std::string&& value);
972 void set_operation(const char* value);
973 void set_operation(const char* value, size_t size);
974 std::string* mutable_operation();
975 std::string* release_operation();
976 void set_allocated_operation(std::string* operation);
977 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
978 " string fields are deprecated and will be removed in a"
979 " future release.")
980 std::string* unsafe_arena_release_operation();
981 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
982 " string fields are deprecated and will be removed in a"
983 " future release.")
984 void unsafe_arena_set_allocated_operation(
985 std::string* operation);
986
987 // string allocator_name = 6;
988 void clear_allocator_name();
989 const std::string& allocator_name() const;
990 void set_allocator_name(const std::string& value);
991 void set_allocator_name(std::string&& value);
992 void set_allocator_name(const char* value);
993 void set_allocator_name(const char* value, size_t size);
994 std::string* mutable_allocator_name();
995 std::string* release_allocator_name();
996 void set_allocated_allocator_name(std::string* allocator_name);
997 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
998 " string fields are deprecated and will be removed in a"
999 " future release.")
1000 std::string* unsafe_arena_release_allocator_name();
1001 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
1002 " string fields are deprecated and will be removed in a"
1003 " future release.")
1004 void unsafe_arena_set_allocated_allocator_name(
1005 std::string* allocator_name);
1006
1007 // int64 step_id = 1;
1008 void clear_step_id();
1009 ::PROTOBUF_NAMESPACE_ID::int64 step_id() const;
1010 void set_step_id(::PROTOBUF_NAMESPACE_ID::int64 value);
1011
1012 // int64 num_bytes = 3;
1013 void clear_num_bytes();
1014 ::PROTOBUF_NAMESPACE_ID::int64 num_bytes() const;
1015 void set_num_bytes(::PROTOBUF_NAMESPACE_ID::int64 value);
1016
1017 // uint64 ptr = 4;
1018 void clear_ptr();
1019 ::PROTOBUF_NAMESPACE_ID::uint64 ptr() const;
1020 void set_ptr(::PROTOBUF_NAMESPACE_ID::uint64 value);
1021
1022 // int64 allocation_id = 5;
1023 void clear_allocation_id();
1024 ::PROTOBUF_NAMESPACE_ID::int64 allocation_id() const;
1025 void set_allocation_id(::PROTOBUF_NAMESPACE_ID::int64 value);
1026
1027 // @@protoc_insertion_point(class_scope:tensorflow.MemoryLogRawAllocation)
1028 private:
1029 class _Internal;
1030
1031 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
1032 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1033 typedef void InternalArenaConstructable_;
1034 typedef void DestructorSkippable_;
1035 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr operation_;
1036 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr allocator_name_;
1037 ::PROTOBUF_NAMESPACE_ID::int64 step_id_;
1038 ::PROTOBUF_NAMESPACE_ID::int64 num_bytes_;
1039 ::PROTOBUF_NAMESPACE_ID::uint64 ptr_;
1040 ::PROTOBUF_NAMESPACE_ID::int64 allocation_id_;
1041 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1042 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto;
1043};
1044// -------------------------------------------------------------------
1045
1046class MemoryLogRawDeallocation :
1047 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.MemoryLogRawDeallocation) */ {
1048 public:
1049 MemoryLogRawDeallocation();
1050 virtual ~MemoryLogRawDeallocation();
1051
1052 MemoryLogRawDeallocation(const MemoryLogRawDeallocation& from);
1053 MemoryLogRawDeallocation(MemoryLogRawDeallocation&& from) noexcept
1054 : MemoryLogRawDeallocation() {
1055 *this = ::std::move(from);
1056 }
1057
1058 inline MemoryLogRawDeallocation& operator=(const MemoryLogRawDeallocation& from) {
1059 CopyFrom(from);
1060 return *this;
1061 }
1062 inline MemoryLogRawDeallocation& operator=(MemoryLogRawDeallocation&& from) noexcept {
1063 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
1064 if (this != &from) InternalSwap(&from);
1065 } else {
1066 CopyFrom(from);
1067 }
1068 return *this;
1069 }
1070
1071 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
1072 return GetArenaNoVirtual();
1073 }
1074 inline void* GetMaybeArenaPointer() const final {
1075 return MaybeArenaPtr();
1076 }
1077 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
1078 return GetDescriptor();
1079 }
1080 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
1081 return GetMetadataStatic().descriptor;
1082 }
1083 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
1084 return GetMetadataStatic().reflection;
1085 }
1086 static const MemoryLogRawDeallocation& default_instance();
1087
1088 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
1089 static inline const MemoryLogRawDeallocation* internal_default_instance() {
1090 return reinterpret_cast<const MemoryLogRawDeallocation*>(
1091 &_MemoryLogRawDeallocation_default_instance_);
1092 }
1093 static constexpr int kIndexInFileMessages =
1094 5;
1095
1096 friend void swap(MemoryLogRawDeallocation& a, MemoryLogRawDeallocation& b) {
1097 a.Swap(&b);
1098 }
1099 inline void Swap(MemoryLogRawDeallocation* other) {
1100 if (other == this) return;
1101 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
1102 InternalSwap(other);
1103 } else {
1104 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1105 }
1106 }
1107 void UnsafeArenaSwap(MemoryLogRawDeallocation* other) {
1108 if (other == this) return;
1109 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
1110 InternalSwap(other);
1111 }
1112
1113 // implements Message ----------------------------------------------
1114
1115 inline MemoryLogRawDeallocation* New() const final {
1116 return CreateMaybeMessage<MemoryLogRawDeallocation>(nullptr);
1117 }
1118
1119 MemoryLogRawDeallocation* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1120 return CreateMaybeMessage<MemoryLogRawDeallocation>(arena);
1121 }
1122 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
1123 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
1124 void CopyFrom(const MemoryLogRawDeallocation& from);
1125 void MergeFrom(const MemoryLogRawDeallocation& from);
1126 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1127 bool IsInitialized() const final;
1128
1129 size_t ByteSizeLong() const final;
1130 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
1131 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1132 #else
1133 bool MergePartialFromCodedStream(
1134 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
1135 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
1136 void SerializeWithCachedSizes(
1137 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
1138 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
1139 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
1140 int GetCachedSize() const final { return _cached_size_.Get(); }
1141
1142 private:
1143 inline void SharedCtor();
1144 inline void SharedDtor();
1145 void SetCachedSize(int size) const final;
1146 void InternalSwap(MemoryLogRawDeallocation* other);
1147 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1148 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1149 return "tensorflow.MemoryLogRawDeallocation";
1150 }
1151 protected:
1152 explicit MemoryLogRawDeallocation(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1153 private:
1154 static void ArenaDtor(void* object);
1155 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1156 private:
1157 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
1158 return _internal_metadata_.arena();
1159 }
1160 inline void* MaybeArenaPtr() const {
1161 return _internal_metadata_.raw_arena_ptr();
1162 }
1163 public:
1164
1165 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
1166 private:
1167 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
1168 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto);
1169 return ::descriptor_table_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto.file_level_metadata[kIndexInFileMessages];
1170 }
1171
1172 public:
1173
1174 // nested types ----------------------------------------------------
1175
1176 // accessors -------------------------------------------------------
1177
1178 enum : int {
1179 kOperationFieldNumber = 2,
1180 kAllocatorNameFieldNumber = 4,
1181 kStepIdFieldNumber = 1,
1182 kAllocationIdFieldNumber = 3,
1183 kDeferredFieldNumber = 5,
1184 };
1185 // string operation = 2;
1186 void clear_operation();
1187 const std::string& operation() const;
1188 void set_operation(const std::string& value);
1189 void set_operation(std::string&& value);
1190 void set_operation(const char* value);
1191 void set_operation(const char* value, size_t size);
1192 std::string* mutable_operation();
1193 std::string* release_operation();
1194 void set_allocated_operation(std::string* operation);
1195 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
1196 " string fields are deprecated and will be removed in a"
1197 " future release.")
1198 std::string* unsafe_arena_release_operation();
1199 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
1200 " string fields are deprecated and will be removed in a"
1201 " future release.")
1202 void unsafe_arena_set_allocated_operation(
1203 std::string* operation);
1204
1205 // string allocator_name = 4;
1206 void clear_allocator_name();
1207 const std::string& allocator_name() const;
1208 void set_allocator_name(const std::string& value);
1209 void set_allocator_name(std::string&& value);
1210 void set_allocator_name(const char* value);
1211 void set_allocator_name(const char* value, size_t size);
1212 std::string* mutable_allocator_name();
1213 std::string* release_allocator_name();
1214 void set_allocated_allocator_name(std::string* allocator_name);
1215 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
1216 " string fields are deprecated and will be removed in a"
1217 " future release.")
1218 std::string* unsafe_arena_release_allocator_name();
1219 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
1220 " string fields are deprecated and will be removed in a"
1221 " future release.")
1222 void unsafe_arena_set_allocated_allocator_name(
1223 std::string* allocator_name);
1224
1225 // int64 step_id = 1;
1226 void clear_step_id();
1227 ::PROTOBUF_NAMESPACE_ID::int64 step_id() const;
1228 void set_step_id(::PROTOBUF_NAMESPACE_ID::int64 value);
1229
1230 // int64 allocation_id = 3;
1231 void clear_allocation_id();
1232 ::PROTOBUF_NAMESPACE_ID::int64 allocation_id() const;
1233 void set_allocation_id(::PROTOBUF_NAMESPACE_ID::int64 value);
1234
1235 // bool deferred = 5;
1236 void clear_deferred();
1237 bool deferred() const;
1238 void set_deferred(bool value);
1239
1240 // @@protoc_insertion_point(class_scope:tensorflow.MemoryLogRawDeallocation)
1241 private:
1242 class _Internal;
1243
1244 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
1245 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1246 typedef void InternalArenaConstructable_;
1247 typedef void DestructorSkippable_;
1248 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr operation_;
1249 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr allocator_name_;
1250 ::PROTOBUF_NAMESPACE_ID::int64 step_id_;
1251 ::PROTOBUF_NAMESPACE_ID::int64 allocation_id_;
1252 bool deferred_;
1253 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1254 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto;
1255};
1256// ===================================================================
1257
1258
1259// ===================================================================
1260
1261#ifdef __GNUC__
1262 #pragma GCC diagnostic push
1263 #pragma GCC diagnostic ignored "-Wstrict-aliasing"
1264#endif // __GNUC__
1265// MemoryLogStep
1266
1267// int64 step_id = 1;
1268inline void MemoryLogStep::clear_step_id() {
1269 step_id_ = PROTOBUF_LONGLONG(0);
1270}
1271inline ::PROTOBUF_NAMESPACE_ID::int64 MemoryLogStep::step_id() const {
1272 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogStep.step_id)
1273 return step_id_;
1274}
1275inline void MemoryLogStep::set_step_id(::PROTOBUF_NAMESPACE_ID::int64 value) {
1276
1277 step_id_ = value;
1278 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogStep.step_id)
1279}
1280
1281// string handle = 2;
1282inline void MemoryLogStep::clear_handle() {
1283 handle_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1284}
1285inline const std::string& MemoryLogStep::handle() const {
1286 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogStep.handle)
1287 return handle_.Get();
1288}
1289inline void MemoryLogStep::set_handle(const std::string& value) {
1290
1291 handle_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
1292 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogStep.handle)
1293}
1294inline void MemoryLogStep::set_handle(std::string&& value) {
1295
1296 handle_.Set(
1297 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
1298 // @@protoc_insertion_point(field_set_rvalue:tensorflow.MemoryLogStep.handle)
1299}
1300inline void MemoryLogStep::set_handle(const char* value) {
1301 GOOGLE_DCHECK(value != nullptr);
1302
1303 handle_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
1304 GetArenaNoVirtual());
1305 // @@protoc_insertion_point(field_set_char:tensorflow.MemoryLogStep.handle)
1306}
1307inline void MemoryLogStep::set_handle(const char* value,
1308 size_t size) {
1309
1310 handle_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
1311 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
1312 // @@protoc_insertion_point(field_set_pointer:tensorflow.MemoryLogStep.handle)
1313}
1314inline std::string* MemoryLogStep::mutable_handle() {
1315
1316 // @@protoc_insertion_point(field_mutable:tensorflow.MemoryLogStep.handle)
1317 return handle_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1318}
1319inline std::string* MemoryLogStep::release_handle() {
1320 // @@protoc_insertion_point(field_release:tensorflow.MemoryLogStep.handle)
1321
1322 return handle_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1323}
1324inline void MemoryLogStep::set_allocated_handle(std::string* handle) {
1325 if (handle != nullptr) {
1326
1327 } else {
1328
1329 }
1330 handle_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), handle,
1331 GetArenaNoVirtual());
1332 // @@protoc_insertion_point(field_set_allocated:tensorflow.MemoryLogStep.handle)
1333}
1334inline std::string* MemoryLogStep::unsafe_arena_release_handle() {
1335 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MemoryLogStep.handle)
1336 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
1337
1338 return handle_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
1339 GetArenaNoVirtual());
1340}
1341inline void MemoryLogStep::unsafe_arena_set_allocated_handle(
1342 std::string* handle) {
1343 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
1344 if (handle != nullptr) {
1345
1346 } else {
1347
1348 }
1349 handle_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
1350 handle, GetArenaNoVirtual());
1351 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MemoryLogStep.handle)
1352}
1353
1354// -------------------------------------------------------------------
1355
1356// MemoryLogTensorAllocation
1357
1358// int64 step_id = 1;
1359inline void MemoryLogTensorAllocation::clear_step_id() {
1360 step_id_ = PROTOBUF_LONGLONG(0);
1361}
1362inline ::PROTOBUF_NAMESPACE_ID::int64 MemoryLogTensorAllocation::step_id() const {
1363 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogTensorAllocation.step_id)
1364 return step_id_;
1365}
1366inline void MemoryLogTensorAllocation::set_step_id(::PROTOBUF_NAMESPACE_ID::int64 value) {
1367
1368 step_id_ = value;
1369 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogTensorAllocation.step_id)
1370}
1371
1372// string kernel_name = 2;
1373inline void MemoryLogTensorAllocation::clear_kernel_name() {
1374 kernel_name_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1375}
1376inline const std::string& MemoryLogTensorAllocation::kernel_name() const {
1377 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogTensorAllocation.kernel_name)
1378 return kernel_name_.Get();
1379}
1380inline void MemoryLogTensorAllocation::set_kernel_name(const std::string& value) {
1381
1382 kernel_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
1383 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogTensorAllocation.kernel_name)
1384}
1385inline void MemoryLogTensorAllocation::set_kernel_name(std::string&& value) {
1386
1387 kernel_name_.Set(
1388 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
1389 // @@protoc_insertion_point(field_set_rvalue:tensorflow.MemoryLogTensorAllocation.kernel_name)
1390}
1391inline void MemoryLogTensorAllocation::set_kernel_name(const char* value) {
1392 GOOGLE_DCHECK(value != nullptr);
1393
1394 kernel_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
1395 GetArenaNoVirtual());
1396 // @@protoc_insertion_point(field_set_char:tensorflow.MemoryLogTensorAllocation.kernel_name)
1397}
1398inline void MemoryLogTensorAllocation::set_kernel_name(const char* value,
1399 size_t size) {
1400
1401 kernel_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
1402 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
1403 // @@protoc_insertion_point(field_set_pointer:tensorflow.MemoryLogTensorAllocation.kernel_name)
1404}
1405inline std::string* MemoryLogTensorAllocation::mutable_kernel_name() {
1406
1407 // @@protoc_insertion_point(field_mutable:tensorflow.MemoryLogTensorAllocation.kernel_name)
1408 return kernel_name_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1409}
1410inline std::string* MemoryLogTensorAllocation::release_kernel_name() {
1411 // @@protoc_insertion_point(field_release:tensorflow.MemoryLogTensorAllocation.kernel_name)
1412
1413 return kernel_name_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1414}
1415inline void MemoryLogTensorAllocation::set_allocated_kernel_name(std::string* kernel_name) {
1416 if (kernel_name != nullptr) {
1417
1418 } else {
1419
1420 }
1421 kernel_name_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), kernel_name,
1422 GetArenaNoVirtual());
1423 // @@protoc_insertion_point(field_set_allocated:tensorflow.MemoryLogTensorAllocation.kernel_name)
1424}
1425inline std::string* MemoryLogTensorAllocation::unsafe_arena_release_kernel_name() {
1426 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MemoryLogTensorAllocation.kernel_name)
1427 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
1428
1429 return kernel_name_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
1430 GetArenaNoVirtual());
1431}
1432inline void MemoryLogTensorAllocation::unsafe_arena_set_allocated_kernel_name(
1433 std::string* kernel_name) {
1434 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
1435 if (kernel_name != nullptr) {
1436
1437 } else {
1438
1439 }
1440 kernel_name_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
1441 kernel_name, GetArenaNoVirtual());
1442 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MemoryLogTensorAllocation.kernel_name)
1443}
1444
1445// .tensorflow.TensorDescription tensor = 3;
1446inline bool MemoryLogTensorAllocation::has_tensor() const {
1447 return this != internal_default_instance() && tensor_ != nullptr;
1448}
1449inline const ::tensorflow::TensorDescription& MemoryLogTensorAllocation::tensor() const {
1450 const ::tensorflow::TensorDescription* p = tensor_;
1451 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogTensorAllocation.tensor)
1452 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::TensorDescription*>(
1453 &::tensorflow::_TensorDescription_default_instance_);
1454}
1455inline ::tensorflow::TensorDescription* MemoryLogTensorAllocation::release_tensor() {
1456 // @@protoc_insertion_point(field_release:tensorflow.MemoryLogTensorAllocation.tensor)
1457
1458 ::tensorflow::TensorDescription* temp = tensor_;
1459 if (GetArenaNoVirtual() != nullptr) {
1460 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
1461 }
1462 tensor_ = nullptr;
1463 return temp;
1464}
1465inline ::tensorflow::TensorDescription* MemoryLogTensorAllocation::unsafe_arena_release_tensor() {
1466 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MemoryLogTensorAllocation.tensor)
1467
1468 ::tensorflow::TensorDescription* temp = tensor_;
1469 tensor_ = nullptr;
1470 return temp;
1471}
1472inline ::tensorflow::TensorDescription* MemoryLogTensorAllocation::mutable_tensor() {
1473
1474 if (tensor_ == nullptr) {
1475 auto* p = CreateMaybeMessage<::tensorflow::TensorDescription>(GetArenaNoVirtual());
1476 tensor_ = p;
1477 }
1478 // @@protoc_insertion_point(field_mutable:tensorflow.MemoryLogTensorAllocation.tensor)
1479 return tensor_;
1480}
1481inline void MemoryLogTensorAllocation::set_allocated_tensor(::tensorflow::TensorDescription* tensor) {
1482 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
1483 if (message_arena == nullptr) {
1484 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(tensor_);
1485 }
1486 if (tensor) {
1487 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
1488 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(tensor)->GetArena();
1489 if (message_arena != submessage_arena) {
1490 tensor = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
1491 message_arena, tensor, submessage_arena);
1492 }
1493
1494 } else {
1495
1496 }
1497 tensor_ = tensor;
1498 // @@protoc_insertion_point(field_set_allocated:tensorflow.MemoryLogTensorAllocation.tensor)
1499}
1500
1501// -------------------------------------------------------------------
1502
1503// MemoryLogTensorDeallocation
1504
1505// int64 allocation_id = 1;
1506inline void MemoryLogTensorDeallocation::clear_allocation_id() {
1507 allocation_id_ = PROTOBUF_LONGLONG(0);
1508}
1509inline ::PROTOBUF_NAMESPACE_ID::int64 MemoryLogTensorDeallocation::allocation_id() const {
1510 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogTensorDeallocation.allocation_id)
1511 return allocation_id_;
1512}
1513inline void MemoryLogTensorDeallocation::set_allocation_id(::PROTOBUF_NAMESPACE_ID::int64 value) {
1514
1515 allocation_id_ = value;
1516 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogTensorDeallocation.allocation_id)
1517}
1518
1519// string allocator_name = 2;
1520inline void MemoryLogTensorDeallocation::clear_allocator_name() {
1521 allocator_name_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1522}
1523inline const std::string& MemoryLogTensorDeallocation::allocator_name() const {
1524 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogTensorDeallocation.allocator_name)
1525 return allocator_name_.Get();
1526}
1527inline void MemoryLogTensorDeallocation::set_allocator_name(const std::string& value) {
1528
1529 allocator_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
1530 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogTensorDeallocation.allocator_name)
1531}
1532inline void MemoryLogTensorDeallocation::set_allocator_name(std::string&& value) {
1533
1534 allocator_name_.Set(
1535 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
1536 // @@protoc_insertion_point(field_set_rvalue:tensorflow.MemoryLogTensorDeallocation.allocator_name)
1537}
1538inline void MemoryLogTensorDeallocation::set_allocator_name(const char* value) {
1539 GOOGLE_DCHECK(value != nullptr);
1540
1541 allocator_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
1542 GetArenaNoVirtual());
1543 // @@protoc_insertion_point(field_set_char:tensorflow.MemoryLogTensorDeallocation.allocator_name)
1544}
1545inline void MemoryLogTensorDeallocation::set_allocator_name(const char* value,
1546 size_t size) {
1547
1548 allocator_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
1549 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
1550 // @@protoc_insertion_point(field_set_pointer:tensorflow.MemoryLogTensorDeallocation.allocator_name)
1551}
1552inline std::string* MemoryLogTensorDeallocation::mutable_allocator_name() {
1553
1554 // @@protoc_insertion_point(field_mutable:tensorflow.MemoryLogTensorDeallocation.allocator_name)
1555 return allocator_name_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1556}
1557inline std::string* MemoryLogTensorDeallocation::release_allocator_name() {
1558 // @@protoc_insertion_point(field_release:tensorflow.MemoryLogTensorDeallocation.allocator_name)
1559
1560 return allocator_name_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1561}
1562inline void MemoryLogTensorDeallocation::set_allocated_allocator_name(std::string* allocator_name) {
1563 if (allocator_name != nullptr) {
1564
1565 } else {
1566
1567 }
1568 allocator_name_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), allocator_name,
1569 GetArenaNoVirtual());
1570 // @@protoc_insertion_point(field_set_allocated:tensorflow.MemoryLogTensorDeallocation.allocator_name)
1571}
1572inline std::string* MemoryLogTensorDeallocation::unsafe_arena_release_allocator_name() {
1573 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MemoryLogTensorDeallocation.allocator_name)
1574 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
1575
1576 return allocator_name_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
1577 GetArenaNoVirtual());
1578}
1579inline void MemoryLogTensorDeallocation::unsafe_arena_set_allocated_allocator_name(
1580 std::string* allocator_name) {
1581 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
1582 if (allocator_name != nullptr) {
1583
1584 } else {
1585
1586 }
1587 allocator_name_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
1588 allocator_name, GetArenaNoVirtual());
1589 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MemoryLogTensorDeallocation.allocator_name)
1590}
1591
1592// -------------------------------------------------------------------
1593
1594// MemoryLogTensorOutput
1595
1596// int64 step_id = 1;
1597inline void MemoryLogTensorOutput::clear_step_id() {
1598 step_id_ = PROTOBUF_LONGLONG(0);
1599}
1600inline ::PROTOBUF_NAMESPACE_ID::int64 MemoryLogTensorOutput::step_id() const {
1601 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogTensorOutput.step_id)
1602 return step_id_;
1603}
1604inline void MemoryLogTensorOutput::set_step_id(::PROTOBUF_NAMESPACE_ID::int64 value) {
1605
1606 step_id_ = value;
1607 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogTensorOutput.step_id)
1608}
1609
1610// string kernel_name = 2;
1611inline void MemoryLogTensorOutput::clear_kernel_name() {
1612 kernel_name_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1613}
1614inline const std::string& MemoryLogTensorOutput::kernel_name() const {
1615 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogTensorOutput.kernel_name)
1616 return kernel_name_.Get();
1617}
1618inline void MemoryLogTensorOutput::set_kernel_name(const std::string& value) {
1619
1620 kernel_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
1621 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogTensorOutput.kernel_name)
1622}
1623inline void MemoryLogTensorOutput::set_kernel_name(std::string&& value) {
1624
1625 kernel_name_.Set(
1626 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
1627 // @@protoc_insertion_point(field_set_rvalue:tensorflow.MemoryLogTensorOutput.kernel_name)
1628}
1629inline void MemoryLogTensorOutput::set_kernel_name(const char* value) {
1630 GOOGLE_DCHECK(value != nullptr);
1631
1632 kernel_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
1633 GetArenaNoVirtual());
1634 // @@protoc_insertion_point(field_set_char:tensorflow.MemoryLogTensorOutput.kernel_name)
1635}
1636inline void MemoryLogTensorOutput::set_kernel_name(const char* value,
1637 size_t size) {
1638
1639 kernel_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
1640 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
1641 // @@protoc_insertion_point(field_set_pointer:tensorflow.MemoryLogTensorOutput.kernel_name)
1642}
1643inline std::string* MemoryLogTensorOutput::mutable_kernel_name() {
1644
1645 // @@protoc_insertion_point(field_mutable:tensorflow.MemoryLogTensorOutput.kernel_name)
1646 return kernel_name_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1647}
1648inline std::string* MemoryLogTensorOutput::release_kernel_name() {
1649 // @@protoc_insertion_point(field_release:tensorflow.MemoryLogTensorOutput.kernel_name)
1650
1651 return kernel_name_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1652}
1653inline void MemoryLogTensorOutput::set_allocated_kernel_name(std::string* kernel_name) {
1654 if (kernel_name != nullptr) {
1655
1656 } else {
1657
1658 }
1659 kernel_name_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), kernel_name,
1660 GetArenaNoVirtual());
1661 // @@protoc_insertion_point(field_set_allocated:tensorflow.MemoryLogTensorOutput.kernel_name)
1662}
1663inline std::string* MemoryLogTensorOutput::unsafe_arena_release_kernel_name() {
1664 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MemoryLogTensorOutput.kernel_name)
1665 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
1666
1667 return kernel_name_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
1668 GetArenaNoVirtual());
1669}
1670inline void MemoryLogTensorOutput::unsafe_arena_set_allocated_kernel_name(
1671 std::string* kernel_name) {
1672 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
1673 if (kernel_name != nullptr) {
1674
1675 } else {
1676
1677 }
1678 kernel_name_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
1679 kernel_name, GetArenaNoVirtual());
1680 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MemoryLogTensorOutput.kernel_name)
1681}
1682
1683// int32 index = 3;
1684inline void MemoryLogTensorOutput::clear_index() {
1685 index_ = 0;
1686}
1687inline ::PROTOBUF_NAMESPACE_ID::int32 MemoryLogTensorOutput::index() const {
1688 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogTensorOutput.index)
1689 return index_;
1690}
1691inline void MemoryLogTensorOutput::set_index(::PROTOBUF_NAMESPACE_ID::int32 value) {
1692
1693 index_ = value;
1694 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogTensorOutput.index)
1695}
1696
1697// .tensorflow.TensorDescription tensor = 4;
1698inline bool MemoryLogTensorOutput::has_tensor() const {
1699 return this != internal_default_instance() && tensor_ != nullptr;
1700}
1701inline const ::tensorflow::TensorDescription& MemoryLogTensorOutput::tensor() const {
1702 const ::tensorflow::TensorDescription* p = tensor_;
1703 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogTensorOutput.tensor)
1704 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::TensorDescription*>(
1705 &::tensorflow::_TensorDescription_default_instance_);
1706}
1707inline ::tensorflow::TensorDescription* MemoryLogTensorOutput::release_tensor() {
1708 // @@protoc_insertion_point(field_release:tensorflow.MemoryLogTensorOutput.tensor)
1709
1710 ::tensorflow::TensorDescription* temp = tensor_;
1711 if (GetArenaNoVirtual() != nullptr) {
1712 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
1713 }
1714 tensor_ = nullptr;
1715 return temp;
1716}
1717inline ::tensorflow::TensorDescription* MemoryLogTensorOutput::unsafe_arena_release_tensor() {
1718 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MemoryLogTensorOutput.tensor)
1719
1720 ::tensorflow::TensorDescription* temp = tensor_;
1721 tensor_ = nullptr;
1722 return temp;
1723}
1724inline ::tensorflow::TensorDescription* MemoryLogTensorOutput::mutable_tensor() {
1725
1726 if (tensor_ == nullptr) {
1727 auto* p = CreateMaybeMessage<::tensorflow::TensorDescription>(GetArenaNoVirtual());
1728 tensor_ = p;
1729 }
1730 // @@protoc_insertion_point(field_mutable:tensorflow.MemoryLogTensorOutput.tensor)
1731 return tensor_;
1732}
1733inline void MemoryLogTensorOutput::set_allocated_tensor(::tensorflow::TensorDescription* tensor) {
1734 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
1735 if (message_arena == nullptr) {
1736 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(tensor_);
1737 }
1738 if (tensor) {
1739 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
1740 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(tensor)->GetArena();
1741 if (message_arena != submessage_arena) {
1742 tensor = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
1743 message_arena, tensor, submessage_arena);
1744 }
1745
1746 } else {
1747
1748 }
1749 tensor_ = tensor;
1750 // @@protoc_insertion_point(field_set_allocated:tensorflow.MemoryLogTensorOutput.tensor)
1751}
1752
1753// -------------------------------------------------------------------
1754
1755// MemoryLogRawAllocation
1756
1757// int64 step_id = 1;
1758inline void MemoryLogRawAllocation::clear_step_id() {
1759 step_id_ = PROTOBUF_LONGLONG(0);
1760}
1761inline ::PROTOBUF_NAMESPACE_ID::int64 MemoryLogRawAllocation::step_id() const {
1762 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogRawAllocation.step_id)
1763 return step_id_;
1764}
1765inline void MemoryLogRawAllocation::set_step_id(::PROTOBUF_NAMESPACE_ID::int64 value) {
1766
1767 step_id_ = value;
1768 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogRawAllocation.step_id)
1769}
1770
1771// string operation = 2;
1772inline void MemoryLogRawAllocation::clear_operation() {
1773 operation_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1774}
1775inline const std::string& MemoryLogRawAllocation::operation() const {
1776 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogRawAllocation.operation)
1777 return operation_.Get();
1778}
1779inline void MemoryLogRawAllocation::set_operation(const std::string& value) {
1780
1781 operation_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
1782 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogRawAllocation.operation)
1783}
1784inline void MemoryLogRawAllocation::set_operation(std::string&& value) {
1785
1786 operation_.Set(
1787 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
1788 // @@protoc_insertion_point(field_set_rvalue:tensorflow.MemoryLogRawAllocation.operation)
1789}
1790inline void MemoryLogRawAllocation::set_operation(const char* value) {
1791 GOOGLE_DCHECK(value != nullptr);
1792
1793 operation_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
1794 GetArenaNoVirtual());
1795 // @@protoc_insertion_point(field_set_char:tensorflow.MemoryLogRawAllocation.operation)
1796}
1797inline void MemoryLogRawAllocation::set_operation(const char* value,
1798 size_t size) {
1799
1800 operation_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
1801 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
1802 // @@protoc_insertion_point(field_set_pointer:tensorflow.MemoryLogRawAllocation.operation)
1803}
1804inline std::string* MemoryLogRawAllocation::mutable_operation() {
1805
1806 // @@protoc_insertion_point(field_mutable:tensorflow.MemoryLogRawAllocation.operation)
1807 return operation_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1808}
1809inline std::string* MemoryLogRawAllocation::release_operation() {
1810 // @@protoc_insertion_point(field_release:tensorflow.MemoryLogRawAllocation.operation)
1811
1812 return operation_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1813}
1814inline void MemoryLogRawAllocation::set_allocated_operation(std::string* operation) {
1815 if (operation != nullptr) {
1816
1817 } else {
1818
1819 }
1820 operation_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), operation,
1821 GetArenaNoVirtual());
1822 // @@protoc_insertion_point(field_set_allocated:tensorflow.MemoryLogRawAllocation.operation)
1823}
1824inline std::string* MemoryLogRawAllocation::unsafe_arena_release_operation() {
1825 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MemoryLogRawAllocation.operation)
1826 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
1827
1828 return operation_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
1829 GetArenaNoVirtual());
1830}
1831inline void MemoryLogRawAllocation::unsafe_arena_set_allocated_operation(
1832 std::string* operation) {
1833 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
1834 if (operation != nullptr) {
1835
1836 } else {
1837
1838 }
1839 operation_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
1840 operation, GetArenaNoVirtual());
1841 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MemoryLogRawAllocation.operation)
1842}
1843
1844// int64 num_bytes = 3;
1845inline void MemoryLogRawAllocation::clear_num_bytes() {
1846 num_bytes_ = PROTOBUF_LONGLONG(0);
1847}
1848inline ::PROTOBUF_NAMESPACE_ID::int64 MemoryLogRawAllocation::num_bytes() const {
1849 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogRawAllocation.num_bytes)
1850 return num_bytes_;
1851}
1852inline void MemoryLogRawAllocation::set_num_bytes(::PROTOBUF_NAMESPACE_ID::int64 value) {
1853
1854 num_bytes_ = value;
1855 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogRawAllocation.num_bytes)
1856}
1857
1858// uint64 ptr = 4;
1859inline void MemoryLogRawAllocation::clear_ptr() {
1860 ptr_ = PROTOBUF_ULONGLONG(0);
1861}
1862inline ::PROTOBUF_NAMESPACE_ID::uint64 MemoryLogRawAllocation::ptr() const {
1863 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogRawAllocation.ptr)
1864 return ptr_;
1865}
1866inline void MemoryLogRawAllocation::set_ptr(::PROTOBUF_NAMESPACE_ID::uint64 value) {
1867
1868 ptr_ = value;
1869 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogRawAllocation.ptr)
1870}
1871
1872// int64 allocation_id = 5;
1873inline void MemoryLogRawAllocation::clear_allocation_id() {
1874 allocation_id_ = PROTOBUF_LONGLONG(0);
1875}
1876inline ::PROTOBUF_NAMESPACE_ID::int64 MemoryLogRawAllocation::allocation_id() const {
1877 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogRawAllocation.allocation_id)
1878 return allocation_id_;
1879}
1880inline void MemoryLogRawAllocation::set_allocation_id(::PROTOBUF_NAMESPACE_ID::int64 value) {
1881
1882 allocation_id_ = value;
1883 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogRawAllocation.allocation_id)
1884}
1885
1886// string allocator_name = 6;
1887inline void MemoryLogRawAllocation::clear_allocator_name() {
1888 allocator_name_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1889}
1890inline const std::string& MemoryLogRawAllocation::allocator_name() const {
1891 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogRawAllocation.allocator_name)
1892 return allocator_name_.Get();
1893}
1894inline void MemoryLogRawAllocation::set_allocator_name(const std::string& value) {
1895
1896 allocator_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
1897 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogRawAllocation.allocator_name)
1898}
1899inline void MemoryLogRawAllocation::set_allocator_name(std::string&& value) {
1900
1901 allocator_name_.Set(
1902 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
1903 // @@protoc_insertion_point(field_set_rvalue:tensorflow.MemoryLogRawAllocation.allocator_name)
1904}
1905inline void MemoryLogRawAllocation::set_allocator_name(const char* value) {
1906 GOOGLE_DCHECK(value != nullptr);
1907
1908 allocator_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
1909 GetArenaNoVirtual());
1910 // @@protoc_insertion_point(field_set_char:tensorflow.MemoryLogRawAllocation.allocator_name)
1911}
1912inline void MemoryLogRawAllocation::set_allocator_name(const char* value,
1913 size_t size) {
1914
1915 allocator_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
1916 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
1917 // @@protoc_insertion_point(field_set_pointer:tensorflow.MemoryLogRawAllocation.allocator_name)
1918}
1919inline std::string* MemoryLogRawAllocation::mutable_allocator_name() {
1920
1921 // @@protoc_insertion_point(field_mutable:tensorflow.MemoryLogRawAllocation.allocator_name)
1922 return allocator_name_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1923}
1924inline std::string* MemoryLogRawAllocation::release_allocator_name() {
1925 // @@protoc_insertion_point(field_release:tensorflow.MemoryLogRawAllocation.allocator_name)
1926
1927 return allocator_name_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1928}
1929inline void MemoryLogRawAllocation::set_allocated_allocator_name(std::string* allocator_name) {
1930 if (allocator_name != nullptr) {
1931
1932 } else {
1933
1934 }
1935 allocator_name_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), allocator_name,
1936 GetArenaNoVirtual());
1937 // @@protoc_insertion_point(field_set_allocated:tensorflow.MemoryLogRawAllocation.allocator_name)
1938}
1939inline std::string* MemoryLogRawAllocation::unsafe_arena_release_allocator_name() {
1940 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MemoryLogRawAllocation.allocator_name)
1941 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
1942
1943 return allocator_name_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
1944 GetArenaNoVirtual());
1945}
1946inline void MemoryLogRawAllocation::unsafe_arena_set_allocated_allocator_name(
1947 std::string* allocator_name) {
1948 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
1949 if (allocator_name != nullptr) {
1950
1951 } else {
1952
1953 }
1954 allocator_name_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
1955 allocator_name, GetArenaNoVirtual());
1956 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MemoryLogRawAllocation.allocator_name)
1957}
1958
1959// -------------------------------------------------------------------
1960
1961// MemoryLogRawDeallocation
1962
1963// int64 step_id = 1;
1964inline void MemoryLogRawDeallocation::clear_step_id() {
1965 step_id_ = PROTOBUF_LONGLONG(0);
1966}
1967inline ::PROTOBUF_NAMESPACE_ID::int64 MemoryLogRawDeallocation::step_id() const {
1968 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogRawDeallocation.step_id)
1969 return step_id_;
1970}
1971inline void MemoryLogRawDeallocation::set_step_id(::PROTOBUF_NAMESPACE_ID::int64 value) {
1972
1973 step_id_ = value;
1974 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogRawDeallocation.step_id)
1975}
1976
1977// string operation = 2;
1978inline void MemoryLogRawDeallocation::clear_operation() {
1979 operation_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
1980}
1981inline const std::string& MemoryLogRawDeallocation::operation() const {
1982 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogRawDeallocation.operation)
1983 return operation_.Get();
1984}
1985inline void MemoryLogRawDeallocation::set_operation(const std::string& value) {
1986
1987 operation_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
1988 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogRawDeallocation.operation)
1989}
1990inline void MemoryLogRawDeallocation::set_operation(std::string&& value) {
1991
1992 operation_.Set(
1993 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
1994 // @@protoc_insertion_point(field_set_rvalue:tensorflow.MemoryLogRawDeallocation.operation)
1995}
1996inline void MemoryLogRawDeallocation::set_operation(const char* value) {
1997 GOOGLE_DCHECK(value != nullptr);
1998
1999 operation_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
2000 GetArenaNoVirtual());
2001 // @@protoc_insertion_point(field_set_char:tensorflow.MemoryLogRawDeallocation.operation)
2002}
2003inline void MemoryLogRawDeallocation::set_operation(const char* value,
2004 size_t size) {
2005
2006 operation_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
2007 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
2008 // @@protoc_insertion_point(field_set_pointer:tensorflow.MemoryLogRawDeallocation.operation)
2009}
2010inline std::string* MemoryLogRawDeallocation::mutable_operation() {
2011
2012 // @@protoc_insertion_point(field_mutable:tensorflow.MemoryLogRawDeallocation.operation)
2013 return operation_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
2014}
2015inline std::string* MemoryLogRawDeallocation::release_operation() {
2016 // @@protoc_insertion_point(field_release:tensorflow.MemoryLogRawDeallocation.operation)
2017
2018 return operation_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
2019}
2020inline void MemoryLogRawDeallocation::set_allocated_operation(std::string* operation) {
2021 if (operation != nullptr) {
2022
2023 } else {
2024
2025 }
2026 operation_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), operation,
2027 GetArenaNoVirtual());
2028 // @@protoc_insertion_point(field_set_allocated:tensorflow.MemoryLogRawDeallocation.operation)
2029}
2030inline std::string* MemoryLogRawDeallocation::unsafe_arena_release_operation() {
2031 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MemoryLogRawDeallocation.operation)
2032 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
2033
2034 return operation_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
2035 GetArenaNoVirtual());
2036}
2037inline void MemoryLogRawDeallocation::unsafe_arena_set_allocated_operation(
2038 std::string* operation) {
2039 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
2040 if (operation != nullptr) {
2041
2042 } else {
2043
2044 }
2045 operation_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
2046 operation, GetArenaNoVirtual());
2047 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MemoryLogRawDeallocation.operation)
2048}
2049
2050// int64 allocation_id = 3;
2051inline void MemoryLogRawDeallocation::clear_allocation_id() {
2052 allocation_id_ = PROTOBUF_LONGLONG(0);
2053}
2054inline ::PROTOBUF_NAMESPACE_ID::int64 MemoryLogRawDeallocation::allocation_id() const {
2055 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogRawDeallocation.allocation_id)
2056 return allocation_id_;
2057}
2058inline void MemoryLogRawDeallocation::set_allocation_id(::PROTOBUF_NAMESPACE_ID::int64 value) {
2059
2060 allocation_id_ = value;
2061 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogRawDeallocation.allocation_id)
2062}
2063
2064// string allocator_name = 4;
2065inline void MemoryLogRawDeallocation::clear_allocator_name() {
2066 allocator_name_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
2067}
2068inline const std::string& MemoryLogRawDeallocation::allocator_name() const {
2069 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogRawDeallocation.allocator_name)
2070 return allocator_name_.Get();
2071}
2072inline void MemoryLogRawDeallocation::set_allocator_name(const std::string& value) {
2073
2074 allocator_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
2075 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogRawDeallocation.allocator_name)
2076}
2077inline void MemoryLogRawDeallocation::set_allocator_name(std::string&& value) {
2078
2079 allocator_name_.Set(
2080 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
2081 // @@protoc_insertion_point(field_set_rvalue:tensorflow.MemoryLogRawDeallocation.allocator_name)
2082}
2083inline void MemoryLogRawDeallocation::set_allocator_name(const char* value) {
2084 GOOGLE_DCHECK(value != nullptr);
2085
2086 allocator_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
2087 GetArenaNoVirtual());
2088 // @@protoc_insertion_point(field_set_char:tensorflow.MemoryLogRawDeallocation.allocator_name)
2089}
2090inline void MemoryLogRawDeallocation::set_allocator_name(const char* value,
2091 size_t size) {
2092
2093 allocator_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
2094 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
2095 // @@protoc_insertion_point(field_set_pointer:tensorflow.MemoryLogRawDeallocation.allocator_name)
2096}
2097inline std::string* MemoryLogRawDeallocation::mutable_allocator_name() {
2098
2099 // @@protoc_insertion_point(field_mutable:tensorflow.MemoryLogRawDeallocation.allocator_name)
2100 return allocator_name_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
2101}
2102inline std::string* MemoryLogRawDeallocation::release_allocator_name() {
2103 // @@protoc_insertion_point(field_release:tensorflow.MemoryLogRawDeallocation.allocator_name)
2104
2105 return allocator_name_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
2106}
2107inline void MemoryLogRawDeallocation::set_allocated_allocator_name(std::string* allocator_name) {
2108 if (allocator_name != nullptr) {
2109
2110 } else {
2111
2112 }
2113 allocator_name_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), allocator_name,
2114 GetArenaNoVirtual());
2115 // @@protoc_insertion_point(field_set_allocated:tensorflow.MemoryLogRawDeallocation.allocator_name)
2116}
2117inline std::string* MemoryLogRawDeallocation::unsafe_arena_release_allocator_name() {
2118 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MemoryLogRawDeallocation.allocator_name)
2119 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
2120
2121 return allocator_name_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
2122 GetArenaNoVirtual());
2123}
2124inline void MemoryLogRawDeallocation::unsafe_arena_set_allocated_allocator_name(
2125 std::string* allocator_name) {
2126 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
2127 if (allocator_name != nullptr) {
2128
2129 } else {
2130
2131 }
2132 allocator_name_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
2133 allocator_name, GetArenaNoVirtual());
2134 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MemoryLogRawDeallocation.allocator_name)
2135}
2136
2137// bool deferred = 5;
2138inline void MemoryLogRawDeallocation::clear_deferred() {
2139 deferred_ = false;
2140}
2141inline bool MemoryLogRawDeallocation::deferred() const {
2142 // @@protoc_insertion_point(field_get:tensorflow.MemoryLogRawDeallocation.deferred)
2143 return deferred_;
2144}
2145inline void MemoryLogRawDeallocation::set_deferred(bool value) {
2146
2147 deferred_ = value;
2148 // @@protoc_insertion_point(field_set:tensorflow.MemoryLogRawDeallocation.deferred)
2149}
2150
2151#ifdef __GNUC__
2152 #pragma GCC diagnostic pop
2153#endif // __GNUC__
2154// -------------------------------------------------------------------
2155
2156// -------------------------------------------------------------------
2157
2158// -------------------------------------------------------------------
2159
2160// -------------------------------------------------------------------
2161
2162// -------------------------------------------------------------------
2163
2164
2165// @@protoc_insertion_point(namespace_scope)
2166
2167} // namespace tensorflow
2168
2169// @@protoc_insertion_point(global_scope)
2170
2171#include <google/protobuf/port_undef.inc>
2172#endif // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fframework_2flog_5fmemory_2eproto
2173