1// Generated by the protocol buffer compiler. DO NOT EDIT!
2// source: tensorflow/core/protobuf/config.proto
3
4#ifndef GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto
5#define GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto
6
7#include <limits>
8#include <string>
9
10#include <google/protobuf/port_def.inc>
11#if PROTOBUF_VERSION < 3009000
12#error This file was generated by a newer version of protoc which is
13#error incompatible with your Protocol Buffer headers. Please update
14#error your headers.
15#endif
16#if 3009002 < PROTOBUF_MIN_PROTOC_VERSION
17#error This file was generated by an older version of protoc which is
18#error incompatible with your Protocol Buffer headers. Please
19#error regenerate this file with a newer version of protoc.
20#endif
21
22#include <google/protobuf/port_undef.inc>
23#include <google/protobuf/io/coded_stream.h>
24#include <google/protobuf/arena.h>
25#include <google/protobuf/arenastring.h>
26#include <google/protobuf/generated_message_table_driven.h>
27#include <google/protobuf/generated_message_util.h>
28#include <google/protobuf/inlined_string_field.h>
29#include <google/protobuf/metadata.h>
30#include <google/protobuf/generated_message_reflection.h>
31#include <google/protobuf/message.h>
32#include <google/protobuf/repeated_field.h> // IWYU pragma: export
33#include <google/protobuf/extension_set.h> // IWYU pragma: export
34#include <google/protobuf/map.h> // IWYU pragma: export
35#include <google/protobuf/map_entry.h>
36#include <google/protobuf/map_field_inl.h>
37#include <google/protobuf/generated_enum_reflection.h>
38#include <google/protobuf/unknown_field_set.h>
39#include "tensorflow/core/framework/cost_graph.pb.h"
40#include "tensorflow/core/framework/graph.pb.h"
41#include "tensorflow/core/framework/step_stats.pb.h"
42#include "tensorflow/core/protobuf/cluster.pb.h"
43#include "tensorflow/core/protobuf/coordination_config.pb.h"
44#include "tensorflow/core/protobuf/debug.pb.h"
45#include "tensorflow/core/protobuf/rewriter_config.pb.h"
46// @@protoc_insertion_point(includes)
47#include <google/protobuf/port_def.inc>
48#define PROTOBUF_INTERNAL_EXPORT_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto
49PROTOBUF_NAMESPACE_OPEN
50namespace internal {
51class AnyMetadata;
52} // namespace internal
53PROTOBUF_NAMESPACE_CLOSE
54
55// Internal implementation detail -- do not use these members.
56struct TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto {
57 static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTableField entries[]
58 PROTOBUF_SECTION_VARIABLE(protodesc_cold);
59 static const ::PROTOBUF_NAMESPACE_ID::internal::AuxillaryParseTableField aux[]
60 PROTOBUF_SECTION_VARIABLE(protodesc_cold);
61 static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTable schema[20]
62 PROTOBUF_SECTION_VARIABLE(protodesc_cold);
63 static const ::PROTOBUF_NAMESPACE_ID::internal::FieldMetadata field_metadata[];
64 static const ::PROTOBUF_NAMESPACE_ID::internal::SerializationTable serialization_table[];
65 static const ::PROTOBUF_NAMESPACE_ID::uint32 offsets[];
66};
67extern const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
68namespace tensorflow {
69class CallableOptions;
70class CallableOptionsDefaultTypeInternal;
71extern CallableOptionsDefaultTypeInternal _CallableOptions_default_instance_;
72class CallableOptions_FeedDevicesEntry_DoNotUse;
73class CallableOptions_FeedDevicesEntry_DoNotUseDefaultTypeInternal;
74extern CallableOptions_FeedDevicesEntry_DoNotUseDefaultTypeInternal _CallableOptions_FeedDevicesEntry_DoNotUse_default_instance_;
75class CallableOptions_FetchDevicesEntry_DoNotUse;
76class CallableOptions_FetchDevicesEntry_DoNotUseDefaultTypeInternal;
77extern CallableOptions_FetchDevicesEntry_DoNotUseDefaultTypeInternal _CallableOptions_FetchDevicesEntry_DoNotUse_default_instance_;
78class ConfigProto;
79class ConfigProtoDefaultTypeInternal;
80extern ConfigProtoDefaultTypeInternal _ConfigProto_default_instance_;
81class ConfigProto_DeviceCountEntry_DoNotUse;
82class ConfigProto_DeviceCountEntry_DoNotUseDefaultTypeInternal;
83extern ConfigProto_DeviceCountEntry_DoNotUseDefaultTypeInternal _ConfigProto_DeviceCountEntry_DoNotUse_default_instance_;
84class ConfigProto_Experimental;
85class ConfigProto_ExperimentalDefaultTypeInternal;
86extern ConfigProto_ExperimentalDefaultTypeInternal _ConfigProto_Experimental_default_instance_;
87class GPUOptions;
88class GPUOptionsDefaultTypeInternal;
89extern GPUOptionsDefaultTypeInternal _GPUOptions_default_instance_;
90class GPUOptions_Experimental;
91class GPUOptions_ExperimentalDefaultTypeInternal;
92extern GPUOptions_ExperimentalDefaultTypeInternal _GPUOptions_Experimental_default_instance_;
93class GPUOptions_Experimental_VirtualDevices;
94class GPUOptions_Experimental_VirtualDevicesDefaultTypeInternal;
95extern GPUOptions_Experimental_VirtualDevicesDefaultTypeInternal _GPUOptions_Experimental_VirtualDevices_default_instance_;
96class GraphOptions;
97class GraphOptionsDefaultTypeInternal;
98extern GraphOptionsDefaultTypeInternal _GraphOptions_default_instance_;
99class OptimizerOptions;
100class OptimizerOptionsDefaultTypeInternal;
101extern OptimizerOptionsDefaultTypeInternal _OptimizerOptions_default_instance_;
102class RPCOptions;
103class RPCOptionsDefaultTypeInternal;
104extern RPCOptionsDefaultTypeInternal _RPCOptions_default_instance_;
105class RunMetadata;
106class RunMetadataDefaultTypeInternal;
107extern RunMetadataDefaultTypeInternal _RunMetadata_default_instance_;
108class RunMetadata_FunctionGraphs;
109class RunMetadata_FunctionGraphsDefaultTypeInternal;
110extern RunMetadata_FunctionGraphsDefaultTypeInternal _RunMetadata_FunctionGraphs_default_instance_;
111class RunOptions;
112class RunOptionsDefaultTypeInternal;
113extern RunOptionsDefaultTypeInternal _RunOptions_default_instance_;
114class RunOptions_Experimental;
115class RunOptions_ExperimentalDefaultTypeInternal;
116extern RunOptions_ExperimentalDefaultTypeInternal _RunOptions_Experimental_default_instance_;
117class RunOptions_Experimental_RunHandlerPoolOptions;
118class RunOptions_Experimental_RunHandlerPoolOptionsDefaultTypeInternal;
119extern RunOptions_Experimental_RunHandlerPoolOptionsDefaultTypeInternal _RunOptions_Experimental_RunHandlerPoolOptions_default_instance_;
120class SessionMetadata;
121class SessionMetadataDefaultTypeInternal;
122extern SessionMetadataDefaultTypeInternal _SessionMetadata_default_instance_;
123class TensorConnection;
124class TensorConnectionDefaultTypeInternal;
125extern TensorConnectionDefaultTypeInternal _TensorConnection_default_instance_;
126class ThreadPoolOptionProto;
127class ThreadPoolOptionProtoDefaultTypeInternal;
128extern ThreadPoolOptionProtoDefaultTypeInternal _ThreadPoolOptionProto_default_instance_;
129} // namespace tensorflow
130PROTOBUF_NAMESPACE_OPEN
131template<> ::tensorflow::CallableOptions* Arena::CreateMaybeMessage<::tensorflow::CallableOptions>(Arena*);
132template<> ::tensorflow::CallableOptions_FeedDevicesEntry_DoNotUse* Arena::CreateMaybeMessage<::tensorflow::CallableOptions_FeedDevicesEntry_DoNotUse>(Arena*);
133template<> ::tensorflow::CallableOptions_FetchDevicesEntry_DoNotUse* Arena::CreateMaybeMessage<::tensorflow::CallableOptions_FetchDevicesEntry_DoNotUse>(Arena*);
134template<> ::tensorflow::ConfigProto* Arena::CreateMaybeMessage<::tensorflow::ConfigProto>(Arena*);
135template<> ::tensorflow::ConfigProto_DeviceCountEntry_DoNotUse* Arena::CreateMaybeMessage<::tensorflow::ConfigProto_DeviceCountEntry_DoNotUse>(Arena*);
136template<> ::tensorflow::ConfigProto_Experimental* Arena::CreateMaybeMessage<::tensorflow::ConfigProto_Experimental>(Arena*);
137template<> ::tensorflow::GPUOptions* Arena::CreateMaybeMessage<::tensorflow::GPUOptions>(Arena*);
138template<> ::tensorflow::GPUOptions_Experimental* Arena::CreateMaybeMessage<::tensorflow::GPUOptions_Experimental>(Arena*);
139template<> ::tensorflow::GPUOptions_Experimental_VirtualDevices* Arena::CreateMaybeMessage<::tensorflow::GPUOptions_Experimental_VirtualDevices>(Arena*);
140template<> ::tensorflow::GraphOptions* Arena::CreateMaybeMessage<::tensorflow::GraphOptions>(Arena*);
141template<> ::tensorflow::OptimizerOptions* Arena::CreateMaybeMessage<::tensorflow::OptimizerOptions>(Arena*);
142template<> ::tensorflow::RPCOptions* Arena::CreateMaybeMessage<::tensorflow::RPCOptions>(Arena*);
143template<> ::tensorflow::RunMetadata* Arena::CreateMaybeMessage<::tensorflow::RunMetadata>(Arena*);
144template<> ::tensorflow::RunMetadata_FunctionGraphs* Arena::CreateMaybeMessage<::tensorflow::RunMetadata_FunctionGraphs>(Arena*);
145template<> ::tensorflow::RunOptions* Arena::CreateMaybeMessage<::tensorflow::RunOptions>(Arena*);
146template<> ::tensorflow::RunOptions_Experimental* Arena::CreateMaybeMessage<::tensorflow::RunOptions_Experimental>(Arena*);
147template<> ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* Arena::CreateMaybeMessage<::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions>(Arena*);
148template<> ::tensorflow::SessionMetadata* Arena::CreateMaybeMessage<::tensorflow::SessionMetadata>(Arena*);
149template<> ::tensorflow::TensorConnection* Arena::CreateMaybeMessage<::tensorflow::TensorConnection>(Arena*);
150template<> ::tensorflow::ThreadPoolOptionProto* Arena::CreateMaybeMessage<::tensorflow::ThreadPoolOptionProto>(Arena*);
151PROTOBUF_NAMESPACE_CLOSE
152namespace tensorflow {
153
154enum OptimizerOptions_Level : int {
155 OptimizerOptions_Level_L1 = 0,
156 OptimizerOptions_Level_L0 = -1,
157 OptimizerOptions_Level_OptimizerOptions_Level_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::min(),
158 OptimizerOptions_Level_OptimizerOptions_Level_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::max()
159};
160bool OptimizerOptions_Level_IsValid(int value);
161constexpr OptimizerOptions_Level OptimizerOptions_Level_Level_MIN = OptimizerOptions_Level_L0;
162constexpr OptimizerOptions_Level OptimizerOptions_Level_Level_MAX = OptimizerOptions_Level_L1;
163constexpr int OptimizerOptions_Level_Level_ARRAYSIZE = OptimizerOptions_Level_Level_MAX + 1;
164
165const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* OptimizerOptions_Level_descriptor();
166template<typename T>
167inline const std::string& OptimizerOptions_Level_Name(T enum_t_value) {
168 static_assert(::std::is_same<T, OptimizerOptions_Level>::value ||
169 ::std::is_integral<T>::value,
170 "Incorrect type passed to function OptimizerOptions_Level_Name.");
171 return ::PROTOBUF_NAMESPACE_ID::internal::NameOfEnum(
172 OptimizerOptions_Level_descriptor(), enum_t_value);
173}
174inline bool OptimizerOptions_Level_Parse(
175 const std::string& name, OptimizerOptions_Level* value) {
176 return ::PROTOBUF_NAMESPACE_ID::internal::ParseNamedEnum<OptimizerOptions_Level>(
177 OptimizerOptions_Level_descriptor(), name, value);
178}
179enum OptimizerOptions_GlobalJitLevel : int {
180 OptimizerOptions_GlobalJitLevel_DEFAULT = 0,
181 OptimizerOptions_GlobalJitLevel_OFF = -1,
182 OptimizerOptions_GlobalJitLevel_ON_1 = 1,
183 OptimizerOptions_GlobalJitLevel_ON_2 = 2,
184 OptimizerOptions_GlobalJitLevel_OptimizerOptions_GlobalJitLevel_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::min(),
185 OptimizerOptions_GlobalJitLevel_OptimizerOptions_GlobalJitLevel_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::max()
186};
187bool OptimizerOptions_GlobalJitLevel_IsValid(int value);
188constexpr OptimizerOptions_GlobalJitLevel OptimizerOptions_GlobalJitLevel_GlobalJitLevel_MIN = OptimizerOptions_GlobalJitLevel_OFF;
189constexpr OptimizerOptions_GlobalJitLevel OptimizerOptions_GlobalJitLevel_GlobalJitLevel_MAX = OptimizerOptions_GlobalJitLevel_ON_2;
190constexpr int OptimizerOptions_GlobalJitLevel_GlobalJitLevel_ARRAYSIZE = OptimizerOptions_GlobalJitLevel_GlobalJitLevel_MAX + 1;
191
192const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* OptimizerOptions_GlobalJitLevel_descriptor();
193template<typename T>
194inline const std::string& OptimizerOptions_GlobalJitLevel_Name(T enum_t_value) {
195 static_assert(::std::is_same<T, OptimizerOptions_GlobalJitLevel>::value ||
196 ::std::is_integral<T>::value,
197 "Incorrect type passed to function OptimizerOptions_GlobalJitLevel_Name.");
198 return ::PROTOBUF_NAMESPACE_ID::internal::NameOfEnum(
199 OptimizerOptions_GlobalJitLevel_descriptor(), enum_t_value);
200}
201inline bool OptimizerOptions_GlobalJitLevel_Parse(
202 const std::string& name, OptimizerOptions_GlobalJitLevel* value) {
203 return ::PROTOBUF_NAMESPACE_ID::internal::ParseNamedEnum<OptimizerOptions_GlobalJitLevel>(
204 OptimizerOptions_GlobalJitLevel_descriptor(), name, value);
205}
206enum ConfigProto_Experimental_MlirBridgeRollout : int {
207 ConfigProto_Experimental_MlirBridgeRollout_MLIR_BRIDGE_ROLLOUT_UNSPECIFIED = 0,
208 ConfigProto_Experimental_MlirBridgeRollout_MLIR_BRIDGE_ROLLOUT_ENABLED = 1,
209 ConfigProto_Experimental_MlirBridgeRollout_MLIR_BRIDGE_ROLLOUT_DISABLED = 2,
210 ConfigProto_Experimental_MlirBridgeRollout_MLIR_BRIDGE_ROLLOUT_SAFE_MODE_ENABLED = 3,
211 ConfigProto_Experimental_MlirBridgeRollout_MLIR_BRIDGE_ROLLOUT_SAFE_MODE_FALLBACK_ENABLED = 4,
212 ConfigProto_Experimental_MlirBridgeRollout_ConfigProto_Experimental_MlirBridgeRollout_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::min(),
213 ConfigProto_Experimental_MlirBridgeRollout_ConfigProto_Experimental_MlirBridgeRollout_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::max()
214};
215bool ConfigProto_Experimental_MlirBridgeRollout_IsValid(int value);
216constexpr ConfigProto_Experimental_MlirBridgeRollout ConfigProto_Experimental_MlirBridgeRollout_MlirBridgeRollout_MIN = ConfigProto_Experimental_MlirBridgeRollout_MLIR_BRIDGE_ROLLOUT_UNSPECIFIED;
217constexpr ConfigProto_Experimental_MlirBridgeRollout ConfigProto_Experimental_MlirBridgeRollout_MlirBridgeRollout_MAX = ConfigProto_Experimental_MlirBridgeRollout_MLIR_BRIDGE_ROLLOUT_SAFE_MODE_FALLBACK_ENABLED;
218constexpr int ConfigProto_Experimental_MlirBridgeRollout_MlirBridgeRollout_ARRAYSIZE = ConfigProto_Experimental_MlirBridgeRollout_MlirBridgeRollout_MAX + 1;
219
220const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* ConfigProto_Experimental_MlirBridgeRollout_descriptor();
221template<typename T>
222inline const std::string& ConfigProto_Experimental_MlirBridgeRollout_Name(T enum_t_value) {
223 static_assert(::std::is_same<T, ConfigProto_Experimental_MlirBridgeRollout>::value ||
224 ::std::is_integral<T>::value,
225 "Incorrect type passed to function ConfigProto_Experimental_MlirBridgeRollout_Name.");
226 return ::PROTOBUF_NAMESPACE_ID::internal::NameOfEnum(
227 ConfigProto_Experimental_MlirBridgeRollout_descriptor(), enum_t_value);
228}
229inline bool ConfigProto_Experimental_MlirBridgeRollout_Parse(
230 const std::string& name, ConfigProto_Experimental_MlirBridgeRollout* value) {
231 return ::PROTOBUF_NAMESPACE_ID::internal::ParseNamedEnum<ConfigProto_Experimental_MlirBridgeRollout>(
232 ConfigProto_Experimental_MlirBridgeRollout_descriptor(), name, value);
233}
234enum RunOptions_TraceLevel : int {
235 RunOptions_TraceLevel_NO_TRACE = 0,
236 RunOptions_TraceLevel_SOFTWARE_TRACE = 1,
237 RunOptions_TraceLevel_HARDWARE_TRACE = 2,
238 RunOptions_TraceLevel_FULL_TRACE = 3,
239 RunOptions_TraceLevel_RunOptions_TraceLevel_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::min(),
240 RunOptions_TraceLevel_RunOptions_TraceLevel_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::max()
241};
242bool RunOptions_TraceLevel_IsValid(int value);
243constexpr RunOptions_TraceLevel RunOptions_TraceLevel_TraceLevel_MIN = RunOptions_TraceLevel_NO_TRACE;
244constexpr RunOptions_TraceLevel RunOptions_TraceLevel_TraceLevel_MAX = RunOptions_TraceLevel_FULL_TRACE;
245constexpr int RunOptions_TraceLevel_TraceLevel_ARRAYSIZE = RunOptions_TraceLevel_TraceLevel_MAX + 1;
246
247const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* RunOptions_TraceLevel_descriptor();
248template<typename T>
249inline const std::string& RunOptions_TraceLevel_Name(T enum_t_value) {
250 static_assert(::std::is_same<T, RunOptions_TraceLevel>::value ||
251 ::std::is_integral<T>::value,
252 "Incorrect type passed to function RunOptions_TraceLevel_Name.");
253 return ::PROTOBUF_NAMESPACE_ID::internal::NameOfEnum(
254 RunOptions_TraceLevel_descriptor(), enum_t_value);
255}
256inline bool RunOptions_TraceLevel_Parse(
257 const std::string& name, RunOptions_TraceLevel* value) {
258 return ::PROTOBUF_NAMESPACE_ID::internal::ParseNamedEnum<RunOptions_TraceLevel>(
259 RunOptions_TraceLevel_descriptor(), name, value);
260}
261// ===================================================================
262
263class GPUOptions_Experimental_VirtualDevices :
264 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.GPUOptions.Experimental.VirtualDevices) */ {
265 public:
266 GPUOptions_Experimental_VirtualDevices();
267 virtual ~GPUOptions_Experimental_VirtualDevices();
268
269 GPUOptions_Experimental_VirtualDevices(const GPUOptions_Experimental_VirtualDevices& from);
270 GPUOptions_Experimental_VirtualDevices(GPUOptions_Experimental_VirtualDevices&& from) noexcept
271 : GPUOptions_Experimental_VirtualDevices() {
272 *this = ::std::move(from);
273 }
274
275 inline GPUOptions_Experimental_VirtualDevices& operator=(const GPUOptions_Experimental_VirtualDevices& from) {
276 CopyFrom(from);
277 return *this;
278 }
279 inline GPUOptions_Experimental_VirtualDevices& operator=(GPUOptions_Experimental_VirtualDevices&& from) noexcept {
280 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
281 if (this != &from) InternalSwap(&from);
282 } else {
283 CopyFrom(from);
284 }
285 return *this;
286 }
287
288 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
289 return GetArenaNoVirtual();
290 }
291 inline void* GetMaybeArenaPointer() const final {
292 return MaybeArenaPtr();
293 }
294 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
295 return GetDescriptor();
296 }
297 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
298 return GetMetadataStatic().descriptor;
299 }
300 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
301 return GetMetadataStatic().reflection;
302 }
303 static const GPUOptions_Experimental_VirtualDevices& default_instance();
304
305 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
306 static inline const GPUOptions_Experimental_VirtualDevices* internal_default_instance() {
307 return reinterpret_cast<const GPUOptions_Experimental_VirtualDevices*>(
308 &_GPUOptions_Experimental_VirtualDevices_default_instance_);
309 }
310 static constexpr int kIndexInFileMessages =
311 0;
312
313 friend void swap(GPUOptions_Experimental_VirtualDevices& a, GPUOptions_Experimental_VirtualDevices& b) {
314 a.Swap(&b);
315 }
316 inline void Swap(GPUOptions_Experimental_VirtualDevices* other) {
317 if (other == this) return;
318 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
319 InternalSwap(other);
320 } else {
321 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
322 }
323 }
324 void UnsafeArenaSwap(GPUOptions_Experimental_VirtualDevices* other) {
325 if (other == this) return;
326 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
327 InternalSwap(other);
328 }
329
330 // implements Message ----------------------------------------------
331
332 inline GPUOptions_Experimental_VirtualDevices* New() const final {
333 return CreateMaybeMessage<GPUOptions_Experimental_VirtualDevices>(nullptr);
334 }
335
336 GPUOptions_Experimental_VirtualDevices* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
337 return CreateMaybeMessage<GPUOptions_Experimental_VirtualDevices>(arena);
338 }
339 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
340 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
341 void CopyFrom(const GPUOptions_Experimental_VirtualDevices& from);
342 void MergeFrom(const GPUOptions_Experimental_VirtualDevices& from);
343 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
344 bool IsInitialized() const final;
345
346 size_t ByteSizeLong() const final;
347 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
348 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
349 #else
350 bool MergePartialFromCodedStream(
351 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
352 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
353 void SerializeWithCachedSizes(
354 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
355 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
356 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
357 int GetCachedSize() const final { return _cached_size_.Get(); }
358
359 private:
360 inline void SharedCtor();
361 inline void SharedDtor();
362 void SetCachedSize(int size) const final;
363 void InternalSwap(GPUOptions_Experimental_VirtualDevices* other);
364 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
365 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
366 return "tensorflow.GPUOptions.Experimental.VirtualDevices";
367 }
368 protected:
369 explicit GPUOptions_Experimental_VirtualDevices(::PROTOBUF_NAMESPACE_ID::Arena* arena);
370 private:
371 static void ArenaDtor(void* object);
372 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
373 private:
374 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
375 return _internal_metadata_.arena();
376 }
377 inline void* MaybeArenaPtr() const {
378 return _internal_metadata_.raw_arena_ptr();
379 }
380 public:
381
382 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
383 private:
384 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
385 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
386 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
387 }
388
389 public:
390
391 // nested types ----------------------------------------------------
392
393 // accessors -------------------------------------------------------
394
395 enum : int {
396 kMemoryLimitMbFieldNumber = 1,
397 kPriorityFieldNumber = 2,
398 kDeviceOrdinalFieldNumber = 3,
399 };
400 // repeated float memory_limit_mb = 1;
401 int memory_limit_mb_size() const;
402 void clear_memory_limit_mb();
403 float memory_limit_mb(int index) const;
404 void set_memory_limit_mb(int index, float value);
405 void add_memory_limit_mb(float value);
406 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< float >&
407 memory_limit_mb() const;
408 ::PROTOBUF_NAMESPACE_ID::RepeatedField< float >*
409 mutable_memory_limit_mb();
410
411 // repeated int32 priority = 2;
412 int priority_size() const;
413 void clear_priority();
414 ::PROTOBUF_NAMESPACE_ID::int32 priority(int index) const;
415 void set_priority(int index, ::PROTOBUF_NAMESPACE_ID::int32 value);
416 void add_priority(::PROTOBUF_NAMESPACE_ID::int32 value);
417 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 >&
418 priority() const;
419 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 >*
420 mutable_priority();
421
422 // repeated int32 device_ordinal = 3;
423 int device_ordinal_size() const;
424 void clear_device_ordinal();
425 ::PROTOBUF_NAMESPACE_ID::int32 device_ordinal(int index) const;
426 void set_device_ordinal(int index, ::PROTOBUF_NAMESPACE_ID::int32 value);
427 void add_device_ordinal(::PROTOBUF_NAMESPACE_ID::int32 value);
428 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 >&
429 device_ordinal() const;
430 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 >*
431 mutable_device_ordinal();
432
433 // @@protoc_insertion_point(class_scope:tensorflow.GPUOptions.Experimental.VirtualDevices)
434 private:
435 class _Internal;
436
437 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
438 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
439 typedef void InternalArenaConstructable_;
440 typedef void DestructorSkippable_;
441 ::PROTOBUF_NAMESPACE_ID::RepeatedField< float > memory_limit_mb_;
442 mutable std::atomic<int> _memory_limit_mb_cached_byte_size_;
443 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 > priority_;
444 mutable std::atomic<int> _priority_cached_byte_size_;
445 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 > device_ordinal_;
446 mutable std::atomic<int> _device_ordinal_cached_byte_size_;
447 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
448 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
449};
450// -------------------------------------------------------------------
451
452class GPUOptions_Experimental :
453 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.GPUOptions.Experimental) */ {
454 public:
455 GPUOptions_Experimental();
456 virtual ~GPUOptions_Experimental();
457
458 GPUOptions_Experimental(const GPUOptions_Experimental& from);
459 GPUOptions_Experimental(GPUOptions_Experimental&& from) noexcept
460 : GPUOptions_Experimental() {
461 *this = ::std::move(from);
462 }
463
464 inline GPUOptions_Experimental& operator=(const GPUOptions_Experimental& from) {
465 CopyFrom(from);
466 return *this;
467 }
468 inline GPUOptions_Experimental& operator=(GPUOptions_Experimental&& from) noexcept {
469 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
470 if (this != &from) InternalSwap(&from);
471 } else {
472 CopyFrom(from);
473 }
474 return *this;
475 }
476
477 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
478 return GetArenaNoVirtual();
479 }
480 inline void* GetMaybeArenaPointer() const final {
481 return MaybeArenaPtr();
482 }
483 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
484 return GetDescriptor();
485 }
486 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
487 return GetMetadataStatic().descriptor;
488 }
489 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
490 return GetMetadataStatic().reflection;
491 }
492 static const GPUOptions_Experimental& default_instance();
493
494 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
495 static inline const GPUOptions_Experimental* internal_default_instance() {
496 return reinterpret_cast<const GPUOptions_Experimental*>(
497 &_GPUOptions_Experimental_default_instance_);
498 }
499 static constexpr int kIndexInFileMessages =
500 1;
501
502 friend void swap(GPUOptions_Experimental& a, GPUOptions_Experimental& b) {
503 a.Swap(&b);
504 }
505 inline void Swap(GPUOptions_Experimental* other) {
506 if (other == this) return;
507 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
508 InternalSwap(other);
509 } else {
510 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
511 }
512 }
513 void UnsafeArenaSwap(GPUOptions_Experimental* other) {
514 if (other == this) return;
515 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
516 InternalSwap(other);
517 }
518
519 // implements Message ----------------------------------------------
520
521 inline GPUOptions_Experimental* New() const final {
522 return CreateMaybeMessage<GPUOptions_Experimental>(nullptr);
523 }
524
525 GPUOptions_Experimental* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
526 return CreateMaybeMessage<GPUOptions_Experimental>(arena);
527 }
528 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
529 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
530 void CopyFrom(const GPUOptions_Experimental& from);
531 void MergeFrom(const GPUOptions_Experimental& from);
532 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
533 bool IsInitialized() const final;
534
535 size_t ByteSizeLong() const final;
536 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
537 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
538 #else
539 bool MergePartialFromCodedStream(
540 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
541 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
542 void SerializeWithCachedSizes(
543 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
544 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
545 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
546 int GetCachedSize() const final { return _cached_size_.Get(); }
547
548 private:
549 inline void SharedCtor();
550 inline void SharedDtor();
551 void SetCachedSize(int size) const final;
552 void InternalSwap(GPUOptions_Experimental* other);
553 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
554 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
555 return "tensorflow.GPUOptions.Experimental";
556 }
557 protected:
558 explicit GPUOptions_Experimental(::PROTOBUF_NAMESPACE_ID::Arena* arena);
559 private:
560 static void ArenaDtor(void* object);
561 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
562 private:
563 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
564 return _internal_metadata_.arena();
565 }
566 inline void* MaybeArenaPtr() const {
567 return _internal_metadata_.raw_arena_ptr();
568 }
569 public:
570
571 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
572 private:
573 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
574 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
575 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
576 }
577
578 public:
579
580 // nested types ----------------------------------------------------
581
582 typedef GPUOptions_Experimental_VirtualDevices VirtualDevices;
583
584 // accessors -------------------------------------------------------
585
586 enum : int {
587 kVirtualDevicesFieldNumber = 1,
588 kCollectiveRingOrderFieldNumber = 4,
589 kNumDevToDevCopyStreamsFieldNumber = 3,
590 kKernelTrackerMaxIntervalFieldNumber = 7,
591 kUseUnifiedMemoryFieldNumber = 2,
592 kTimestampedAllocatorFieldNumber = 5,
593 kUseCudaMallocAsyncFieldNumber = 11,
594 kDisallowRetryOnAllocationFailureFieldNumber = 12,
595 kKernelTrackerMaxBytesFieldNumber = 8,
596 kInternalFragmentationFractionFieldNumber = 10,
597 kKernelTrackerMaxPendingFieldNumber = 9,
598 };
599 // repeated .tensorflow.GPUOptions.Experimental.VirtualDevices virtual_devices = 1;
600 int virtual_devices_size() const;
601 void clear_virtual_devices();
602 ::tensorflow::GPUOptions_Experimental_VirtualDevices* mutable_virtual_devices(int index);
603 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GPUOptions_Experimental_VirtualDevices >*
604 mutable_virtual_devices();
605 const ::tensorflow::GPUOptions_Experimental_VirtualDevices& virtual_devices(int index) const;
606 ::tensorflow::GPUOptions_Experimental_VirtualDevices* add_virtual_devices();
607 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GPUOptions_Experimental_VirtualDevices >&
608 virtual_devices() const;
609
610 // string collective_ring_order = 4;
611 void clear_collective_ring_order();
612 const std::string& collective_ring_order() const;
613 void set_collective_ring_order(const std::string& value);
614 void set_collective_ring_order(std::string&& value);
615 void set_collective_ring_order(const char* value);
616 void set_collective_ring_order(const char* value, size_t size);
617 std::string* mutable_collective_ring_order();
618 std::string* release_collective_ring_order();
619 void set_allocated_collective_ring_order(std::string* collective_ring_order);
620 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
621 " string fields are deprecated and will be removed in a"
622 " future release.")
623 std::string* unsafe_arena_release_collective_ring_order();
624 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
625 " string fields are deprecated and will be removed in a"
626 " future release.")
627 void unsafe_arena_set_allocated_collective_ring_order(
628 std::string* collective_ring_order);
629
630 // int32 num_dev_to_dev_copy_streams = 3;
631 void clear_num_dev_to_dev_copy_streams();
632 ::PROTOBUF_NAMESPACE_ID::int32 num_dev_to_dev_copy_streams() const;
633 void set_num_dev_to_dev_copy_streams(::PROTOBUF_NAMESPACE_ID::int32 value);
634
635 // int32 kernel_tracker_max_interval = 7;
636 void clear_kernel_tracker_max_interval();
637 ::PROTOBUF_NAMESPACE_ID::int32 kernel_tracker_max_interval() const;
638 void set_kernel_tracker_max_interval(::PROTOBUF_NAMESPACE_ID::int32 value);
639
640 // bool use_unified_memory = 2;
641 void clear_use_unified_memory();
642 bool use_unified_memory() const;
643 void set_use_unified_memory(bool value);
644
645 // bool timestamped_allocator = 5;
646 void clear_timestamped_allocator();
647 bool timestamped_allocator() const;
648 void set_timestamped_allocator(bool value);
649
650 // bool use_cuda_malloc_async = 11;
651 void clear_use_cuda_malloc_async();
652 bool use_cuda_malloc_async() const;
653 void set_use_cuda_malloc_async(bool value);
654
655 // bool disallow_retry_on_allocation_failure = 12;
656 void clear_disallow_retry_on_allocation_failure();
657 bool disallow_retry_on_allocation_failure() const;
658 void set_disallow_retry_on_allocation_failure(bool value);
659
660 // int32 kernel_tracker_max_bytes = 8;
661 void clear_kernel_tracker_max_bytes();
662 ::PROTOBUF_NAMESPACE_ID::int32 kernel_tracker_max_bytes() const;
663 void set_kernel_tracker_max_bytes(::PROTOBUF_NAMESPACE_ID::int32 value);
664
665 // double internal_fragmentation_fraction = 10;
666 void clear_internal_fragmentation_fraction();
667 double internal_fragmentation_fraction() const;
668 void set_internal_fragmentation_fraction(double value);
669
670 // int32 kernel_tracker_max_pending = 9;
671 void clear_kernel_tracker_max_pending();
672 ::PROTOBUF_NAMESPACE_ID::int32 kernel_tracker_max_pending() const;
673 void set_kernel_tracker_max_pending(::PROTOBUF_NAMESPACE_ID::int32 value);
674
675 // @@protoc_insertion_point(class_scope:tensorflow.GPUOptions.Experimental)
676 private:
677 class _Internal;
678
679 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
680 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
681 typedef void InternalArenaConstructable_;
682 typedef void DestructorSkippable_;
683 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GPUOptions_Experimental_VirtualDevices > virtual_devices_;
684 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr collective_ring_order_;
685 ::PROTOBUF_NAMESPACE_ID::int32 num_dev_to_dev_copy_streams_;
686 ::PROTOBUF_NAMESPACE_ID::int32 kernel_tracker_max_interval_;
687 bool use_unified_memory_;
688 bool timestamped_allocator_;
689 bool use_cuda_malloc_async_;
690 bool disallow_retry_on_allocation_failure_;
691 ::PROTOBUF_NAMESPACE_ID::int32 kernel_tracker_max_bytes_;
692 double internal_fragmentation_fraction_;
693 ::PROTOBUF_NAMESPACE_ID::int32 kernel_tracker_max_pending_;
694 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
695 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
696};
697// -------------------------------------------------------------------
698
699class GPUOptions :
700 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.GPUOptions) */ {
701 public:
702 GPUOptions();
703 virtual ~GPUOptions();
704
705 GPUOptions(const GPUOptions& from);
706 GPUOptions(GPUOptions&& from) noexcept
707 : GPUOptions() {
708 *this = ::std::move(from);
709 }
710
711 inline GPUOptions& operator=(const GPUOptions& from) {
712 CopyFrom(from);
713 return *this;
714 }
715 inline GPUOptions& operator=(GPUOptions&& from) noexcept {
716 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
717 if (this != &from) InternalSwap(&from);
718 } else {
719 CopyFrom(from);
720 }
721 return *this;
722 }
723
724 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
725 return GetArenaNoVirtual();
726 }
727 inline void* GetMaybeArenaPointer() const final {
728 return MaybeArenaPtr();
729 }
730 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
731 return GetDescriptor();
732 }
733 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
734 return GetMetadataStatic().descriptor;
735 }
736 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
737 return GetMetadataStatic().reflection;
738 }
739 static const GPUOptions& default_instance();
740
741 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
742 static inline const GPUOptions* internal_default_instance() {
743 return reinterpret_cast<const GPUOptions*>(
744 &_GPUOptions_default_instance_);
745 }
746 static constexpr int kIndexInFileMessages =
747 2;
748
749 friend void swap(GPUOptions& a, GPUOptions& b) {
750 a.Swap(&b);
751 }
752 inline void Swap(GPUOptions* other) {
753 if (other == this) return;
754 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
755 InternalSwap(other);
756 } else {
757 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
758 }
759 }
760 void UnsafeArenaSwap(GPUOptions* other) {
761 if (other == this) return;
762 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
763 InternalSwap(other);
764 }
765
766 // implements Message ----------------------------------------------
767
768 inline GPUOptions* New() const final {
769 return CreateMaybeMessage<GPUOptions>(nullptr);
770 }
771
772 GPUOptions* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
773 return CreateMaybeMessage<GPUOptions>(arena);
774 }
775 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
776 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
777 void CopyFrom(const GPUOptions& from);
778 void MergeFrom(const GPUOptions& from);
779 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
780 bool IsInitialized() const final;
781
782 size_t ByteSizeLong() const final;
783 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
784 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
785 #else
786 bool MergePartialFromCodedStream(
787 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
788 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
789 void SerializeWithCachedSizes(
790 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
791 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
792 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
793 int GetCachedSize() const final { return _cached_size_.Get(); }
794
795 private:
796 inline void SharedCtor();
797 inline void SharedDtor();
798 void SetCachedSize(int size) const final;
799 void InternalSwap(GPUOptions* other);
800 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
801 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
802 return "tensorflow.GPUOptions";
803 }
804 protected:
805 explicit GPUOptions(::PROTOBUF_NAMESPACE_ID::Arena* arena);
806 private:
807 static void ArenaDtor(void* object);
808 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
809 private:
810 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
811 return _internal_metadata_.arena();
812 }
813 inline void* MaybeArenaPtr() const {
814 return _internal_metadata_.raw_arena_ptr();
815 }
816 public:
817
818 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
819 private:
820 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
821 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
822 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
823 }
824
825 public:
826
827 // nested types ----------------------------------------------------
828
829 typedef GPUOptions_Experimental Experimental;
830
831 // accessors -------------------------------------------------------
832
833 enum : int {
834 kAllocatorTypeFieldNumber = 2,
835 kVisibleDeviceListFieldNumber = 5,
836 kExperimentalFieldNumber = 9,
837 kPerProcessGpuMemoryFractionFieldNumber = 1,
838 kDeferredDeletionBytesFieldNumber = 3,
839 kPollingActiveDelayUsecsFieldNumber = 6,
840 kAllowGrowthFieldNumber = 4,
841 kForceGpuCompatibleFieldNumber = 8,
842 kPollingInactiveDelayMsecsFieldNumber = 7,
843 };
844 // string allocator_type = 2;
845 void clear_allocator_type();
846 const std::string& allocator_type() const;
847 void set_allocator_type(const std::string& value);
848 void set_allocator_type(std::string&& value);
849 void set_allocator_type(const char* value);
850 void set_allocator_type(const char* value, size_t size);
851 std::string* mutable_allocator_type();
852 std::string* release_allocator_type();
853 void set_allocated_allocator_type(std::string* allocator_type);
854 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
855 " string fields are deprecated and will be removed in a"
856 " future release.")
857 std::string* unsafe_arena_release_allocator_type();
858 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
859 " string fields are deprecated and will be removed in a"
860 " future release.")
861 void unsafe_arena_set_allocated_allocator_type(
862 std::string* allocator_type);
863
864 // string visible_device_list = 5;
865 void clear_visible_device_list();
866 const std::string& visible_device_list() const;
867 void set_visible_device_list(const std::string& value);
868 void set_visible_device_list(std::string&& value);
869 void set_visible_device_list(const char* value);
870 void set_visible_device_list(const char* value, size_t size);
871 std::string* mutable_visible_device_list();
872 std::string* release_visible_device_list();
873 void set_allocated_visible_device_list(std::string* visible_device_list);
874 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
875 " string fields are deprecated and will be removed in a"
876 " future release.")
877 std::string* unsafe_arena_release_visible_device_list();
878 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
879 " string fields are deprecated and will be removed in a"
880 " future release.")
881 void unsafe_arena_set_allocated_visible_device_list(
882 std::string* visible_device_list);
883
884 // .tensorflow.GPUOptions.Experimental experimental = 9;
885 bool has_experimental() const;
886 void clear_experimental();
887 const ::tensorflow::GPUOptions_Experimental& experimental() const;
888 ::tensorflow::GPUOptions_Experimental* release_experimental();
889 ::tensorflow::GPUOptions_Experimental* mutable_experimental();
890 void set_allocated_experimental(::tensorflow::GPUOptions_Experimental* experimental);
891 void unsafe_arena_set_allocated_experimental(
892 ::tensorflow::GPUOptions_Experimental* experimental);
893 ::tensorflow::GPUOptions_Experimental* unsafe_arena_release_experimental();
894
895 // double per_process_gpu_memory_fraction = 1;
896 void clear_per_process_gpu_memory_fraction();
897 double per_process_gpu_memory_fraction() const;
898 void set_per_process_gpu_memory_fraction(double value);
899
900 // int64 deferred_deletion_bytes = 3;
901 void clear_deferred_deletion_bytes();
902 ::PROTOBUF_NAMESPACE_ID::int64 deferred_deletion_bytes() const;
903 void set_deferred_deletion_bytes(::PROTOBUF_NAMESPACE_ID::int64 value);
904
905 // int32 polling_active_delay_usecs = 6;
906 void clear_polling_active_delay_usecs();
907 ::PROTOBUF_NAMESPACE_ID::int32 polling_active_delay_usecs() const;
908 void set_polling_active_delay_usecs(::PROTOBUF_NAMESPACE_ID::int32 value);
909
910 // bool allow_growth = 4;
911 void clear_allow_growth();
912 bool allow_growth() const;
913 void set_allow_growth(bool value);
914
915 // bool force_gpu_compatible = 8;
916 void clear_force_gpu_compatible();
917 bool force_gpu_compatible() const;
918 void set_force_gpu_compatible(bool value);
919
920 // int32 polling_inactive_delay_msecs = 7;
921 void clear_polling_inactive_delay_msecs();
922 ::PROTOBUF_NAMESPACE_ID::int32 polling_inactive_delay_msecs() const;
923 void set_polling_inactive_delay_msecs(::PROTOBUF_NAMESPACE_ID::int32 value);
924
925 // @@protoc_insertion_point(class_scope:tensorflow.GPUOptions)
926 private:
927 class _Internal;
928
929 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
930 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
931 typedef void InternalArenaConstructable_;
932 typedef void DestructorSkippable_;
933 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr allocator_type_;
934 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr visible_device_list_;
935 ::tensorflow::GPUOptions_Experimental* experimental_;
936 double per_process_gpu_memory_fraction_;
937 ::PROTOBUF_NAMESPACE_ID::int64 deferred_deletion_bytes_;
938 ::PROTOBUF_NAMESPACE_ID::int32 polling_active_delay_usecs_;
939 bool allow_growth_;
940 bool force_gpu_compatible_;
941 ::PROTOBUF_NAMESPACE_ID::int32 polling_inactive_delay_msecs_;
942 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
943 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
944};
945// -------------------------------------------------------------------
946
947class OptimizerOptions :
948 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.OptimizerOptions) */ {
949 public:
950 OptimizerOptions();
951 virtual ~OptimizerOptions();
952
953 OptimizerOptions(const OptimizerOptions& from);
954 OptimizerOptions(OptimizerOptions&& from) noexcept
955 : OptimizerOptions() {
956 *this = ::std::move(from);
957 }
958
959 inline OptimizerOptions& operator=(const OptimizerOptions& from) {
960 CopyFrom(from);
961 return *this;
962 }
963 inline OptimizerOptions& operator=(OptimizerOptions&& from) noexcept {
964 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
965 if (this != &from) InternalSwap(&from);
966 } else {
967 CopyFrom(from);
968 }
969 return *this;
970 }
971
972 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
973 return GetArenaNoVirtual();
974 }
975 inline void* GetMaybeArenaPointer() const final {
976 return MaybeArenaPtr();
977 }
978 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
979 return GetDescriptor();
980 }
981 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
982 return GetMetadataStatic().descriptor;
983 }
984 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
985 return GetMetadataStatic().reflection;
986 }
987 static const OptimizerOptions& default_instance();
988
989 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
990 static inline const OptimizerOptions* internal_default_instance() {
991 return reinterpret_cast<const OptimizerOptions*>(
992 &_OptimizerOptions_default_instance_);
993 }
994 static constexpr int kIndexInFileMessages =
995 3;
996
997 friend void swap(OptimizerOptions& a, OptimizerOptions& b) {
998 a.Swap(&b);
999 }
1000 inline void Swap(OptimizerOptions* other) {
1001 if (other == this) return;
1002 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
1003 InternalSwap(other);
1004 } else {
1005 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1006 }
1007 }
1008 void UnsafeArenaSwap(OptimizerOptions* other) {
1009 if (other == this) return;
1010 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
1011 InternalSwap(other);
1012 }
1013
1014 // implements Message ----------------------------------------------
1015
1016 inline OptimizerOptions* New() const final {
1017 return CreateMaybeMessage<OptimizerOptions>(nullptr);
1018 }
1019
1020 OptimizerOptions* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1021 return CreateMaybeMessage<OptimizerOptions>(arena);
1022 }
1023 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
1024 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
1025 void CopyFrom(const OptimizerOptions& from);
1026 void MergeFrom(const OptimizerOptions& from);
1027 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1028 bool IsInitialized() const final;
1029
1030 size_t ByteSizeLong() const final;
1031 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
1032 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1033 #else
1034 bool MergePartialFromCodedStream(
1035 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
1036 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
1037 void SerializeWithCachedSizes(
1038 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
1039 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
1040 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
1041 int GetCachedSize() const final { return _cached_size_.Get(); }
1042
1043 private:
1044 inline void SharedCtor();
1045 inline void SharedDtor();
1046 void SetCachedSize(int size) const final;
1047 void InternalSwap(OptimizerOptions* other);
1048 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1049 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1050 return "tensorflow.OptimizerOptions";
1051 }
1052 protected:
1053 explicit OptimizerOptions(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1054 private:
1055 static void ArenaDtor(void* object);
1056 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1057 private:
1058 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
1059 return _internal_metadata_.arena();
1060 }
1061 inline void* MaybeArenaPtr() const {
1062 return _internal_metadata_.raw_arena_ptr();
1063 }
1064 public:
1065
1066 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
1067 private:
1068 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
1069 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
1070 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
1071 }
1072
1073 public:
1074
1075 // nested types ----------------------------------------------------
1076
1077 typedef OptimizerOptions_Level Level;
1078 static constexpr Level L1 =
1079 OptimizerOptions_Level_L1;
1080 static constexpr Level L0 =
1081 OptimizerOptions_Level_L0;
1082 static inline bool Level_IsValid(int value) {
1083 return OptimizerOptions_Level_IsValid(value);
1084 }
1085 static constexpr Level Level_MIN =
1086 OptimizerOptions_Level_Level_MIN;
1087 static constexpr Level Level_MAX =
1088 OptimizerOptions_Level_Level_MAX;
1089 static constexpr int Level_ARRAYSIZE =
1090 OptimizerOptions_Level_Level_ARRAYSIZE;
1091 static inline const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor*
1092 Level_descriptor() {
1093 return OptimizerOptions_Level_descriptor();
1094 }
1095 template<typename T>
1096 static inline const std::string& Level_Name(T enum_t_value) {
1097 static_assert(::std::is_same<T, Level>::value ||
1098 ::std::is_integral<T>::value,
1099 "Incorrect type passed to function Level_Name.");
1100 return OptimizerOptions_Level_Name(enum_t_value);
1101 }
1102 static inline bool Level_Parse(const std::string& name,
1103 Level* value) {
1104 return OptimizerOptions_Level_Parse(name, value);
1105 }
1106
1107 typedef OptimizerOptions_GlobalJitLevel GlobalJitLevel;
1108 static constexpr GlobalJitLevel DEFAULT =
1109 OptimizerOptions_GlobalJitLevel_DEFAULT;
1110 static constexpr GlobalJitLevel OFF =
1111 OptimizerOptions_GlobalJitLevel_OFF;
1112 static constexpr GlobalJitLevel ON_1 =
1113 OptimizerOptions_GlobalJitLevel_ON_1;
1114 static constexpr GlobalJitLevel ON_2 =
1115 OptimizerOptions_GlobalJitLevel_ON_2;
1116 static inline bool GlobalJitLevel_IsValid(int value) {
1117 return OptimizerOptions_GlobalJitLevel_IsValid(value);
1118 }
1119 static constexpr GlobalJitLevel GlobalJitLevel_MIN =
1120 OptimizerOptions_GlobalJitLevel_GlobalJitLevel_MIN;
1121 static constexpr GlobalJitLevel GlobalJitLevel_MAX =
1122 OptimizerOptions_GlobalJitLevel_GlobalJitLevel_MAX;
1123 static constexpr int GlobalJitLevel_ARRAYSIZE =
1124 OptimizerOptions_GlobalJitLevel_GlobalJitLevel_ARRAYSIZE;
1125 static inline const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor*
1126 GlobalJitLevel_descriptor() {
1127 return OptimizerOptions_GlobalJitLevel_descriptor();
1128 }
1129 template<typename T>
1130 static inline const std::string& GlobalJitLevel_Name(T enum_t_value) {
1131 static_assert(::std::is_same<T, GlobalJitLevel>::value ||
1132 ::std::is_integral<T>::value,
1133 "Incorrect type passed to function GlobalJitLevel_Name.");
1134 return OptimizerOptions_GlobalJitLevel_Name(enum_t_value);
1135 }
1136 static inline bool GlobalJitLevel_Parse(const std::string& name,
1137 GlobalJitLevel* value) {
1138 return OptimizerOptions_GlobalJitLevel_Parse(name, value);
1139 }
1140
1141 // accessors -------------------------------------------------------
1142
1143 enum : int {
1144 kOptLevelFieldNumber = 3,
1145 kDoCommonSubexpressionEliminationFieldNumber = 1,
1146 kDoConstantFoldingFieldNumber = 2,
1147 kDoFunctionInliningFieldNumber = 4,
1148 kCpuGlobalJitFieldNumber = 7,
1149 kMaxFoldedConstantInBytesFieldNumber = 6,
1150 kGlobalJitLevelFieldNumber = 5,
1151 };
1152 // .tensorflow.OptimizerOptions.Level opt_level = 3;
1153 void clear_opt_level();
1154 ::tensorflow::OptimizerOptions_Level opt_level() const;
1155 void set_opt_level(::tensorflow::OptimizerOptions_Level value);
1156
1157 // bool do_common_subexpression_elimination = 1;
1158 void clear_do_common_subexpression_elimination();
1159 bool do_common_subexpression_elimination() const;
1160 void set_do_common_subexpression_elimination(bool value);
1161
1162 // bool do_constant_folding = 2;
1163 void clear_do_constant_folding();
1164 bool do_constant_folding() const;
1165 void set_do_constant_folding(bool value);
1166
1167 // bool do_function_inlining = 4;
1168 void clear_do_function_inlining();
1169 bool do_function_inlining() const;
1170 void set_do_function_inlining(bool value);
1171
1172 // bool cpu_global_jit = 7;
1173 void clear_cpu_global_jit();
1174 bool cpu_global_jit() const;
1175 void set_cpu_global_jit(bool value);
1176
1177 // int64 max_folded_constant_in_bytes = 6;
1178 void clear_max_folded_constant_in_bytes();
1179 ::PROTOBUF_NAMESPACE_ID::int64 max_folded_constant_in_bytes() const;
1180 void set_max_folded_constant_in_bytes(::PROTOBUF_NAMESPACE_ID::int64 value);
1181
1182 // .tensorflow.OptimizerOptions.GlobalJitLevel global_jit_level = 5;
1183 void clear_global_jit_level();
1184 ::tensorflow::OptimizerOptions_GlobalJitLevel global_jit_level() const;
1185 void set_global_jit_level(::tensorflow::OptimizerOptions_GlobalJitLevel value);
1186
1187 // @@protoc_insertion_point(class_scope:tensorflow.OptimizerOptions)
1188 private:
1189 class _Internal;
1190
1191 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
1192 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1193 typedef void InternalArenaConstructable_;
1194 typedef void DestructorSkippable_;
1195 int opt_level_;
1196 bool do_common_subexpression_elimination_;
1197 bool do_constant_folding_;
1198 bool do_function_inlining_;
1199 bool cpu_global_jit_;
1200 ::PROTOBUF_NAMESPACE_ID::int64 max_folded_constant_in_bytes_;
1201 int global_jit_level_;
1202 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1203 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
1204};
1205// -------------------------------------------------------------------
1206
1207class GraphOptions :
1208 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.GraphOptions) */ {
1209 public:
1210 GraphOptions();
1211 virtual ~GraphOptions();
1212
1213 GraphOptions(const GraphOptions& from);
1214 GraphOptions(GraphOptions&& from) noexcept
1215 : GraphOptions() {
1216 *this = ::std::move(from);
1217 }
1218
1219 inline GraphOptions& operator=(const GraphOptions& from) {
1220 CopyFrom(from);
1221 return *this;
1222 }
1223 inline GraphOptions& operator=(GraphOptions&& from) noexcept {
1224 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
1225 if (this != &from) InternalSwap(&from);
1226 } else {
1227 CopyFrom(from);
1228 }
1229 return *this;
1230 }
1231
1232 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
1233 return GetArenaNoVirtual();
1234 }
1235 inline void* GetMaybeArenaPointer() const final {
1236 return MaybeArenaPtr();
1237 }
1238 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
1239 return GetDescriptor();
1240 }
1241 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
1242 return GetMetadataStatic().descriptor;
1243 }
1244 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
1245 return GetMetadataStatic().reflection;
1246 }
1247 static const GraphOptions& default_instance();
1248
1249 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
1250 static inline const GraphOptions* internal_default_instance() {
1251 return reinterpret_cast<const GraphOptions*>(
1252 &_GraphOptions_default_instance_);
1253 }
1254 static constexpr int kIndexInFileMessages =
1255 4;
1256
1257 friend void swap(GraphOptions& a, GraphOptions& b) {
1258 a.Swap(&b);
1259 }
1260 inline void Swap(GraphOptions* other) {
1261 if (other == this) return;
1262 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
1263 InternalSwap(other);
1264 } else {
1265 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1266 }
1267 }
1268 void UnsafeArenaSwap(GraphOptions* other) {
1269 if (other == this) return;
1270 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
1271 InternalSwap(other);
1272 }
1273
1274 // implements Message ----------------------------------------------
1275
1276 inline GraphOptions* New() const final {
1277 return CreateMaybeMessage<GraphOptions>(nullptr);
1278 }
1279
1280 GraphOptions* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1281 return CreateMaybeMessage<GraphOptions>(arena);
1282 }
1283 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
1284 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
1285 void CopyFrom(const GraphOptions& from);
1286 void MergeFrom(const GraphOptions& from);
1287 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1288 bool IsInitialized() const final;
1289
1290 size_t ByteSizeLong() const final;
1291 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
1292 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1293 #else
1294 bool MergePartialFromCodedStream(
1295 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
1296 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
1297 void SerializeWithCachedSizes(
1298 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
1299 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
1300 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
1301 int GetCachedSize() const final { return _cached_size_.Get(); }
1302
1303 private:
1304 inline void SharedCtor();
1305 inline void SharedDtor();
1306 void SetCachedSize(int size) const final;
1307 void InternalSwap(GraphOptions* other);
1308 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1309 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1310 return "tensorflow.GraphOptions";
1311 }
1312 protected:
1313 explicit GraphOptions(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1314 private:
1315 static void ArenaDtor(void* object);
1316 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1317 private:
1318 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
1319 return _internal_metadata_.arena();
1320 }
1321 inline void* MaybeArenaPtr() const {
1322 return _internal_metadata_.raw_arena_ptr();
1323 }
1324 public:
1325
1326 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
1327 private:
1328 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
1329 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
1330 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
1331 }
1332
1333 public:
1334
1335 // nested types ----------------------------------------------------
1336
1337 // accessors -------------------------------------------------------
1338
1339 enum : int {
1340 kOptimizerOptionsFieldNumber = 3,
1341 kRewriteOptionsFieldNumber = 10,
1342 kBuildCostModelFieldNumber = 4,
1343 kEnableRecvSchedulingFieldNumber = 2,
1344 kInferShapesFieldNumber = 5,
1345 kPlacePrunedGraphFieldNumber = 6,
1346 kEnableBfloat16SendrecvFieldNumber = 7,
1347 kTimelineStepFieldNumber = 8,
1348 kBuildCostModelAfterFieldNumber = 9,
1349 };
1350 // .tensorflow.OptimizerOptions optimizer_options = 3;
1351 bool has_optimizer_options() const;
1352 void clear_optimizer_options();
1353 const ::tensorflow::OptimizerOptions& optimizer_options() const;
1354 ::tensorflow::OptimizerOptions* release_optimizer_options();
1355 ::tensorflow::OptimizerOptions* mutable_optimizer_options();
1356 void set_allocated_optimizer_options(::tensorflow::OptimizerOptions* optimizer_options);
1357 void unsafe_arena_set_allocated_optimizer_options(
1358 ::tensorflow::OptimizerOptions* optimizer_options);
1359 ::tensorflow::OptimizerOptions* unsafe_arena_release_optimizer_options();
1360
1361 // .tensorflow.RewriterConfig rewrite_options = 10;
1362 bool has_rewrite_options() const;
1363 void clear_rewrite_options();
1364 const ::tensorflow::RewriterConfig& rewrite_options() const;
1365 ::tensorflow::RewriterConfig* release_rewrite_options();
1366 ::tensorflow::RewriterConfig* mutable_rewrite_options();
1367 void set_allocated_rewrite_options(::tensorflow::RewriterConfig* rewrite_options);
1368 void unsafe_arena_set_allocated_rewrite_options(
1369 ::tensorflow::RewriterConfig* rewrite_options);
1370 ::tensorflow::RewriterConfig* unsafe_arena_release_rewrite_options();
1371
1372 // int64 build_cost_model = 4;
1373 void clear_build_cost_model();
1374 ::PROTOBUF_NAMESPACE_ID::int64 build_cost_model() const;
1375 void set_build_cost_model(::PROTOBUF_NAMESPACE_ID::int64 value);
1376
1377 // bool enable_recv_scheduling = 2;
1378 void clear_enable_recv_scheduling();
1379 bool enable_recv_scheduling() const;
1380 void set_enable_recv_scheduling(bool value);
1381
1382 // bool infer_shapes = 5;
1383 void clear_infer_shapes();
1384 bool infer_shapes() const;
1385 void set_infer_shapes(bool value);
1386
1387 // bool place_pruned_graph = 6;
1388 void clear_place_pruned_graph();
1389 bool place_pruned_graph() const;
1390 void set_place_pruned_graph(bool value);
1391
1392 // bool enable_bfloat16_sendrecv = 7;
1393 void clear_enable_bfloat16_sendrecv();
1394 bool enable_bfloat16_sendrecv() const;
1395 void set_enable_bfloat16_sendrecv(bool value);
1396
1397 // int32 timeline_step = 8;
1398 void clear_timeline_step();
1399 ::PROTOBUF_NAMESPACE_ID::int32 timeline_step() const;
1400 void set_timeline_step(::PROTOBUF_NAMESPACE_ID::int32 value);
1401
1402 // int64 build_cost_model_after = 9;
1403 void clear_build_cost_model_after();
1404 ::PROTOBUF_NAMESPACE_ID::int64 build_cost_model_after() const;
1405 void set_build_cost_model_after(::PROTOBUF_NAMESPACE_ID::int64 value);
1406
1407 // @@protoc_insertion_point(class_scope:tensorflow.GraphOptions)
1408 private:
1409 class _Internal;
1410
1411 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
1412 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1413 typedef void InternalArenaConstructable_;
1414 typedef void DestructorSkippable_;
1415 ::tensorflow::OptimizerOptions* optimizer_options_;
1416 ::tensorflow::RewriterConfig* rewrite_options_;
1417 ::PROTOBUF_NAMESPACE_ID::int64 build_cost_model_;
1418 bool enable_recv_scheduling_;
1419 bool infer_shapes_;
1420 bool place_pruned_graph_;
1421 bool enable_bfloat16_sendrecv_;
1422 ::PROTOBUF_NAMESPACE_ID::int32 timeline_step_;
1423 ::PROTOBUF_NAMESPACE_ID::int64 build_cost_model_after_;
1424 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1425 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
1426};
1427// -------------------------------------------------------------------
1428
1429class ThreadPoolOptionProto :
1430 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.ThreadPoolOptionProto) */ {
1431 public:
1432 ThreadPoolOptionProto();
1433 virtual ~ThreadPoolOptionProto();
1434
1435 ThreadPoolOptionProto(const ThreadPoolOptionProto& from);
1436 ThreadPoolOptionProto(ThreadPoolOptionProto&& from) noexcept
1437 : ThreadPoolOptionProto() {
1438 *this = ::std::move(from);
1439 }
1440
1441 inline ThreadPoolOptionProto& operator=(const ThreadPoolOptionProto& from) {
1442 CopyFrom(from);
1443 return *this;
1444 }
1445 inline ThreadPoolOptionProto& operator=(ThreadPoolOptionProto&& from) noexcept {
1446 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
1447 if (this != &from) InternalSwap(&from);
1448 } else {
1449 CopyFrom(from);
1450 }
1451 return *this;
1452 }
1453
1454 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
1455 return GetArenaNoVirtual();
1456 }
1457 inline void* GetMaybeArenaPointer() const final {
1458 return MaybeArenaPtr();
1459 }
1460 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
1461 return GetDescriptor();
1462 }
1463 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
1464 return GetMetadataStatic().descriptor;
1465 }
1466 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
1467 return GetMetadataStatic().reflection;
1468 }
1469 static const ThreadPoolOptionProto& default_instance();
1470
1471 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
1472 static inline const ThreadPoolOptionProto* internal_default_instance() {
1473 return reinterpret_cast<const ThreadPoolOptionProto*>(
1474 &_ThreadPoolOptionProto_default_instance_);
1475 }
1476 static constexpr int kIndexInFileMessages =
1477 5;
1478
1479 friend void swap(ThreadPoolOptionProto& a, ThreadPoolOptionProto& b) {
1480 a.Swap(&b);
1481 }
1482 inline void Swap(ThreadPoolOptionProto* other) {
1483 if (other == this) return;
1484 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
1485 InternalSwap(other);
1486 } else {
1487 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1488 }
1489 }
1490 void UnsafeArenaSwap(ThreadPoolOptionProto* other) {
1491 if (other == this) return;
1492 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
1493 InternalSwap(other);
1494 }
1495
1496 // implements Message ----------------------------------------------
1497
1498 inline ThreadPoolOptionProto* New() const final {
1499 return CreateMaybeMessage<ThreadPoolOptionProto>(nullptr);
1500 }
1501
1502 ThreadPoolOptionProto* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1503 return CreateMaybeMessage<ThreadPoolOptionProto>(arena);
1504 }
1505 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
1506 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
1507 void CopyFrom(const ThreadPoolOptionProto& from);
1508 void MergeFrom(const ThreadPoolOptionProto& from);
1509 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1510 bool IsInitialized() const final;
1511
1512 size_t ByteSizeLong() const final;
1513 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
1514 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1515 #else
1516 bool MergePartialFromCodedStream(
1517 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
1518 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
1519 void SerializeWithCachedSizes(
1520 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
1521 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
1522 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
1523 int GetCachedSize() const final { return _cached_size_.Get(); }
1524
1525 private:
1526 inline void SharedCtor();
1527 inline void SharedDtor();
1528 void SetCachedSize(int size) const final;
1529 void InternalSwap(ThreadPoolOptionProto* other);
1530 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1531 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1532 return "tensorflow.ThreadPoolOptionProto";
1533 }
1534 protected:
1535 explicit ThreadPoolOptionProto(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1536 private:
1537 static void ArenaDtor(void* object);
1538 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1539 private:
1540 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
1541 return _internal_metadata_.arena();
1542 }
1543 inline void* MaybeArenaPtr() const {
1544 return _internal_metadata_.raw_arena_ptr();
1545 }
1546 public:
1547
1548 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
1549 private:
1550 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
1551 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
1552 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
1553 }
1554
1555 public:
1556
1557 // nested types ----------------------------------------------------
1558
1559 // accessors -------------------------------------------------------
1560
1561 enum : int {
1562 kGlobalNameFieldNumber = 2,
1563 kNumThreadsFieldNumber = 1,
1564 };
1565 // string global_name = 2;
1566 void clear_global_name();
1567 const std::string& global_name() const;
1568 void set_global_name(const std::string& value);
1569 void set_global_name(std::string&& value);
1570 void set_global_name(const char* value);
1571 void set_global_name(const char* value, size_t size);
1572 std::string* mutable_global_name();
1573 std::string* release_global_name();
1574 void set_allocated_global_name(std::string* global_name);
1575 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
1576 " string fields are deprecated and will be removed in a"
1577 " future release.")
1578 std::string* unsafe_arena_release_global_name();
1579 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
1580 " string fields are deprecated and will be removed in a"
1581 " future release.")
1582 void unsafe_arena_set_allocated_global_name(
1583 std::string* global_name);
1584
1585 // int32 num_threads = 1;
1586 void clear_num_threads();
1587 ::PROTOBUF_NAMESPACE_ID::int32 num_threads() const;
1588 void set_num_threads(::PROTOBUF_NAMESPACE_ID::int32 value);
1589
1590 // @@protoc_insertion_point(class_scope:tensorflow.ThreadPoolOptionProto)
1591 private:
1592 class _Internal;
1593
1594 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
1595 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1596 typedef void InternalArenaConstructable_;
1597 typedef void DestructorSkippable_;
1598 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr global_name_;
1599 ::PROTOBUF_NAMESPACE_ID::int32 num_threads_;
1600 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1601 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
1602};
1603// -------------------------------------------------------------------
1604
1605class RPCOptions :
1606 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.RPCOptions) */ {
1607 public:
1608 RPCOptions();
1609 virtual ~RPCOptions();
1610
1611 RPCOptions(const RPCOptions& from);
1612 RPCOptions(RPCOptions&& from) noexcept
1613 : RPCOptions() {
1614 *this = ::std::move(from);
1615 }
1616
1617 inline RPCOptions& operator=(const RPCOptions& from) {
1618 CopyFrom(from);
1619 return *this;
1620 }
1621 inline RPCOptions& operator=(RPCOptions&& from) noexcept {
1622 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
1623 if (this != &from) InternalSwap(&from);
1624 } else {
1625 CopyFrom(from);
1626 }
1627 return *this;
1628 }
1629
1630 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
1631 return GetArenaNoVirtual();
1632 }
1633 inline void* GetMaybeArenaPointer() const final {
1634 return MaybeArenaPtr();
1635 }
1636 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
1637 return GetDescriptor();
1638 }
1639 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
1640 return GetMetadataStatic().descriptor;
1641 }
1642 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
1643 return GetMetadataStatic().reflection;
1644 }
1645 static const RPCOptions& default_instance();
1646
1647 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
1648 static inline const RPCOptions* internal_default_instance() {
1649 return reinterpret_cast<const RPCOptions*>(
1650 &_RPCOptions_default_instance_);
1651 }
1652 static constexpr int kIndexInFileMessages =
1653 6;
1654
1655 friend void swap(RPCOptions& a, RPCOptions& b) {
1656 a.Swap(&b);
1657 }
1658 inline void Swap(RPCOptions* other) {
1659 if (other == this) return;
1660 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
1661 InternalSwap(other);
1662 } else {
1663 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1664 }
1665 }
1666 void UnsafeArenaSwap(RPCOptions* other) {
1667 if (other == this) return;
1668 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
1669 InternalSwap(other);
1670 }
1671
1672 // implements Message ----------------------------------------------
1673
1674 inline RPCOptions* New() const final {
1675 return CreateMaybeMessage<RPCOptions>(nullptr);
1676 }
1677
1678 RPCOptions* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1679 return CreateMaybeMessage<RPCOptions>(arena);
1680 }
1681 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
1682 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
1683 void CopyFrom(const RPCOptions& from);
1684 void MergeFrom(const RPCOptions& from);
1685 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1686 bool IsInitialized() const final;
1687
1688 size_t ByteSizeLong() const final;
1689 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
1690 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1691 #else
1692 bool MergePartialFromCodedStream(
1693 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
1694 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
1695 void SerializeWithCachedSizes(
1696 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
1697 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
1698 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
1699 int GetCachedSize() const final { return _cached_size_.Get(); }
1700
1701 private:
1702 inline void SharedCtor();
1703 inline void SharedDtor();
1704 void SetCachedSize(int size) const final;
1705 void InternalSwap(RPCOptions* other);
1706 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1707 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1708 return "tensorflow.RPCOptions";
1709 }
1710 protected:
1711 explicit RPCOptions(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1712 private:
1713 static void ArenaDtor(void* object);
1714 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1715 private:
1716 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
1717 return _internal_metadata_.arena();
1718 }
1719 inline void* MaybeArenaPtr() const {
1720 return _internal_metadata_.raw_arena_ptr();
1721 }
1722 public:
1723
1724 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
1725 private:
1726 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
1727 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
1728 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
1729 }
1730
1731 public:
1732
1733 // nested types ----------------------------------------------------
1734
1735 // accessors -------------------------------------------------------
1736
1737 enum : int {
1738 kCompressionAlgorithmFieldNumber = 2,
1739 kCompressionLevelFieldNumber = 3,
1740 kUseRpcForInprocessMasterFieldNumber = 1,
1741 kCacheRpcResponseFieldNumber = 4,
1742 kDisableSessionConnectionSharingFieldNumber = 5,
1743 kNumChannelsPerTargetFieldNumber = 6,
1744 };
1745 // string compression_algorithm = 2;
1746 void clear_compression_algorithm();
1747 const std::string& compression_algorithm() const;
1748 void set_compression_algorithm(const std::string& value);
1749 void set_compression_algorithm(std::string&& value);
1750 void set_compression_algorithm(const char* value);
1751 void set_compression_algorithm(const char* value, size_t size);
1752 std::string* mutable_compression_algorithm();
1753 std::string* release_compression_algorithm();
1754 void set_allocated_compression_algorithm(std::string* compression_algorithm);
1755 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
1756 " string fields are deprecated and will be removed in a"
1757 " future release.")
1758 std::string* unsafe_arena_release_compression_algorithm();
1759 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
1760 " string fields are deprecated and will be removed in a"
1761 " future release.")
1762 void unsafe_arena_set_allocated_compression_algorithm(
1763 std::string* compression_algorithm);
1764
1765 // int32 compression_level = 3;
1766 void clear_compression_level();
1767 ::PROTOBUF_NAMESPACE_ID::int32 compression_level() const;
1768 void set_compression_level(::PROTOBUF_NAMESPACE_ID::int32 value);
1769
1770 // bool use_rpc_for_inprocess_master = 1;
1771 void clear_use_rpc_for_inprocess_master();
1772 bool use_rpc_for_inprocess_master() const;
1773 void set_use_rpc_for_inprocess_master(bool value);
1774
1775 // bool cache_rpc_response = 4;
1776 void clear_cache_rpc_response();
1777 bool cache_rpc_response() const;
1778 void set_cache_rpc_response(bool value);
1779
1780 // bool disable_session_connection_sharing = 5;
1781 void clear_disable_session_connection_sharing();
1782 bool disable_session_connection_sharing() const;
1783 void set_disable_session_connection_sharing(bool value);
1784
1785 // int32 num_channels_per_target = 6;
1786 void clear_num_channels_per_target();
1787 ::PROTOBUF_NAMESPACE_ID::int32 num_channels_per_target() const;
1788 void set_num_channels_per_target(::PROTOBUF_NAMESPACE_ID::int32 value);
1789
1790 // @@protoc_insertion_point(class_scope:tensorflow.RPCOptions)
1791 private:
1792 class _Internal;
1793
1794 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
1795 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1796 typedef void InternalArenaConstructable_;
1797 typedef void DestructorSkippable_;
1798 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr compression_algorithm_;
1799 ::PROTOBUF_NAMESPACE_ID::int32 compression_level_;
1800 bool use_rpc_for_inprocess_master_;
1801 bool cache_rpc_response_;
1802 bool disable_session_connection_sharing_;
1803 ::PROTOBUF_NAMESPACE_ID::int32 num_channels_per_target_;
1804 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1805 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
1806};
1807// -------------------------------------------------------------------
1808
1809class SessionMetadata :
1810 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.SessionMetadata) */ {
1811 public:
1812 SessionMetadata();
1813 virtual ~SessionMetadata();
1814
1815 SessionMetadata(const SessionMetadata& from);
1816 SessionMetadata(SessionMetadata&& from) noexcept
1817 : SessionMetadata() {
1818 *this = ::std::move(from);
1819 }
1820
1821 inline SessionMetadata& operator=(const SessionMetadata& from) {
1822 CopyFrom(from);
1823 return *this;
1824 }
1825 inline SessionMetadata& operator=(SessionMetadata&& from) noexcept {
1826 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
1827 if (this != &from) InternalSwap(&from);
1828 } else {
1829 CopyFrom(from);
1830 }
1831 return *this;
1832 }
1833
1834 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
1835 return GetArenaNoVirtual();
1836 }
1837 inline void* GetMaybeArenaPointer() const final {
1838 return MaybeArenaPtr();
1839 }
1840 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
1841 return GetDescriptor();
1842 }
1843 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
1844 return GetMetadataStatic().descriptor;
1845 }
1846 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
1847 return GetMetadataStatic().reflection;
1848 }
1849 static const SessionMetadata& default_instance();
1850
1851 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
1852 static inline const SessionMetadata* internal_default_instance() {
1853 return reinterpret_cast<const SessionMetadata*>(
1854 &_SessionMetadata_default_instance_);
1855 }
1856 static constexpr int kIndexInFileMessages =
1857 7;
1858
1859 friend void swap(SessionMetadata& a, SessionMetadata& b) {
1860 a.Swap(&b);
1861 }
1862 inline void Swap(SessionMetadata* other) {
1863 if (other == this) return;
1864 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
1865 InternalSwap(other);
1866 } else {
1867 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1868 }
1869 }
1870 void UnsafeArenaSwap(SessionMetadata* other) {
1871 if (other == this) return;
1872 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
1873 InternalSwap(other);
1874 }
1875
1876 // implements Message ----------------------------------------------
1877
1878 inline SessionMetadata* New() const final {
1879 return CreateMaybeMessage<SessionMetadata>(nullptr);
1880 }
1881
1882 SessionMetadata* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1883 return CreateMaybeMessage<SessionMetadata>(arena);
1884 }
1885 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
1886 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
1887 void CopyFrom(const SessionMetadata& from);
1888 void MergeFrom(const SessionMetadata& from);
1889 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1890 bool IsInitialized() const final;
1891
1892 size_t ByteSizeLong() const final;
1893 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
1894 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1895 #else
1896 bool MergePartialFromCodedStream(
1897 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
1898 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
1899 void SerializeWithCachedSizes(
1900 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
1901 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
1902 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
1903 int GetCachedSize() const final { return _cached_size_.Get(); }
1904
1905 private:
1906 inline void SharedCtor();
1907 inline void SharedDtor();
1908 void SetCachedSize(int size) const final;
1909 void InternalSwap(SessionMetadata* other);
1910 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1911 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1912 return "tensorflow.SessionMetadata";
1913 }
1914 protected:
1915 explicit SessionMetadata(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1916 private:
1917 static void ArenaDtor(void* object);
1918 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1919 private:
1920 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
1921 return _internal_metadata_.arena();
1922 }
1923 inline void* MaybeArenaPtr() const {
1924 return _internal_metadata_.raw_arena_ptr();
1925 }
1926 public:
1927
1928 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
1929 private:
1930 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
1931 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
1932 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
1933 }
1934
1935 public:
1936
1937 // nested types ----------------------------------------------------
1938
1939 // accessors -------------------------------------------------------
1940
1941 enum : int {
1942 kNameFieldNumber = 1,
1943 kVersionFieldNumber = 2,
1944 };
1945 // string name = 1;
1946 void clear_name();
1947 const std::string& name() const;
1948 void set_name(const std::string& value);
1949 void set_name(std::string&& value);
1950 void set_name(const char* value);
1951 void set_name(const char* value, size_t size);
1952 std::string* mutable_name();
1953 std::string* release_name();
1954 void set_allocated_name(std::string* name);
1955 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
1956 " string fields are deprecated and will be removed in a"
1957 " future release.")
1958 std::string* unsafe_arena_release_name();
1959 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
1960 " string fields are deprecated and will be removed in a"
1961 " future release.")
1962 void unsafe_arena_set_allocated_name(
1963 std::string* name);
1964
1965 // int64 version = 2;
1966 void clear_version();
1967 ::PROTOBUF_NAMESPACE_ID::int64 version() const;
1968 void set_version(::PROTOBUF_NAMESPACE_ID::int64 value);
1969
1970 // @@protoc_insertion_point(class_scope:tensorflow.SessionMetadata)
1971 private:
1972 class _Internal;
1973
1974 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
1975 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1976 typedef void InternalArenaConstructable_;
1977 typedef void DestructorSkippable_;
1978 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr name_;
1979 ::PROTOBUF_NAMESPACE_ID::int64 version_;
1980 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1981 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
1982};
1983// -------------------------------------------------------------------
1984
1985class ConfigProto_DeviceCountEntry_DoNotUse : public ::PROTOBUF_NAMESPACE_ID::internal::MapEntry<ConfigProto_DeviceCountEntry_DoNotUse,
1986 std::string, ::PROTOBUF_NAMESPACE_ID::int32,
1987 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
1988 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32,
1989 0 > {
1990public:
1991 typedef ::PROTOBUF_NAMESPACE_ID::internal::MapEntry<ConfigProto_DeviceCountEntry_DoNotUse,
1992 std::string, ::PROTOBUF_NAMESPACE_ID::int32,
1993 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
1994 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32,
1995 0 > SuperType;
1996 ConfigProto_DeviceCountEntry_DoNotUse();
1997 ConfigProto_DeviceCountEntry_DoNotUse(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1998 void MergeFrom(const ConfigProto_DeviceCountEntry_DoNotUse& other);
1999 static const ConfigProto_DeviceCountEntry_DoNotUse* internal_default_instance() { return reinterpret_cast<const ConfigProto_DeviceCountEntry_DoNotUse*>(&_ConfigProto_DeviceCountEntry_DoNotUse_default_instance_); }
2000 static bool ValidateKey(std::string* s) {
2001 return ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(s->data(), s->size(), ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::PARSE, "tensorflow.ConfigProto.DeviceCountEntry.key");
2002 }
2003 static bool ValidateValue(void*) { return true; }
2004 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& other) final;
2005 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
2006 private:
2007 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
2008 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
2009 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[8];
2010 }
2011
2012 public:
2013};
2014
2015// -------------------------------------------------------------------
2016
2017class ConfigProto_Experimental :
2018 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.ConfigProto.Experimental) */ {
2019 public:
2020 ConfigProto_Experimental();
2021 virtual ~ConfigProto_Experimental();
2022
2023 ConfigProto_Experimental(const ConfigProto_Experimental& from);
2024 ConfigProto_Experimental(ConfigProto_Experimental&& from) noexcept
2025 : ConfigProto_Experimental() {
2026 *this = ::std::move(from);
2027 }
2028
2029 inline ConfigProto_Experimental& operator=(const ConfigProto_Experimental& from) {
2030 CopyFrom(from);
2031 return *this;
2032 }
2033 inline ConfigProto_Experimental& operator=(ConfigProto_Experimental&& from) noexcept {
2034 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
2035 if (this != &from) InternalSwap(&from);
2036 } else {
2037 CopyFrom(from);
2038 }
2039 return *this;
2040 }
2041
2042 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
2043 return GetArenaNoVirtual();
2044 }
2045 inline void* GetMaybeArenaPointer() const final {
2046 return MaybeArenaPtr();
2047 }
2048 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
2049 return GetDescriptor();
2050 }
2051 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
2052 return GetMetadataStatic().descriptor;
2053 }
2054 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
2055 return GetMetadataStatic().reflection;
2056 }
2057 static const ConfigProto_Experimental& default_instance();
2058
2059 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
2060 static inline const ConfigProto_Experimental* internal_default_instance() {
2061 return reinterpret_cast<const ConfigProto_Experimental*>(
2062 &_ConfigProto_Experimental_default_instance_);
2063 }
2064 static constexpr int kIndexInFileMessages =
2065 9;
2066
2067 friend void swap(ConfigProto_Experimental& a, ConfigProto_Experimental& b) {
2068 a.Swap(&b);
2069 }
2070 inline void Swap(ConfigProto_Experimental* other) {
2071 if (other == this) return;
2072 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
2073 InternalSwap(other);
2074 } else {
2075 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2076 }
2077 }
2078 void UnsafeArenaSwap(ConfigProto_Experimental* other) {
2079 if (other == this) return;
2080 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
2081 InternalSwap(other);
2082 }
2083
2084 // implements Message ----------------------------------------------
2085
2086 inline ConfigProto_Experimental* New() const final {
2087 return CreateMaybeMessage<ConfigProto_Experimental>(nullptr);
2088 }
2089
2090 ConfigProto_Experimental* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2091 return CreateMaybeMessage<ConfigProto_Experimental>(arena);
2092 }
2093 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
2094 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
2095 void CopyFrom(const ConfigProto_Experimental& from);
2096 void MergeFrom(const ConfigProto_Experimental& from);
2097 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2098 bool IsInitialized() const final;
2099
2100 size_t ByteSizeLong() const final;
2101 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
2102 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2103 #else
2104 bool MergePartialFromCodedStream(
2105 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
2106 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
2107 void SerializeWithCachedSizes(
2108 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
2109 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
2110 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
2111 int GetCachedSize() const final { return _cached_size_.Get(); }
2112
2113 private:
2114 inline void SharedCtor();
2115 inline void SharedDtor();
2116 void SetCachedSize(int size) const final;
2117 void InternalSwap(ConfigProto_Experimental* other);
2118 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2119 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2120 return "tensorflow.ConfigProto.Experimental";
2121 }
2122 protected:
2123 explicit ConfigProto_Experimental(::PROTOBUF_NAMESPACE_ID::Arena* arena);
2124 private:
2125 static void ArenaDtor(void* object);
2126 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
2127 private:
2128 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
2129 return _internal_metadata_.arena();
2130 }
2131 inline void* MaybeArenaPtr() const {
2132 return _internal_metadata_.raw_arena_ptr();
2133 }
2134 public:
2135
2136 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
2137 private:
2138 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
2139 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
2140 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
2141 }
2142
2143 public:
2144
2145 // nested types ----------------------------------------------------
2146
2147 typedef ConfigProto_Experimental_MlirBridgeRollout MlirBridgeRollout;
2148 static constexpr MlirBridgeRollout MLIR_BRIDGE_ROLLOUT_UNSPECIFIED =
2149 ConfigProto_Experimental_MlirBridgeRollout_MLIR_BRIDGE_ROLLOUT_UNSPECIFIED;
2150 static constexpr MlirBridgeRollout MLIR_BRIDGE_ROLLOUT_ENABLED =
2151 ConfigProto_Experimental_MlirBridgeRollout_MLIR_BRIDGE_ROLLOUT_ENABLED;
2152 static constexpr MlirBridgeRollout MLIR_BRIDGE_ROLLOUT_DISABLED =
2153 ConfigProto_Experimental_MlirBridgeRollout_MLIR_BRIDGE_ROLLOUT_DISABLED;
2154 static constexpr MlirBridgeRollout MLIR_BRIDGE_ROLLOUT_SAFE_MODE_ENABLED =
2155 ConfigProto_Experimental_MlirBridgeRollout_MLIR_BRIDGE_ROLLOUT_SAFE_MODE_ENABLED;
2156 static constexpr MlirBridgeRollout MLIR_BRIDGE_ROLLOUT_SAFE_MODE_FALLBACK_ENABLED =
2157 ConfigProto_Experimental_MlirBridgeRollout_MLIR_BRIDGE_ROLLOUT_SAFE_MODE_FALLBACK_ENABLED;
2158 static inline bool MlirBridgeRollout_IsValid(int value) {
2159 return ConfigProto_Experimental_MlirBridgeRollout_IsValid(value);
2160 }
2161 static constexpr MlirBridgeRollout MlirBridgeRollout_MIN =
2162 ConfigProto_Experimental_MlirBridgeRollout_MlirBridgeRollout_MIN;
2163 static constexpr MlirBridgeRollout MlirBridgeRollout_MAX =
2164 ConfigProto_Experimental_MlirBridgeRollout_MlirBridgeRollout_MAX;
2165 static constexpr int MlirBridgeRollout_ARRAYSIZE =
2166 ConfigProto_Experimental_MlirBridgeRollout_MlirBridgeRollout_ARRAYSIZE;
2167 static inline const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor*
2168 MlirBridgeRollout_descriptor() {
2169 return ConfigProto_Experimental_MlirBridgeRollout_descriptor();
2170 }
2171 template<typename T>
2172 static inline const std::string& MlirBridgeRollout_Name(T enum_t_value) {
2173 static_assert(::std::is_same<T, MlirBridgeRollout>::value ||
2174 ::std::is_integral<T>::value,
2175 "Incorrect type passed to function MlirBridgeRollout_Name.");
2176 return ConfigProto_Experimental_MlirBridgeRollout_Name(enum_t_value);
2177 }
2178 static inline bool MlirBridgeRollout_Parse(const std::string& name,
2179 MlirBridgeRollout* value) {
2180 return ConfigProto_Experimental_MlirBridgeRollout_Parse(name, value);
2181 }
2182
2183 // accessors -------------------------------------------------------
2184
2185 enum : int {
2186 kCollectiveGroupLeaderFieldNumber = 1,
2187 kExecutorTypeFieldNumber = 3,
2188 kSessionMetadataFieldNumber = 11,
2189 kCoordinationConfigFieldNumber = 23,
2190 kRecvBufMaxChunkFieldNumber = 4,
2191 kUseNumaAffinityFieldNumber = 5,
2192 kCollectiveDeterministicSequentialExecutionFieldNumber = 6,
2193 kCollectiveNcclFieldNumber = 7,
2194 kShareSessionStateInClusterspecPropagationFieldNumber = 8,
2195 kDisableThreadSpinningFieldNumber = 9,
2196 kShareClusterDevicesInSessionFieldNumber = 10,
2197 kOptimizeForStaticGraphFieldNumber = 12,
2198 kEnableMlirBridgeFieldNumber = 13,
2199 kMlirBridgeRolloutFieldNumber = 17,
2200 kXlaFusionAutotunerThreshFieldNumber = 15,
2201 kEnableMlirGraphOptimizationFieldNumber = 16,
2202 kDisableOutputPartitionGraphsFieldNumber = 14,
2203 kUseTfrtFieldNumber = 18,
2204 kDisableFunctionalOpsLoweringFieldNumber = 21,
2205 kXlaPreferSingleGraphClusterFieldNumber = 22,
2206 };
2207 // string collective_group_leader = 1;
2208 void clear_collective_group_leader();
2209 const std::string& collective_group_leader() const;
2210 void set_collective_group_leader(const std::string& value);
2211 void set_collective_group_leader(std::string&& value);
2212 void set_collective_group_leader(const char* value);
2213 void set_collective_group_leader(const char* value, size_t size);
2214 std::string* mutable_collective_group_leader();
2215 std::string* release_collective_group_leader();
2216 void set_allocated_collective_group_leader(std::string* collective_group_leader);
2217 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
2218 " string fields are deprecated and will be removed in a"
2219 " future release.")
2220 std::string* unsafe_arena_release_collective_group_leader();
2221 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
2222 " string fields are deprecated and will be removed in a"
2223 " future release.")
2224 void unsafe_arena_set_allocated_collective_group_leader(
2225 std::string* collective_group_leader);
2226
2227 // string executor_type = 3;
2228 void clear_executor_type();
2229 const std::string& executor_type() const;
2230 void set_executor_type(const std::string& value);
2231 void set_executor_type(std::string&& value);
2232 void set_executor_type(const char* value);
2233 void set_executor_type(const char* value, size_t size);
2234 std::string* mutable_executor_type();
2235 std::string* release_executor_type();
2236 void set_allocated_executor_type(std::string* executor_type);
2237 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
2238 " string fields are deprecated and will be removed in a"
2239 " future release.")
2240 std::string* unsafe_arena_release_executor_type();
2241 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
2242 " string fields are deprecated and will be removed in a"
2243 " future release.")
2244 void unsafe_arena_set_allocated_executor_type(
2245 std::string* executor_type);
2246
2247 // .tensorflow.SessionMetadata session_metadata = 11;
2248 bool has_session_metadata() const;
2249 void clear_session_metadata();
2250 const ::tensorflow::SessionMetadata& session_metadata() const;
2251 ::tensorflow::SessionMetadata* release_session_metadata();
2252 ::tensorflow::SessionMetadata* mutable_session_metadata();
2253 void set_allocated_session_metadata(::tensorflow::SessionMetadata* session_metadata);
2254 void unsafe_arena_set_allocated_session_metadata(
2255 ::tensorflow::SessionMetadata* session_metadata);
2256 ::tensorflow::SessionMetadata* unsafe_arena_release_session_metadata();
2257
2258 // .tensorflow.CoordinationServiceConfig coordination_config = 23;
2259 bool has_coordination_config() const;
2260 void clear_coordination_config();
2261 const ::tensorflow::CoordinationServiceConfig& coordination_config() const;
2262 ::tensorflow::CoordinationServiceConfig* release_coordination_config();
2263 ::tensorflow::CoordinationServiceConfig* mutable_coordination_config();
2264 void set_allocated_coordination_config(::tensorflow::CoordinationServiceConfig* coordination_config);
2265 void unsafe_arena_set_allocated_coordination_config(
2266 ::tensorflow::CoordinationServiceConfig* coordination_config);
2267 ::tensorflow::CoordinationServiceConfig* unsafe_arena_release_coordination_config();
2268
2269 // int32 recv_buf_max_chunk = 4;
2270 void clear_recv_buf_max_chunk();
2271 ::PROTOBUF_NAMESPACE_ID::int32 recv_buf_max_chunk() const;
2272 void set_recv_buf_max_chunk(::PROTOBUF_NAMESPACE_ID::int32 value);
2273
2274 // bool use_numa_affinity = 5;
2275 void clear_use_numa_affinity();
2276 bool use_numa_affinity() const;
2277 void set_use_numa_affinity(bool value);
2278
2279 // bool collective_deterministic_sequential_execution = 6;
2280 void clear_collective_deterministic_sequential_execution();
2281 bool collective_deterministic_sequential_execution() const;
2282 void set_collective_deterministic_sequential_execution(bool value);
2283
2284 // bool collective_nccl = 7;
2285 void clear_collective_nccl();
2286 bool collective_nccl() const;
2287 void set_collective_nccl(bool value);
2288
2289 // bool share_session_state_in_clusterspec_propagation = 8;
2290 void clear_share_session_state_in_clusterspec_propagation();
2291 bool share_session_state_in_clusterspec_propagation() const;
2292 void set_share_session_state_in_clusterspec_propagation(bool value);
2293
2294 // bool disable_thread_spinning = 9;
2295 void clear_disable_thread_spinning();
2296 bool disable_thread_spinning() const;
2297 void set_disable_thread_spinning(bool value);
2298
2299 // bool share_cluster_devices_in_session = 10;
2300 void clear_share_cluster_devices_in_session();
2301 bool share_cluster_devices_in_session() const;
2302 void set_share_cluster_devices_in_session(bool value);
2303
2304 // bool optimize_for_static_graph = 12;
2305 void clear_optimize_for_static_graph();
2306 bool optimize_for_static_graph() const;
2307 void set_optimize_for_static_graph(bool value);
2308
2309 // bool enable_mlir_bridge = 13;
2310 void clear_enable_mlir_bridge();
2311 bool enable_mlir_bridge() const;
2312 void set_enable_mlir_bridge(bool value);
2313
2314 // .tensorflow.ConfigProto.Experimental.MlirBridgeRollout mlir_bridge_rollout = 17;
2315 void clear_mlir_bridge_rollout();
2316 ::tensorflow::ConfigProto_Experimental_MlirBridgeRollout mlir_bridge_rollout() const;
2317 void set_mlir_bridge_rollout(::tensorflow::ConfigProto_Experimental_MlirBridgeRollout value);
2318
2319 // int64 xla_fusion_autotuner_thresh = 15;
2320 void clear_xla_fusion_autotuner_thresh();
2321 ::PROTOBUF_NAMESPACE_ID::int64 xla_fusion_autotuner_thresh() const;
2322 void set_xla_fusion_autotuner_thresh(::PROTOBUF_NAMESPACE_ID::int64 value);
2323
2324 // bool enable_mlir_graph_optimization = 16;
2325 void clear_enable_mlir_graph_optimization();
2326 bool enable_mlir_graph_optimization() const;
2327 void set_enable_mlir_graph_optimization(bool value);
2328
2329 // bool disable_output_partition_graphs = 14;
2330 void clear_disable_output_partition_graphs();
2331 bool disable_output_partition_graphs() const;
2332 void set_disable_output_partition_graphs(bool value);
2333
2334 // bool use_tfrt = 18;
2335 void clear_use_tfrt();
2336 bool use_tfrt() const;
2337 void set_use_tfrt(bool value);
2338
2339 // bool disable_functional_ops_lowering = 21;
2340 void clear_disable_functional_ops_lowering();
2341 bool disable_functional_ops_lowering() const;
2342 void set_disable_functional_ops_lowering(bool value);
2343
2344 // bool xla_prefer_single_graph_cluster = 22;
2345 void clear_xla_prefer_single_graph_cluster();
2346 bool xla_prefer_single_graph_cluster() const;
2347 void set_xla_prefer_single_graph_cluster(bool value);
2348
2349 // @@protoc_insertion_point(class_scope:tensorflow.ConfigProto.Experimental)
2350 private:
2351 class _Internal;
2352
2353 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
2354 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2355 typedef void InternalArenaConstructable_;
2356 typedef void DestructorSkippable_;
2357 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr collective_group_leader_;
2358 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr executor_type_;
2359 ::tensorflow::SessionMetadata* session_metadata_;
2360 ::tensorflow::CoordinationServiceConfig* coordination_config_;
2361 ::PROTOBUF_NAMESPACE_ID::int32 recv_buf_max_chunk_;
2362 bool use_numa_affinity_;
2363 bool collective_deterministic_sequential_execution_;
2364 bool collective_nccl_;
2365 bool share_session_state_in_clusterspec_propagation_;
2366 bool disable_thread_spinning_;
2367 bool share_cluster_devices_in_session_;
2368 bool optimize_for_static_graph_;
2369 bool enable_mlir_bridge_;
2370 int mlir_bridge_rollout_;
2371 ::PROTOBUF_NAMESPACE_ID::int64 xla_fusion_autotuner_thresh_;
2372 bool enable_mlir_graph_optimization_;
2373 bool disable_output_partition_graphs_;
2374 bool use_tfrt_;
2375 bool disable_functional_ops_lowering_;
2376 bool xla_prefer_single_graph_cluster_;
2377 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2378 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
2379};
2380// -------------------------------------------------------------------
2381
2382class ConfigProto :
2383 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.ConfigProto) */ {
2384 public:
2385 ConfigProto();
2386 virtual ~ConfigProto();
2387
2388 ConfigProto(const ConfigProto& from);
2389 ConfigProto(ConfigProto&& from) noexcept
2390 : ConfigProto() {
2391 *this = ::std::move(from);
2392 }
2393
2394 inline ConfigProto& operator=(const ConfigProto& from) {
2395 CopyFrom(from);
2396 return *this;
2397 }
2398 inline ConfigProto& operator=(ConfigProto&& from) noexcept {
2399 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
2400 if (this != &from) InternalSwap(&from);
2401 } else {
2402 CopyFrom(from);
2403 }
2404 return *this;
2405 }
2406
2407 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
2408 return GetArenaNoVirtual();
2409 }
2410 inline void* GetMaybeArenaPointer() const final {
2411 return MaybeArenaPtr();
2412 }
2413 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
2414 return GetDescriptor();
2415 }
2416 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
2417 return GetMetadataStatic().descriptor;
2418 }
2419 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
2420 return GetMetadataStatic().reflection;
2421 }
2422 static const ConfigProto& default_instance();
2423
2424 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
2425 static inline const ConfigProto* internal_default_instance() {
2426 return reinterpret_cast<const ConfigProto*>(
2427 &_ConfigProto_default_instance_);
2428 }
2429 static constexpr int kIndexInFileMessages =
2430 10;
2431
2432 friend void swap(ConfigProto& a, ConfigProto& b) {
2433 a.Swap(&b);
2434 }
2435 inline void Swap(ConfigProto* other) {
2436 if (other == this) return;
2437 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
2438 InternalSwap(other);
2439 } else {
2440 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2441 }
2442 }
2443 void UnsafeArenaSwap(ConfigProto* other) {
2444 if (other == this) return;
2445 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
2446 InternalSwap(other);
2447 }
2448
2449 // implements Message ----------------------------------------------
2450
2451 inline ConfigProto* New() const final {
2452 return CreateMaybeMessage<ConfigProto>(nullptr);
2453 }
2454
2455 ConfigProto* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2456 return CreateMaybeMessage<ConfigProto>(arena);
2457 }
2458 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
2459 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
2460 void CopyFrom(const ConfigProto& from);
2461 void MergeFrom(const ConfigProto& from);
2462 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2463 bool IsInitialized() const final;
2464
2465 size_t ByteSizeLong() const final;
2466 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
2467 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2468 #else
2469 bool MergePartialFromCodedStream(
2470 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
2471 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
2472 void SerializeWithCachedSizes(
2473 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
2474 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
2475 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
2476 int GetCachedSize() const final { return _cached_size_.Get(); }
2477
2478 private:
2479 inline void SharedCtor();
2480 inline void SharedDtor();
2481 void SetCachedSize(int size) const final;
2482 void InternalSwap(ConfigProto* other);
2483 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2484 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2485 return "tensorflow.ConfigProto";
2486 }
2487 protected:
2488 explicit ConfigProto(::PROTOBUF_NAMESPACE_ID::Arena* arena);
2489 private:
2490 static void ArenaDtor(void* object);
2491 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
2492 private:
2493 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
2494 return _internal_metadata_.arena();
2495 }
2496 inline void* MaybeArenaPtr() const {
2497 return _internal_metadata_.raw_arena_ptr();
2498 }
2499 public:
2500
2501 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
2502 private:
2503 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
2504 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
2505 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
2506 }
2507
2508 public:
2509
2510 // nested types ----------------------------------------------------
2511
2512 typedef ConfigProto_Experimental Experimental;
2513
2514 // accessors -------------------------------------------------------
2515
2516 enum : int {
2517 kDeviceCountFieldNumber = 1,
2518 kDeviceFiltersFieldNumber = 4,
2519 kSessionInterOpThreadPoolFieldNumber = 12,
2520 kGpuOptionsFieldNumber = 6,
2521 kGraphOptionsFieldNumber = 10,
2522 kRpcOptionsFieldNumber = 13,
2523 kClusterDefFieldNumber = 14,
2524 kExperimentalFieldNumber = 16,
2525 kIntraOpParallelismThreadsFieldNumber = 2,
2526 kPlacementPeriodFieldNumber = 3,
2527 kInterOpParallelismThreadsFieldNumber = 5,
2528 kUsePerSessionThreadsFieldNumber = 9,
2529 kAllowSoftPlacementFieldNumber = 7,
2530 kLogDevicePlacementFieldNumber = 8,
2531 kIsolateSessionStateFieldNumber = 15,
2532 kOperationTimeoutInMsFieldNumber = 11,
2533 kShareClusterDevicesInSessionFieldNumber = 17,
2534 };
2535 // map<string, int32> device_count = 1;
2536 int device_count_size() const;
2537 void clear_device_count();
2538 const ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::PROTOBUF_NAMESPACE_ID::int32 >&
2539 device_count() const;
2540 ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::PROTOBUF_NAMESPACE_ID::int32 >*
2541 mutable_device_count();
2542
2543 // repeated string device_filters = 4;
2544 int device_filters_size() const;
2545 void clear_device_filters();
2546 const std::string& device_filters(int index) const;
2547 std::string* mutable_device_filters(int index);
2548 void set_device_filters(int index, const std::string& value);
2549 void set_device_filters(int index, std::string&& value);
2550 void set_device_filters(int index, const char* value);
2551 void set_device_filters(int index, const char* value, size_t size);
2552 std::string* add_device_filters();
2553 void add_device_filters(const std::string& value);
2554 void add_device_filters(std::string&& value);
2555 void add_device_filters(const char* value);
2556 void add_device_filters(const char* value, size_t size);
2557 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& device_filters() const;
2558 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_device_filters();
2559
2560 // repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12;
2561 int session_inter_op_thread_pool_size() const;
2562 void clear_session_inter_op_thread_pool();
2563 ::tensorflow::ThreadPoolOptionProto* mutable_session_inter_op_thread_pool(int index);
2564 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::ThreadPoolOptionProto >*
2565 mutable_session_inter_op_thread_pool();
2566 const ::tensorflow::ThreadPoolOptionProto& session_inter_op_thread_pool(int index) const;
2567 ::tensorflow::ThreadPoolOptionProto* add_session_inter_op_thread_pool();
2568 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::ThreadPoolOptionProto >&
2569 session_inter_op_thread_pool() const;
2570
2571 // .tensorflow.GPUOptions gpu_options = 6;
2572 bool has_gpu_options() const;
2573 void clear_gpu_options();
2574 const ::tensorflow::GPUOptions& gpu_options() const;
2575 ::tensorflow::GPUOptions* release_gpu_options();
2576 ::tensorflow::GPUOptions* mutable_gpu_options();
2577 void set_allocated_gpu_options(::tensorflow::GPUOptions* gpu_options);
2578 void unsafe_arena_set_allocated_gpu_options(
2579 ::tensorflow::GPUOptions* gpu_options);
2580 ::tensorflow::GPUOptions* unsafe_arena_release_gpu_options();
2581
2582 // .tensorflow.GraphOptions graph_options = 10;
2583 bool has_graph_options() const;
2584 void clear_graph_options();
2585 const ::tensorflow::GraphOptions& graph_options() const;
2586 ::tensorflow::GraphOptions* release_graph_options();
2587 ::tensorflow::GraphOptions* mutable_graph_options();
2588 void set_allocated_graph_options(::tensorflow::GraphOptions* graph_options);
2589 void unsafe_arena_set_allocated_graph_options(
2590 ::tensorflow::GraphOptions* graph_options);
2591 ::tensorflow::GraphOptions* unsafe_arena_release_graph_options();
2592
2593 // .tensorflow.RPCOptions rpc_options = 13;
2594 bool has_rpc_options() const;
2595 void clear_rpc_options();
2596 const ::tensorflow::RPCOptions& rpc_options() const;
2597 ::tensorflow::RPCOptions* release_rpc_options();
2598 ::tensorflow::RPCOptions* mutable_rpc_options();
2599 void set_allocated_rpc_options(::tensorflow::RPCOptions* rpc_options);
2600 void unsafe_arena_set_allocated_rpc_options(
2601 ::tensorflow::RPCOptions* rpc_options);
2602 ::tensorflow::RPCOptions* unsafe_arena_release_rpc_options();
2603
2604 // .tensorflow.ClusterDef cluster_def = 14;
2605 bool has_cluster_def() const;
2606 void clear_cluster_def();
2607 const ::tensorflow::ClusterDef& cluster_def() const;
2608 ::tensorflow::ClusterDef* release_cluster_def();
2609 ::tensorflow::ClusterDef* mutable_cluster_def();
2610 void set_allocated_cluster_def(::tensorflow::ClusterDef* cluster_def);
2611 void unsafe_arena_set_allocated_cluster_def(
2612 ::tensorflow::ClusterDef* cluster_def);
2613 ::tensorflow::ClusterDef* unsafe_arena_release_cluster_def();
2614
2615 // .tensorflow.ConfigProto.Experimental experimental = 16;
2616 bool has_experimental() const;
2617 void clear_experimental();
2618 const ::tensorflow::ConfigProto_Experimental& experimental() const;
2619 ::tensorflow::ConfigProto_Experimental* release_experimental();
2620 ::tensorflow::ConfigProto_Experimental* mutable_experimental();
2621 void set_allocated_experimental(::tensorflow::ConfigProto_Experimental* experimental);
2622 void unsafe_arena_set_allocated_experimental(
2623 ::tensorflow::ConfigProto_Experimental* experimental);
2624 ::tensorflow::ConfigProto_Experimental* unsafe_arena_release_experimental();
2625
2626 // int32 intra_op_parallelism_threads = 2;
2627 void clear_intra_op_parallelism_threads();
2628 ::PROTOBUF_NAMESPACE_ID::int32 intra_op_parallelism_threads() const;
2629 void set_intra_op_parallelism_threads(::PROTOBUF_NAMESPACE_ID::int32 value);
2630
2631 // int32 placement_period = 3;
2632 void clear_placement_period();
2633 ::PROTOBUF_NAMESPACE_ID::int32 placement_period() const;
2634 void set_placement_period(::PROTOBUF_NAMESPACE_ID::int32 value);
2635
2636 // int32 inter_op_parallelism_threads = 5;
2637 void clear_inter_op_parallelism_threads();
2638 ::PROTOBUF_NAMESPACE_ID::int32 inter_op_parallelism_threads() const;
2639 void set_inter_op_parallelism_threads(::PROTOBUF_NAMESPACE_ID::int32 value);
2640
2641 // bool use_per_session_threads = 9;
2642 void clear_use_per_session_threads();
2643 bool use_per_session_threads() const;
2644 void set_use_per_session_threads(bool value);
2645
2646 // bool allow_soft_placement = 7;
2647 void clear_allow_soft_placement();
2648 bool allow_soft_placement() const;
2649 void set_allow_soft_placement(bool value);
2650
2651 // bool log_device_placement = 8;
2652 void clear_log_device_placement();
2653 bool log_device_placement() const;
2654 void set_log_device_placement(bool value);
2655
2656 // bool isolate_session_state = 15;
2657 void clear_isolate_session_state();
2658 bool isolate_session_state() const;
2659 void set_isolate_session_state(bool value);
2660
2661 // int64 operation_timeout_in_ms = 11;
2662 void clear_operation_timeout_in_ms();
2663 ::PROTOBUF_NAMESPACE_ID::int64 operation_timeout_in_ms() const;
2664 void set_operation_timeout_in_ms(::PROTOBUF_NAMESPACE_ID::int64 value);
2665
2666 // bool share_cluster_devices_in_session = 17;
2667 void clear_share_cluster_devices_in_session();
2668 bool share_cluster_devices_in_session() const;
2669 void set_share_cluster_devices_in_session(bool value);
2670
2671 // @@protoc_insertion_point(class_scope:tensorflow.ConfigProto)
2672 private:
2673 class _Internal;
2674
2675 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
2676 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2677 typedef void InternalArenaConstructable_;
2678 typedef void DestructorSkippable_;
2679 ::PROTOBUF_NAMESPACE_ID::internal::MapField<
2680 ConfigProto_DeviceCountEntry_DoNotUse,
2681 std::string, ::PROTOBUF_NAMESPACE_ID::int32,
2682 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
2683 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32,
2684 0 > device_count_;
2685 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> device_filters_;
2686 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::ThreadPoolOptionProto > session_inter_op_thread_pool_;
2687 ::tensorflow::GPUOptions* gpu_options_;
2688 ::tensorflow::GraphOptions* graph_options_;
2689 ::tensorflow::RPCOptions* rpc_options_;
2690 ::tensorflow::ClusterDef* cluster_def_;
2691 ::tensorflow::ConfigProto_Experimental* experimental_;
2692 ::PROTOBUF_NAMESPACE_ID::int32 intra_op_parallelism_threads_;
2693 ::PROTOBUF_NAMESPACE_ID::int32 placement_period_;
2694 ::PROTOBUF_NAMESPACE_ID::int32 inter_op_parallelism_threads_;
2695 bool use_per_session_threads_;
2696 bool allow_soft_placement_;
2697 bool log_device_placement_;
2698 bool isolate_session_state_;
2699 ::PROTOBUF_NAMESPACE_ID::int64 operation_timeout_in_ms_;
2700 bool share_cluster_devices_in_session_;
2701 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2702 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
2703};
2704// -------------------------------------------------------------------
2705
2706class RunOptions_Experimental_RunHandlerPoolOptions :
2707 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.RunOptions.Experimental.RunHandlerPoolOptions) */ {
2708 public:
2709 RunOptions_Experimental_RunHandlerPoolOptions();
2710 virtual ~RunOptions_Experimental_RunHandlerPoolOptions();
2711
2712 RunOptions_Experimental_RunHandlerPoolOptions(const RunOptions_Experimental_RunHandlerPoolOptions& from);
2713 RunOptions_Experimental_RunHandlerPoolOptions(RunOptions_Experimental_RunHandlerPoolOptions&& from) noexcept
2714 : RunOptions_Experimental_RunHandlerPoolOptions() {
2715 *this = ::std::move(from);
2716 }
2717
2718 inline RunOptions_Experimental_RunHandlerPoolOptions& operator=(const RunOptions_Experimental_RunHandlerPoolOptions& from) {
2719 CopyFrom(from);
2720 return *this;
2721 }
2722 inline RunOptions_Experimental_RunHandlerPoolOptions& operator=(RunOptions_Experimental_RunHandlerPoolOptions&& from) noexcept {
2723 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
2724 if (this != &from) InternalSwap(&from);
2725 } else {
2726 CopyFrom(from);
2727 }
2728 return *this;
2729 }
2730
2731 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
2732 return GetArenaNoVirtual();
2733 }
2734 inline void* GetMaybeArenaPointer() const final {
2735 return MaybeArenaPtr();
2736 }
2737 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
2738 return GetDescriptor();
2739 }
2740 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
2741 return GetMetadataStatic().descriptor;
2742 }
2743 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
2744 return GetMetadataStatic().reflection;
2745 }
2746 static const RunOptions_Experimental_RunHandlerPoolOptions& default_instance();
2747
2748 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
2749 static inline const RunOptions_Experimental_RunHandlerPoolOptions* internal_default_instance() {
2750 return reinterpret_cast<const RunOptions_Experimental_RunHandlerPoolOptions*>(
2751 &_RunOptions_Experimental_RunHandlerPoolOptions_default_instance_);
2752 }
2753 static constexpr int kIndexInFileMessages =
2754 11;
2755
2756 friend void swap(RunOptions_Experimental_RunHandlerPoolOptions& a, RunOptions_Experimental_RunHandlerPoolOptions& b) {
2757 a.Swap(&b);
2758 }
2759 inline void Swap(RunOptions_Experimental_RunHandlerPoolOptions* other) {
2760 if (other == this) return;
2761 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
2762 InternalSwap(other);
2763 } else {
2764 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2765 }
2766 }
2767 void UnsafeArenaSwap(RunOptions_Experimental_RunHandlerPoolOptions* other) {
2768 if (other == this) return;
2769 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
2770 InternalSwap(other);
2771 }
2772
2773 // implements Message ----------------------------------------------
2774
2775 inline RunOptions_Experimental_RunHandlerPoolOptions* New() const final {
2776 return CreateMaybeMessage<RunOptions_Experimental_RunHandlerPoolOptions>(nullptr);
2777 }
2778
2779 RunOptions_Experimental_RunHandlerPoolOptions* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2780 return CreateMaybeMessage<RunOptions_Experimental_RunHandlerPoolOptions>(arena);
2781 }
2782 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
2783 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
2784 void CopyFrom(const RunOptions_Experimental_RunHandlerPoolOptions& from);
2785 void MergeFrom(const RunOptions_Experimental_RunHandlerPoolOptions& from);
2786 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2787 bool IsInitialized() const final;
2788
2789 size_t ByteSizeLong() const final;
2790 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
2791 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2792 #else
2793 bool MergePartialFromCodedStream(
2794 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
2795 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
2796 void SerializeWithCachedSizes(
2797 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
2798 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
2799 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
2800 int GetCachedSize() const final { return _cached_size_.Get(); }
2801
2802 private:
2803 inline void SharedCtor();
2804 inline void SharedDtor();
2805 void SetCachedSize(int size) const final;
2806 void InternalSwap(RunOptions_Experimental_RunHandlerPoolOptions* other);
2807 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2808 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2809 return "tensorflow.RunOptions.Experimental.RunHandlerPoolOptions";
2810 }
2811 protected:
2812 explicit RunOptions_Experimental_RunHandlerPoolOptions(::PROTOBUF_NAMESPACE_ID::Arena* arena);
2813 private:
2814 static void ArenaDtor(void* object);
2815 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
2816 private:
2817 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
2818 return _internal_metadata_.arena();
2819 }
2820 inline void* MaybeArenaPtr() const {
2821 return _internal_metadata_.raw_arena_ptr();
2822 }
2823 public:
2824
2825 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
2826 private:
2827 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
2828 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
2829 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
2830 }
2831
2832 public:
2833
2834 // nested types ----------------------------------------------------
2835
2836 // accessors -------------------------------------------------------
2837
2838 enum : int {
2839 kPriorityFieldNumber = 1,
2840 };
2841 // int64 priority = 1;
2842 void clear_priority();
2843 ::PROTOBUF_NAMESPACE_ID::int64 priority() const;
2844 void set_priority(::PROTOBUF_NAMESPACE_ID::int64 value);
2845
2846 // @@protoc_insertion_point(class_scope:tensorflow.RunOptions.Experimental.RunHandlerPoolOptions)
2847 private:
2848 class _Internal;
2849
2850 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
2851 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2852 typedef void InternalArenaConstructable_;
2853 typedef void DestructorSkippable_;
2854 ::PROTOBUF_NAMESPACE_ID::int64 priority_;
2855 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2856 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
2857};
2858// -------------------------------------------------------------------
2859
2860class RunOptions_Experimental :
2861 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.RunOptions.Experimental) */ {
2862 public:
2863 RunOptions_Experimental();
2864 virtual ~RunOptions_Experimental();
2865
2866 RunOptions_Experimental(const RunOptions_Experimental& from);
2867 RunOptions_Experimental(RunOptions_Experimental&& from) noexcept
2868 : RunOptions_Experimental() {
2869 *this = ::std::move(from);
2870 }
2871
2872 inline RunOptions_Experimental& operator=(const RunOptions_Experimental& from) {
2873 CopyFrom(from);
2874 return *this;
2875 }
2876 inline RunOptions_Experimental& operator=(RunOptions_Experimental&& from) noexcept {
2877 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
2878 if (this != &from) InternalSwap(&from);
2879 } else {
2880 CopyFrom(from);
2881 }
2882 return *this;
2883 }
2884
2885 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
2886 return GetArenaNoVirtual();
2887 }
2888 inline void* GetMaybeArenaPointer() const final {
2889 return MaybeArenaPtr();
2890 }
2891 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
2892 return GetDescriptor();
2893 }
2894 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
2895 return GetMetadataStatic().descriptor;
2896 }
2897 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
2898 return GetMetadataStatic().reflection;
2899 }
2900 static const RunOptions_Experimental& default_instance();
2901
2902 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
2903 static inline const RunOptions_Experimental* internal_default_instance() {
2904 return reinterpret_cast<const RunOptions_Experimental*>(
2905 &_RunOptions_Experimental_default_instance_);
2906 }
2907 static constexpr int kIndexInFileMessages =
2908 12;
2909
2910 friend void swap(RunOptions_Experimental& a, RunOptions_Experimental& b) {
2911 a.Swap(&b);
2912 }
2913 inline void Swap(RunOptions_Experimental* other) {
2914 if (other == this) return;
2915 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
2916 InternalSwap(other);
2917 } else {
2918 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2919 }
2920 }
2921 void UnsafeArenaSwap(RunOptions_Experimental* other) {
2922 if (other == this) return;
2923 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
2924 InternalSwap(other);
2925 }
2926
2927 // implements Message ----------------------------------------------
2928
2929 inline RunOptions_Experimental* New() const final {
2930 return CreateMaybeMessage<RunOptions_Experimental>(nullptr);
2931 }
2932
2933 RunOptions_Experimental* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2934 return CreateMaybeMessage<RunOptions_Experimental>(arena);
2935 }
2936 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
2937 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
2938 void CopyFrom(const RunOptions_Experimental& from);
2939 void MergeFrom(const RunOptions_Experimental& from);
2940 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2941 bool IsInitialized() const final;
2942
2943 size_t ByteSizeLong() const final;
2944 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
2945 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2946 #else
2947 bool MergePartialFromCodedStream(
2948 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
2949 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
2950 void SerializeWithCachedSizes(
2951 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
2952 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
2953 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
2954 int GetCachedSize() const final { return _cached_size_.Get(); }
2955
2956 private:
2957 inline void SharedCtor();
2958 inline void SharedDtor();
2959 void SetCachedSize(int size) const final;
2960 void InternalSwap(RunOptions_Experimental* other);
2961 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2962 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2963 return "tensorflow.RunOptions.Experimental";
2964 }
2965 protected:
2966 explicit RunOptions_Experimental(::PROTOBUF_NAMESPACE_ID::Arena* arena);
2967 private:
2968 static void ArenaDtor(void* object);
2969 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
2970 private:
2971 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
2972 return _internal_metadata_.arena();
2973 }
2974 inline void* MaybeArenaPtr() const {
2975 return _internal_metadata_.raw_arena_ptr();
2976 }
2977 public:
2978
2979 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
2980 private:
2981 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
2982 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
2983 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
2984 }
2985
2986 public:
2987
2988 // nested types ----------------------------------------------------
2989
2990 typedef RunOptions_Experimental_RunHandlerPoolOptions RunHandlerPoolOptions;
2991
2992 // accessors -------------------------------------------------------
2993
2994 enum : int {
2995 kRunHandlerPoolOptionsFieldNumber = 3,
2996 kCollectiveGraphKeyFieldNumber = 1,
2997 kUseRunHandlerPoolFieldNumber = 2,
2998 };
2999 // .tensorflow.RunOptions.Experimental.RunHandlerPoolOptions run_handler_pool_options = 3;
3000 bool has_run_handler_pool_options() const;
3001 void clear_run_handler_pool_options();
3002 const ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions& run_handler_pool_options() const;
3003 ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* release_run_handler_pool_options();
3004 ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* mutable_run_handler_pool_options();
3005 void set_allocated_run_handler_pool_options(::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* run_handler_pool_options);
3006 void unsafe_arena_set_allocated_run_handler_pool_options(
3007 ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* run_handler_pool_options);
3008 ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* unsafe_arena_release_run_handler_pool_options();
3009
3010 // int64 collective_graph_key = 1;
3011 void clear_collective_graph_key();
3012 ::PROTOBUF_NAMESPACE_ID::int64 collective_graph_key() const;
3013 void set_collective_graph_key(::PROTOBUF_NAMESPACE_ID::int64 value);
3014
3015 // bool use_run_handler_pool = 2;
3016 void clear_use_run_handler_pool();
3017 bool use_run_handler_pool() const;
3018 void set_use_run_handler_pool(bool value);
3019
3020 // @@protoc_insertion_point(class_scope:tensorflow.RunOptions.Experimental)
3021 private:
3022 class _Internal;
3023
3024 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
3025 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3026 typedef void InternalArenaConstructable_;
3027 typedef void DestructorSkippable_;
3028 ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* run_handler_pool_options_;
3029 ::PROTOBUF_NAMESPACE_ID::int64 collective_graph_key_;
3030 bool use_run_handler_pool_;
3031 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3032 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
3033};
3034// -------------------------------------------------------------------
3035
3036class RunOptions :
3037 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.RunOptions) */ {
3038 public:
3039 RunOptions();
3040 virtual ~RunOptions();
3041
3042 RunOptions(const RunOptions& from);
3043 RunOptions(RunOptions&& from) noexcept
3044 : RunOptions() {
3045 *this = ::std::move(from);
3046 }
3047
3048 inline RunOptions& operator=(const RunOptions& from) {
3049 CopyFrom(from);
3050 return *this;
3051 }
3052 inline RunOptions& operator=(RunOptions&& from) noexcept {
3053 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
3054 if (this != &from) InternalSwap(&from);
3055 } else {
3056 CopyFrom(from);
3057 }
3058 return *this;
3059 }
3060
3061 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
3062 return GetArenaNoVirtual();
3063 }
3064 inline void* GetMaybeArenaPointer() const final {
3065 return MaybeArenaPtr();
3066 }
3067 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
3068 return GetDescriptor();
3069 }
3070 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
3071 return GetMetadataStatic().descriptor;
3072 }
3073 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
3074 return GetMetadataStatic().reflection;
3075 }
3076 static const RunOptions& default_instance();
3077
3078 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
3079 static inline const RunOptions* internal_default_instance() {
3080 return reinterpret_cast<const RunOptions*>(
3081 &_RunOptions_default_instance_);
3082 }
3083 static constexpr int kIndexInFileMessages =
3084 13;
3085
3086 friend void swap(RunOptions& a, RunOptions& b) {
3087 a.Swap(&b);
3088 }
3089 inline void Swap(RunOptions* other) {
3090 if (other == this) return;
3091 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
3092 InternalSwap(other);
3093 } else {
3094 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3095 }
3096 }
3097 void UnsafeArenaSwap(RunOptions* other) {
3098 if (other == this) return;
3099 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
3100 InternalSwap(other);
3101 }
3102
3103 // implements Message ----------------------------------------------
3104
3105 inline RunOptions* New() const final {
3106 return CreateMaybeMessage<RunOptions>(nullptr);
3107 }
3108
3109 RunOptions* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3110 return CreateMaybeMessage<RunOptions>(arena);
3111 }
3112 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
3113 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
3114 void CopyFrom(const RunOptions& from);
3115 void MergeFrom(const RunOptions& from);
3116 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3117 bool IsInitialized() const final;
3118
3119 size_t ByteSizeLong() const final;
3120 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
3121 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3122 #else
3123 bool MergePartialFromCodedStream(
3124 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
3125 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
3126 void SerializeWithCachedSizes(
3127 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
3128 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
3129 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
3130 int GetCachedSize() const final { return _cached_size_.Get(); }
3131
3132 private:
3133 inline void SharedCtor();
3134 inline void SharedDtor();
3135 void SetCachedSize(int size) const final;
3136 void InternalSwap(RunOptions* other);
3137 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3138 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3139 return "tensorflow.RunOptions";
3140 }
3141 protected:
3142 explicit RunOptions(::PROTOBUF_NAMESPACE_ID::Arena* arena);
3143 private:
3144 static void ArenaDtor(void* object);
3145 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
3146 private:
3147 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
3148 return _internal_metadata_.arena();
3149 }
3150 inline void* MaybeArenaPtr() const {
3151 return _internal_metadata_.raw_arena_ptr();
3152 }
3153 public:
3154
3155 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
3156 private:
3157 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
3158 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
3159 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
3160 }
3161
3162 public:
3163
3164 // nested types ----------------------------------------------------
3165
3166 typedef RunOptions_Experimental Experimental;
3167
3168 typedef RunOptions_TraceLevel TraceLevel;
3169 static constexpr TraceLevel NO_TRACE =
3170 RunOptions_TraceLevel_NO_TRACE;
3171 static constexpr TraceLevel SOFTWARE_TRACE =
3172 RunOptions_TraceLevel_SOFTWARE_TRACE;
3173 static constexpr TraceLevel HARDWARE_TRACE =
3174 RunOptions_TraceLevel_HARDWARE_TRACE;
3175 static constexpr TraceLevel FULL_TRACE =
3176 RunOptions_TraceLevel_FULL_TRACE;
3177 static inline bool TraceLevel_IsValid(int value) {
3178 return RunOptions_TraceLevel_IsValid(value);
3179 }
3180 static constexpr TraceLevel TraceLevel_MIN =
3181 RunOptions_TraceLevel_TraceLevel_MIN;
3182 static constexpr TraceLevel TraceLevel_MAX =
3183 RunOptions_TraceLevel_TraceLevel_MAX;
3184 static constexpr int TraceLevel_ARRAYSIZE =
3185 RunOptions_TraceLevel_TraceLevel_ARRAYSIZE;
3186 static inline const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor*
3187 TraceLevel_descriptor() {
3188 return RunOptions_TraceLevel_descriptor();
3189 }
3190 template<typename T>
3191 static inline const std::string& TraceLevel_Name(T enum_t_value) {
3192 static_assert(::std::is_same<T, TraceLevel>::value ||
3193 ::std::is_integral<T>::value,
3194 "Incorrect type passed to function TraceLevel_Name.");
3195 return RunOptions_TraceLevel_Name(enum_t_value);
3196 }
3197 static inline bool TraceLevel_Parse(const std::string& name,
3198 TraceLevel* value) {
3199 return RunOptions_TraceLevel_Parse(name, value);
3200 }
3201
3202 // accessors -------------------------------------------------------
3203
3204 enum : int {
3205 kDebugOptionsFieldNumber = 6,
3206 kExperimentalFieldNumber = 8,
3207 kTimeoutInMsFieldNumber = 2,
3208 kTraceLevelFieldNumber = 1,
3209 kInterOpThreadPoolFieldNumber = 3,
3210 kOutputPartitionGraphsFieldNumber = 5,
3211 kReportTensorAllocationsUponOomFieldNumber = 7,
3212 };
3213 // .tensorflow.DebugOptions debug_options = 6;
3214 bool has_debug_options() const;
3215 void clear_debug_options();
3216 const ::tensorflow::DebugOptions& debug_options() const;
3217 ::tensorflow::DebugOptions* release_debug_options();
3218 ::tensorflow::DebugOptions* mutable_debug_options();
3219 void set_allocated_debug_options(::tensorflow::DebugOptions* debug_options);
3220 void unsafe_arena_set_allocated_debug_options(
3221 ::tensorflow::DebugOptions* debug_options);
3222 ::tensorflow::DebugOptions* unsafe_arena_release_debug_options();
3223
3224 // .tensorflow.RunOptions.Experimental experimental = 8;
3225 bool has_experimental() const;
3226 void clear_experimental();
3227 const ::tensorflow::RunOptions_Experimental& experimental() const;
3228 ::tensorflow::RunOptions_Experimental* release_experimental();
3229 ::tensorflow::RunOptions_Experimental* mutable_experimental();
3230 void set_allocated_experimental(::tensorflow::RunOptions_Experimental* experimental);
3231 void unsafe_arena_set_allocated_experimental(
3232 ::tensorflow::RunOptions_Experimental* experimental);
3233 ::tensorflow::RunOptions_Experimental* unsafe_arena_release_experimental();
3234
3235 // int64 timeout_in_ms = 2;
3236 void clear_timeout_in_ms();
3237 ::PROTOBUF_NAMESPACE_ID::int64 timeout_in_ms() const;
3238 void set_timeout_in_ms(::PROTOBUF_NAMESPACE_ID::int64 value);
3239
3240 // .tensorflow.RunOptions.TraceLevel trace_level = 1;
3241 void clear_trace_level();
3242 ::tensorflow::RunOptions_TraceLevel trace_level() const;
3243 void set_trace_level(::tensorflow::RunOptions_TraceLevel value);
3244
3245 // int32 inter_op_thread_pool = 3;
3246 void clear_inter_op_thread_pool();
3247 ::PROTOBUF_NAMESPACE_ID::int32 inter_op_thread_pool() const;
3248 void set_inter_op_thread_pool(::PROTOBUF_NAMESPACE_ID::int32 value);
3249
3250 // bool output_partition_graphs = 5;
3251 void clear_output_partition_graphs();
3252 bool output_partition_graphs() const;
3253 void set_output_partition_graphs(bool value);
3254
3255 // bool report_tensor_allocations_upon_oom = 7;
3256 void clear_report_tensor_allocations_upon_oom();
3257 bool report_tensor_allocations_upon_oom() const;
3258 void set_report_tensor_allocations_upon_oom(bool value);
3259
3260 // @@protoc_insertion_point(class_scope:tensorflow.RunOptions)
3261 private:
3262 class _Internal;
3263
3264 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
3265 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3266 typedef void InternalArenaConstructable_;
3267 typedef void DestructorSkippable_;
3268 ::tensorflow::DebugOptions* debug_options_;
3269 ::tensorflow::RunOptions_Experimental* experimental_;
3270 ::PROTOBUF_NAMESPACE_ID::int64 timeout_in_ms_;
3271 int trace_level_;
3272 ::PROTOBUF_NAMESPACE_ID::int32 inter_op_thread_pool_;
3273 bool output_partition_graphs_;
3274 bool report_tensor_allocations_upon_oom_;
3275 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3276 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
3277};
3278// -------------------------------------------------------------------
3279
3280class RunMetadata_FunctionGraphs :
3281 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.RunMetadata.FunctionGraphs) */ {
3282 public:
3283 RunMetadata_FunctionGraphs();
3284 virtual ~RunMetadata_FunctionGraphs();
3285
3286 RunMetadata_FunctionGraphs(const RunMetadata_FunctionGraphs& from);
3287 RunMetadata_FunctionGraphs(RunMetadata_FunctionGraphs&& from) noexcept
3288 : RunMetadata_FunctionGraphs() {
3289 *this = ::std::move(from);
3290 }
3291
3292 inline RunMetadata_FunctionGraphs& operator=(const RunMetadata_FunctionGraphs& from) {
3293 CopyFrom(from);
3294 return *this;
3295 }
3296 inline RunMetadata_FunctionGraphs& operator=(RunMetadata_FunctionGraphs&& from) noexcept {
3297 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
3298 if (this != &from) InternalSwap(&from);
3299 } else {
3300 CopyFrom(from);
3301 }
3302 return *this;
3303 }
3304
3305 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
3306 return GetArenaNoVirtual();
3307 }
3308 inline void* GetMaybeArenaPointer() const final {
3309 return MaybeArenaPtr();
3310 }
3311 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
3312 return GetDescriptor();
3313 }
3314 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
3315 return GetMetadataStatic().descriptor;
3316 }
3317 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
3318 return GetMetadataStatic().reflection;
3319 }
3320 static const RunMetadata_FunctionGraphs& default_instance();
3321
3322 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
3323 static inline const RunMetadata_FunctionGraphs* internal_default_instance() {
3324 return reinterpret_cast<const RunMetadata_FunctionGraphs*>(
3325 &_RunMetadata_FunctionGraphs_default_instance_);
3326 }
3327 static constexpr int kIndexInFileMessages =
3328 14;
3329
3330 friend void swap(RunMetadata_FunctionGraphs& a, RunMetadata_FunctionGraphs& b) {
3331 a.Swap(&b);
3332 }
3333 inline void Swap(RunMetadata_FunctionGraphs* other) {
3334 if (other == this) return;
3335 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
3336 InternalSwap(other);
3337 } else {
3338 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3339 }
3340 }
3341 void UnsafeArenaSwap(RunMetadata_FunctionGraphs* other) {
3342 if (other == this) return;
3343 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
3344 InternalSwap(other);
3345 }
3346
3347 // implements Message ----------------------------------------------
3348
3349 inline RunMetadata_FunctionGraphs* New() const final {
3350 return CreateMaybeMessage<RunMetadata_FunctionGraphs>(nullptr);
3351 }
3352
3353 RunMetadata_FunctionGraphs* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3354 return CreateMaybeMessage<RunMetadata_FunctionGraphs>(arena);
3355 }
3356 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
3357 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
3358 void CopyFrom(const RunMetadata_FunctionGraphs& from);
3359 void MergeFrom(const RunMetadata_FunctionGraphs& from);
3360 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3361 bool IsInitialized() const final;
3362
3363 size_t ByteSizeLong() const final;
3364 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
3365 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3366 #else
3367 bool MergePartialFromCodedStream(
3368 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
3369 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
3370 void SerializeWithCachedSizes(
3371 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
3372 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
3373 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
3374 int GetCachedSize() const final { return _cached_size_.Get(); }
3375
3376 private:
3377 inline void SharedCtor();
3378 inline void SharedDtor();
3379 void SetCachedSize(int size) const final;
3380 void InternalSwap(RunMetadata_FunctionGraphs* other);
3381 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3382 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3383 return "tensorflow.RunMetadata.FunctionGraphs";
3384 }
3385 protected:
3386 explicit RunMetadata_FunctionGraphs(::PROTOBUF_NAMESPACE_ID::Arena* arena);
3387 private:
3388 static void ArenaDtor(void* object);
3389 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
3390 private:
3391 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
3392 return _internal_metadata_.arena();
3393 }
3394 inline void* MaybeArenaPtr() const {
3395 return _internal_metadata_.raw_arena_ptr();
3396 }
3397 public:
3398
3399 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
3400 private:
3401 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
3402 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
3403 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
3404 }
3405
3406 public:
3407
3408 // nested types ----------------------------------------------------
3409
3410 // accessors -------------------------------------------------------
3411
3412 enum : int {
3413 kPartitionGraphsFieldNumber = 1,
3414 kPreOptimizationGraphFieldNumber = 2,
3415 kPostOptimizationGraphFieldNumber = 3,
3416 };
3417 // repeated .tensorflow.GraphDef partition_graphs = 1;
3418 int partition_graphs_size() const;
3419 void clear_partition_graphs();
3420 ::tensorflow::GraphDef* mutable_partition_graphs(int index);
3421 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GraphDef >*
3422 mutable_partition_graphs();
3423 const ::tensorflow::GraphDef& partition_graphs(int index) const;
3424 ::tensorflow::GraphDef* add_partition_graphs();
3425 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GraphDef >&
3426 partition_graphs() const;
3427
3428 // .tensorflow.GraphDef pre_optimization_graph = 2;
3429 bool has_pre_optimization_graph() const;
3430 void clear_pre_optimization_graph();
3431 const ::tensorflow::GraphDef& pre_optimization_graph() const;
3432 ::tensorflow::GraphDef* release_pre_optimization_graph();
3433 ::tensorflow::GraphDef* mutable_pre_optimization_graph();
3434 void set_allocated_pre_optimization_graph(::tensorflow::GraphDef* pre_optimization_graph);
3435 void unsafe_arena_set_allocated_pre_optimization_graph(
3436 ::tensorflow::GraphDef* pre_optimization_graph);
3437 ::tensorflow::GraphDef* unsafe_arena_release_pre_optimization_graph();
3438
3439 // .tensorflow.GraphDef post_optimization_graph = 3;
3440 bool has_post_optimization_graph() const;
3441 void clear_post_optimization_graph();
3442 const ::tensorflow::GraphDef& post_optimization_graph() const;
3443 ::tensorflow::GraphDef* release_post_optimization_graph();
3444 ::tensorflow::GraphDef* mutable_post_optimization_graph();
3445 void set_allocated_post_optimization_graph(::tensorflow::GraphDef* post_optimization_graph);
3446 void unsafe_arena_set_allocated_post_optimization_graph(
3447 ::tensorflow::GraphDef* post_optimization_graph);
3448 ::tensorflow::GraphDef* unsafe_arena_release_post_optimization_graph();
3449
3450 // @@protoc_insertion_point(class_scope:tensorflow.RunMetadata.FunctionGraphs)
3451 private:
3452 class _Internal;
3453
3454 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
3455 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3456 typedef void InternalArenaConstructable_;
3457 typedef void DestructorSkippable_;
3458 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GraphDef > partition_graphs_;
3459 ::tensorflow::GraphDef* pre_optimization_graph_;
3460 ::tensorflow::GraphDef* post_optimization_graph_;
3461 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3462 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
3463};
3464// -------------------------------------------------------------------
3465
3466class RunMetadata :
3467 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.RunMetadata) */ {
3468 public:
3469 RunMetadata();
3470 virtual ~RunMetadata();
3471
3472 RunMetadata(const RunMetadata& from);
3473 RunMetadata(RunMetadata&& from) noexcept
3474 : RunMetadata() {
3475 *this = ::std::move(from);
3476 }
3477
3478 inline RunMetadata& operator=(const RunMetadata& from) {
3479 CopyFrom(from);
3480 return *this;
3481 }
3482 inline RunMetadata& operator=(RunMetadata&& from) noexcept {
3483 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
3484 if (this != &from) InternalSwap(&from);
3485 } else {
3486 CopyFrom(from);
3487 }
3488 return *this;
3489 }
3490
3491 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
3492 return GetArenaNoVirtual();
3493 }
3494 inline void* GetMaybeArenaPointer() const final {
3495 return MaybeArenaPtr();
3496 }
3497 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
3498 return GetDescriptor();
3499 }
3500 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
3501 return GetMetadataStatic().descriptor;
3502 }
3503 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
3504 return GetMetadataStatic().reflection;
3505 }
3506 static const RunMetadata& default_instance();
3507
3508 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
3509 static inline const RunMetadata* internal_default_instance() {
3510 return reinterpret_cast<const RunMetadata*>(
3511 &_RunMetadata_default_instance_);
3512 }
3513 static constexpr int kIndexInFileMessages =
3514 15;
3515
3516 friend void swap(RunMetadata& a, RunMetadata& b) {
3517 a.Swap(&b);
3518 }
3519 inline void Swap(RunMetadata* other) {
3520 if (other == this) return;
3521 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
3522 InternalSwap(other);
3523 } else {
3524 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3525 }
3526 }
3527 void UnsafeArenaSwap(RunMetadata* other) {
3528 if (other == this) return;
3529 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
3530 InternalSwap(other);
3531 }
3532
3533 // implements Message ----------------------------------------------
3534
3535 inline RunMetadata* New() const final {
3536 return CreateMaybeMessage<RunMetadata>(nullptr);
3537 }
3538
3539 RunMetadata* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3540 return CreateMaybeMessage<RunMetadata>(arena);
3541 }
3542 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
3543 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
3544 void CopyFrom(const RunMetadata& from);
3545 void MergeFrom(const RunMetadata& from);
3546 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3547 bool IsInitialized() const final;
3548
3549 size_t ByteSizeLong() const final;
3550 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
3551 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3552 #else
3553 bool MergePartialFromCodedStream(
3554 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
3555 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
3556 void SerializeWithCachedSizes(
3557 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
3558 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
3559 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
3560 int GetCachedSize() const final { return _cached_size_.Get(); }
3561
3562 private:
3563 inline void SharedCtor();
3564 inline void SharedDtor();
3565 void SetCachedSize(int size) const final;
3566 void InternalSwap(RunMetadata* other);
3567 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3568 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3569 return "tensorflow.RunMetadata";
3570 }
3571 protected:
3572 explicit RunMetadata(::PROTOBUF_NAMESPACE_ID::Arena* arena);
3573 private:
3574 static void ArenaDtor(void* object);
3575 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
3576 private:
3577 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
3578 return _internal_metadata_.arena();
3579 }
3580 inline void* MaybeArenaPtr() const {
3581 return _internal_metadata_.raw_arena_ptr();
3582 }
3583 public:
3584
3585 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
3586 private:
3587 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
3588 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
3589 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
3590 }
3591
3592 public:
3593
3594 // nested types ----------------------------------------------------
3595
3596 typedef RunMetadata_FunctionGraphs FunctionGraphs;
3597
3598 // accessors -------------------------------------------------------
3599
3600 enum : int {
3601 kPartitionGraphsFieldNumber = 3,
3602 kFunctionGraphsFieldNumber = 4,
3603 kStepStatsFieldNumber = 1,
3604 kCostGraphFieldNumber = 2,
3605 kSessionMetadataFieldNumber = 5,
3606 };
3607 // repeated .tensorflow.GraphDef partition_graphs = 3;
3608 int partition_graphs_size() const;
3609 void clear_partition_graphs();
3610 ::tensorflow::GraphDef* mutable_partition_graphs(int index);
3611 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GraphDef >*
3612 mutable_partition_graphs();
3613 const ::tensorflow::GraphDef& partition_graphs(int index) const;
3614 ::tensorflow::GraphDef* add_partition_graphs();
3615 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GraphDef >&
3616 partition_graphs() const;
3617
3618 // repeated .tensorflow.RunMetadata.FunctionGraphs function_graphs = 4;
3619 int function_graphs_size() const;
3620 void clear_function_graphs();
3621 ::tensorflow::RunMetadata_FunctionGraphs* mutable_function_graphs(int index);
3622 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::RunMetadata_FunctionGraphs >*
3623 mutable_function_graphs();
3624 const ::tensorflow::RunMetadata_FunctionGraphs& function_graphs(int index) const;
3625 ::tensorflow::RunMetadata_FunctionGraphs* add_function_graphs();
3626 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::RunMetadata_FunctionGraphs >&
3627 function_graphs() const;
3628
3629 // .tensorflow.StepStats step_stats = 1;
3630 bool has_step_stats() const;
3631 void clear_step_stats();
3632 const ::tensorflow::StepStats& step_stats() const;
3633 ::tensorflow::StepStats* release_step_stats();
3634 ::tensorflow::StepStats* mutable_step_stats();
3635 void set_allocated_step_stats(::tensorflow::StepStats* step_stats);
3636 void unsafe_arena_set_allocated_step_stats(
3637 ::tensorflow::StepStats* step_stats);
3638 ::tensorflow::StepStats* unsafe_arena_release_step_stats();
3639
3640 // .tensorflow.CostGraphDef cost_graph = 2;
3641 bool has_cost_graph() const;
3642 void clear_cost_graph();
3643 const ::tensorflow::CostGraphDef& cost_graph() const;
3644 ::tensorflow::CostGraphDef* release_cost_graph();
3645 ::tensorflow::CostGraphDef* mutable_cost_graph();
3646 void set_allocated_cost_graph(::tensorflow::CostGraphDef* cost_graph);
3647 void unsafe_arena_set_allocated_cost_graph(
3648 ::tensorflow::CostGraphDef* cost_graph);
3649 ::tensorflow::CostGraphDef* unsafe_arena_release_cost_graph();
3650
3651 // .tensorflow.SessionMetadata session_metadata = 5;
3652 bool has_session_metadata() const;
3653 void clear_session_metadata();
3654 const ::tensorflow::SessionMetadata& session_metadata() const;
3655 ::tensorflow::SessionMetadata* release_session_metadata();
3656 ::tensorflow::SessionMetadata* mutable_session_metadata();
3657 void set_allocated_session_metadata(::tensorflow::SessionMetadata* session_metadata);
3658 void unsafe_arena_set_allocated_session_metadata(
3659 ::tensorflow::SessionMetadata* session_metadata);
3660 ::tensorflow::SessionMetadata* unsafe_arena_release_session_metadata();
3661
3662 // @@protoc_insertion_point(class_scope:tensorflow.RunMetadata)
3663 private:
3664 class _Internal;
3665
3666 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
3667 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3668 typedef void InternalArenaConstructable_;
3669 typedef void DestructorSkippable_;
3670 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GraphDef > partition_graphs_;
3671 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::RunMetadata_FunctionGraphs > function_graphs_;
3672 ::tensorflow::StepStats* step_stats_;
3673 ::tensorflow::CostGraphDef* cost_graph_;
3674 ::tensorflow::SessionMetadata* session_metadata_;
3675 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3676 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
3677};
3678// -------------------------------------------------------------------
3679
3680class TensorConnection :
3681 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.TensorConnection) */ {
3682 public:
3683 TensorConnection();
3684 virtual ~TensorConnection();
3685
3686 TensorConnection(const TensorConnection& from);
3687 TensorConnection(TensorConnection&& from) noexcept
3688 : TensorConnection() {
3689 *this = ::std::move(from);
3690 }
3691
3692 inline TensorConnection& operator=(const TensorConnection& from) {
3693 CopyFrom(from);
3694 return *this;
3695 }
3696 inline TensorConnection& operator=(TensorConnection&& from) noexcept {
3697 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
3698 if (this != &from) InternalSwap(&from);
3699 } else {
3700 CopyFrom(from);
3701 }
3702 return *this;
3703 }
3704
3705 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
3706 return GetArenaNoVirtual();
3707 }
3708 inline void* GetMaybeArenaPointer() const final {
3709 return MaybeArenaPtr();
3710 }
3711 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
3712 return GetDescriptor();
3713 }
3714 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
3715 return GetMetadataStatic().descriptor;
3716 }
3717 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
3718 return GetMetadataStatic().reflection;
3719 }
3720 static const TensorConnection& default_instance();
3721
3722 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
3723 static inline const TensorConnection* internal_default_instance() {
3724 return reinterpret_cast<const TensorConnection*>(
3725 &_TensorConnection_default_instance_);
3726 }
3727 static constexpr int kIndexInFileMessages =
3728 16;
3729
3730 friend void swap(TensorConnection& a, TensorConnection& b) {
3731 a.Swap(&b);
3732 }
3733 inline void Swap(TensorConnection* other) {
3734 if (other == this) return;
3735 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
3736 InternalSwap(other);
3737 } else {
3738 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3739 }
3740 }
3741 void UnsafeArenaSwap(TensorConnection* other) {
3742 if (other == this) return;
3743 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
3744 InternalSwap(other);
3745 }
3746
3747 // implements Message ----------------------------------------------
3748
3749 inline TensorConnection* New() const final {
3750 return CreateMaybeMessage<TensorConnection>(nullptr);
3751 }
3752
3753 TensorConnection* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3754 return CreateMaybeMessage<TensorConnection>(arena);
3755 }
3756 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
3757 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
3758 void CopyFrom(const TensorConnection& from);
3759 void MergeFrom(const TensorConnection& from);
3760 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3761 bool IsInitialized() const final;
3762
3763 size_t ByteSizeLong() const final;
3764 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
3765 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3766 #else
3767 bool MergePartialFromCodedStream(
3768 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
3769 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
3770 void SerializeWithCachedSizes(
3771 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
3772 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
3773 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
3774 int GetCachedSize() const final { return _cached_size_.Get(); }
3775
3776 private:
3777 inline void SharedCtor();
3778 inline void SharedDtor();
3779 void SetCachedSize(int size) const final;
3780 void InternalSwap(TensorConnection* other);
3781 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3782 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3783 return "tensorflow.TensorConnection";
3784 }
3785 protected:
3786 explicit TensorConnection(::PROTOBUF_NAMESPACE_ID::Arena* arena);
3787 private:
3788 static void ArenaDtor(void* object);
3789 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
3790 private:
3791 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
3792 return _internal_metadata_.arena();
3793 }
3794 inline void* MaybeArenaPtr() const {
3795 return _internal_metadata_.raw_arena_ptr();
3796 }
3797 public:
3798
3799 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
3800 private:
3801 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
3802 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
3803 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
3804 }
3805
3806 public:
3807
3808 // nested types ----------------------------------------------------
3809
3810 // accessors -------------------------------------------------------
3811
3812 enum : int {
3813 kFromTensorFieldNumber = 1,
3814 kToTensorFieldNumber = 2,
3815 };
3816 // string from_tensor = 1;
3817 void clear_from_tensor();
3818 const std::string& from_tensor() const;
3819 void set_from_tensor(const std::string& value);
3820 void set_from_tensor(std::string&& value);
3821 void set_from_tensor(const char* value);
3822 void set_from_tensor(const char* value, size_t size);
3823 std::string* mutable_from_tensor();
3824 std::string* release_from_tensor();
3825 void set_allocated_from_tensor(std::string* from_tensor);
3826 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
3827 " string fields are deprecated and will be removed in a"
3828 " future release.")
3829 std::string* unsafe_arena_release_from_tensor();
3830 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
3831 " string fields are deprecated and will be removed in a"
3832 " future release.")
3833 void unsafe_arena_set_allocated_from_tensor(
3834 std::string* from_tensor);
3835
3836 // string to_tensor = 2;
3837 void clear_to_tensor();
3838 const std::string& to_tensor() const;
3839 void set_to_tensor(const std::string& value);
3840 void set_to_tensor(std::string&& value);
3841 void set_to_tensor(const char* value);
3842 void set_to_tensor(const char* value, size_t size);
3843 std::string* mutable_to_tensor();
3844 std::string* release_to_tensor();
3845 void set_allocated_to_tensor(std::string* to_tensor);
3846 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
3847 " string fields are deprecated and will be removed in a"
3848 " future release.")
3849 std::string* unsafe_arena_release_to_tensor();
3850 GOOGLE_PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
3851 " string fields are deprecated and will be removed in a"
3852 " future release.")
3853 void unsafe_arena_set_allocated_to_tensor(
3854 std::string* to_tensor);
3855
3856 // @@protoc_insertion_point(class_scope:tensorflow.TensorConnection)
3857 private:
3858 class _Internal;
3859
3860 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
3861 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3862 typedef void InternalArenaConstructable_;
3863 typedef void DestructorSkippable_;
3864 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr from_tensor_;
3865 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr to_tensor_;
3866 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3867 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
3868};
3869// -------------------------------------------------------------------
3870
3871class CallableOptions_FeedDevicesEntry_DoNotUse : public ::PROTOBUF_NAMESPACE_ID::internal::MapEntry<CallableOptions_FeedDevicesEntry_DoNotUse,
3872 std::string, std::string,
3873 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
3874 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
3875 0 > {
3876public:
3877 typedef ::PROTOBUF_NAMESPACE_ID::internal::MapEntry<CallableOptions_FeedDevicesEntry_DoNotUse,
3878 std::string, std::string,
3879 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
3880 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
3881 0 > SuperType;
3882 CallableOptions_FeedDevicesEntry_DoNotUse();
3883 CallableOptions_FeedDevicesEntry_DoNotUse(::PROTOBUF_NAMESPACE_ID::Arena* arena);
3884 void MergeFrom(const CallableOptions_FeedDevicesEntry_DoNotUse& other);
3885 static const CallableOptions_FeedDevicesEntry_DoNotUse* internal_default_instance() { return reinterpret_cast<const CallableOptions_FeedDevicesEntry_DoNotUse*>(&_CallableOptions_FeedDevicesEntry_DoNotUse_default_instance_); }
3886 static bool ValidateKey(std::string* s) {
3887 return ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(s->data(), s->size(), ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::PARSE, "tensorflow.CallableOptions.FeedDevicesEntry.key");
3888 }
3889 static bool ValidateValue(std::string* s) {
3890 return ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(s->data(), s->size(), ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::PARSE, "tensorflow.CallableOptions.FeedDevicesEntry.value");
3891 }
3892 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& other) final;
3893 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
3894 private:
3895 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
3896 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
3897 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[17];
3898 }
3899
3900 public:
3901};
3902
3903// -------------------------------------------------------------------
3904
3905class CallableOptions_FetchDevicesEntry_DoNotUse : public ::PROTOBUF_NAMESPACE_ID::internal::MapEntry<CallableOptions_FetchDevicesEntry_DoNotUse,
3906 std::string, std::string,
3907 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
3908 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
3909 0 > {
3910public:
3911 typedef ::PROTOBUF_NAMESPACE_ID::internal::MapEntry<CallableOptions_FetchDevicesEntry_DoNotUse,
3912 std::string, std::string,
3913 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
3914 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
3915 0 > SuperType;
3916 CallableOptions_FetchDevicesEntry_DoNotUse();
3917 CallableOptions_FetchDevicesEntry_DoNotUse(::PROTOBUF_NAMESPACE_ID::Arena* arena);
3918 void MergeFrom(const CallableOptions_FetchDevicesEntry_DoNotUse& other);
3919 static const CallableOptions_FetchDevicesEntry_DoNotUse* internal_default_instance() { return reinterpret_cast<const CallableOptions_FetchDevicesEntry_DoNotUse*>(&_CallableOptions_FetchDevicesEntry_DoNotUse_default_instance_); }
3920 static bool ValidateKey(std::string* s) {
3921 return ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(s->data(), s->size(), ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::PARSE, "tensorflow.CallableOptions.FetchDevicesEntry.key");
3922 }
3923 static bool ValidateValue(std::string* s) {
3924 return ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(s->data(), s->size(), ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::PARSE, "tensorflow.CallableOptions.FetchDevicesEntry.value");
3925 }
3926 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& other) final;
3927 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
3928 private:
3929 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
3930 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
3931 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[18];
3932 }
3933
3934 public:
3935};
3936
3937// -------------------------------------------------------------------
3938
3939class CallableOptions :
3940 public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:tensorflow.CallableOptions) */ {
3941 public:
3942 CallableOptions();
3943 virtual ~CallableOptions();
3944
3945 CallableOptions(const CallableOptions& from);
3946 CallableOptions(CallableOptions&& from) noexcept
3947 : CallableOptions() {
3948 *this = ::std::move(from);
3949 }
3950
3951 inline CallableOptions& operator=(const CallableOptions& from) {
3952 CopyFrom(from);
3953 return *this;
3954 }
3955 inline CallableOptions& operator=(CallableOptions&& from) noexcept {
3956 if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
3957 if (this != &from) InternalSwap(&from);
3958 } else {
3959 CopyFrom(from);
3960 }
3961 return *this;
3962 }
3963
3964 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArena() const final {
3965 return GetArenaNoVirtual();
3966 }
3967 inline void* GetMaybeArenaPointer() const final {
3968 return MaybeArenaPtr();
3969 }
3970 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
3971 return GetDescriptor();
3972 }
3973 static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
3974 return GetMetadataStatic().descriptor;
3975 }
3976 static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
3977 return GetMetadataStatic().reflection;
3978 }
3979 static const CallableOptions& default_instance();
3980
3981 static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
3982 static inline const CallableOptions* internal_default_instance() {
3983 return reinterpret_cast<const CallableOptions*>(
3984 &_CallableOptions_default_instance_);
3985 }
3986 static constexpr int kIndexInFileMessages =
3987 19;
3988
3989 friend void swap(CallableOptions& a, CallableOptions& b) {
3990 a.Swap(&b);
3991 }
3992 inline void Swap(CallableOptions* other) {
3993 if (other == this) return;
3994 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
3995 InternalSwap(other);
3996 } else {
3997 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3998 }
3999 }
4000 void UnsafeArenaSwap(CallableOptions* other) {
4001 if (other == this) return;
4002 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
4003 InternalSwap(other);
4004 }
4005
4006 // implements Message ----------------------------------------------
4007
4008 inline CallableOptions* New() const final {
4009 return CreateMaybeMessage<CallableOptions>(nullptr);
4010 }
4011
4012 CallableOptions* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
4013 return CreateMaybeMessage<CallableOptions>(arena);
4014 }
4015 void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
4016 void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
4017 void CopyFrom(const CallableOptions& from);
4018 void MergeFrom(const CallableOptions& from);
4019 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
4020 bool IsInitialized() const final;
4021
4022 size_t ByteSizeLong() const final;
4023 #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
4024 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
4025 #else
4026 bool MergePartialFromCodedStream(
4027 ::PROTOBUF_NAMESPACE_ID::io::CodedInputStream* input) final;
4028 #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
4029 void SerializeWithCachedSizes(
4030 ::PROTOBUF_NAMESPACE_ID::io::CodedOutputStream* output) const final;
4031 ::PROTOBUF_NAMESPACE_ID::uint8* InternalSerializeWithCachedSizesToArray(
4032 ::PROTOBUF_NAMESPACE_ID::uint8* target) const final;
4033 int GetCachedSize() const final { return _cached_size_.Get(); }
4034
4035 private:
4036 inline void SharedCtor();
4037 inline void SharedDtor();
4038 void SetCachedSize(int size) const final;
4039 void InternalSwap(CallableOptions* other);
4040 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
4041 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
4042 return "tensorflow.CallableOptions";
4043 }
4044 protected:
4045 explicit CallableOptions(::PROTOBUF_NAMESPACE_ID::Arena* arena);
4046 private:
4047 static void ArenaDtor(void* object);
4048 inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
4049 private:
4050 inline ::PROTOBUF_NAMESPACE_ID::Arena* GetArenaNoVirtual() const {
4051 return _internal_metadata_.arena();
4052 }
4053 inline void* MaybeArenaPtr() const {
4054 return _internal_metadata_.raw_arena_ptr();
4055 }
4056 public:
4057
4058 ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
4059 private:
4060 static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
4061 ::PROTOBUF_NAMESPACE_ID::internal::AssignDescriptors(&::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto);
4062 return ::descriptor_table_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto.file_level_metadata[kIndexInFileMessages];
4063 }
4064
4065 public:
4066
4067 // nested types ----------------------------------------------------
4068
4069
4070 // accessors -------------------------------------------------------
4071
4072 enum : int {
4073 kFeedFieldNumber = 1,
4074 kFetchFieldNumber = 2,
4075 kTargetFieldNumber = 3,
4076 kTensorConnectionFieldNumber = 5,
4077 kFeedDevicesFieldNumber = 6,
4078 kFetchDevicesFieldNumber = 7,
4079 kRunOptionsFieldNumber = 4,
4080 kFetchSkipSyncFieldNumber = 8,
4081 };
4082 // repeated string feed = 1;
4083 int feed_size() const;
4084 void clear_feed();
4085 const std::string& feed(int index) const;
4086 std::string* mutable_feed(int index);
4087 void set_feed(int index, const std::string& value);
4088 void set_feed(int index, std::string&& value);
4089 void set_feed(int index, const char* value);
4090 void set_feed(int index, const char* value, size_t size);
4091 std::string* add_feed();
4092 void add_feed(const std::string& value);
4093 void add_feed(std::string&& value);
4094 void add_feed(const char* value);
4095 void add_feed(const char* value, size_t size);
4096 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& feed() const;
4097 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_feed();
4098
4099 // repeated string fetch = 2;
4100 int fetch_size() const;
4101 void clear_fetch();
4102 const std::string& fetch(int index) const;
4103 std::string* mutable_fetch(int index);
4104 void set_fetch(int index, const std::string& value);
4105 void set_fetch(int index, std::string&& value);
4106 void set_fetch(int index, const char* value);
4107 void set_fetch(int index, const char* value, size_t size);
4108 std::string* add_fetch();
4109 void add_fetch(const std::string& value);
4110 void add_fetch(std::string&& value);
4111 void add_fetch(const char* value);
4112 void add_fetch(const char* value, size_t size);
4113 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& fetch() const;
4114 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_fetch();
4115
4116 // repeated string target = 3;
4117 int target_size() const;
4118 void clear_target();
4119 const std::string& target(int index) const;
4120 std::string* mutable_target(int index);
4121 void set_target(int index, const std::string& value);
4122 void set_target(int index, std::string&& value);
4123 void set_target(int index, const char* value);
4124 void set_target(int index, const char* value, size_t size);
4125 std::string* add_target();
4126 void add_target(const std::string& value);
4127 void add_target(std::string&& value);
4128 void add_target(const char* value);
4129 void add_target(const char* value, size_t size);
4130 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& target() const;
4131 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_target();
4132
4133 // repeated .tensorflow.TensorConnection tensor_connection = 5;
4134 int tensor_connection_size() const;
4135 void clear_tensor_connection();
4136 ::tensorflow::TensorConnection* mutable_tensor_connection(int index);
4137 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorConnection >*
4138 mutable_tensor_connection();
4139 const ::tensorflow::TensorConnection& tensor_connection(int index) const;
4140 ::tensorflow::TensorConnection* add_tensor_connection();
4141 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorConnection >&
4142 tensor_connection() const;
4143
4144 // map<string, string> feed_devices = 6;
4145 int feed_devices_size() const;
4146 void clear_feed_devices();
4147 const ::PROTOBUF_NAMESPACE_ID::Map< std::string, std::string >&
4148 feed_devices() const;
4149 ::PROTOBUF_NAMESPACE_ID::Map< std::string, std::string >*
4150 mutable_feed_devices();
4151
4152 // map<string, string> fetch_devices = 7;
4153 int fetch_devices_size() const;
4154 void clear_fetch_devices();
4155 const ::PROTOBUF_NAMESPACE_ID::Map< std::string, std::string >&
4156 fetch_devices() const;
4157 ::PROTOBUF_NAMESPACE_ID::Map< std::string, std::string >*
4158 mutable_fetch_devices();
4159
4160 // .tensorflow.RunOptions run_options = 4;
4161 bool has_run_options() const;
4162 void clear_run_options();
4163 const ::tensorflow::RunOptions& run_options() const;
4164 ::tensorflow::RunOptions* release_run_options();
4165 ::tensorflow::RunOptions* mutable_run_options();
4166 void set_allocated_run_options(::tensorflow::RunOptions* run_options);
4167 void unsafe_arena_set_allocated_run_options(
4168 ::tensorflow::RunOptions* run_options);
4169 ::tensorflow::RunOptions* unsafe_arena_release_run_options();
4170
4171 // bool fetch_skip_sync = 8;
4172 void clear_fetch_skip_sync();
4173 bool fetch_skip_sync() const;
4174 void set_fetch_skip_sync(bool value);
4175
4176 // @@protoc_insertion_point(class_scope:tensorflow.CallableOptions)
4177 private:
4178 class _Internal;
4179
4180 ::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
4181 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
4182 typedef void InternalArenaConstructable_;
4183 typedef void DestructorSkippable_;
4184 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> feed_;
4185 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> fetch_;
4186 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> target_;
4187 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorConnection > tensor_connection_;
4188 ::PROTOBUF_NAMESPACE_ID::internal::MapField<
4189 CallableOptions_FeedDevicesEntry_DoNotUse,
4190 std::string, std::string,
4191 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
4192 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
4193 0 > feed_devices_;
4194 ::PROTOBUF_NAMESPACE_ID::internal::MapField<
4195 CallableOptions_FetchDevicesEntry_DoNotUse,
4196 std::string, std::string,
4197 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
4198 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
4199 0 > fetch_devices_;
4200 ::tensorflow::RunOptions* run_options_;
4201 bool fetch_skip_sync_;
4202 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
4203 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto;
4204};
4205// ===================================================================
4206
4207
4208// ===================================================================
4209
4210#ifdef __GNUC__
4211 #pragma GCC diagnostic push
4212 #pragma GCC diagnostic ignored "-Wstrict-aliasing"
4213#endif // __GNUC__
4214// GPUOptions_Experimental_VirtualDevices
4215
4216// repeated float memory_limit_mb = 1;
4217inline int GPUOptions_Experimental_VirtualDevices::memory_limit_mb_size() const {
4218 return memory_limit_mb_.size();
4219}
4220inline void GPUOptions_Experimental_VirtualDevices::clear_memory_limit_mb() {
4221 memory_limit_mb_.Clear();
4222}
4223inline float GPUOptions_Experimental_VirtualDevices::memory_limit_mb(int index) const {
4224 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.VirtualDevices.memory_limit_mb)
4225 return memory_limit_mb_.Get(index);
4226}
4227inline void GPUOptions_Experimental_VirtualDevices::set_memory_limit_mb(int index, float value) {
4228 memory_limit_mb_.Set(index, value);
4229 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.Experimental.VirtualDevices.memory_limit_mb)
4230}
4231inline void GPUOptions_Experimental_VirtualDevices::add_memory_limit_mb(float value) {
4232 memory_limit_mb_.Add(value);
4233 // @@protoc_insertion_point(field_add:tensorflow.GPUOptions.Experimental.VirtualDevices.memory_limit_mb)
4234}
4235inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< float >&
4236GPUOptions_Experimental_VirtualDevices::memory_limit_mb() const {
4237 // @@protoc_insertion_point(field_list:tensorflow.GPUOptions.Experimental.VirtualDevices.memory_limit_mb)
4238 return memory_limit_mb_;
4239}
4240inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< float >*
4241GPUOptions_Experimental_VirtualDevices::mutable_memory_limit_mb() {
4242 // @@protoc_insertion_point(field_mutable_list:tensorflow.GPUOptions.Experimental.VirtualDevices.memory_limit_mb)
4243 return &memory_limit_mb_;
4244}
4245
4246// repeated int32 priority = 2;
4247inline int GPUOptions_Experimental_VirtualDevices::priority_size() const {
4248 return priority_.size();
4249}
4250inline void GPUOptions_Experimental_VirtualDevices::clear_priority() {
4251 priority_.Clear();
4252}
4253inline ::PROTOBUF_NAMESPACE_ID::int32 GPUOptions_Experimental_VirtualDevices::priority(int index) const {
4254 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.VirtualDevices.priority)
4255 return priority_.Get(index);
4256}
4257inline void GPUOptions_Experimental_VirtualDevices::set_priority(int index, ::PROTOBUF_NAMESPACE_ID::int32 value) {
4258 priority_.Set(index, value);
4259 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.Experimental.VirtualDevices.priority)
4260}
4261inline void GPUOptions_Experimental_VirtualDevices::add_priority(::PROTOBUF_NAMESPACE_ID::int32 value) {
4262 priority_.Add(value);
4263 // @@protoc_insertion_point(field_add:tensorflow.GPUOptions.Experimental.VirtualDevices.priority)
4264}
4265inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 >&
4266GPUOptions_Experimental_VirtualDevices::priority() const {
4267 // @@protoc_insertion_point(field_list:tensorflow.GPUOptions.Experimental.VirtualDevices.priority)
4268 return priority_;
4269}
4270inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 >*
4271GPUOptions_Experimental_VirtualDevices::mutable_priority() {
4272 // @@protoc_insertion_point(field_mutable_list:tensorflow.GPUOptions.Experimental.VirtualDevices.priority)
4273 return &priority_;
4274}
4275
4276// repeated int32 device_ordinal = 3;
4277inline int GPUOptions_Experimental_VirtualDevices::device_ordinal_size() const {
4278 return device_ordinal_.size();
4279}
4280inline void GPUOptions_Experimental_VirtualDevices::clear_device_ordinal() {
4281 device_ordinal_.Clear();
4282}
4283inline ::PROTOBUF_NAMESPACE_ID::int32 GPUOptions_Experimental_VirtualDevices::device_ordinal(int index) const {
4284 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.VirtualDevices.device_ordinal)
4285 return device_ordinal_.Get(index);
4286}
4287inline void GPUOptions_Experimental_VirtualDevices::set_device_ordinal(int index, ::PROTOBUF_NAMESPACE_ID::int32 value) {
4288 device_ordinal_.Set(index, value);
4289 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.Experimental.VirtualDevices.device_ordinal)
4290}
4291inline void GPUOptions_Experimental_VirtualDevices::add_device_ordinal(::PROTOBUF_NAMESPACE_ID::int32 value) {
4292 device_ordinal_.Add(value);
4293 // @@protoc_insertion_point(field_add:tensorflow.GPUOptions.Experimental.VirtualDevices.device_ordinal)
4294}
4295inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 >&
4296GPUOptions_Experimental_VirtualDevices::device_ordinal() const {
4297 // @@protoc_insertion_point(field_list:tensorflow.GPUOptions.Experimental.VirtualDevices.device_ordinal)
4298 return device_ordinal_;
4299}
4300inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 >*
4301GPUOptions_Experimental_VirtualDevices::mutable_device_ordinal() {
4302 // @@protoc_insertion_point(field_mutable_list:tensorflow.GPUOptions.Experimental.VirtualDevices.device_ordinal)
4303 return &device_ordinal_;
4304}
4305
4306// -------------------------------------------------------------------
4307
4308// GPUOptions_Experimental
4309
4310// repeated .tensorflow.GPUOptions.Experimental.VirtualDevices virtual_devices = 1;
4311inline int GPUOptions_Experimental::virtual_devices_size() const {
4312 return virtual_devices_.size();
4313}
4314inline void GPUOptions_Experimental::clear_virtual_devices() {
4315 virtual_devices_.Clear();
4316}
4317inline ::tensorflow::GPUOptions_Experimental_VirtualDevices* GPUOptions_Experimental::mutable_virtual_devices(int index) {
4318 // @@protoc_insertion_point(field_mutable:tensorflow.GPUOptions.Experimental.virtual_devices)
4319 return virtual_devices_.Mutable(index);
4320}
4321inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GPUOptions_Experimental_VirtualDevices >*
4322GPUOptions_Experimental::mutable_virtual_devices() {
4323 // @@protoc_insertion_point(field_mutable_list:tensorflow.GPUOptions.Experimental.virtual_devices)
4324 return &virtual_devices_;
4325}
4326inline const ::tensorflow::GPUOptions_Experimental_VirtualDevices& GPUOptions_Experimental::virtual_devices(int index) const {
4327 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.virtual_devices)
4328 return virtual_devices_.Get(index);
4329}
4330inline ::tensorflow::GPUOptions_Experimental_VirtualDevices* GPUOptions_Experimental::add_virtual_devices() {
4331 // @@protoc_insertion_point(field_add:tensorflow.GPUOptions.Experimental.virtual_devices)
4332 return virtual_devices_.Add();
4333}
4334inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GPUOptions_Experimental_VirtualDevices >&
4335GPUOptions_Experimental::virtual_devices() const {
4336 // @@protoc_insertion_point(field_list:tensorflow.GPUOptions.Experimental.virtual_devices)
4337 return virtual_devices_;
4338}
4339
4340// bool use_unified_memory = 2;
4341inline void GPUOptions_Experimental::clear_use_unified_memory() {
4342 use_unified_memory_ = false;
4343}
4344inline bool GPUOptions_Experimental::use_unified_memory() const {
4345 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.use_unified_memory)
4346 return use_unified_memory_;
4347}
4348inline void GPUOptions_Experimental::set_use_unified_memory(bool value) {
4349
4350 use_unified_memory_ = value;
4351 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.Experimental.use_unified_memory)
4352}
4353
4354// int32 num_dev_to_dev_copy_streams = 3;
4355inline void GPUOptions_Experimental::clear_num_dev_to_dev_copy_streams() {
4356 num_dev_to_dev_copy_streams_ = 0;
4357}
4358inline ::PROTOBUF_NAMESPACE_ID::int32 GPUOptions_Experimental::num_dev_to_dev_copy_streams() const {
4359 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.num_dev_to_dev_copy_streams)
4360 return num_dev_to_dev_copy_streams_;
4361}
4362inline void GPUOptions_Experimental::set_num_dev_to_dev_copy_streams(::PROTOBUF_NAMESPACE_ID::int32 value) {
4363
4364 num_dev_to_dev_copy_streams_ = value;
4365 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.Experimental.num_dev_to_dev_copy_streams)
4366}
4367
4368// string collective_ring_order = 4;
4369inline void GPUOptions_Experimental::clear_collective_ring_order() {
4370 collective_ring_order_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
4371}
4372inline const std::string& GPUOptions_Experimental::collective_ring_order() const {
4373 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.collective_ring_order)
4374 return collective_ring_order_.Get();
4375}
4376inline void GPUOptions_Experimental::set_collective_ring_order(const std::string& value) {
4377
4378 collective_ring_order_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
4379 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.Experimental.collective_ring_order)
4380}
4381inline void GPUOptions_Experimental::set_collective_ring_order(std::string&& value) {
4382
4383 collective_ring_order_.Set(
4384 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
4385 // @@protoc_insertion_point(field_set_rvalue:tensorflow.GPUOptions.Experimental.collective_ring_order)
4386}
4387inline void GPUOptions_Experimental::set_collective_ring_order(const char* value) {
4388 GOOGLE_DCHECK(value != nullptr);
4389
4390 collective_ring_order_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
4391 GetArenaNoVirtual());
4392 // @@protoc_insertion_point(field_set_char:tensorflow.GPUOptions.Experimental.collective_ring_order)
4393}
4394inline void GPUOptions_Experimental::set_collective_ring_order(const char* value,
4395 size_t size) {
4396
4397 collective_ring_order_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
4398 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
4399 // @@protoc_insertion_point(field_set_pointer:tensorflow.GPUOptions.Experimental.collective_ring_order)
4400}
4401inline std::string* GPUOptions_Experimental::mutable_collective_ring_order() {
4402
4403 // @@protoc_insertion_point(field_mutable:tensorflow.GPUOptions.Experimental.collective_ring_order)
4404 return collective_ring_order_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
4405}
4406inline std::string* GPUOptions_Experimental::release_collective_ring_order() {
4407 // @@protoc_insertion_point(field_release:tensorflow.GPUOptions.Experimental.collective_ring_order)
4408
4409 return collective_ring_order_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
4410}
4411inline void GPUOptions_Experimental::set_allocated_collective_ring_order(std::string* collective_ring_order) {
4412 if (collective_ring_order != nullptr) {
4413
4414 } else {
4415
4416 }
4417 collective_ring_order_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), collective_ring_order,
4418 GetArenaNoVirtual());
4419 // @@protoc_insertion_point(field_set_allocated:tensorflow.GPUOptions.Experimental.collective_ring_order)
4420}
4421inline std::string* GPUOptions_Experimental::unsafe_arena_release_collective_ring_order() {
4422 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.GPUOptions.Experimental.collective_ring_order)
4423 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
4424
4425 return collective_ring_order_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
4426 GetArenaNoVirtual());
4427}
4428inline void GPUOptions_Experimental::unsafe_arena_set_allocated_collective_ring_order(
4429 std::string* collective_ring_order) {
4430 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
4431 if (collective_ring_order != nullptr) {
4432
4433 } else {
4434
4435 }
4436 collective_ring_order_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
4437 collective_ring_order, GetArenaNoVirtual());
4438 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.GPUOptions.Experimental.collective_ring_order)
4439}
4440
4441// bool timestamped_allocator = 5;
4442inline void GPUOptions_Experimental::clear_timestamped_allocator() {
4443 timestamped_allocator_ = false;
4444}
4445inline bool GPUOptions_Experimental::timestamped_allocator() const {
4446 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.timestamped_allocator)
4447 return timestamped_allocator_;
4448}
4449inline void GPUOptions_Experimental::set_timestamped_allocator(bool value) {
4450
4451 timestamped_allocator_ = value;
4452 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.Experimental.timestamped_allocator)
4453}
4454
4455// int32 kernel_tracker_max_interval = 7;
4456inline void GPUOptions_Experimental::clear_kernel_tracker_max_interval() {
4457 kernel_tracker_max_interval_ = 0;
4458}
4459inline ::PROTOBUF_NAMESPACE_ID::int32 GPUOptions_Experimental::kernel_tracker_max_interval() const {
4460 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.kernel_tracker_max_interval)
4461 return kernel_tracker_max_interval_;
4462}
4463inline void GPUOptions_Experimental::set_kernel_tracker_max_interval(::PROTOBUF_NAMESPACE_ID::int32 value) {
4464
4465 kernel_tracker_max_interval_ = value;
4466 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.Experimental.kernel_tracker_max_interval)
4467}
4468
4469// int32 kernel_tracker_max_bytes = 8;
4470inline void GPUOptions_Experimental::clear_kernel_tracker_max_bytes() {
4471 kernel_tracker_max_bytes_ = 0;
4472}
4473inline ::PROTOBUF_NAMESPACE_ID::int32 GPUOptions_Experimental::kernel_tracker_max_bytes() const {
4474 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.kernel_tracker_max_bytes)
4475 return kernel_tracker_max_bytes_;
4476}
4477inline void GPUOptions_Experimental::set_kernel_tracker_max_bytes(::PROTOBUF_NAMESPACE_ID::int32 value) {
4478
4479 kernel_tracker_max_bytes_ = value;
4480 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.Experimental.kernel_tracker_max_bytes)
4481}
4482
4483// int32 kernel_tracker_max_pending = 9;
4484inline void GPUOptions_Experimental::clear_kernel_tracker_max_pending() {
4485 kernel_tracker_max_pending_ = 0;
4486}
4487inline ::PROTOBUF_NAMESPACE_ID::int32 GPUOptions_Experimental::kernel_tracker_max_pending() const {
4488 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.kernel_tracker_max_pending)
4489 return kernel_tracker_max_pending_;
4490}
4491inline void GPUOptions_Experimental::set_kernel_tracker_max_pending(::PROTOBUF_NAMESPACE_ID::int32 value) {
4492
4493 kernel_tracker_max_pending_ = value;
4494 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.Experimental.kernel_tracker_max_pending)
4495}
4496
4497// double internal_fragmentation_fraction = 10;
4498inline void GPUOptions_Experimental::clear_internal_fragmentation_fraction() {
4499 internal_fragmentation_fraction_ = 0;
4500}
4501inline double GPUOptions_Experimental::internal_fragmentation_fraction() const {
4502 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.internal_fragmentation_fraction)
4503 return internal_fragmentation_fraction_;
4504}
4505inline void GPUOptions_Experimental::set_internal_fragmentation_fraction(double value) {
4506
4507 internal_fragmentation_fraction_ = value;
4508 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.Experimental.internal_fragmentation_fraction)
4509}
4510
4511// bool use_cuda_malloc_async = 11;
4512inline void GPUOptions_Experimental::clear_use_cuda_malloc_async() {
4513 use_cuda_malloc_async_ = false;
4514}
4515inline bool GPUOptions_Experimental::use_cuda_malloc_async() const {
4516 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.use_cuda_malloc_async)
4517 return use_cuda_malloc_async_;
4518}
4519inline void GPUOptions_Experimental::set_use_cuda_malloc_async(bool value) {
4520
4521 use_cuda_malloc_async_ = value;
4522 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.Experimental.use_cuda_malloc_async)
4523}
4524
4525// bool disallow_retry_on_allocation_failure = 12;
4526inline void GPUOptions_Experimental::clear_disallow_retry_on_allocation_failure() {
4527 disallow_retry_on_allocation_failure_ = false;
4528}
4529inline bool GPUOptions_Experimental::disallow_retry_on_allocation_failure() const {
4530 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.Experimental.disallow_retry_on_allocation_failure)
4531 return disallow_retry_on_allocation_failure_;
4532}
4533inline void GPUOptions_Experimental::set_disallow_retry_on_allocation_failure(bool value) {
4534
4535 disallow_retry_on_allocation_failure_ = value;
4536 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.Experimental.disallow_retry_on_allocation_failure)
4537}
4538
4539// -------------------------------------------------------------------
4540
4541// GPUOptions
4542
4543// double per_process_gpu_memory_fraction = 1;
4544inline void GPUOptions::clear_per_process_gpu_memory_fraction() {
4545 per_process_gpu_memory_fraction_ = 0;
4546}
4547inline double GPUOptions::per_process_gpu_memory_fraction() const {
4548 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.per_process_gpu_memory_fraction)
4549 return per_process_gpu_memory_fraction_;
4550}
4551inline void GPUOptions::set_per_process_gpu_memory_fraction(double value) {
4552
4553 per_process_gpu_memory_fraction_ = value;
4554 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.per_process_gpu_memory_fraction)
4555}
4556
4557// bool allow_growth = 4;
4558inline void GPUOptions::clear_allow_growth() {
4559 allow_growth_ = false;
4560}
4561inline bool GPUOptions::allow_growth() const {
4562 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.allow_growth)
4563 return allow_growth_;
4564}
4565inline void GPUOptions::set_allow_growth(bool value) {
4566
4567 allow_growth_ = value;
4568 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.allow_growth)
4569}
4570
4571// string allocator_type = 2;
4572inline void GPUOptions::clear_allocator_type() {
4573 allocator_type_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
4574}
4575inline const std::string& GPUOptions::allocator_type() const {
4576 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.allocator_type)
4577 return allocator_type_.Get();
4578}
4579inline void GPUOptions::set_allocator_type(const std::string& value) {
4580
4581 allocator_type_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
4582 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.allocator_type)
4583}
4584inline void GPUOptions::set_allocator_type(std::string&& value) {
4585
4586 allocator_type_.Set(
4587 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
4588 // @@protoc_insertion_point(field_set_rvalue:tensorflow.GPUOptions.allocator_type)
4589}
4590inline void GPUOptions::set_allocator_type(const char* value) {
4591 GOOGLE_DCHECK(value != nullptr);
4592
4593 allocator_type_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
4594 GetArenaNoVirtual());
4595 // @@protoc_insertion_point(field_set_char:tensorflow.GPUOptions.allocator_type)
4596}
4597inline void GPUOptions::set_allocator_type(const char* value,
4598 size_t size) {
4599
4600 allocator_type_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
4601 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
4602 // @@protoc_insertion_point(field_set_pointer:tensorflow.GPUOptions.allocator_type)
4603}
4604inline std::string* GPUOptions::mutable_allocator_type() {
4605
4606 // @@protoc_insertion_point(field_mutable:tensorflow.GPUOptions.allocator_type)
4607 return allocator_type_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
4608}
4609inline std::string* GPUOptions::release_allocator_type() {
4610 // @@protoc_insertion_point(field_release:tensorflow.GPUOptions.allocator_type)
4611
4612 return allocator_type_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
4613}
4614inline void GPUOptions::set_allocated_allocator_type(std::string* allocator_type) {
4615 if (allocator_type != nullptr) {
4616
4617 } else {
4618
4619 }
4620 allocator_type_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), allocator_type,
4621 GetArenaNoVirtual());
4622 // @@protoc_insertion_point(field_set_allocated:tensorflow.GPUOptions.allocator_type)
4623}
4624inline std::string* GPUOptions::unsafe_arena_release_allocator_type() {
4625 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.GPUOptions.allocator_type)
4626 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
4627
4628 return allocator_type_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
4629 GetArenaNoVirtual());
4630}
4631inline void GPUOptions::unsafe_arena_set_allocated_allocator_type(
4632 std::string* allocator_type) {
4633 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
4634 if (allocator_type != nullptr) {
4635
4636 } else {
4637
4638 }
4639 allocator_type_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
4640 allocator_type, GetArenaNoVirtual());
4641 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.GPUOptions.allocator_type)
4642}
4643
4644// int64 deferred_deletion_bytes = 3;
4645inline void GPUOptions::clear_deferred_deletion_bytes() {
4646 deferred_deletion_bytes_ = PROTOBUF_LONGLONG(0);
4647}
4648inline ::PROTOBUF_NAMESPACE_ID::int64 GPUOptions::deferred_deletion_bytes() const {
4649 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.deferred_deletion_bytes)
4650 return deferred_deletion_bytes_;
4651}
4652inline void GPUOptions::set_deferred_deletion_bytes(::PROTOBUF_NAMESPACE_ID::int64 value) {
4653
4654 deferred_deletion_bytes_ = value;
4655 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.deferred_deletion_bytes)
4656}
4657
4658// string visible_device_list = 5;
4659inline void GPUOptions::clear_visible_device_list() {
4660 visible_device_list_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
4661}
4662inline const std::string& GPUOptions::visible_device_list() const {
4663 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.visible_device_list)
4664 return visible_device_list_.Get();
4665}
4666inline void GPUOptions::set_visible_device_list(const std::string& value) {
4667
4668 visible_device_list_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
4669 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.visible_device_list)
4670}
4671inline void GPUOptions::set_visible_device_list(std::string&& value) {
4672
4673 visible_device_list_.Set(
4674 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
4675 // @@protoc_insertion_point(field_set_rvalue:tensorflow.GPUOptions.visible_device_list)
4676}
4677inline void GPUOptions::set_visible_device_list(const char* value) {
4678 GOOGLE_DCHECK(value != nullptr);
4679
4680 visible_device_list_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
4681 GetArenaNoVirtual());
4682 // @@protoc_insertion_point(field_set_char:tensorflow.GPUOptions.visible_device_list)
4683}
4684inline void GPUOptions::set_visible_device_list(const char* value,
4685 size_t size) {
4686
4687 visible_device_list_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
4688 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
4689 // @@protoc_insertion_point(field_set_pointer:tensorflow.GPUOptions.visible_device_list)
4690}
4691inline std::string* GPUOptions::mutable_visible_device_list() {
4692
4693 // @@protoc_insertion_point(field_mutable:tensorflow.GPUOptions.visible_device_list)
4694 return visible_device_list_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
4695}
4696inline std::string* GPUOptions::release_visible_device_list() {
4697 // @@protoc_insertion_point(field_release:tensorflow.GPUOptions.visible_device_list)
4698
4699 return visible_device_list_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
4700}
4701inline void GPUOptions::set_allocated_visible_device_list(std::string* visible_device_list) {
4702 if (visible_device_list != nullptr) {
4703
4704 } else {
4705
4706 }
4707 visible_device_list_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), visible_device_list,
4708 GetArenaNoVirtual());
4709 // @@protoc_insertion_point(field_set_allocated:tensorflow.GPUOptions.visible_device_list)
4710}
4711inline std::string* GPUOptions::unsafe_arena_release_visible_device_list() {
4712 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.GPUOptions.visible_device_list)
4713 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
4714
4715 return visible_device_list_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
4716 GetArenaNoVirtual());
4717}
4718inline void GPUOptions::unsafe_arena_set_allocated_visible_device_list(
4719 std::string* visible_device_list) {
4720 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
4721 if (visible_device_list != nullptr) {
4722
4723 } else {
4724
4725 }
4726 visible_device_list_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
4727 visible_device_list, GetArenaNoVirtual());
4728 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.GPUOptions.visible_device_list)
4729}
4730
4731// int32 polling_active_delay_usecs = 6;
4732inline void GPUOptions::clear_polling_active_delay_usecs() {
4733 polling_active_delay_usecs_ = 0;
4734}
4735inline ::PROTOBUF_NAMESPACE_ID::int32 GPUOptions::polling_active_delay_usecs() const {
4736 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.polling_active_delay_usecs)
4737 return polling_active_delay_usecs_;
4738}
4739inline void GPUOptions::set_polling_active_delay_usecs(::PROTOBUF_NAMESPACE_ID::int32 value) {
4740
4741 polling_active_delay_usecs_ = value;
4742 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.polling_active_delay_usecs)
4743}
4744
4745// int32 polling_inactive_delay_msecs = 7;
4746inline void GPUOptions::clear_polling_inactive_delay_msecs() {
4747 polling_inactive_delay_msecs_ = 0;
4748}
4749inline ::PROTOBUF_NAMESPACE_ID::int32 GPUOptions::polling_inactive_delay_msecs() const {
4750 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.polling_inactive_delay_msecs)
4751 return polling_inactive_delay_msecs_;
4752}
4753inline void GPUOptions::set_polling_inactive_delay_msecs(::PROTOBUF_NAMESPACE_ID::int32 value) {
4754
4755 polling_inactive_delay_msecs_ = value;
4756 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.polling_inactive_delay_msecs)
4757}
4758
4759// bool force_gpu_compatible = 8;
4760inline void GPUOptions::clear_force_gpu_compatible() {
4761 force_gpu_compatible_ = false;
4762}
4763inline bool GPUOptions::force_gpu_compatible() const {
4764 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.force_gpu_compatible)
4765 return force_gpu_compatible_;
4766}
4767inline void GPUOptions::set_force_gpu_compatible(bool value) {
4768
4769 force_gpu_compatible_ = value;
4770 // @@protoc_insertion_point(field_set:tensorflow.GPUOptions.force_gpu_compatible)
4771}
4772
4773// .tensorflow.GPUOptions.Experimental experimental = 9;
4774inline bool GPUOptions::has_experimental() const {
4775 return this != internal_default_instance() && experimental_ != nullptr;
4776}
4777inline void GPUOptions::clear_experimental() {
4778 if (GetArenaNoVirtual() == nullptr && experimental_ != nullptr) {
4779 delete experimental_;
4780 }
4781 experimental_ = nullptr;
4782}
4783inline const ::tensorflow::GPUOptions_Experimental& GPUOptions::experimental() const {
4784 const ::tensorflow::GPUOptions_Experimental* p = experimental_;
4785 // @@protoc_insertion_point(field_get:tensorflow.GPUOptions.experimental)
4786 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::GPUOptions_Experimental*>(
4787 &::tensorflow::_GPUOptions_Experimental_default_instance_);
4788}
4789inline ::tensorflow::GPUOptions_Experimental* GPUOptions::release_experimental() {
4790 // @@protoc_insertion_point(field_release:tensorflow.GPUOptions.experimental)
4791
4792 ::tensorflow::GPUOptions_Experimental* temp = experimental_;
4793 if (GetArenaNoVirtual() != nullptr) {
4794 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
4795 }
4796 experimental_ = nullptr;
4797 return temp;
4798}
4799inline ::tensorflow::GPUOptions_Experimental* GPUOptions::unsafe_arena_release_experimental() {
4800 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.GPUOptions.experimental)
4801
4802 ::tensorflow::GPUOptions_Experimental* temp = experimental_;
4803 experimental_ = nullptr;
4804 return temp;
4805}
4806inline ::tensorflow::GPUOptions_Experimental* GPUOptions::mutable_experimental() {
4807
4808 if (experimental_ == nullptr) {
4809 auto* p = CreateMaybeMessage<::tensorflow::GPUOptions_Experimental>(GetArenaNoVirtual());
4810 experimental_ = p;
4811 }
4812 // @@protoc_insertion_point(field_mutable:tensorflow.GPUOptions.experimental)
4813 return experimental_;
4814}
4815inline void GPUOptions::set_allocated_experimental(::tensorflow::GPUOptions_Experimental* experimental) {
4816 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
4817 if (message_arena == nullptr) {
4818 delete experimental_;
4819 }
4820 if (experimental) {
4821 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
4822 ::PROTOBUF_NAMESPACE_ID::Arena::GetArena(experimental);
4823 if (message_arena != submessage_arena) {
4824 experimental = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
4825 message_arena, experimental, submessage_arena);
4826 }
4827
4828 } else {
4829
4830 }
4831 experimental_ = experimental;
4832 // @@protoc_insertion_point(field_set_allocated:tensorflow.GPUOptions.experimental)
4833}
4834
4835// -------------------------------------------------------------------
4836
4837// OptimizerOptions
4838
4839// bool do_common_subexpression_elimination = 1;
4840inline void OptimizerOptions::clear_do_common_subexpression_elimination() {
4841 do_common_subexpression_elimination_ = false;
4842}
4843inline bool OptimizerOptions::do_common_subexpression_elimination() const {
4844 // @@protoc_insertion_point(field_get:tensorflow.OptimizerOptions.do_common_subexpression_elimination)
4845 return do_common_subexpression_elimination_;
4846}
4847inline void OptimizerOptions::set_do_common_subexpression_elimination(bool value) {
4848
4849 do_common_subexpression_elimination_ = value;
4850 // @@protoc_insertion_point(field_set:tensorflow.OptimizerOptions.do_common_subexpression_elimination)
4851}
4852
4853// bool do_constant_folding = 2;
4854inline void OptimizerOptions::clear_do_constant_folding() {
4855 do_constant_folding_ = false;
4856}
4857inline bool OptimizerOptions::do_constant_folding() const {
4858 // @@protoc_insertion_point(field_get:tensorflow.OptimizerOptions.do_constant_folding)
4859 return do_constant_folding_;
4860}
4861inline void OptimizerOptions::set_do_constant_folding(bool value) {
4862
4863 do_constant_folding_ = value;
4864 // @@protoc_insertion_point(field_set:tensorflow.OptimizerOptions.do_constant_folding)
4865}
4866
4867// int64 max_folded_constant_in_bytes = 6;
4868inline void OptimizerOptions::clear_max_folded_constant_in_bytes() {
4869 max_folded_constant_in_bytes_ = PROTOBUF_LONGLONG(0);
4870}
4871inline ::PROTOBUF_NAMESPACE_ID::int64 OptimizerOptions::max_folded_constant_in_bytes() const {
4872 // @@protoc_insertion_point(field_get:tensorflow.OptimizerOptions.max_folded_constant_in_bytes)
4873 return max_folded_constant_in_bytes_;
4874}
4875inline void OptimizerOptions::set_max_folded_constant_in_bytes(::PROTOBUF_NAMESPACE_ID::int64 value) {
4876
4877 max_folded_constant_in_bytes_ = value;
4878 // @@protoc_insertion_point(field_set:tensorflow.OptimizerOptions.max_folded_constant_in_bytes)
4879}
4880
4881// bool do_function_inlining = 4;
4882inline void OptimizerOptions::clear_do_function_inlining() {
4883 do_function_inlining_ = false;
4884}
4885inline bool OptimizerOptions::do_function_inlining() const {
4886 // @@protoc_insertion_point(field_get:tensorflow.OptimizerOptions.do_function_inlining)
4887 return do_function_inlining_;
4888}
4889inline void OptimizerOptions::set_do_function_inlining(bool value) {
4890
4891 do_function_inlining_ = value;
4892 // @@protoc_insertion_point(field_set:tensorflow.OptimizerOptions.do_function_inlining)
4893}
4894
4895// .tensorflow.OptimizerOptions.Level opt_level = 3;
4896inline void OptimizerOptions::clear_opt_level() {
4897 opt_level_ = 0;
4898}
4899inline ::tensorflow::OptimizerOptions_Level OptimizerOptions::opt_level() const {
4900 // @@protoc_insertion_point(field_get:tensorflow.OptimizerOptions.opt_level)
4901 return static_cast< ::tensorflow::OptimizerOptions_Level >(opt_level_);
4902}
4903inline void OptimizerOptions::set_opt_level(::tensorflow::OptimizerOptions_Level value) {
4904
4905 opt_level_ = value;
4906 // @@protoc_insertion_point(field_set:tensorflow.OptimizerOptions.opt_level)
4907}
4908
4909// .tensorflow.OptimizerOptions.GlobalJitLevel global_jit_level = 5;
4910inline void OptimizerOptions::clear_global_jit_level() {
4911 global_jit_level_ = 0;
4912}
4913inline ::tensorflow::OptimizerOptions_GlobalJitLevel OptimizerOptions::global_jit_level() const {
4914 // @@protoc_insertion_point(field_get:tensorflow.OptimizerOptions.global_jit_level)
4915 return static_cast< ::tensorflow::OptimizerOptions_GlobalJitLevel >(global_jit_level_);
4916}
4917inline void OptimizerOptions::set_global_jit_level(::tensorflow::OptimizerOptions_GlobalJitLevel value) {
4918
4919 global_jit_level_ = value;
4920 // @@protoc_insertion_point(field_set:tensorflow.OptimizerOptions.global_jit_level)
4921}
4922
4923// bool cpu_global_jit = 7;
4924inline void OptimizerOptions::clear_cpu_global_jit() {
4925 cpu_global_jit_ = false;
4926}
4927inline bool OptimizerOptions::cpu_global_jit() const {
4928 // @@protoc_insertion_point(field_get:tensorflow.OptimizerOptions.cpu_global_jit)
4929 return cpu_global_jit_;
4930}
4931inline void OptimizerOptions::set_cpu_global_jit(bool value) {
4932
4933 cpu_global_jit_ = value;
4934 // @@protoc_insertion_point(field_set:tensorflow.OptimizerOptions.cpu_global_jit)
4935}
4936
4937// -------------------------------------------------------------------
4938
4939// GraphOptions
4940
4941// bool enable_recv_scheduling = 2;
4942inline void GraphOptions::clear_enable_recv_scheduling() {
4943 enable_recv_scheduling_ = false;
4944}
4945inline bool GraphOptions::enable_recv_scheduling() const {
4946 // @@protoc_insertion_point(field_get:tensorflow.GraphOptions.enable_recv_scheduling)
4947 return enable_recv_scheduling_;
4948}
4949inline void GraphOptions::set_enable_recv_scheduling(bool value) {
4950
4951 enable_recv_scheduling_ = value;
4952 // @@protoc_insertion_point(field_set:tensorflow.GraphOptions.enable_recv_scheduling)
4953}
4954
4955// .tensorflow.OptimizerOptions optimizer_options = 3;
4956inline bool GraphOptions::has_optimizer_options() const {
4957 return this != internal_default_instance() && optimizer_options_ != nullptr;
4958}
4959inline void GraphOptions::clear_optimizer_options() {
4960 if (GetArenaNoVirtual() == nullptr && optimizer_options_ != nullptr) {
4961 delete optimizer_options_;
4962 }
4963 optimizer_options_ = nullptr;
4964}
4965inline const ::tensorflow::OptimizerOptions& GraphOptions::optimizer_options() const {
4966 const ::tensorflow::OptimizerOptions* p = optimizer_options_;
4967 // @@protoc_insertion_point(field_get:tensorflow.GraphOptions.optimizer_options)
4968 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::OptimizerOptions*>(
4969 &::tensorflow::_OptimizerOptions_default_instance_);
4970}
4971inline ::tensorflow::OptimizerOptions* GraphOptions::release_optimizer_options() {
4972 // @@protoc_insertion_point(field_release:tensorflow.GraphOptions.optimizer_options)
4973
4974 ::tensorflow::OptimizerOptions* temp = optimizer_options_;
4975 if (GetArenaNoVirtual() != nullptr) {
4976 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
4977 }
4978 optimizer_options_ = nullptr;
4979 return temp;
4980}
4981inline ::tensorflow::OptimizerOptions* GraphOptions::unsafe_arena_release_optimizer_options() {
4982 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.GraphOptions.optimizer_options)
4983
4984 ::tensorflow::OptimizerOptions* temp = optimizer_options_;
4985 optimizer_options_ = nullptr;
4986 return temp;
4987}
4988inline ::tensorflow::OptimizerOptions* GraphOptions::mutable_optimizer_options() {
4989
4990 if (optimizer_options_ == nullptr) {
4991 auto* p = CreateMaybeMessage<::tensorflow::OptimizerOptions>(GetArenaNoVirtual());
4992 optimizer_options_ = p;
4993 }
4994 // @@protoc_insertion_point(field_mutable:tensorflow.GraphOptions.optimizer_options)
4995 return optimizer_options_;
4996}
4997inline void GraphOptions::set_allocated_optimizer_options(::tensorflow::OptimizerOptions* optimizer_options) {
4998 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
4999 if (message_arena == nullptr) {
5000 delete optimizer_options_;
5001 }
5002 if (optimizer_options) {
5003 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
5004 ::PROTOBUF_NAMESPACE_ID::Arena::GetArena(optimizer_options);
5005 if (message_arena != submessage_arena) {
5006 optimizer_options = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
5007 message_arena, optimizer_options, submessage_arena);
5008 }
5009
5010 } else {
5011
5012 }
5013 optimizer_options_ = optimizer_options;
5014 // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphOptions.optimizer_options)
5015}
5016
5017// int64 build_cost_model = 4;
5018inline void GraphOptions::clear_build_cost_model() {
5019 build_cost_model_ = PROTOBUF_LONGLONG(0);
5020}
5021inline ::PROTOBUF_NAMESPACE_ID::int64 GraphOptions::build_cost_model() const {
5022 // @@protoc_insertion_point(field_get:tensorflow.GraphOptions.build_cost_model)
5023 return build_cost_model_;
5024}
5025inline void GraphOptions::set_build_cost_model(::PROTOBUF_NAMESPACE_ID::int64 value) {
5026
5027 build_cost_model_ = value;
5028 // @@protoc_insertion_point(field_set:tensorflow.GraphOptions.build_cost_model)
5029}
5030
5031// int64 build_cost_model_after = 9;
5032inline void GraphOptions::clear_build_cost_model_after() {
5033 build_cost_model_after_ = PROTOBUF_LONGLONG(0);
5034}
5035inline ::PROTOBUF_NAMESPACE_ID::int64 GraphOptions::build_cost_model_after() const {
5036 // @@protoc_insertion_point(field_get:tensorflow.GraphOptions.build_cost_model_after)
5037 return build_cost_model_after_;
5038}
5039inline void GraphOptions::set_build_cost_model_after(::PROTOBUF_NAMESPACE_ID::int64 value) {
5040
5041 build_cost_model_after_ = value;
5042 // @@protoc_insertion_point(field_set:tensorflow.GraphOptions.build_cost_model_after)
5043}
5044
5045// bool infer_shapes = 5;
5046inline void GraphOptions::clear_infer_shapes() {
5047 infer_shapes_ = false;
5048}
5049inline bool GraphOptions::infer_shapes() const {
5050 // @@protoc_insertion_point(field_get:tensorflow.GraphOptions.infer_shapes)
5051 return infer_shapes_;
5052}
5053inline void GraphOptions::set_infer_shapes(bool value) {
5054
5055 infer_shapes_ = value;
5056 // @@protoc_insertion_point(field_set:tensorflow.GraphOptions.infer_shapes)
5057}
5058
5059// bool place_pruned_graph = 6;
5060inline void GraphOptions::clear_place_pruned_graph() {
5061 place_pruned_graph_ = false;
5062}
5063inline bool GraphOptions::place_pruned_graph() const {
5064 // @@protoc_insertion_point(field_get:tensorflow.GraphOptions.place_pruned_graph)
5065 return place_pruned_graph_;
5066}
5067inline void GraphOptions::set_place_pruned_graph(bool value) {
5068
5069 place_pruned_graph_ = value;
5070 // @@protoc_insertion_point(field_set:tensorflow.GraphOptions.place_pruned_graph)
5071}
5072
5073// bool enable_bfloat16_sendrecv = 7;
5074inline void GraphOptions::clear_enable_bfloat16_sendrecv() {
5075 enable_bfloat16_sendrecv_ = false;
5076}
5077inline bool GraphOptions::enable_bfloat16_sendrecv() const {
5078 // @@protoc_insertion_point(field_get:tensorflow.GraphOptions.enable_bfloat16_sendrecv)
5079 return enable_bfloat16_sendrecv_;
5080}
5081inline void GraphOptions::set_enable_bfloat16_sendrecv(bool value) {
5082
5083 enable_bfloat16_sendrecv_ = value;
5084 // @@protoc_insertion_point(field_set:tensorflow.GraphOptions.enable_bfloat16_sendrecv)
5085}
5086
5087// int32 timeline_step = 8;
5088inline void GraphOptions::clear_timeline_step() {
5089 timeline_step_ = 0;
5090}
5091inline ::PROTOBUF_NAMESPACE_ID::int32 GraphOptions::timeline_step() const {
5092 // @@protoc_insertion_point(field_get:tensorflow.GraphOptions.timeline_step)
5093 return timeline_step_;
5094}
5095inline void GraphOptions::set_timeline_step(::PROTOBUF_NAMESPACE_ID::int32 value) {
5096
5097 timeline_step_ = value;
5098 // @@protoc_insertion_point(field_set:tensorflow.GraphOptions.timeline_step)
5099}
5100
5101// .tensorflow.RewriterConfig rewrite_options = 10;
5102inline bool GraphOptions::has_rewrite_options() const {
5103 return this != internal_default_instance() && rewrite_options_ != nullptr;
5104}
5105inline const ::tensorflow::RewriterConfig& GraphOptions::rewrite_options() const {
5106 const ::tensorflow::RewriterConfig* p = rewrite_options_;
5107 // @@protoc_insertion_point(field_get:tensorflow.GraphOptions.rewrite_options)
5108 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::RewriterConfig*>(
5109 &::tensorflow::_RewriterConfig_default_instance_);
5110}
5111inline ::tensorflow::RewriterConfig* GraphOptions::release_rewrite_options() {
5112 // @@protoc_insertion_point(field_release:tensorflow.GraphOptions.rewrite_options)
5113
5114 ::tensorflow::RewriterConfig* temp = rewrite_options_;
5115 if (GetArenaNoVirtual() != nullptr) {
5116 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5117 }
5118 rewrite_options_ = nullptr;
5119 return temp;
5120}
5121inline ::tensorflow::RewriterConfig* GraphOptions::unsafe_arena_release_rewrite_options() {
5122 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.GraphOptions.rewrite_options)
5123
5124 ::tensorflow::RewriterConfig* temp = rewrite_options_;
5125 rewrite_options_ = nullptr;
5126 return temp;
5127}
5128inline ::tensorflow::RewriterConfig* GraphOptions::mutable_rewrite_options() {
5129
5130 if (rewrite_options_ == nullptr) {
5131 auto* p = CreateMaybeMessage<::tensorflow::RewriterConfig>(GetArenaNoVirtual());
5132 rewrite_options_ = p;
5133 }
5134 // @@protoc_insertion_point(field_mutable:tensorflow.GraphOptions.rewrite_options)
5135 return rewrite_options_;
5136}
5137inline void GraphOptions::set_allocated_rewrite_options(::tensorflow::RewriterConfig* rewrite_options) {
5138 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
5139 if (message_arena == nullptr) {
5140 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(rewrite_options_);
5141 }
5142 if (rewrite_options) {
5143 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
5144 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(rewrite_options)->GetArena();
5145 if (message_arena != submessage_arena) {
5146 rewrite_options = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
5147 message_arena, rewrite_options, submessage_arena);
5148 }
5149
5150 } else {
5151
5152 }
5153 rewrite_options_ = rewrite_options;
5154 // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphOptions.rewrite_options)
5155}
5156
5157// -------------------------------------------------------------------
5158
5159// ThreadPoolOptionProto
5160
5161// int32 num_threads = 1;
5162inline void ThreadPoolOptionProto::clear_num_threads() {
5163 num_threads_ = 0;
5164}
5165inline ::PROTOBUF_NAMESPACE_ID::int32 ThreadPoolOptionProto::num_threads() const {
5166 // @@protoc_insertion_point(field_get:tensorflow.ThreadPoolOptionProto.num_threads)
5167 return num_threads_;
5168}
5169inline void ThreadPoolOptionProto::set_num_threads(::PROTOBUF_NAMESPACE_ID::int32 value) {
5170
5171 num_threads_ = value;
5172 // @@protoc_insertion_point(field_set:tensorflow.ThreadPoolOptionProto.num_threads)
5173}
5174
5175// string global_name = 2;
5176inline void ThreadPoolOptionProto::clear_global_name() {
5177 global_name_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5178}
5179inline const std::string& ThreadPoolOptionProto::global_name() const {
5180 // @@protoc_insertion_point(field_get:tensorflow.ThreadPoolOptionProto.global_name)
5181 return global_name_.Get();
5182}
5183inline void ThreadPoolOptionProto::set_global_name(const std::string& value) {
5184
5185 global_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
5186 // @@protoc_insertion_point(field_set:tensorflow.ThreadPoolOptionProto.global_name)
5187}
5188inline void ThreadPoolOptionProto::set_global_name(std::string&& value) {
5189
5190 global_name_.Set(
5191 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
5192 // @@protoc_insertion_point(field_set_rvalue:tensorflow.ThreadPoolOptionProto.global_name)
5193}
5194inline void ThreadPoolOptionProto::set_global_name(const char* value) {
5195 GOOGLE_DCHECK(value != nullptr);
5196
5197 global_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
5198 GetArenaNoVirtual());
5199 // @@protoc_insertion_point(field_set_char:tensorflow.ThreadPoolOptionProto.global_name)
5200}
5201inline void ThreadPoolOptionProto::set_global_name(const char* value,
5202 size_t size) {
5203
5204 global_name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
5205 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
5206 // @@protoc_insertion_point(field_set_pointer:tensorflow.ThreadPoolOptionProto.global_name)
5207}
5208inline std::string* ThreadPoolOptionProto::mutable_global_name() {
5209
5210 // @@protoc_insertion_point(field_mutable:tensorflow.ThreadPoolOptionProto.global_name)
5211 return global_name_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5212}
5213inline std::string* ThreadPoolOptionProto::release_global_name() {
5214 // @@protoc_insertion_point(field_release:tensorflow.ThreadPoolOptionProto.global_name)
5215
5216 return global_name_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5217}
5218inline void ThreadPoolOptionProto::set_allocated_global_name(std::string* global_name) {
5219 if (global_name != nullptr) {
5220
5221 } else {
5222
5223 }
5224 global_name_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), global_name,
5225 GetArenaNoVirtual());
5226 // @@protoc_insertion_point(field_set_allocated:tensorflow.ThreadPoolOptionProto.global_name)
5227}
5228inline std::string* ThreadPoolOptionProto::unsafe_arena_release_global_name() {
5229 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.ThreadPoolOptionProto.global_name)
5230 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
5231
5232 return global_name_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
5233 GetArenaNoVirtual());
5234}
5235inline void ThreadPoolOptionProto::unsafe_arena_set_allocated_global_name(
5236 std::string* global_name) {
5237 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
5238 if (global_name != nullptr) {
5239
5240 } else {
5241
5242 }
5243 global_name_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
5244 global_name, GetArenaNoVirtual());
5245 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.ThreadPoolOptionProto.global_name)
5246}
5247
5248// -------------------------------------------------------------------
5249
5250// RPCOptions
5251
5252// bool use_rpc_for_inprocess_master = 1;
5253inline void RPCOptions::clear_use_rpc_for_inprocess_master() {
5254 use_rpc_for_inprocess_master_ = false;
5255}
5256inline bool RPCOptions::use_rpc_for_inprocess_master() const {
5257 // @@protoc_insertion_point(field_get:tensorflow.RPCOptions.use_rpc_for_inprocess_master)
5258 return use_rpc_for_inprocess_master_;
5259}
5260inline void RPCOptions::set_use_rpc_for_inprocess_master(bool value) {
5261
5262 use_rpc_for_inprocess_master_ = value;
5263 // @@protoc_insertion_point(field_set:tensorflow.RPCOptions.use_rpc_for_inprocess_master)
5264}
5265
5266// string compression_algorithm = 2;
5267inline void RPCOptions::clear_compression_algorithm() {
5268 compression_algorithm_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5269}
5270inline const std::string& RPCOptions::compression_algorithm() const {
5271 // @@protoc_insertion_point(field_get:tensorflow.RPCOptions.compression_algorithm)
5272 return compression_algorithm_.Get();
5273}
5274inline void RPCOptions::set_compression_algorithm(const std::string& value) {
5275
5276 compression_algorithm_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
5277 // @@protoc_insertion_point(field_set:tensorflow.RPCOptions.compression_algorithm)
5278}
5279inline void RPCOptions::set_compression_algorithm(std::string&& value) {
5280
5281 compression_algorithm_.Set(
5282 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
5283 // @@protoc_insertion_point(field_set_rvalue:tensorflow.RPCOptions.compression_algorithm)
5284}
5285inline void RPCOptions::set_compression_algorithm(const char* value) {
5286 GOOGLE_DCHECK(value != nullptr);
5287
5288 compression_algorithm_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
5289 GetArenaNoVirtual());
5290 // @@protoc_insertion_point(field_set_char:tensorflow.RPCOptions.compression_algorithm)
5291}
5292inline void RPCOptions::set_compression_algorithm(const char* value,
5293 size_t size) {
5294
5295 compression_algorithm_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
5296 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
5297 // @@protoc_insertion_point(field_set_pointer:tensorflow.RPCOptions.compression_algorithm)
5298}
5299inline std::string* RPCOptions::mutable_compression_algorithm() {
5300
5301 // @@protoc_insertion_point(field_mutable:tensorflow.RPCOptions.compression_algorithm)
5302 return compression_algorithm_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5303}
5304inline std::string* RPCOptions::release_compression_algorithm() {
5305 // @@protoc_insertion_point(field_release:tensorflow.RPCOptions.compression_algorithm)
5306
5307 return compression_algorithm_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5308}
5309inline void RPCOptions::set_allocated_compression_algorithm(std::string* compression_algorithm) {
5310 if (compression_algorithm != nullptr) {
5311
5312 } else {
5313
5314 }
5315 compression_algorithm_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), compression_algorithm,
5316 GetArenaNoVirtual());
5317 // @@protoc_insertion_point(field_set_allocated:tensorflow.RPCOptions.compression_algorithm)
5318}
5319inline std::string* RPCOptions::unsafe_arena_release_compression_algorithm() {
5320 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.RPCOptions.compression_algorithm)
5321 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
5322
5323 return compression_algorithm_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
5324 GetArenaNoVirtual());
5325}
5326inline void RPCOptions::unsafe_arena_set_allocated_compression_algorithm(
5327 std::string* compression_algorithm) {
5328 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
5329 if (compression_algorithm != nullptr) {
5330
5331 } else {
5332
5333 }
5334 compression_algorithm_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
5335 compression_algorithm, GetArenaNoVirtual());
5336 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.RPCOptions.compression_algorithm)
5337}
5338
5339// int32 compression_level = 3;
5340inline void RPCOptions::clear_compression_level() {
5341 compression_level_ = 0;
5342}
5343inline ::PROTOBUF_NAMESPACE_ID::int32 RPCOptions::compression_level() const {
5344 // @@protoc_insertion_point(field_get:tensorflow.RPCOptions.compression_level)
5345 return compression_level_;
5346}
5347inline void RPCOptions::set_compression_level(::PROTOBUF_NAMESPACE_ID::int32 value) {
5348
5349 compression_level_ = value;
5350 // @@protoc_insertion_point(field_set:tensorflow.RPCOptions.compression_level)
5351}
5352
5353// bool cache_rpc_response = 4;
5354inline void RPCOptions::clear_cache_rpc_response() {
5355 cache_rpc_response_ = false;
5356}
5357inline bool RPCOptions::cache_rpc_response() const {
5358 // @@protoc_insertion_point(field_get:tensorflow.RPCOptions.cache_rpc_response)
5359 return cache_rpc_response_;
5360}
5361inline void RPCOptions::set_cache_rpc_response(bool value) {
5362
5363 cache_rpc_response_ = value;
5364 // @@protoc_insertion_point(field_set:tensorflow.RPCOptions.cache_rpc_response)
5365}
5366
5367// bool disable_session_connection_sharing = 5;
5368inline void RPCOptions::clear_disable_session_connection_sharing() {
5369 disable_session_connection_sharing_ = false;
5370}
5371inline bool RPCOptions::disable_session_connection_sharing() const {
5372 // @@protoc_insertion_point(field_get:tensorflow.RPCOptions.disable_session_connection_sharing)
5373 return disable_session_connection_sharing_;
5374}
5375inline void RPCOptions::set_disable_session_connection_sharing(bool value) {
5376
5377 disable_session_connection_sharing_ = value;
5378 // @@protoc_insertion_point(field_set:tensorflow.RPCOptions.disable_session_connection_sharing)
5379}
5380
5381// int32 num_channels_per_target = 6;
5382inline void RPCOptions::clear_num_channels_per_target() {
5383 num_channels_per_target_ = 0;
5384}
5385inline ::PROTOBUF_NAMESPACE_ID::int32 RPCOptions::num_channels_per_target() const {
5386 // @@protoc_insertion_point(field_get:tensorflow.RPCOptions.num_channels_per_target)
5387 return num_channels_per_target_;
5388}
5389inline void RPCOptions::set_num_channels_per_target(::PROTOBUF_NAMESPACE_ID::int32 value) {
5390
5391 num_channels_per_target_ = value;
5392 // @@protoc_insertion_point(field_set:tensorflow.RPCOptions.num_channels_per_target)
5393}
5394
5395// -------------------------------------------------------------------
5396
5397// SessionMetadata
5398
5399// string name = 1;
5400inline void SessionMetadata::clear_name() {
5401 name_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5402}
5403inline const std::string& SessionMetadata::name() const {
5404 // @@protoc_insertion_point(field_get:tensorflow.SessionMetadata.name)
5405 return name_.Get();
5406}
5407inline void SessionMetadata::set_name(const std::string& value) {
5408
5409 name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
5410 // @@protoc_insertion_point(field_set:tensorflow.SessionMetadata.name)
5411}
5412inline void SessionMetadata::set_name(std::string&& value) {
5413
5414 name_.Set(
5415 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
5416 // @@protoc_insertion_point(field_set_rvalue:tensorflow.SessionMetadata.name)
5417}
5418inline void SessionMetadata::set_name(const char* value) {
5419 GOOGLE_DCHECK(value != nullptr);
5420
5421 name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
5422 GetArenaNoVirtual());
5423 // @@protoc_insertion_point(field_set_char:tensorflow.SessionMetadata.name)
5424}
5425inline void SessionMetadata::set_name(const char* value,
5426 size_t size) {
5427
5428 name_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
5429 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
5430 // @@protoc_insertion_point(field_set_pointer:tensorflow.SessionMetadata.name)
5431}
5432inline std::string* SessionMetadata::mutable_name() {
5433
5434 // @@protoc_insertion_point(field_mutable:tensorflow.SessionMetadata.name)
5435 return name_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5436}
5437inline std::string* SessionMetadata::release_name() {
5438 // @@protoc_insertion_point(field_release:tensorflow.SessionMetadata.name)
5439
5440 return name_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5441}
5442inline void SessionMetadata::set_allocated_name(std::string* name) {
5443 if (name != nullptr) {
5444
5445 } else {
5446
5447 }
5448 name_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), name,
5449 GetArenaNoVirtual());
5450 // @@protoc_insertion_point(field_set_allocated:tensorflow.SessionMetadata.name)
5451}
5452inline std::string* SessionMetadata::unsafe_arena_release_name() {
5453 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.SessionMetadata.name)
5454 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
5455
5456 return name_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
5457 GetArenaNoVirtual());
5458}
5459inline void SessionMetadata::unsafe_arena_set_allocated_name(
5460 std::string* name) {
5461 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
5462 if (name != nullptr) {
5463
5464 } else {
5465
5466 }
5467 name_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
5468 name, GetArenaNoVirtual());
5469 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.SessionMetadata.name)
5470}
5471
5472// int64 version = 2;
5473inline void SessionMetadata::clear_version() {
5474 version_ = PROTOBUF_LONGLONG(0);
5475}
5476inline ::PROTOBUF_NAMESPACE_ID::int64 SessionMetadata::version() const {
5477 // @@protoc_insertion_point(field_get:tensorflow.SessionMetadata.version)
5478 return version_;
5479}
5480inline void SessionMetadata::set_version(::PROTOBUF_NAMESPACE_ID::int64 value) {
5481
5482 version_ = value;
5483 // @@protoc_insertion_point(field_set:tensorflow.SessionMetadata.version)
5484}
5485
5486// -------------------------------------------------------------------
5487
5488// -------------------------------------------------------------------
5489
5490// ConfigProto_Experimental
5491
5492// string collective_group_leader = 1;
5493inline void ConfigProto_Experimental::clear_collective_group_leader() {
5494 collective_group_leader_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5495}
5496inline const std::string& ConfigProto_Experimental::collective_group_leader() const {
5497 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.collective_group_leader)
5498 return collective_group_leader_.Get();
5499}
5500inline void ConfigProto_Experimental::set_collective_group_leader(const std::string& value) {
5501
5502 collective_group_leader_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
5503 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.collective_group_leader)
5504}
5505inline void ConfigProto_Experimental::set_collective_group_leader(std::string&& value) {
5506
5507 collective_group_leader_.Set(
5508 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
5509 // @@protoc_insertion_point(field_set_rvalue:tensorflow.ConfigProto.Experimental.collective_group_leader)
5510}
5511inline void ConfigProto_Experimental::set_collective_group_leader(const char* value) {
5512 GOOGLE_DCHECK(value != nullptr);
5513
5514 collective_group_leader_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
5515 GetArenaNoVirtual());
5516 // @@protoc_insertion_point(field_set_char:tensorflow.ConfigProto.Experimental.collective_group_leader)
5517}
5518inline void ConfigProto_Experimental::set_collective_group_leader(const char* value,
5519 size_t size) {
5520
5521 collective_group_leader_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
5522 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
5523 // @@protoc_insertion_point(field_set_pointer:tensorflow.ConfigProto.Experimental.collective_group_leader)
5524}
5525inline std::string* ConfigProto_Experimental::mutable_collective_group_leader() {
5526
5527 // @@protoc_insertion_point(field_mutable:tensorflow.ConfigProto.Experimental.collective_group_leader)
5528 return collective_group_leader_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5529}
5530inline std::string* ConfigProto_Experimental::release_collective_group_leader() {
5531 // @@protoc_insertion_point(field_release:tensorflow.ConfigProto.Experimental.collective_group_leader)
5532
5533 return collective_group_leader_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5534}
5535inline void ConfigProto_Experimental::set_allocated_collective_group_leader(std::string* collective_group_leader) {
5536 if (collective_group_leader != nullptr) {
5537
5538 } else {
5539
5540 }
5541 collective_group_leader_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), collective_group_leader,
5542 GetArenaNoVirtual());
5543 // @@protoc_insertion_point(field_set_allocated:tensorflow.ConfigProto.Experimental.collective_group_leader)
5544}
5545inline std::string* ConfigProto_Experimental::unsafe_arena_release_collective_group_leader() {
5546 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.ConfigProto.Experimental.collective_group_leader)
5547 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
5548
5549 return collective_group_leader_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
5550 GetArenaNoVirtual());
5551}
5552inline void ConfigProto_Experimental::unsafe_arena_set_allocated_collective_group_leader(
5553 std::string* collective_group_leader) {
5554 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
5555 if (collective_group_leader != nullptr) {
5556
5557 } else {
5558
5559 }
5560 collective_group_leader_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
5561 collective_group_leader, GetArenaNoVirtual());
5562 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.ConfigProto.Experimental.collective_group_leader)
5563}
5564
5565// string executor_type = 3;
5566inline void ConfigProto_Experimental::clear_executor_type() {
5567 executor_type_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5568}
5569inline const std::string& ConfigProto_Experimental::executor_type() const {
5570 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.executor_type)
5571 return executor_type_.Get();
5572}
5573inline void ConfigProto_Experimental::set_executor_type(const std::string& value) {
5574
5575 executor_type_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
5576 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.executor_type)
5577}
5578inline void ConfigProto_Experimental::set_executor_type(std::string&& value) {
5579
5580 executor_type_.Set(
5581 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
5582 // @@protoc_insertion_point(field_set_rvalue:tensorflow.ConfigProto.Experimental.executor_type)
5583}
5584inline void ConfigProto_Experimental::set_executor_type(const char* value) {
5585 GOOGLE_DCHECK(value != nullptr);
5586
5587 executor_type_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
5588 GetArenaNoVirtual());
5589 // @@protoc_insertion_point(field_set_char:tensorflow.ConfigProto.Experimental.executor_type)
5590}
5591inline void ConfigProto_Experimental::set_executor_type(const char* value,
5592 size_t size) {
5593
5594 executor_type_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
5595 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
5596 // @@protoc_insertion_point(field_set_pointer:tensorflow.ConfigProto.Experimental.executor_type)
5597}
5598inline std::string* ConfigProto_Experimental::mutable_executor_type() {
5599
5600 // @@protoc_insertion_point(field_mutable:tensorflow.ConfigProto.Experimental.executor_type)
5601 return executor_type_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5602}
5603inline std::string* ConfigProto_Experimental::release_executor_type() {
5604 // @@protoc_insertion_point(field_release:tensorflow.ConfigProto.Experimental.executor_type)
5605
5606 return executor_type_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
5607}
5608inline void ConfigProto_Experimental::set_allocated_executor_type(std::string* executor_type) {
5609 if (executor_type != nullptr) {
5610
5611 } else {
5612
5613 }
5614 executor_type_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), executor_type,
5615 GetArenaNoVirtual());
5616 // @@protoc_insertion_point(field_set_allocated:tensorflow.ConfigProto.Experimental.executor_type)
5617}
5618inline std::string* ConfigProto_Experimental::unsafe_arena_release_executor_type() {
5619 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.ConfigProto.Experimental.executor_type)
5620 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
5621
5622 return executor_type_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
5623 GetArenaNoVirtual());
5624}
5625inline void ConfigProto_Experimental::unsafe_arena_set_allocated_executor_type(
5626 std::string* executor_type) {
5627 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
5628 if (executor_type != nullptr) {
5629
5630 } else {
5631
5632 }
5633 executor_type_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
5634 executor_type, GetArenaNoVirtual());
5635 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.ConfigProto.Experimental.executor_type)
5636}
5637
5638// int32 recv_buf_max_chunk = 4;
5639inline void ConfigProto_Experimental::clear_recv_buf_max_chunk() {
5640 recv_buf_max_chunk_ = 0;
5641}
5642inline ::PROTOBUF_NAMESPACE_ID::int32 ConfigProto_Experimental::recv_buf_max_chunk() const {
5643 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.recv_buf_max_chunk)
5644 return recv_buf_max_chunk_;
5645}
5646inline void ConfigProto_Experimental::set_recv_buf_max_chunk(::PROTOBUF_NAMESPACE_ID::int32 value) {
5647
5648 recv_buf_max_chunk_ = value;
5649 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.recv_buf_max_chunk)
5650}
5651
5652// bool use_numa_affinity = 5;
5653inline void ConfigProto_Experimental::clear_use_numa_affinity() {
5654 use_numa_affinity_ = false;
5655}
5656inline bool ConfigProto_Experimental::use_numa_affinity() const {
5657 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.use_numa_affinity)
5658 return use_numa_affinity_;
5659}
5660inline void ConfigProto_Experimental::set_use_numa_affinity(bool value) {
5661
5662 use_numa_affinity_ = value;
5663 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.use_numa_affinity)
5664}
5665
5666// bool collective_deterministic_sequential_execution = 6;
5667inline void ConfigProto_Experimental::clear_collective_deterministic_sequential_execution() {
5668 collective_deterministic_sequential_execution_ = false;
5669}
5670inline bool ConfigProto_Experimental::collective_deterministic_sequential_execution() const {
5671 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.collective_deterministic_sequential_execution)
5672 return collective_deterministic_sequential_execution_;
5673}
5674inline void ConfigProto_Experimental::set_collective_deterministic_sequential_execution(bool value) {
5675
5676 collective_deterministic_sequential_execution_ = value;
5677 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.collective_deterministic_sequential_execution)
5678}
5679
5680// bool collective_nccl = 7;
5681inline void ConfigProto_Experimental::clear_collective_nccl() {
5682 collective_nccl_ = false;
5683}
5684inline bool ConfigProto_Experimental::collective_nccl() const {
5685 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.collective_nccl)
5686 return collective_nccl_;
5687}
5688inline void ConfigProto_Experimental::set_collective_nccl(bool value) {
5689
5690 collective_nccl_ = value;
5691 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.collective_nccl)
5692}
5693
5694// bool share_session_state_in_clusterspec_propagation = 8;
5695inline void ConfigProto_Experimental::clear_share_session_state_in_clusterspec_propagation() {
5696 share_session_state_in_clusterspec_propagation_ = false;
5697}
5698inline bool ConfigProto_Experimental::share_session_state_in_clusterspec_propagation() const {
5699 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.share_session_state_in_clusterspec_propagation)
5700 return share_session_state_in_clusterspec_propagation_;
5701}
5702inline void ConfigProto_Experimental::set_share_session_state_in_clusterspec_propagation(bool value) {
5703
5704 share_session_state_in_clusterspec_propagation_ = value;
5705 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.share_session_state_in_clusterspec_propagation)
5706}
5707
5708// bool disable_thread_spinning = 9;
5709inline void ConfigProto_Experimental::clear_disable_thread_spinning() {
5710 disable_thread_spinning_ = false;
5711}
5712inline bool ConfigProto_Experimental::disable_thread_spinning() const {
5713 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.disable_thread_spinning)
5714 return disable_thread_spinning_;
5715}
5716inline void ConfigProto_Experimental::set_disable_thread_spinning(bool value) {
5717
5718 disable_thread_spinning_ = value;
5719 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.disable_thread_spinning)
5720}
5721
5722// bool share_cluster_devices_in_session = 10;
5723inline void ConfigProto_Experimental::clear_share_cluster_devices_in_session() {
5724 share_cluster_devices_in_session_ = false;
5725}
5726inline bool ConfigProto_Experimental::share_cluster_devices_in_session() const {
5727 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.share_cluster_devices_in_session)
5728 return share_cluster_devices_in_session_;
5729}
5730inline void ConfigProto_Experimental::set_share_cluster_devices_in_session(bool value) {
5731
5732 share_cluster_devices_in_session_ = value;
5733 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.share_cluster_devices_in_session)
5734}
5735
5736// .tensorflow.SessionMetadata session_metadata = 11;
5737inline bool ConfigProto_Experimental::has_session_metadata() const {
5738 return this != internal_default_instance() && session_metadata_ != nullptr;
5739}
5740inline void ConfigProto_Experimental::clear_session_metadata() {
5741 if (GetArenaNoVirtual() == nullptr && session_metadata_ != nullptr) {
5742 delete session_metadata_;
5743 }
5744 session_metadata_ = nullptr;
5745}
5746inline const ::tensorflow::SessionMetadata& ConfigProto_Experimental::session_metadata() const {
5747 const ::tensorflow::SessionMetadata* p = session_metadata_;
5748 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.session_metadata)
5749 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::SessionMetadata*>(
5750 &::tensorflow::_SessionMetadata_default_instance_);
5751}
5752inline ::tensorflow::SessionMetadata* ConfigProto_Experimental::release_session_metadata() {
5753 // @@protoc_insertion_point(field_release:tensorflow.ConfigProto.Experimental.session_metadata)
5754
5755 ::tensorflow::SessionMetadata* temp = session_metadata_;
5756 if (GetArenaNoVirtual() != nullptr) {
5757 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5758 }
5759 session_metadata_ = nullptr;
5760 return temp;
5761}
5762inline ::tensorflow::SessionMetadata* ConfigProto_Experimental::unsafe_arena_release_session_metadata() {
5763 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.ConfigProto.Experimental.session_metadata)
5764
5765 ::tensorflow::SessionMetadata* temp = session_metadata_;
5766 session_metadata_ = nullptr;
5767 return temp;
5768}
5769inline ::tensorflow::SessionMetadata* ConfigProto_Experimental::mutable_session_metadata() {
5770
5771 if (session_metadata_ == nullptr) {
5772 auto* p = CreateMaybeMessage<::tensorflow::SessionMetadata>(GetArenaNoVirtual());
5773 session_metadata_ = p;
5774 }
5775 // @@protoc_insertion_point(field_mutable:tensorflow.ConfigProto.Experimental.session_metadata)
5776 return session_metadata_;
5777}
5778inline void ConfigProto_Experimental::set_allocated_session_metadata(::tensorflow::SessionMetadata* session_metadata) {
5779 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
5780 if (message_arena == nullptr) {
5781 delete session_metadata_;
5782 }
5783 if (session_metadata) {
5784 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
5785 ::PROTOBUF_NAMESPACE_ID::Arena::GetArena(session_metadata);
5786 if (message_arena != submessage_arena) {
5787 session_metadata = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
5788 message_arena, session_metadata, submessage_arena);
5789 }
5790
5791 } else {
5792
5793 }
5794 session_metadata_ = session_metadata;
5795 // @@protoc_insertion_point(field_set_allocated:tensorflow.ConfigProto.Experimental.session_metadata)
5796}
5797
5798// bool optimize_for_static_graph = 12;
5799inline void ConfigProto_Experimental::clear_optimize_for_static_graph() {
5800 optimize_for_static_graph_ = false;
5801}
5802inline bool ConfigProto_Experimental::optimize_for_static_graph() const {
5803 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.optimize_for_static_graph)
5804 return optimize_for_static_graph_;
5805}
5806inline void ConfigProto_Experimental::set_optimize_for_static_graph(bool value) {
5807
5808 optimize_for_static_graph_ = value;
5809 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.optimize_for_static_graph)
5810}
5811
5812// bool enable_mlir_bridge = 13;
5813inline void ConfigProto_Experimental::clear_enable_mlir_bridge() {
5814 enable_mlir_bridge_ = false;
5815}
5816inline bool ConfigProto_Experimental::enable_mlir_bridge() const {
5817 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.enable_mlir_bridge)
5818 return enable_mlir_bridge_;
5819}
5820inline void ConfigProto_Experimental::set_enable_mlir_bridge(bool value) {
5821
5822 enable_mlir_bridge_ = value;
5823 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.enable_mlir_bridge)
5824}
5825
5826// .tensorflow.ConfigProto.Experimental.MlirBridgeRollout mlir_bridge_rollout = 17;
5827inline void ConfigProto_Experimental::clear_mlir_bridge_rollout() {
5828 mlir_bridge_rollout_ = 0;
5829}
5830inline ::tensorflow::ConfigProto_Experimental_MlirBridgeRollout ConfigProto_Experimental::mlir_bridge_rollout() const {
5831 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.mlir_bridge_rollout)
5832 return static_cast< ::tensorflow::ConfigProto_Experimental_MlirBridgeRollout >(mlir_bridge_rollout_);
5833}
5834inline void ConfigProto_Experimental::set_mlir_bridge_rollout(::tensorflow::ConfigProto_Experimental_MlirBridgeRollout value) {
5835
5836 mlir_bridge_rollout_ = value;
5837 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.mlir_bridge_rollout)
5838}
5839
5840// bool enable_mlir_graph_optimization = 16;
5841inline void ConfigProto_Experimental::clear_enable_mlir_graph_optimization() {
5842 enable_mlir_graph_optimization_ = false;
5843}
5844inline bool ConfigProto_Experimental::enable_mlir_graph_optimization() const {
5845 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.enable_mlir_graph_optimization)
5846 return enable_mlir_graph_optimization_;
5847}
5848inline void ConfigProto_Experimental::set_enable_mlir_graph_optimization(bool value) {
5849
5850 enable_mlir_graph_optimization_ = value;
5851 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.enable_mlir_graph_optimization)
5852}
5853
5854// bool disable_output_partition_graphs = 14;
5855inline void ConfigProto_Experimental::clear_disable_output_partition_graphs() {
5856 disable_output_partition_graphs_ = false;
5857}
5858inline bool ConfigProto_Experimental::disable_output_partition_graphs() const {
5859 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.disable_output_partition_graphs)
5860 return disable_output_partition_graphs_;
5861}
5862inline void ConfigProto_Experimental::set_disable_output_partition_graphs(bool value) {
5863
5864 disable_output_partition_graphs_ = value;
5865 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.disable_output_partition_graphs)
5866}
5867
5868// int64 xla_fusion_autotuner_thresh = 15;
5869inline void ConfigProto_Experimental::clear_xla_fusion_autotuner_thresh() {
5870 xla_fusion_autotuner_thresh_ = PROTOBUF_LONGLONG(0);
5871}
5872inline ::PROTOBUF_NAMESPACE_ID::int64 ConfigProto_Experimental::xla_fusion_autotuner_thresh() const {
5873 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.xla_fusion_autotuner_thresh)
5874 return xla_fusion_autotuner_thresh_;
5875}
5876inline void ConfigProto_Experimental::set_xla_fusion_autotuner_thresh(::PROTOBUF_NAMESPACE_ID::int64 value) {
5877
5878 xla_fusion_autotuner_thresh_ = value;
5879 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.xla_fusion_autotuner_thresh)
5880}
5881
5882// bool use_tfrt = 18;
5883inline void ConfigProto_Experimental::clear_use_tfrt() {
5884 use_tfrt_ = false;
5885}
5886inline bool ConfigProto_Experimental::use_tfrt() const {
5887 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.use_tfrt)
5888 return use_tfrt_;
5889}
5890inline void ConfigProto_Experimental::set_use_tfrt(bool value) {
5891
5892 use_tfrt_ = value;
5893 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.use_tfrt)
5894}
5895
5896// bool disable_functional_ops_lowering = 21;
5897inline void ConfigProto_Experimental::clear_disable_functional_ops_lowering() {
5898 disable_functional_ops_lowering_ = false;
5899}
5900inline bool ConfigProto_Experimental::disable_functional_ops_lowering() const {
5901 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.disable_functional_ops_lowering)
5902 return disable_functional_ops_lowering_;
5903}
5904inline void ConfigProto_Experimental::set_disable_functional_ops_lowering(bool value) {
5905
5906 disable_functional_ops_lowering_ = value;
5907 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.disable_functional_ops_lowering)
5908}
5909
5910// bool xla_prefer_single_graph_cluster = 22;
5911inline void ConfigProto_Experimental::clear_xla_prefer_single_graph_cluster() {
5912 xla_prefer_single_graph_cluster_ = false;
5913}
5914inline bool ConfigProto_Experimental::xla_prefer_single_graph_cluster() const {
5915 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.xla_prefer_single_graph_cluster)
5916 return xla_prefer_single_graph_cluster_;
5917}
5918inline void ConfigProto_Experimental::set_xla_prefer_single_graph_cluster(bool value) {
5919
5920 xla_prefer_single_graph_cluster_ = value;
5921 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.Experimental.xla_prefer_single_graph_cluster)
5922}
5923
5924// .tensorflow.CoordinationServiceConfig coordination_config = 23;
5925inline bool ConfigProto_Experimental::has_coordination_config() const {
5926 return this != internal_default_instance() && coordination_config_ != nullptr;
5927}
5928inline const ::tensorflow::CoordinationServiceConfig& ConfigProto_Experimental::coordination_config() const {
5929 const ::tensorflow::CoordinationServiceConfig* p = coordination_config_;
5930 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.Experimental.coordination_config)
5931 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::CoordinationServiceConfig*>(
5932 &::tensorflow::_CoordinationServiceConfig_default_instance_);
5933}
5934inline ::tensorflow::CoordinationServiceConfig* ConfigProto_Experimental::release_coordination_config() {
5935 // @@protoc_insertion_point(field_release:tensorflow.ConfigProto.Experimental.coordination_config)
5936
5937 ::tensorflow::CoordinationServiceConfig* temp = coordination_config_;
5938 if (GetArenaNoVirtual() != nullptr) {
5939 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5940 }
5941 coordination_config_ = nullptr;
5942 return temp;
5943}
5944inline ::tensorflow::CoordinationServiceConfig* ConfigProto_Experimental::unsafe_arena_release_coordination_config() {
5945 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.ConfigProto.Experimental.coordination_config)
5946
5947 ::tensorflow::CoordinationServiceConfig* temp = coordination_config_;
5948 coordination_config_ = nullptr;
5949 return temp;
5950}
5951inline ::tensorflow::CoordinationServiceConfig* ConfigProto_Experimental::mutable_coordination_config() {
5952
5953 if (coordination_config_ == nullptr) {
5954 auto* p = CreateMaybeMessage<::tensorflow::CoordinationServiceConfig>(GetArenaNoVirtual());
5955 coordination_config_ = p;
5956 }
5957 // @@protoc_insertion_point(field_mutable:tensorflow.ConfigProto.Experimental.coordination_config)
5958 return coordination_config_;
5959}
5960inline void ConfigProto_Experimental::set_allocated_coordination_config(::tensorflow::CoordinationServiceConfig* coordination_config) {
5961 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
5962 if (message_arena == nullptr) {
5963 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(coordination_config_);
5964 }
5965 if (coordination_config) {
5966 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena = nullptr;
5967 if (message_arena != submessage_arena) {
5968 coordination_config = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
5969 message_arena, coordination_config, submessage_arena);
5970 }
5971
5972 } else {
5973
5974 }
5975 coordination_config_ = coordination_config;
5976 // @@protoc_insertion_point(field_set_allocated:tensorflow.ConfigProto.Experimental.coordination_config)
5977}
5978
5979// -------------------------------------------------------------------
5980
5981// ConfigProto
5982
5983// map<string, int32> device_count = 1;
5984inline int ConfigProto::device_count_size() const {
5985 return device_count_.size();
5986}
5987inline void ConfigProto::clear_device_count() {
5988 device_count_.Clear();
5989}
5990inline const ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::PROTOBUF_NAMESPACE_ID::int32 >&
5991ConfigProto::device_count() const {
5992 // @@protoc_insertion_point(field_map:tensorflow.ConfigProto.device_count)
5993 return device_count_.GetMap();
5994}
5995inline ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::PROTOBUF_NAMESPACE_ID::int32 >*
5996ConfigProto::mutable_device_count() {
5997 // @@protoc_insertion_point(field_mutable_map:tensorflow.ConfigProto.device_count)
5998 return device_count_.MutableMap();
5999}
6000
6001// int32 intra_op_parallelism_threads = 2;
6002inline void ConfigProto::clear_intra_op_parallelism_threads() {
6003 intra_op_parallelism_threads_ = 0;
6004}
6005inline ::PROTOBUF_NAMESPACE_ID::int32 ConfigProto::intra_op_parallelism_threads() const {
6006 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.intra_op_parallelism_threads)
6007 return intra_op_parallelism_threads_;
6008}
6009inline void ConfigProto::set_intra_op_parallelism_threads(::PROTOBUF_NAMESPACE_ID::int32 value) {
6010
6011 intra_op_parallelism_threads_ = value;
6012 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.intra_op_parallelism_threads)
6013}
6014
6015// int32 inter_op_parallelism_threads = 5;
6016inline void ConfigProto::clear_inter_op_parallelism_threads() {
6017 inter_op_parallelism_threads_ = 0;
6018}
6019inline ::PROTOBUF_NAMESPACE_ID::int32 ConfigProto::inter_op_parallelism_threads() const {
6020 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.inter_op_parallelism_threads)
6021 return inter_op_parallelism_threads_;
6022}
6023inline void ConfigProto::set_inter_op_parallelism_threads(::PROTOBUF_NAMESPACE_ID::int32 value) {
6024
6025 inter_op_parallelism_threads_ = value;
6026 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.inter_op_parallelism_threads)
6027}
6028
6029// bool use_per_session_threads = 9;
6030inline void ConfigProto::clear_use_per_session_threads() {
6031 use_per_session_threads_ = false;
6032}
6033inline bool ConfigProto::use_per_session_threads() const {
6034 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.use_per_session_threads)
6035 return use_per_session_threads_;
6036}
6037inline void ConfigProto::set_use_per_session_threads(bool value) {
6038
6039 use_per_session_threads_ = value;
6040 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.use_per_session_threads)
6041}
6042
6043// repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12;
6044inline int ConfigProto::session_inter_op_thread_pool_size() const {
6045 return session_inter_op_thread_pool_.size();
6046}
6047inline void ConfigProto::clear_session_inter_op_thread_pool() {
6048 session_inter_op_thread_pool_.Clear();
6049}
6050inline ::tensorflow::ThreadPoolOptionProto* ConfigProto::mutable_session_inter_op_thread_pool(int index) {
6051 // @@protoc_insertion_point(field_mutable:tensorflow.ConfigProto.session_inter_op_thread_pool)
6052 return session_inter_op_thread_pool_.Mutable(index);
6053}
6054inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::ThreadPoolOptionProto >*
6055ConfigProto::mutable_session_inter_op_thread_pool() {
6056 // @@protoc_insertion_point(field_mutable_list:tensorflow.ConfigProto.session_inter_op_thread_pool)
6057 return &session_inter_op_thread_pool_;
6058}
6059inline const ::tensorflow::ThreadPoolOptionProto& ConfigProto::session_inter_op_thread_pool(int index) const {
6060 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.session_inter_op_thread_pool)
6061 return session_inter_op_thread_pool_.Get(index);
6062}
6063inline ::tensorflow::ThreadPoolOptionProto* ConfigProto::add_session_inter_op_thread_pool() {
6064 // @@protoc_insertion_point(field_add:tensorflow.ConfigProto.session_inter_op_thread_pool)
6065 return session_inter_op_thread_pool_.Add();
6066}
6067inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::ThreadPoolOptionProto >&
6068ConfigProto::session_inter_op_thread_pool() const {
6069 // @@protoc_insertion_point(field_list:tensorflow.ConfigProto.session_inter_op_thread_pool)
6070 return session_inter_op_thread_pool_;
6071}
6072
6073// int32 placement_period = 3;
6074inline void ConfigProto::clear_placement_period() {
6075 placement_period_ = 0;
6076}
6077inline ::PROTOBUF_NAMESPACE_ID::int32 ConfigProto::placement_period() const {
6078 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.placement_period)
6079 return placement_period_;
6080}
6081inline void ConfigProto::set_placement_period(::PROTOBUF_NAMESPACE_ID::int32 value) {
6082
6083 placement_period_ = value;
6084 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.placement_period)
6085}
6086
6087// repeated string device_filters = 4;
6088inline int ConfigProto::device_filters_size() const {
6089 return device_filters_.size();
6090}
6091inline void ConfigProto::clear_device_filters() {
6092 device_filters_.Clear();
6093}
6094inline const std::string& ConfigProto::device_filters(int index) const {
6095 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.device_filters)
6096 return device_filters_.Get(index);
6097}
6098inline std::string* ConfigProto::mutable_device_filters(int index) {
6099 // @@protoc_insertion_point(field_mutable:tensorflow.ConfigProto.device_filters)
6100 return device_filters_.Mutable(index);
6101}
6102inline void ConfigProto::set_device_filters(int index, const std::string& value) {
6103 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.device_filters)
6104 device_filters_.Mutable(index)->assign(value);
6105}
6106inline void ConfigProto::set_device_filters(int index, std::string&& value) {
6107 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.device_filters)
6108 device_filters_.Mutable(index)->assign(std::move(value));
6109}
6110inline void ConfigProto::set_device_filters(int index, const char* value) {
6111 GOOGLE_DCHECK(value != nullptr);
6112 device_filters_.Mutable(index)->assign(value);
6113 // @@protoc_insertion_point(field_set_char:tensorflow.ConfigProto.device_filters)
6114}
6115inline void ConfigProto::set_device_filters(int index, const char* value, size_t size) {
6116 device_filters_.Mutable(index)->assign(
6117 reinterpret_cast<const char*>(value), size);
6118 // @@protoc_insertion_point(field_set_pointer:tensorflow.ConfigProto.device_filters)
6119}
6120inline std::string* ConfigProto::add_device_filters() {
6121 // @@protoc_insertion_point(field_add_mutable:tensorflow.ConfigProto.device_filters)
6122 return device_filters_.Add();
6123}
6124inline void ConfigProto::add_device_filters(const std::string& value) {
6125 device_filters_.Add()->assign(value);
6126 // @@protoc_insertion_point(field_add:tensorflow.ConfigProto.device_filters)
6127}
6128inline void ConfigProto::add_device_filters(std::string&& value) {
6129 device_filters_.Add(std::move(value));
6130 // @@protoc_insertion_point(field_add:tensorflow.ConfigProto.device_filters)
6131}
6132inline void ConfigProto::add_device_filters(const char* value) {
6133 GOOGLE_DCHECK(value != nullptr);
6134 device_filters_.Add()->assign(value);
6135 // @@protoc_insertion_point(field_add_char:tensorflow.ConfigProto.device_filters)
6136}
6137inline void ConfigProto::add_device_filters(const char* value, size_t size) {
6138 device_filters_.Add()->assign(reinterpret_cast<const char*>(value), size);
6139 // @@protoc_insertion_point(field_add_pointer:tensorflow.ConfigProto.device_filters)
6140}
6141inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>&
6142ConfigProto::device_filters() const {
6143 // @@protoc_insertion_point(field_list:tensorflow.ConfigProto.device_filters)
6144 return device_filters_;
6145}
6146inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>*
6147ConfigProto::mutable_device_filters() {
6148 // @@protoc_insertion_point(field_mutable_list:tensorflow.ConfigProto.device_filters)
6149 return &device_filters_;
6150}
6151
6152// .tensorflow.GPUOptions gpu_options = 6;
6153inline bool ConfigProto::has_gpu_options() const {
6154 return this != internal_default_instance() && gpu_options_ != nullptr;
6155}
6156inline void ConfigProto::clear_gpu_options() {
6157 if (GetArenaNoVirtual() == nullptr && gpu_options_ != nullptr) {
6158 delete gpu_options_;
6159 }
6160 gpu_options_ = nullptr;
6161}
6162inline const ::tensorflow::GPUOptions& ConfigProto::gpu_options() const {
6163 const ::tensorflow::GPUOptions* p = gpu_options_;
6164 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.gpu_options)
6165 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::GPUOptions*>(
6166 &::tensorflow::_GPUOptions_default_instance_);
6167}
6168inline ::tensorflow::GPUOptions* ConfigProto::release_gpu_options() {
6169 // @@protoc_insertion_point(field_release:tensorflow.ConfigProto.gpu_options)
6170
6171 ::tensorflow::GPUOptions* temp = gpu_options_;
6172 if (GetArenaNoVirtual() != nullptr) {
6173 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6174 }
6175 gpu_options_ = nullptr;
6176 return temp;
6177}
6178inline ::tensorflow::GPUOptions* ConfigProto::unsafe_arena_release_gpu_options() {
6179 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.ConfigProto.gpu_options)
6180
6181 ::tensorflow::GPUOptions* temp = gpu_options_;
6182 gpu_options_ = nullptr;
6183 return temp;
6184}
6185inline ::tensorflow::GPUOptions* ConfigProto::mutable_gpu_options() {
6186
6187 if (gpu_options_ == nullptr) {
6188 auto* p = CreateMaybeMessage<::tensorflow::GPUOptions>(GetArenaNoVirtual());
6189 gpu_options_ = p;
6190 }
6191 // @@protoc_insertion_point(field_mutable:tensorflow.ConfigProto.gpu_options)
6192 return gpu_options_;
6193}
6194inline void ConfigProto::set_allocated_gpu_options(::tensorflow::GPUOptions* gpu_options) {
6195 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
6196 if (message_arena == nullptr) {
6197 delete gpu_options_;
6198 }
6199 if (gpu_options) {
6200 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6201 ::PROTOBUF_NAMESPACE_ID::Arena::GetArena(gpu_options);
6202 if (message_arena != submessage_arena) {
6203 gpu_options = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6204 message_arena, gpu_options, submessage_arena);
6205 }
6206
6207 } else {
6208
6209 }
6210 gpu_options_ = gpu_options;
6211 // @@protoc_insertion_point(field_set_allocated:tensorflow.ConfigProto.gpu_options)
6212}
6213
6214// bool allow_soft_placement = 7;
6215inline void ConfigProto::clear_allow_soft_placement() {
6216 allow_soft_placement_ = false;
6217}
6218inline bool ConfigProto::allow_soft_placement() const {
6219 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.allow_soft_placement)
6220 return allow_soft_placement_;
6221}
6222inline void ConfigProto::set_allow_soft_placement(bool value) {
6223
6224 allow_soft_placement_ = value;
6225 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.allow_soft_placement)
6226}
6227
6228// bool log_device_placement = 8;
6229inline void ConfigProto::clear_log_device_placement() {
6230 log_device_placement_ = false;
6231}
6232inline bool ConfigProto::log_device_placement() const {
6233 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.log_device_placement)
6234 return log_device_placement_;
6235}
6236inline void ConfigProto::set_log_device_placement(bool value) {
6237
6238 log_device_placement_ = value;
6239 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.log_device_placement)
6240}
6241
6242// .tensorflow.GraphOptions graph_options = 10;
6243inline bool ConfigProto::has_graph_options() const {
6244 return this != internal_default_instance() && graph_options_ != nullptr;
6245}
6246inline void ConfigProto::clear_graph_options() {
6247 if (GetArenaNoVirtual() == nullptr && graph_options_ != nullptr) {
6248 delete graph_options_;
6249 }
6250 graph_options_ = nullptr;
6251}
6252inline const ::tensorflow::GraphOptions& ConfigProto::graph_options() const {
6253 const ::tensorflow::GraphOptions* p = graph_options_;
6254 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.graph_options)
6255 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::GraphOptions*>(
6256 &::tensorflow::_GraphOptions_default_instance_);
6257}
6258inline ::tensorflow::GraphOptions* ConfigProto::release_graph_options() {
6259 // @@protoc_insertion_point(field_release:tensorflow.ConfigProto.graph_options)
6260
6261 ::tensorflow::GraphOptions* temp = graph_options_;
6262 if (GetArenaNoVirtual() != nullptr) {
6263 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6264 }
6265 graph_options_ = nullptr;
6266 return temp;
6267}
6268inline ::tensorflow::GraphOptions* ConfigProto::unsafe_arena_release_graph_options() {
6269 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.ConfigProto.graph_options)
6270
6271 ::tensorflow::GraphOptions* temp = graph_options_;
6272 graph_options_ = nullptr;
6273 return temp;
6274}
6275inline ::tensorflow::GraphOptions* ConfigProto::mutable_graph_options() {
6276
6277 if (graph_options_ == nullptr) {
6278 auto* p = CreateMaybeMessage<::tensorflow::GraphOptions>(GetArenaNoVirtual());
6279 graph_options_ = p;
6280 }
6281 // @@protoc_insertion_point(field_mutable:tensorflow.ConfigProto.graph_options)
6282 return graph_options_;
6283}
6284inline void ConfigProto::set_allocated_graph_options(::tensorflow::GraphOptions* graph_options) {
6285 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
6286 if (message_arena == nullptr) {
6287 delete graph_options_;
6288 }
6289 if (graph_options) {
6290 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6291 ::PROTOBUF_NAMESPACE_ID::Arena::GetArena(graph_options);
6292 if (message_arena != submessage_arena) {
6293 graph_options = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6294 message_arena, graph_options, submessage_arena);
6295 }
6296
6297 } else {
6298
6299 }
6300 graph_options_ = graph_options;
6301 // @@protoc_insertion_point(field_set_allocated:tensorflow.ConfigProto.graph_options)
6302}
6303
6304// int64 operation_timeout_in_ms = 11;
6305inline void ConfigProto::clear_operation_timeout_in_ms() {
6306 operation_timeout_in_ms_ = PROTOBUF_LONGLONG(0);
6307}
6308inline ::PROTOBUF_NAMESPACE_ID::int64 ConfigProto::operation_timeout_in_ms() const {
6309 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.operation_timeout_in_ms)
6310 return operation_timeout_in_ms_;
6311}
6312inline void ConfigProto::set_operation_timeout_in_ms(::PROTOBUF_NAMESPACE_ID::int64 value) {
6313
6314 operation_timeout_in_ms_ = value;
6315 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.operation_timeout_in_ms)
6316}
6317
6318// .tensorflow.RPCOptions rpc_options = 13;
6319inline bool ConfigProto::has_rpc_options() const {
6320 return this != internal_default_instance() && rpc_options_ != nullptr;
6321}
6322inline void ConfigProto::clear_rpc_options() {
6323 if (GetArenaNoVirtual() == nullptr && rpc_options_ != nullptr) {
6324 delete rpc_options_;
6325 }
6326 rpc_options_ = nullptr;
6327}
6328inline const ::tensorflow::RPCOptions& ConfigProto::rpc_options() const {
6329 const ::tensorflow::RPCOptions* p = rpc_options_;
6330 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.rpc_options)
6331 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::RPCOptions*>(
6332 &::tensorflow::_RPCOptions_default_instance_);
6333}
6334inline ::tensorflow::RPCOptions* ConfigProto::release_rpc_options() {
6335 // @@protoc_insertion_point(field_release:tensorflow.ConfigProto.rpc_options)
6336
6337 ::tensorflow::RPCOptions* temp = rpc_options_;
6338 if (GetArenaNoVirtual() != nullptr) {
6339 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6340 }
6341 rpc_options_ = nullptr;
6342 return temp;
6343}
6344inline ::tensorflow::RPCOptions* ConfigProto::unsafe_arena_release_rpc_options() {
6345 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.ConfigProto.rpc_options)
6346
6347 ::tensorflow::RPCOptions* temp = rpc_options_;
6348 rpc_options_ = nullptr;
6349 return temp;
6350}
6351inline ::tensorflow::RPCOptions* ConfigProto::mutable_rpc_options() {
6352
6353 if (rpc_options_ == nullptr) {
6354 auto* p = CreateMaybeMessage<::tensorflow::RPCOptions>(GetArenaNoVirtual());
6355 rpc_options_ = p;
6356 }
6357 // @@protoc_insertion_point(field_mutable:tensorflow.ConfigProto.rpc_options)
6358 return rpc_options_;
6359}
6360inline void ConfigProto::set_allocated_rpc_options(::tensorflow::RPCOptions* rpc_options) {
6361 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
6362 if (message_arena == nullptr) {
6363 delete rpc_options_;
6364 }
6365 if (rpc_options) {
6366 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6367 ::PROTOBUF_NAMESPACE_ID::Arena::GetArena(rpc_options);
6368 if (message_arena != submessage_arena) {
6369 rpc_options = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6370 message_arena, rpc_options, submessage_arena);
6371 }
6372
6373 } else {
6374
6375 }
6376 rpc_options_ = rpc_options;
6377 // @@protoc_insertion_point(field_set_allocated:tensorflow.ConfigProto.rpc_options)
6378}
6379
6380// .tensorflow.ClusterDef cluster_def = 14;
6381inline bool ConfigProto::has_cluster_def() const {
6382 return this != internal_default_instance() && cluster_def_ != nullptr;
6383}
6384inline const ::tensorflow::ClusterDef& ConfigProto::cluster_def() const {
6385 const ::tensorflow::ClusterDef* p = cluster_def_;
6386 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.cluster_def)
6387 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::ClusterDef*>(
6388 &::tensorflow::_ClusterDef_default_instance_);
6389}
6390inline ::tensorflow::ClusterDef* ConfigProto::release_cluster_def() {
6391 // @@protoc_insertion_point(field_release:tensorflow.ConfigProto.cluster_def)
6392
6393 ::tensorflow::ClusterDef* temp = cluster_def_;
6394 if (GetArenaNoVirtual() != nullptr) {
6395 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6396 }
6397 cluster_def_ = nullptr;
6398 return temp;
6399}
6400inline ::tensorflow::ClusterDef* ConfigProto::unsafe_arena_release_cluster_def() {
6401 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.ConfigProto.cluster_def)
6402
6403 ::tensorflow::ClusterDef* temp = cluster_def_;
6404 cluster_def_ = nullptr;
6405 return temp;
6406}
6407inline ::tensorflow::ClusterDef* ConfigProto::mutable_cluster_def() {
6408
6409 if (cluster_def_ == nullptr) {
6410 auto* p = CreateMaybeMessage<::tensorflow::ClusterDef>(GetArenaNoVirtual());
6411 cluster_def_ = p;
6412 }
6413 // @@protoc_insertion_point(field_mutable:tensorflow.ConfigProto.cluster_def)
6414 return cluster_def_;
6415}
6416inline void ConfigProto::set_allocated_cluster_def(::tensorflow::ClusterDef* cluster_def) {
6417 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
6418 if (message_arena == nullptr) {
6419 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(cluster_def_);
6420 }
6421 if (cluster_def) {
6422 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6423 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(cluster_def)->GetArena();
6424 if (message_arena != submessage_arena) {
6425 cluster_def = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6426 message_arena, cluster_def, submessage_arena);
6427 }
6428
6429 } else {
6430
6431 }
6432 cluster_def_ = cluster_def;
6433 // @@protoc_insertion_point(field_set_allocated:tensorflow.ConfigProto.cluster_def)
6434}
6435
6436// bool isolate_session_state = 15;
6437inline void ConfigProto::clear_isolate_session_state() {
6438 isolate_session_state_ = false;
6439}
6440inline bool ConfigProto::isolate_session_state() const {
6441 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.isolate_session_state)
6442 return isolate_session_state_;
6443}
6444inline void ConfigProto::set_isolate_session_state(bool value) {
6445
6446 isolate_session_state_ = value;
6447 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.isolate_session_state)
6448}
6449
6450// bool share_cluster_devices_in_session = 17;
6451inline void ConfigProto::clear_share_cluster_devices_in_session() {
6452 share_cluster_devices_in_session_ = false;
6453}
6454inline bool ConfigProto::share_cluster_devices_in_session() const {
6455 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.share_cluster_devices_in_session)
6456 return share_cluster_devices_in_session_;
6457}
6458inline void ConfigProto::set_share_cluster_devices_in_session(bool value) {
6459
6460 share_cluster_devices_in_session_ = value;
6461 // @@protoc_insertion_point(field_set:tensorflow.ConfigProto.share_cluster_devices_in_session)
6462}
6463
6464// .tensorflow.ConfigProto.Experimental experimental = 16;
6465inline bool ConfigProto::has_experimental() const {
6466 return this != internal_default_instance() && experimental_ != nullptr;
6467}
6468inline void ConfigProto::clear_experimental() {
6469 if (GetArenaNoVirtual() == nullptr && experimental_ != nullptr) {
6470 delete experimental_;
6471 }
6472 experimental_ = nullptr;
6473}
6474inline const ::tensorflow::ConfigProto_Experimental& ConfigProto::experimental() const {
6475 const ::tensorflow::ConfigProto_Experimental* p = experimental_;
6476 // @@protoc_insertion_point(field_get:tensorflow.ConfigProto.experimental)
6477 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::ConfigProto_Experimental*>(
6478 &::tensorflow::_ConfigProto_Experimental_default_instance_);
6479}
6480inline ::tensorflow::ConfigProto_Experimental* ConfigProto::release_experimental() {
6481 // @@protoc_insertion_point(field_release:tensorflow.ConfigProto.experimental)
6482
6483 ::tensorflow::ConfigProto_Experimental* temp = experimental_;
6484 if (GetArenaNoVirtual() != nullptr) {
6485 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6486 }
6487 experimental_ = nullptr;
6488 return temp;
6489}
6490inline ::tensorflow::ConfigProto_Experimental* ConfigProto::unsafe_arena_release_experimental() {
6491 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.ConfigProto.experimental)
6492
6493 ::tensorflow::ConfigProto_Experimental* temp = experimental_;
6494 experimental_ = nullptr;
6495 return temp;
6496}
6497inline ::tensorflow::ConfigProto_Experimental* ConfigProto::mutable_experimental() {
6498
6499 if (experimental_ == nullptr) {
6500 auto* p = CreateMaybeMessage<::tensorflow::ConfigProto_Experimental>(GetArenaNoVirtual());
6501 experimental_ = p;
6502 }
6503 // @@protoc_insertion_point(field_mutable:tensorflow.ConfigProto.experimental)
6504 return experimental_;
6505}
6506inline void ConfigProto::set_allocated_experimental(::tensorflow::ConfigProto_Experimental* experimental) {
6507 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
6508 if (message_arena == nullptr) {
6509 delete experimental_;
6510 }
6511 if (experimental) {
6512 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6513 ::PROTOBUF_NAMESPACE_ID::Arena::GetArena(experimental);
6514 if (message_arena != submessage_arena) {
6515 experimental = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6516 message_arena, experimental, submessage_arena);
6517 }
6518
6519 } else {
6520
6521 }
6522 experimental_ = experimental;
6523 // @@protoc_insertion_point(field_set_allocated:tensorflow.ConfigProto.experimental)
6524}
6525
6526// -------------------------------------------------------------------
6527
6528// RunOptions_Experimental_RunHandlerPoolOptions
6529
6530// int64 priority = 1;
6531inline void RunOptions_Experimental_RunHandlerPoolOptions::clear_priority() {
6532 priority_ = PROTOBUF_LONGLONG(0);
6533}
6534inline ::PROTOBUF_NAMESPACE_ID::int64 RunOptions_Experimental_RunHandlerPoolOptions::priority() const {
6535 // @@protoc_insertion_point(field_get:tensorflow.RunOptions.Experimental.RunHandlerPoolOptions.priority)
6536 return priority_;
6537}
6538inline void RunOptions_Experimental_RunHandlerPoolOptions::set_priority(::PROTOBUF_NAMESPACE_ID::int64 value) {
6539
6540 priority_ = value;
6541 // @@protoc_insertion_point(field_set:tensorflow.RunOptions.Experimental.RunHandlerPoolOptions.priority)
6542}
6543
6544// -------------------------------------------------------------------
6545
6546// RunOptions_Experimental
6547
6548// int64 collective_graph_key = 1;
6549inline void RunOptions_Experimental::clear_collective_graph_key() {
6550 collective_graph_key_ = PROTOBUF_LONGLONG(0);
6551}
6552inline ::PROTOBUF_NAMESPACE_ID::int64 RunOptions_Experimental::collective_graph_key() const {
6553 // @@protoc_insertion_point(field_get:tensorflow.RunOptions.Experimental.collective_graph_key)
6554 return collective_graph_key_;
6555}
6556inline void RunOptions_Experimental::set_collective_graph_key(::PROTOBUF_NAMESPACE_ID::int64 value) {
6557
6558 collective_graph_key_ = value;
6559 // @@protoc_insertion_point(field_set:tensorflow.RunOptions.Experimental.collective_graph_key)
6560}
6561
6562// bool use_run_handler_pool = 2;
6563inline void RunOptions_Experimental::clear_use_run_handler_pool() {
6564 use_run_handler_pool_ = false;
6565}
6566inline bool RunOptions_Experimental::use_run_handler_pool() const {
6567 // @@protoc_insertion_point(field_get:tensorflow.RunOptions.Experimental.use_run_handler_pool)
6568 return use_run_handler_pool_;
6569}
6570inline void RunOptions_Experimental::set_use_run_handler_pool(bool value) {
6571
6572 use_run_handler_pool_ = value;
6573 // @@protoc_insertion_point(field_set:tensorflow.RunOptions.Experimental.use_run_handler_pool)
6574}
6575
6576// .tensorflow.RunOptions.Experimental.RunHandlerPoolOptions run_handler_pool_options = 3;
6577inline bool RunOptions_Experimental::has_run_handler_pool_options() const {
6578 return this != internal_default_instance() && run_handler_pool_options_ != nullptr;
6579}
6580inline void RunOptions_Experimental::clear_run_handler_pool_options() {
6581 if (GetArenaNoVirtual() == nullptr && run_handler_pool_options_ != nullptr) {
6582 delete run_handler_pool_options_;
6583 }
6584 run_handler_pool_options_ = nullptr;
6585}
6586inline const ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions& RunOptions_Experimental::run_handler_pool_options() const {
6587 const ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* p = run_handler_pool_options_;
6588 // @@protoc_insertion_point(field_get:tensorflow.RunOptions.Experimental.run_handler_pool_options)
6589 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions*>(
6590 &::tensorflow::_RunOptions_Experimental_RunHandlerPoolOptions_default_instance_);
6591}
6592inline ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* RunOptions_Experimental::release_run_handler_pool_options() {
6593 // @@protoc_insertion_point(field_release:tensorflow.RunOptions.Experimental.run_handler_pool_options)
6594
6595 ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* temp = run_handler_pool_options_;
6596 if (GetArenaNoVirtual() != nullptr) {
6597 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6598 }
6599 run_handler_pool_options_ = nullptr;
6600 return temp;
6601}
6602inline ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* RunOptions_Experimental::unsafe_arena_release_run_handler_pool_options() {
6603 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.RunOptions.Experimental.run_handler_pool_options)
6604
6605 ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* temp = run_handler_pool_options_;
6606 run_handler_pool_options_ = nullptr;
6607 return temp;
6608}
6609inline ::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* RunOptions_Experimental::mutable_run_handler_pool_options() {
6610
6611 if (run_handler_pool_options_ == nullptr) {
6612 auto* p = CreateMaybeMessage<::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions>(GetArenaNoVirtual());
6613 run_handler_pool_options_ = p;
6614 }
6615 // @@protoc_insertion_point(field_mutable:tensorflow.RunOptions.Experimental.run_handler_pool_options)
6616 return run_handler_pool_options_;
6617}
6618inline void RunOptions_Experimental::set_allocated_run_handler_pool_options(::tensorflow::RunOptions_Experimental_RunHandlerPoolOptions* run_handler_pool_options) {
6619 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
6620 if (message_arena == nullptr) {
6621 delete run_handler_pool_options_;
6622 }
6623 if (run_handler_pool_options) {
6624 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6625 ::PROTOBUF_NAMESPACE_ID::Arena::GetArena(run_handler_pool_options);
6626 if (message_arena != submessage_arena) {
6627 run_handler_pool_options = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6628 message_arena, run_handler_pool_options, submessage_arena);
6629 }
6630
6631 } else {
6632
6633 }
6634 run_handler_pool_options_ = run_handler_pool_options;
6635 // @@protoc_insertion_point(field_set_allocated:tensorflow.RunOptions.Experimental.run_handler_pool_options)
6636}
6637
6638// -------------------------------------------------------------------
6639
6640// RunOptions
6641
6642// .tensorflow.RunOptions.TraceLevel trace_level = 1;
6643inline void RunOptions::clear_trace_level() {
6644 trace_level_ = 0;
6645}
6646inline ::tensorflow::RunOptions_TraceLevel RunOptions::trace_level() const {
6647 // @@protoc_insertion_point(field_get:tensorflow.RunOptions.trace_level)
6648 return static_cast< ::tensorflow::RunOptions_TraceLevel >(trace_level_);
6649}
6650inline void RunOptions::set_trace_level(::tensorflow::RunOptions_TraceLevel value) {
6651
6652 trace_level_ = value;
6653 // @@protoc_insertion_point(field_set:tensorflow.RunOptions.trace_level)
6654}
6655
6656// int64 timeout_in_ms = 2;
6657inline void RunOptions::clear_timeout_in_ms() {
6658 timeout_in_ms_ = PROTOBUF_LONGLONG(0);
6659}
6660inline ::PROTOBUF_NAMESPACE_ID::int64 RunOptions::timeout_in_ms() const {
6661 // @@protoc_insertion_point(field_get:tensorflow.RunOptions.timeout_in_ms)
6662 return timeout_in_ms_;
6663}
6664inline void RunOptions::set_timeout_in_ms(::PROTOBUF_NAMESPACE_ID::int64 value) {
6665
6666 timeout_in_ms_ = value;
6667 // @@protoc_insertion_point(field_set:tensorflow.RunOptions.timeout_in_ms)
6668}
6669
6670// int32 inter_op_thread_pool = 3;
6671inline void RunOptions::clear_inter_op_thread_pool() {
6672 inter_op_thread_pool_ = 0;
6673}
6674inline ::PROTOBUF_NAMESPACE_ID::int32 RunOptions::inter_op_thread_pool() const {
6675 // @@protoc_insertion_point(field_get:tensorflow.RunOptions.inter_op_thread_pool)
6676 return inter_op_thread_pool_;
6677}
6678inline void RunOptions::set_inter_op_thread_pool(::PROTOBUF_NAMESPACE_ID::int32 value) {
6679
6680 inter_op_thread_pool_ = value;
6681 // @@protoc_insertion_point(field_set:tensorflow.RunOptions.inter_op_thread_pool)
6682}
6683
6684// bool output_partition_graphs = 5;
6685inline void RunOptions::clear_output_partition_graphs() {
6686 output_partition_graphs_ = false;
6687}
6688inline bool RunOptions::output_partition_graphs() const {
6689 // @@protoc_insertion_point(field_get:tensorflow.RunOptions.output_partition_graphs)
6690 return output_partition_graphs_;
6691}
6692inline void RunOptions::set_output_partition_graphs(bool value) {
6693
6694 output_partition_graphs_ = value;
6695 // @@protoc_insertion_point(field_set:tensorflow.RunOptions.output_partition_graphs)
6696}
6697
6698// .tensorflow.DebugOptions debug_options = 6;
6699inline bool RunOptions::has_debug_options() const {
6700 return this != internal_default_instance() && debug_options_ != nullptr;
6701}
6702inline const ::tensorflow::DebugOptions& RunOptions::debug_options() const {
6703 const ::tensorflow::DebugOptions* p = debug_options_;
6704 // @@protoc_insertion_point(field_get:tensorflow.RunOptions.debug_options)
6705 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::DebugOptions*>(
6706 &::tensorflow::_DebugOptions_default_instance_);
6707}
6708inline ::tensorflow::DebugOptions* RunOptions::release_debug_options() {
6709 // @@protoc_insertion_point(field_release:tensorflow.RunOptions.debug_options)
6710
6711 ::tensorflow::DebugOptions* temp = debug_options_;
6712 if (GetArenaNoVirtual() != nullptr) {
6713 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6714 }
6715 debug_options_ = nullptr;
6716 return temp;
6717}
6718inline ::tensorflow::DebugOptions* RunOptions::unsafe_arena_release_debug_options() {
6719 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.RunOptions.debug_options)
6720
6721 ::tensorflow::DebugOptions* temp = debug_options_;
6722 debug_options_ = nullptr;
6723 return temp;
6724}
6725inline ::tensorflow::DebugOptions* RunOptions::mutable_debug_options() {
6726
6727 if (debug_options_ == nullptr) {
6728 auto* p = CreateMaybeMessage<::tensorflow::DebugOptions>(GetArenaNoVirtual());
6729 debug_options_ = p;
6730 }
6731 // @@protoc_insertion_point(field_mutable:tensorflow.RunOptions.debug_options)
6732 return debug_options_;
6733}
6734inline void RunOptions::set_allocated_debug_options(::tensorflow::DebugOptions* debug_options) {
6735 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
6736 if (message_arena == nullptr) {
6737 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(debug_options_);
6738 }
6739 if (debug_options) {
6740 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6741 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(debug_options)->GetArena();
6742 if (message_arena != submessage_arena) {
6743 debug_options = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6744 message_arena, debug_options, submessage_arena);
6745 }
6746
6747 } else {
6748
6749 }
6750 debug_options_ = debug_options;
6751 // @@protoc_insertion_point(field_set_allocated:tensorflow.RunOptions.debug_options)
6752}
6753
6754// bool report_tensor_allocations_upon_oom = 7;
6755inline void RunOptions::clear_report_tensor_allocations_upon_oom() {
6756 report_tensor_allocations_upon_oom_ = false;
6757}
6758inline bool RunOptions::report_tensor_allocations_upon_oom() const {
6759 // @@protoc_insertion_point(field_get:tensorflow.RunOptions.report_tensor_allocations_upon_oom)
6760 return report_tensor_allocations_upon_oom_;
6761}
6762inline void RunOptions::set_report_tensor_allocations_upon_oom(bool value) {
6763
6764 report_tensor_allocations_upon_oom_ = value;
6765 // @@protoc_insertion_point(field_set:tensorflow.RunOptions.report_tensor_allocations_upon_oom)
6766}
6767
6768// .tensorflow.RunOptions.Experimental experimental = 8;
6769inline bool RunOptions::has_experimental() const {
6770 return this != internal_default_instance() && experimental_ != nullptr;
6771}
6772inline void RunOptions::clear_experimental() {
6773 if (GetArenaNoVirtual() == nullptr && experimental_ != nullptr) {
6774 delete experimental_;
6775 }
6776 experimental_ = nullptr;
6777}
6778inline const ::tensorflow::RunOptions_Experimental& RunOptions::experimental() const {
6779 const ::tensorflow::RunOptions_Experimental* p = experimental_;
6780 // @@protoc_insertion_point(field_get:tensorflow.RunOptions.experimental)
6781 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::RunOptions_Experimental*>(
6782 &::tensorflow::_RunOptions_Experimental_default_instance_);
6783}
6784inline ::tensorflow::RunOptions_Experimental* RunOptions::release_experimental() {
6785 // @@protoc_insertion_point(field_release:tensorflow.RunOptions.experimental)
6786
6787 ::tensorflow::RunOptions_Experimental* temp = experimental_;
6788 if (GetArenaNoVirtual() != nullptr) {
6789 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6790 }
6791 experimental_ = nullptr;
6792 return temp;
6793}
6794inline ::tensorflow::RunOptions_Experimental* RunOptions::unsafe_arena_release_experimental() {
6795 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.RunOptions.experimental)
6796
6797 ::tensorflow::RunOptions_Experimental* temp = experimental_;
6798 experimental_ = nullptr;
6799 return temp;
6800}
6801inline ::tensorflow::RunOptions_Experimental* RunOptions::mutable_experimental() {
6802
6803 if (experimental_ == nullptr) {
6804 auto* p = CreateMaybeMessage<::tensorflow::RunOptions_Experimental>(GetArenaNoVirtual());
6805 experimental_ = p;
6806 }
6807 // @@protoc_insertion_point(field_mutable:tensorflow.RunOptions.experimental)
6808 return experimental_;
6809}
6810inline void RunOptions::set_allocated_experimental(::tensorflow::RunOptions_Experimental* experimental) {
6811 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
6812 if (message_arena == nullptr) {
6813 delete experimental_;
6814 }
6815 if (experimental) {
6816 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6817 ::PROTOBUF_NAMESPACE_ID::Arena::GetArena(experimental);
6818 if (message_arena != submessage_arena) {
6819 experimental = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6820 message_arena, experimental, submessage_arena);
6821 }
6822
6823 } else {
6824
6825 }
6826 experimental_ = experimental;
6827 // @@protoc_insertion_point(field_set_allocated:tensorflow.RunOptions.experimental)
6828}
6829
6830// -------------------------------------------------------------------
6831
6832// RunMetadata_FunctionGraphs
6833
6834// repeated .tensorflow.GraphDef partition_graphs = 1;
6835inline int RunMetadata_FunctionGraphs::partition_graphs_size() const {
6836 return partition_graphs_.size();
6837}
6838inline ::tensorflow::GraphDef* RunMetadata_FunctionGraphs::mutable_partition_graphs(int index) {
6839 // @@protoc_insertion_point(field_mutable:tensorflow.RunMetadata.FunctionGraphs.partition_graphs)
6840 return partition_graphs_.Mutable(index);
6841}
6842inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GraphDef >*
6843RunMetadata_FunctionGraphs::mutable_partition_graphs() {
6844 // @@protoc_insertion_point(field_mutable_list:tensorflow.RunMetadata.FunctionGraphs.partition_graphs)
6845 return &partition_graphs_;
6846}
6847inline const ::tensorflow::GraphDef& RunMetadata_FunctionGraphs::partition_graphs(int index) const {
6848 // @@protoc_insertion_point(field_get:tensorflow.RunMetadata.FunctionGraphs.partition_graphs)
6849 return partition_graphs_.Get(index);
6850}
6851inline ::tensorflow::GraphDef* RunMetadata_FunctionGraphs::add_partition_graphs() {
6852 // @@protoc_insertion_point(field_add:tensorflow.RunMetadata.FunctionGraphs.partition_graphs)
6853 return partition_graphs_.Add();
6854}
6855inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GraphDef >&
6856RunMetadata_FunctionGraphs::partition_graphs() const {
6857 // @@protoc_insertion_point(field_list:tensorflow.RunMetadata.FunctionGraphs.partition_graphs)
6858 return partition_graphs_;
6859}
6860
6861// .tensorflow.GraphDef pre_optimization_graph = 2;
6862inline bool RunMetadata_FunctionGraphs::has_pre_optimization_graph() const {
6863 return this != internal_default_instance() && pre_optimization_graph_ != nullptr;
6864}
6865inline const ::tensorflow::GraphDef& RunMetadata_FunctionGraphs::pre_optimization_graph() const {
6866 const ::tensorflow::GraphDef* p = pre_optimization_graph_;
6867 // @@protoc_insertion_point(field_get:tensorflow.RunMetadata.FunctionGraphs.pre_optimization_graph)
6868 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::GraphDef*>(
6869 &::tensorflow::_GraphDef_default_instance_);
6870}
6871inline ::tensorflow::GraphDef* RunMetadata_FunctionGraphs::release_pre_optimization_graph() {
6872 // @@protoc_insertion_point(field_release:tensorflow.RunMetadata.FunctionGraphs.pre_optimization_graph)
6873
6874 ::tensorflow::GraphDef* temp = pre_optimization_graph_;
6875 if (GetArenaNoVirtual() != nullptr) {
6876 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6877 }
6878 pre_optimization_graph_ = nullptr;
6879 return temp;
6880}
6881inline ::tensorflow::GraphDef* RunMetadata_FunctionGraphs::unsafe_arena_release_pre_optimization_graph() {
6882 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.RunMetadata.FunctionGraphs.pre_optimization_graph)
6883
6884 ::tensorflow::GraphDef* temp = pre_optimization_graph_;
6885 pre_optimization_graph_ = nullptr;
6886 return temp;
6887}
6888inline ::tensorflow::GraphDef* RunMetadata_FunctionGraphs::mutable_pre_optimization_graph() {
6889
6890 if (pre_optimization_graph_ == nullptr) {
6891 auto* p = CreateMaybeMessage<::tensorflow::GraphDef>(GetArenaNoVirtual());
6892 pre_optimization_graph_ = p;
6893 }
6894 // @@protoc_insertion_point(field_mutable:tensorflow.RunMetadata.FunctionGraphs.pre_optimization_graph)
6895 return pre_optimization_graph_;
6896}
6897inline void RunMetadata_FunctionGraphs::set_allocated_pre_optimization_graph(::tensorflow::GraphDef* pre_optimization_graph) {
6898 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
6899 if (message_arena == nullptr) {
6900 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(pre_optimization_graph_);
6901 }
6902 if (pre_optimization_graph) {
6903 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6904 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(pre_optimization_graph)->GetArena();
6905 if (message_arena != submessage_arena) {
6906 pre_optimization_graph = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6907 message_arena, pre_optimization_graph, submessage_arena);
6908 }
6909
6910 } else {
6911
6912 }
6913 pre_optimization_graph_ = pre_optimization_graph;
6914 // @@protoc_insertion_point(field_set_allocated:tensorflow.RunMetadata.FunctionGraphs.pre_optimization_graph)
6915}
6916
6917// .tensorflow.GraphDef post_optimization_graph = 3;
6918inline bool RunMetadata_FunctionGraphs::has_post_optimization_graph() const {
6919 return this != internal_default_instance() && post_optimization_graph_ != nullptr;
6920}
6921inline const ::tensorflow::GraphDef& RunMetadata_FunctionGraphs::post_optimization_graph() const {
6922 const ::tensorflow::GraphDef* p = post_optimization_graph_;
6923 // @@protoc_insertion_point(field_get:tensorflow.RunMetadata.FunctionGraphs.post_optimization_graph)
6924 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::GraphDef*>(
6925 &::tensorflow::_GraphDef_default_instance_);
6926}
6927inline ::tensorflow::GraphDef* RunMetadata_FunctionGraphs::release_post_optimization_graph() {
6928 // @@protoc_insertion_point(field_release:tensorflow.RunMetadata.FunctionGraphs.post_optimization_graph)
6929
6930 ::tensorflow::GraphDef* temp = post_optimization_graph_;
6931 if (GetArenaNoVirtual() != nullptr) {
6932 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6933 }
6934 post_optimization_graph_ = nullptr;
6935 return temp;
6936}
6937inline ::tensorflow::GraphDef* RunMetadata_FunctionGraphs::unsafe_arena_release_post_optimization_graph() {
6938 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.RunMetadata.FunctionGraphs.post_optimization_graph)
6939
6940 ::tensorflow::GraphDef* temp = post_optimization_graph_;
6941 post_optimization_graph_ = nullptr;
6942 return temp;
6943}
6944inline ::tensorflow::GraphDef* RunMetadata_FunctionGraphs::mutable_post_optimization_graph() {
6945
6946 if (post_optimization_graph_ == nullptr) {
6947 auto* p = CreateMaybeMessage<::tensorflow::GraphDef>(GetArenaNoVirtual());
6948 post_optimization_graph_ = p;
6949 }
6950 // @@protoc_insertion_point(field_mutable:tensorflow.RunMetadata.FunctionGraphs.post_optimization_graph)
6951 return post_optimization_graph_;
6952}
6953inline void RunMetadata_FunctionGraphs::set_allocated_post_optimization_graph(::tensorflow::GraphDef* post_optimization_graph) {
6954 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
6955 if (message_arena == nullptr) {
6956 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(post_optimization_graph_);
6957 }
6958 if (post_optimization_graph) {
6959 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6960 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(post_optimization_graph)->GetArena();
6961 if (message_arena != submessage_arena) {
6962 post_optimization_graph = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6963 message_arena, post_optimization_graph, submessage_arena);
6964 }
6965
6966 } else {
6967
6968 }
6969 post_optimization_graph_ = post_optimization_graph;
6970 // @@protoc_insertion_point(field_set_allocated:tensorflow.RunMetadata.FunctionGraphs.post_optimization_graph)
6971}
6972
6973// -------------------------------------------------------------------
6974
6975// RunMetadata
6976
6977// .tensorflow.StepStats step_stats = 1;
6978inline bool RunMetadata::has_step_stats() const {
6979 return this != internal_default_instance() && step_stats_ != nullptr;
6980}
6981inline const ::tensorflow::StepStats& RunMetadata::step_stats() const {
6982 const ::tensorflow::StepStats* p = step_stats_;
6983 // @@protoc_insertion_point(field_get:tensorflow.RunMetadata.step_stats)
6984 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::StepStats*>(
6985 &::tensorflow::_StepStats_default_instance_);
6986}
6987inline ::tensorflow::StepStats* RunMetadata::release_step_stats() {
6988 // @@protoc_insertion_point(field_release:tensorflow.RunMetadata.step_stats)
6989
6990 ::tensorflow::StepStats* temp = step_stats_;
6991 if (GetArenaNoVirtual() != nullptr) {
6992 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6993 }
6994 step_stats_ = nullptr;
6995 return temp;
6996}
6997inline ::tensorflow::StepStats* RunMetadata::unsafe_arena_release_step_stats() {
6998 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.RunMetadata.step_stats)
6999
7000 ::tensorflow::StepStats* temp = step_stats_;
7001 step_stats_ = nullptr;
7002 return temp;
7003}
7004inline ::tensorflow::StepStats* RunMetadata::mutable_step_stats() {
7005
7006 if (step_stats_ == nullptr) {
7007 auto* p = CreateMaybeMessage<::tensorflow::StepStats>(GetArenaNoVirtual());
7008 step_stats_ = p;
7009 }
7010 // @@protoc_insertion_point(field_mutable:tensorflow.RunMetadata.step_stats)
7011 return step_stats_;
7012}
7013inline void RunMetadata::set_allocated_step_stats(::tensorflow::StepStats* step_stats) {
7014 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
7015 if (message_arena == nullptr) {
7016 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(step_stats_);
7017 }
7018 if (step_stats) {
7019 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
7020 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(step_stats)->GetArena();
7021 if (message_arena != submessage_arena) {
7022 step_stats = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
7023 message_arena, step_stats, submessage_arena);
7024 }
7025
7026 } else {
7027
7028 }
7029 step_stats_ = step_stats;
7030 // @@protoc_insertion_point(field_set_allocated:tensorflow.RunMetadata.step_stats)
7031}
7032
7033// .tensorflow.CostGraphDef cost_graph = 2;
7034inline bool RunMetadata::has_cost_graph() const {
7035 return this != internal_default_instance() && cost_graph_ != nullptr;
7036}
7037inline const ::tensorflow::CostGraphDef& RunMetadata::cost_graph() const {
7038 const ::tensorflow::CostGraphDef* p = cost_graph_;
7039 // @@protoc_insertion_point(field_get:tensorflow.RunMetadata.cost_graph)
7040 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::CostGraphDef*>(
7041 &::tensorflow::_CostGraphDef_default_instance_);
7042}
7043inline ::tensorflow::CostGraphDef* RunMetadata::release_cost_graph() {
7044 // @@protoc_insertion_point(field_release:tensorflow.RunMetadata.cost_graph)
7045
7046 ::tensorflow::CostGraphDef* temp = cost_graph_;
7047 if (GetArenaNoVirtual() != nullptr) {
7048 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
7049 }
7050 cost_graph_ = nullptr;
7051 return temp;
7052}
7053inline ::tensorflow::CostGraphDef* RunMetadata::unsafe_arena_release_cost_graph() {
7054 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.RunMetadata.cost_graph)
7055
7056 ::tensorflow::CostGraphDef* temp = cost_graph_;
7057 cost_graph_ = nullptr;
7058 return temp;
7059}
7060inline ::tensorflow::CostGraphDef* RunMetadata::mutable_cost_graph() {
7061
7062 if (cost_graph_ == nullptr) {
7063 auto* p = CreateMaybeMessage<::tensorflow::CostGraphDef>(GetArenaNoVirtual());
7064 cost_graph_ = p;
7065 }
7066 // @@protoc_insertion_point(field_mutable:tensorflow.RunMetadata.cost_graph)
7067 return cost_graph_;
7068}
7069inline void RunMetadata::set_allocated_cost_graph(::tensorflow::CostGraphDef* cost_graph) {
7070 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
7071 if (message_arena == nullptr) {
7072 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(cost_graph_);
7073 }
7074 if (cost_graph) {
7075 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
7076 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(cost_graph)->GetArena();
7077 if (message_arena != submessage_arena) {
7078 cost_graph = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
7079 message_arena, cost_graph, submessage_arena);
7080 }
7081
7082 } else {
7083
7084 }
7085 cost_graph_ = cost_graph;
7086 // @@protoc_insertion_point(field_set_allocated:tensorflow.RunMetadata.cost_graph)
7087}
7088
7089// repeated .tensorflow.GraphDef partition_graphs = 3;
7090inline int RunMetadata::partition_graphs_size() const {
7091 return partition_graphs_.size();
7092}
7093inline ::tensorflow::GraphDef* RunMetadata::mutable_partition_graphs(int index) {
7094 // @@protoc_insertion_point(field_mutable:tensorflow.RunMetadata.partition_graphs)
7095 return partition_graphs_.Mutable(index);
7096}
7097inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GraphDef >*
7098RunMetadata::mutable_partition_graphs() {
7099 // @@protoc_insertion_point(field_mutable_list:tensorflow.RunMetadata.partition_graphs)
7100 return &partition_graphs_;
7101}
7102inline const ::tensorflow::GraphDef& RunMetadata::partition_graphs(int index) const {
7103 // @@protoc_insertion_point(field_get:tensorflow.RunMetadata.partition_graphs)
7104 return partition_graphs_.Get(index);
7105}
7106inline ::tensorflow::GraphDef* RunMetadata::add_partition_graphs() {
7107 // @@protoc_insertion_point(field_add:tensorflow.RunMetadata.partition_graphs)
7108 return partition_graphs_.Add();
7109}
7110inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::GraphDef >&
7111RunMetadata::partition_graphs() const {
7112 // @@protoc_insertion_point(field_list:tensorflow.RunMetadata.partition_graphs)
7113 return partition_graphs_;
7114}
7115
7116// repeated .tensorflow.RunMetadata.FunctionGraphs function_graphs = 4;
7117inline int RunMetadata::function_graphs_size() const {
7118 return function_graphs_.size();
7119}
7120inline void RunMetadata::clear_function_graphs() {
7121 function_graphs_.Clear();
7122}
7123inline ::tensorflow::RunMetadata_FunctionGraphs* RunMetadata::mutable_function_graphs(int index) {
7124 // @@protoc_insertion_point(field_mutable:tensorflow.RunMetadata.function_graphs)
7125 return function_graphs_.Mutable(index);
7126}
7127inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::RunMetadata_FunctionGraphs >*
7128RunMetadata::mutable_function_graphs() {
7129 // @@protoc_insertion_point(field_mutable_list:tensorflow.RunMetadata.function_graphs)
7130 return &function_graphs_;
7131}
7132inline const ::tensorflow::RunMetadata_FunctionGraphs& RunMetadata::function_graphs(int index) const {
7133 // @@protoc_insertion_point(field_get:tensorflow.RunMetadata.function_graphs)
7134 return function_graphs_.Get(index);
7135}
7136inline ::tensorflow::RunMetadata_FunctionGraphs* RunMetadata::add_function_graphs() {
7137 // @@protoc_insertion_point(field_add:tensorflow.RunMetadata.function_graphs)
7138 return function_graphs_.Add();
7139}
7140inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::RunMetadata_FunctionGraphs >&
7141RunMetadata::function_graphs() const {
7142 // @@protoc_insertion_point(field_list:tensorflow.RunMetadata.function_graphs)
7143 return function_graphs_;
7144}
7145
7146// .tensorflow.SessionMetadata session_metadata = 5;
7147inline bool RunMetadata::has_session_metadata() const {
7148 return this != internal_default_instance() && session_metadata_ != nullptr;
7149}
7150inline void RunMetadata::clear_session_metadata() {
7151 if (GetArenaNoVirtual() == nullptr && session_metadata_ != nullptr) {
7152 delete session_metadata_;
7153 }
7154 session_metadata_ = nullptr;
7155}
7156inline const ::tensorflow::SessionMetadata& RunMetadata::session_metadata() const {
7157 const ::tensorflow::SessionMetadata* p = session_metadata_;
7158 // @@protoc_insertion_point(field_get:tensorflow.RunMetadata.session_metadata)
7159 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::SessionMetadata*>(
7160 &::tensorflow::_SessionMetadata_default_instance_);
7161}
7162inline ::tensorflow::SessionMetadata* RunMetadata::release_session_metadata() {
7163 // @@protoc_insertion_point(field_release:tensorflow.RunMetadata.session_metadata)
7164
7165 ::tensorflow::SessionMetadata* temp = session_metadata_;
7166 if (GetArenaNoVirtual() != nullptr) {
7167 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
7168 }
7169 session_metadata_ = nullptr;
7170 return temp;
7171}
7172inline ::tensorflow::SessionMetadata* RunMetadata::unsafe_arena_release_session_metadata() {
7173 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.RunMetadata.session_metadata)
7174
7175 ::tensorflow::SessionMetadata* temp = session_metadata_;
7176 session_metadata_ = nullptr;
7177 return temp;
7178}
7179inline ::tensorflow::SessionMetadata* RunMetadata::mutable_session_metadata() {
7180
7181 if (session_metadata_ == nullptr) {
7182 auto* p = CreateMaybeMessage<::tensorflow::SessionMetadata>(GetArenaNoVirtual());
7183 session_metadata_ = p;
7184 }
7185 // @@protoc_insertion_point(field_mutable:tensorflow.RunMetadata.session_metadata)
7186 return session_metadata_;
7187}
7188inline void RunMetadata::set_allocated_session_metadata(::tensorflow::SessionMetadata* session_metadata) {
7189 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
7190 if (message_arena == nullptr) {
7191 delete session_metadata_;
7192 }
7193 if (session_metadata) {
7194 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
7195 ::PROTOBUF_NAMESPACE_ID::Arena::GetArena(session_metadata);
7196 if (message_arena != submessage_arena) {
7197 session_metadata = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
7198 message_arena, session_metadata, submessage_arena);
7199 }
7200
7201 } else {
7202
7203 }
7204 session_metadata_ = session_metadata;
7205 // @@protoc_insertion_point(field_set_allocated:tensorflow.RunMetadata.session_metadata)
7206}
7207
7208// -------------------------------------------------------------------
7209
7210// TensorConnection
7211
7212// string from_tensor = 1;
7213inline void TensorConnection::clear_from_tensor() {
7214 from_tensor_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
7215}
7216inline const std::string& TensorConnection::from_tensor() const {
7217 // @@protoc_insertion_point(field_get:tensorflow.TensorConnection.from_tensor)
7218 return from_tensor_.Get();
7219}
7220inline void TensorConnection::set_from_tensor(const std::string& value) {
7221
7222 from_tensor_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
7223 // @@protoc_insertion_point(field_set:tensorflow.TensorConnection.from_tensor)
7224}
7225inline void TensorConnection::set_from_tensor(std::string&& value) {
7226
7227 from_tensor_.Set(
7228 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
7229 // @@protoc_insertion_point(field_set_rvalue:tensorflow.TensorConnection.from_tensor)
7230}
7231inline void TensorConnection::set_from_tensor(const char* value) {
7232 GOOGLE_DCHECK(value != nullptr);
7233
7234 from_tensor_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
7235 GetArenaNoVirtual());
7236 // @@protoc_insertion_point(field_set_char:tensorflow.TensorConnection.from_tensor)
7237}
7238inline void TensorConnection::set_from_tensor(const char* value,
7239 size_t size) {
7240
7241 from_tensor_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
7242 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
7243 // @@protoc_insertion_point(field_set_pointer:tensorflow.TensorConnection.from_tensor)
7244}
7245inline std::string* TensorConnection::mutable_from_tensor() {
7246
7247 // @@protoc_insertion_point(field_mutable:tensorflow.TensorConnection.from_tensor)
7248 return from_tensor_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
7249}
7250inline std::string* TensorConnection::release_from_tensor() {
7251 // @@protoc_insertion_point(field_release:tensorflow.TensorConnection.from_tensor)
7252
7253 return from_tensor_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
7254}
7255inline void TensorConnection::set_allocated_from_tensor(std::string* from_tensor) {
7256 if (from_tensor != nullptr) {
7257
7258 } else {
7259
7260 }
7261 from_tensor_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), from_tensor,
7262 GetArenaNoVirtual());
7263 // @@protoc_insertion_point(field_set_allocated:tensorflow.TensorConnection.from_tensor)
7264}
7265inline std::string* TensorConnection::unsafe_arena_release_from_tensor() {
7266 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.TensorConnection.from_tensor)
7267 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
7268
7269 return from_tensor_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
7270 GetArenaNoVirtual());
7271}
7272inline void TensorConnection::unsafe_arena_set_allocated_from_tensor(
7273 std::string* from_tensor) {
7274 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
7275 if (from_tensor != nullptr) {
7276
7277 } else {
7278
7279 }
7280 from_tensor_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
7281 from_tensor, GetArenaNoVirtual());
7282 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.TensorConnection.from_tensor)
7283}
7284
7285// string to_tensor = 2;
7286inline void TensorConnection::clear_to_tensor() {
7287 to_tensor_.ClearToEmpty(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
7288}
7289inline const std::string& TensorConnection::to_tensor() const {
7290 // @@protoc_insertion_point(field_get:tensorflow.TensorConnection.to_tensor)
7291 return to_tensor_.Get();
7292}
7293inline void TensorConnection::set_to_tensor(const std::string& value) {
7294
7295 to_tensor_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
7296 // @@protoc_insertion_point(field_set:tensorflow.TensorConnection.to_tensor)
7297}
7298inline void TensorConnection::set_to_tensor(std::string&& value) {
7299
7300 to_tensor_.Set(
7301 &::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
7302 // @@protoc_insertion_point(field_set_rvalue:tensorflow.TensorConnection.to_tensor)
7303}
7304inline void TensorConnection::set_to_tensor(const char* value) {
7305 GOOGLE_DCHECK(value != nullptr);
7306
7307 to_tensor_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
7308 GetArenaNoVirtual());
7309 // @@protoc_insertion_point(field_set_char:tensorflow.TensorConnection.to_tensor)
7310}
7311inline void TensorConnection::set_to_tensor(const char* value,
7312 size_t size) {
7313
7314 to_tensor_.Set(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), ::std::string(
7315 reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
7316 // @@protoc_insertion_point(field_set_pointer:tensorflow.TensorConnection.to_tensor)
7317}
7318inline std::string* TensorConnection::mutable_to_tensor() {
7319
7320 // @@protoc_insertion_point(field_mutable:tensorflow.TensorConnection.to_tensor)
7321 return to_tensor_.Mutable(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
7322}
7323inline std::string* TensorConnection::release_to_tensor() {
7324 // @@protoc_insertion_point(field_release:tensorflow.TensorConnection.to_tensor)
7325
7326 return to_tensor_.Release(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
7327}
7328inline void TensorConnection::set_allocated_to_tensor(std::string* to_tensor) {
7329 if (to_tensor != nullptr) {
7330
7331 } else {
7332
7333 }
7334 to_tensor_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), to_tensor,
7335 GetArenaNoVirtual());
7336 // @@protoc_insertion_point(field_set_allocated:tensorflow.TensorConnection.to_tensor)
7337}
7338inline std::string* TensorConnection::unsafe_arena_release_to_tensor() {
7339 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.TensorConnection.to_tensor)
7340 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
7341
7342 return to_tensor_.UnsafeArenaRelease(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
7343 GetArenaNoVirtual());
7344}
7345inline void TensorConnection::unsafe_arena_set_allocated_to_tensor(
7346 std::string* to_tensor) {
7347 GOOGLE_DCHECK(GetArenaNoVirtual() != nullptr);
7348 if (to_tensor != nullptr) {
7349
7350 } else {
7351
7352 }
7353 to_tensor_.UnsafeArenaSetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(),
7354 to_tensor, GetArenaNoVirtual());
7355 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.TensorConnection.to_tensor)
7356}
7357
7358// -------------------------------------------------------------------
7359
7360// -------------------------------------------------------------------
7361
7362// -------------------------------------------------------------------
7363
7364// CallableOptions
7365
7366// repeated string feed = 1;
7367inline int CallableOptions::feed_size() const {
7368 return feed_.size();
7369}
7370inline void CallableOptions::clear_feed() {
7371 feed_.Clear();
7372}
7373inline const std::string& CallableOptions::feed(int index) const {
7374 // @@protoc_insertion_point(field_get:tensorflow.CallableOptions.feed)
7375 return feed_.Get(index);
7376}
7377inline std::string* CallableOptions::mutable_feed(int index) {
7378 // @@protoc_insertion_point(field_mutable:tensorflow.CallableOptions.feed)
7379 return feed_.Mutable(index);
7380}
7381inline void CallableOptions::set_feed(int index, const std::string& value) {
7382 // @@protoc_insertion_point(field_set:tensorflow.CallableOptions.feed)
7383 feed_.Mutable(index)->assign(value);
7384}
7385inline void CallableOptions::set_feed(int index, std::string&& value) {
7386 // @@protoc_insertion_point(field_set:tensorflow.CallableOptions.feed)
7387 feed_.Mutable(index)->assign(std::move(value));
7388}
7389inline void CallableOptions::set_feed(int index, const char* value) {
7390 GOOGLE_DCHECK(value != nullptr);
7391 feed_.Mutable(index)->assign(value);
7392 // @@protoc_insertion_point(field_set_char:tensorflow.CallableOptions.feed)
7393}
7394inline void CallableOptions::set_feed(int index, const char* value, size_t size) {
7395 feed_.Mutable(index)->assign(
7396 reinterpret_cast<const char*>(value), size);
7397 // @@protoc_insertion_point(field_set_pointer:tensorflow.CallableOptions.feed)
7398}
7399inline std::string* CallableOptions::add_feed() {
7400 // @@protoc_insertion_point(field_add_mutable:tensorflow.CallableOptions.feed)
7401 return feed_.Add();
7402}
7403inline void CallableOptions::add_feed(const std::string& value) {
7404 feed_.Add()->assign(value);
7405 // @@protoc_insertion_point(field_add:tensorflow.CallableOptions.feed)
7406}
7407inline void CallableOptions::add_feed(std::string&& value) {
7408 feed_.Add(std::move(value));
7409 // @@protoc_insertion_point(field_add:tensorflow.CallableOptions.feed)
7410}
7411inline void CallableOptions::add_feed(const char* value) {
7412 GOOGLE_DCHECK(value != nullptr);
7413 feed_.Add()->assign(value);
7414 // @@protoc_insertion_point(field_add_char:tensorflow.CallableOptions.feed)
7415}
7416inline void CallableOptions::add_feed(const char* value, size_t size) {
7417 feed_.Add()->assign(reinterpret_cast<const char*>(value), size);
7418 // @@protoc_insertion_point(field_add_pointer:tensorflow.CallableOptions.feed)
7419}
7420inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>&
7421CallableOptions::feed() const {
7422 // @@protoc_insertion_point(field_list:tensorflow.CallableOptions.feed)
7423 return feed_;
7424}
7425inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>*
7426CallableOptions::mutable_feed() {
7427 // @@protoc_insertion_point(field_mutable_list:tensorflow.CallableOptions.feed)
7428 return &feed_;
7429}
7430
7431// repeated string fetch = 2;
7432inline int CallableOptions::fetch_size() const {
7433 return fetch_.size();
7434}
7435inline void CallableOptions::clear_fetch() {
7436 fetch_.Clear();
7437}
7438inline const std::string& CallableOptions::fetch(int index) const {
7439 // @@protoc_insertion_point(field_get:tensorflow.CallableOptions.fetch)
7440 return fetch_.Get(index);
7441}
7442inline std::string* CallableOptions::mutable_fetch(int index) {
7443 // @@protoc_insertion_point(field_mutable:tensorflow.CallableOptions.fetch)
7444 return fetch_.Mutable(index);
7445}
7446inline void CallableOptions::set_fetch(int index, const std::string& value) {
7447 // @@protoc_insertion_point(field_set:tensorflow.CallableOptions.fetch)
7448 fetch_.Mutable(index)->assign(value);
7449}
7450inline void CallableOptions::set_fetch(int index, std::string&& value) {
7451 // @@protoc_insertion_point(field_set:tensorflow.CallableOptions.fetch)
7452 fetch_.Mutable(index)->assign(std::move(value));
7453}
7454inline void CallableOptions::set_fetch(int index, const char* value) {
7455 GOOGLE_DCHECK(value != nullptr);
7456 fetch_.Mutable(index)->assign(value);
7457 // @@protoc_insertion_point(field_set_char:tensorflow.CallableOptions.fetch)
7458}
7459inline void CallableOptions::set_fetch(int index, const char* value, size_t size) {
7460 fetch_.Mutable(index)->assign(
7461 reinterpret_cast<const char*>(value), size);
7462 // @@protoc_insertion_point(field_set_pointer:tensorflow.CallableOptions.fetch)
7463}
7464inline std::string* CallableOptions::add_fetch() {
7465 // @@protoc_insertion_point(field_add_mutable:tensorflow.CallableOptions.fetch)
7466 return fetch_.Add();
7467}
7468inline void CallableOptions::add_fetch(const std::string& value) {
7469 fetch_.Add()->assign(value);
7470 // @@protoc_insertion_point(field_add:tensorflow.CallableOptions.fetch)
7471}
7472inline void CallableOptions::add_fetch(std::string&& value) {
7473 fetch_.Add(std::move(value));
7474 // @@protoc_insertion_point(field_add:tensorflow.CallableOptions.fetch)
7475}
7476inline void CallableOptions::add_fetch(const char* value) {
7477 GOOGLE_DCHECK(value != nullptr);
7478 fetch_.Add()->assign(value);
7479 // @@protoc_insertion_point(field_add_char:tensorflow.CallableOptions.fetch)
7480}
7481inline void CallableOptions::add_fetch(const char* value, size_t size) {
7482 fetch_.Add()->assign(reinterpret_cast<const char*>(value), size);
7483 // @@protoc_insertion_point(field_add_pointer:tensorflow.CallableOptions.fetch)
7484}
7485inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>&
7486CallableOptions::fetch() const {
7487 // @@protoc_insertion_point(field_list:tensorflow.CallableOptions.fetch)
7488 return fetch_;
7489}
7490inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>*
7491CallableOptions::mutable_fetch() {
7492 // @@protoc_insertion_point(field_mutable_list:tensorflow.CallableOptions.fetch)
7493 return &fetch_;
7494}
7495
7496// repeated string target = 3;
7497inline int CallableOptions::target_size() const {
7498 return target_.size();
7499}
7500inline void CallableOptions::clear_target() {
7501 target_.Clear();
7502}
7503inline const std::string& CallableOptions::target(int index) const {
7504 // @@protoc_insertion_point(field_get:tensorflow.CallableOptions.target)
7505 return target_.Get(index);
7506}
7507inline std::string* CallableOptions::mutable_target(int index) {
7508 // @@protoc_insertion_point(field_mutable:tensorflow.CallableOptions.target)
7509 return target_.Mutable(index);
7510}
7511inline void CallableOptions::set_target(int index, const std::string& value) {
7512 // @@protoc_insertion_point(field_set:tensorflow.CallableOptions.target)
7513 target_.Mutable(index)->assign(value);
7514}
7515inline void CallableOptions::set_target(int index, std::string&& value) {
7516 // @@protoc_insertion_point(field_set:tensorflow.CallableOptions.target)
7517 target_.Mutable(index)->assign(std::move(value));
7518}
7519inline void CallableOptions::set_target(int index, const char* value) {
7520 GOOGLE_DCHECK(value != nullptr);
7521 target_.Mutable(index)->assign(value);
7522 // @@protoc_insertion_point(field_set_char:tensorflow.CallableOptions.target)
7523}
7524inline void CallableOptions::set_target(int index, const char* value, size_t size) {
7525 target_.Mutable(index)->assign(
7526 reinterpret_cast<const char*>(value), size);
7527 // @@protoc_insertion_point(field_set_pointer:tensorflow.CallableOptions.target)
7528}
7529inline std::string* CallableOptions::add_target() {
7530 // @@protoc_insertion_point(field_add_mutable:tensorflow.CallableOptions.target)
7531 return target_.Add();
7532}
7533inline void CallableOptions::add_target(const std::string& value) {
7534 target_.Add()->assign(value);
7535 // @@protoc_insertion_point(field_add:tensorflow.CallableOptions.target)
7536}
7537inline void CallableOptions::add_target(std::string&& value) {
7538 target_.Add(std::move(value));
7539 // @@protoc_insertion_point(field_add:tensorflow.CallableOptions.target)
7540}
7541inline void CallableOptions::add_target(const char* value) {
7542 GOOGLE_DCHECK(value != nullptr);
7543 target_.Add()->assign(value);
7544 // @@protoc_insertion_point(field_add_char:tensorflow.CallableOptions.target)
7545}
7546inline void CallableOptions::add_target(const char* value, size_t size) {
7547 target_.Add()->assign(reinterpret_cast<const char*>(value), size);
7548 // @@protoc_insertion_point(field_add_pointer:tensorflow.CallableOptions.target)
7549}
7550inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>&
7551CallableOptions::target() const {
7552 // @@protoc_insertion_point(field_list:tensorflow.CallableOptions.target)
7553 return target_;
7554}
7555inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>*
7556CallableOptions::mutable_target() {
7557 // @@protoc_insertion_point(field_mutable_list:tensorflow.CallableOptions.target)
7558 return &target_;
7559}
7560
7561// .tensorflow.RunOptions run_options = 4;
7562inline bool CallableOptions::has_run_options() const {
7563 return this != internal_default_instance() && run_options_ != nullptr;
7564}
7565inline void CallableOptions::clear_run_options() {
7566 if (GetArenaNoVirtual() == nullptr && run_options_ != nullptr) {
7567 delete run_options_;
7568 }
7569 run_options_ = nullptr;
7570}
7571inline const ::tensorflow::RunOptions& CallableOptions::run_options() const {
7572 const ::tensorflow::RunOptions* p = run_options_;
7573 // @@protoc_insertion_point(field_get:tensorflow.CallableOptions.run_options)
7574 return p != nullptr ? *p : *reinterpret_cast<const ::tensorflow::RunOptions*>(
7575 &::tensorflow::_RunOptions_default_instance_);
7576}
7577inline ::tensorflow::RunOptions* CallableOptions::release_run_options() {
7578 // @@protoc_insertion_point(field_release:tensorflow.CallableOptions.run_options)
7579
7580 ::tensorflow::RunOptions* temp = run_options_;
7581 if (GetArenaNoVirtual() != nullptr) {
7582 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
7583 }
7584 run_options_ = nullptr;
7585 return temp;
7586}
7587inline ::tensorflow::RunOptions* CallableOptions::unsafe_arena_release_run_options() {
7588 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.CallableOptions.run_options)
7589
7590 ::tensorflow::RunOptions* temp = run_options_;
7591 run_options_ = nullptr;
7592 return temp;
7593}
7594inline ::tensorflow::RunOptions* CallableOptions::mutable_run_options() {
7595
7596 if (run_options_ == nullptr) {
7597 auto* p = CreateMaybeMessage<::tensorflow::RunOptions>(GetArenaNoVirtual());
7598 run_options_ = p;
7599 }
7600 // @@protoc_insertion_point(field_mutable:tensorflow.CallableOptions.run_options)
7601 return run_options_;
7602}
7603inline void CallableOptions::set_allocated_run_options(::tensorflow::RunOptions* run_options) {
7604 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaNoVirtual();
7605 if (message_arena == nullptr) {
7606 delete run_options_;
7607 }
7608 if (run_options) {
7609 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
7610 ::PROTOBUF_NAMESPACE_ID::Arena::GetArena(run_options);
7611 if (message_arena != submessage_arena) {
7612 run_options = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
7613 message_arena, run_options, submessage_arena);
7614 }
7615
7616 } else {
7617
7618 }
7619 run_options_ = run_options;
7620 // @@protoc_insertion_point(field_set_allocated:tensorflow.CallableOptions.run_options)
7621}
7622
7623// repeated .tensorflow.TensorConnection tensor_connection = 5;
7624inline int CallableOptions::tensor_connection_size() const {
7625 return tensor_connection_.size();
7626}
7627inline void CallableOptions::clear_tensor_connection() {
7628 tensor_connection_.Clear();
7629}
7630inline ::tensorflow::TensorConnection* CallableOptions::mutable_tensor_connection(int index) {
7631 // @@protoc_insertion_point(field_mutable:tensorflow.CallableOptions.tensor_connection)
7632 return tensor_connection_.Mutable(index);
7633}
7634inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorConnection >*
7635CallableOptions::mutable_tensor_connection() {
7636 // @@protoc_insertion_point(field_mutable_list:tensorflow.CallableOptions.tensor_connection)
7637 return &tensor_connection_;
7638}
7639inline const ::tensorflow::TensorConnection& CallableOptions::tensor_connection(int index) const {
7640 // @@protoc_insertion_point(field_get:tensorflow.CallableOptions.tensor_connection)
7641 return tensor_connection_.Get(index);
7642}
7643inline ::tensorflow::TensorConnection* CallableOptions::add_tensor_connection() {
7644 // @@protoc_insertion_point(field_add:tensorflow.CallableOptions.tensor_connection)
7645 return tensor_connection_.Add();
7646}
7647inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorConnection >&
7648CallableOptions::tensor_connection() const {
7649 // @@protoc_insertion_point(field_list:tensorflow.CallableOptions.tensor_connection)
7650 return tensor_connection_;
7651}
7652
7653// map<string, string> feed_devices = 6;
7654inline int CallableOptions::feed_devices_size() const {
7655 return feed_devices_.size();
7656}
7657inline void CallableOptions::clear_feed_devices() {
7658 feed_devices_.Clear();
7659}
7660inline const ::PROTOBUF_NAMESPACE_ID::Map< std::string, std::string >&
7661CallableOptions::feed_devices() const {
7662 // @@protoc_insertion_point(field_map:tensorflow.CallableOptions.feed_devices)
7663 return feed_devices_.GetMap();
7664}
7665inline ::PROTOBUF_NAMESPACE_ID::Map< std::string, std::string >*
7666CallableOptions::mutable_feed_devices() {
7667 // @@protoc_insertion_point(field_mutable_map:tensorflow.CallableOptions.feed_devices)
7668 return feed_devices_.MutableMap();
7669}
7670
7671// map<string, string> fetch_devices = 7;
7672inline int CallableOptions::fetch_devices_size() const {
7673 return fetch_devices_.size();
7674}
7675inline void CallableOptions::clear_fetch_devices() {
7676 fetch_devices_.Clear();
7677}
7678inline const ::PROTOBUF_NAMESPACE_ID::Map< std::string, std::string >&
7679CallableOptions::fetch_devices() const {
7680 // @@protoc_insertion_point(field_map:tensorflow.CallableOptions.fetch_devices)
7681 return fetch_devices_.GetMap();
7682}
7683inline ::PROTOBUF_NAMESPACE_ID::Map< std::string, std::string >*
7684CallableOptions::mutable_fetch_devices() {
7685 // @@protoc_insertion_point(field_mutable_map:tensorflow.CallableOptions.fetch_devices)
7686 return fetch_devices_.MutableMap();
7687}
7688
7689// bool fetch_skip_sync = 8;
7690inline void CallableOptions::clear_fetch_skip_sync() {
7691 fetch_skip_sync_ = false;
7692}
7693inline bool CallableOptions::fetch_skip_sync() const {
7694 // @@protoc_insertion_point(field_get:tensorflow.CallableOptions.fetch_skip_sync)
7695 return fetch_skip_sync_;
7696}
7697inline void CallableOptions::set_fetch_skip_sync(bool value) {
7698
7699 fetch_skip_sync_ = value;
7700 // @@protoc_insertion_point(field_set:tensorflow.CallableOptions.fetch_skip_sync)
7701}
7702
7703#ifdef __GNUC__
7704 #pragma GCC diagnostic pop
7705#endif // __GNUC__
7706// -------------------------------------------------------------------
7707
7708// -------------------------------------------------------------------
7709
7710// -------------------------------------------------------------------
7711
7712// -------------------------------------------------------------------
7713
7714// -------------------------------------------------------------------
7715
7716// -------------------------------------------------------------------
7717
7718// -------------------------------------------------------------------
7719
7720// -------------------------------------------------------------------
7721
7722// -------------------------------------------------------------------
7723
7724// -------------------------------------------------------------------
7725
7726// -------------------------------------------------------------------
7727
7728// -------------------------------------------------------------------
7729
7730// -------------------------------------------------------------------
7731
7732// -------------------------------------------------------------------
7733
7734// -------------------------------------------------------------------
7735
7736// -------------------------------------------------------------------
7737
7738// -------------------------------------------------------------------
7739
7740// -------------------------------------------------------------------
7741
7742// -------------------------------------------------------------------
7743
7744
7745// @@protoc_insertion_point(namespace_scope)
7746
7747} // namespace tensorflow
7748
7749PROTOBUF_NAMESPACE_OPEN
7750
7751template <> struct is_proto_enum< ::tensorflow::OptimizerOptions_Level> : ::std::true_type {};
7752template <>
7753inline const EnumDescriptor* GetEnumDescriptor< ::tensorflow::OptimizerOptions_Level>() {
7754 return ::tensorflow::OptimizerOptions_Level_descriptor();
7755}
7756template <> struct is_proto_enum< ::tensorflow::OptimizerOptions_GlobalJitLevel> : ::std::true_type {};
7757template <>
7758inline const EnumDescriptor* GetEnumDescriptor< ::tensorflow::OptimizerOptions_GlobalJitLevel>() {
7759 return ::tensorflow::OptimizerOptions_GlobalJitLevel_descriptor();
7760}
7761template <> struct is_proto_enum< ::tensorflow::ConfigProto_Experimental_MlirBridgeRollout> : ::std::true_type {};
7762template <>
7763inline const EnumDescriptor* GetEnumDescriptor< ::tensorflow::ConfigProto_Experimental_MlirBridgeRollout>() {
7764 return ::tensorflow::ConfigProto_Experimental_MlirBridgeRollout_descriptor();
7765}
7766template <> struct is_proto_enum< ::tensorflow::RunOptions_TraceLevel> : ::std::true_type {};
7767template <>
7768inline const EnumDescriptor* GetEnumDescriptor< ::tensorflow::RunOptions_TraceLevel>() {
7769 return ::tensorflow::RunOptions_TraceLevel_descriptor();
7770}
7771
7772PROTOBUF_NAMESPACE_CLOSE
7773
7774// @@protoc_insertion_point(global_scope)
7775
7776#include <google/protobuf/port_undef.inc>
7777#endif // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fconfig_2eproto
7778