1/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2
3Licensed under the Apache License, Version 2.0 (the "License");
4you may not use this file except in compliance with the License.
5You may obtain a copy of the License at
6
7 http://www.apache.org/licenses/LICENSE-2.0
8
9Unless required by applicable law or agreed to in writing, software
10distributed under the License is distributed on an "AS IS" BASIS,
11WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12See the License for the specific language governing permissions and
13limitations under the License.
14==============================================================================*/
15
16// Functions for getting information about kernels registered in the binary.
17// Migrated from previous SWIG file (mlir.i) authored by aminim@.
18#ifndef TENSORFLOW_COMPILER_MLIR_PYTHON_MLIR_H_
19#define TENSORFLOW_COMPILER_MLIR_PYTHON_MLIR_H_
20
21#include <string>
22
23#include "absl/strings/string_view.h"
24#include "tensorflow/c/eager/c_api.h"
25#include "tensorflow/c/tf_status.h"
26
27namespace tensorflow {
28
29// Simple wrapper to support tf.mlir.experimental.convert_graph_def.
30// Load a GraphDef (binary or textual proto format), convert to MLIR, and
31// (optionally) optimize the module before returning it as a string.
32// This is an early experimental API, ideally we should return a wrapper object
33// around a Python binding to the MLIR module.
34std::string ImportGraphDef(const std::string &proto,
35 const std::string &pass_pipeline,
36 bool show_debug_info, TF_Status *status);
37
38// Simple wrapper to support tf.mlir.experimental.convert_function.
39// Load FunctionDef (binary or textual proto format), convert to MLIR, and
40// (optionally) optimize the module before returning it as a string.
41// This is an early experimental API, ideally we should return a wrapper object
42// around a Python binding to the MLIR module.
43std::string ImportFunction(const std::string &functiondef_proto,
44 const std::string &pass_pipeline,
45 bool show_debug_info, TFE_Context *context,
46 TF_Status *status);
47
48// This wrapper passes the graph_def taking names of input nodes, the shapes and
49// types of its inputs and the output nodes as parameters to MLIR.
50std::string ImportGraphDef(const std::string &proto,
51 const std::string &pass_pipeline,
52 bool show_debug_info, absl::string_view(input_names),
53 absl::string_view(input_data_types),
54 absl::string_view(input_data_shapes),
55 absl::string_view(output_names), TF_Status *status);
56
57// Load a SavedModel and return a textual MLIR string corresponding to it.
58//
59// Args:
60// saved_model_path: File path from which to load the SavedModel.
61// exported_names_str: Comma-separated list of names to export.
62// Empty means "export all".
63//
64// Returns:
65// A string of textual MLIR representing the raw imported SavedModel.
66std::string ExperimentalConvertSavedModelToMlir(
67 const std::string &saved_model_path, const std::string &exported_names_str,
68 bool show_debug_info, TF_Status *status);
69
70// Load a SavedModel V1 and return a textual MLIR string corresponding to it
71// without any MLIR graph transformation.
72//
73// Args:
74// saved_model_path: File path from which to load the SavedModel.
75// tags: Tags to identify MetaGraphDef that need to be loaded.
76// upgrade_legacy: Boolean flag that indicates whether to upgrade legacy
77// graphs
78//
79// Returns:
80// A string of textual MLIR representing the raw imported SavedModel.
81std::string ExperimentalConvertSavedModelV1ToMlirLite(
82 const std::string &saved_model_path, const std::string &exported_names_str,
83 const std::string &tags, bool upgrade_legacy, bool show_debug_info,
84 TF_Status *status);
85
86// Load a SavedModel V1 and return a textual MLIR string corresponding to it.
87//
88// Args:
89// saved_model_path: File path from which to load the SavedModel.
90// tags: Tags to identify MetaGraphDef that need to be loaded.
91// lift_variables: Boolean flag that indicates whether to hoist variables
92// after loading the SavedModel.
93//
94// Returns:
95// A string of textual MLIR representing the raw imported SavedModel.
96std::string ExperimentalConvertSavedModelV1ToMlir(
97 const std::string &saved_model_path, const std::string &exported_names_str,
98 const std::string &tags, bool lift_variables, bool upgrade_legacy,
99 bool show_debug_info, TF_Status *status);
100
101std::string ExperimentalRunPassPipeline(const std::string &mlir_txt,
102 const std::string &pass_pipeline,
103 bool show_debug_info,
104 TF_Status *status);
105
106} // namespace tensorflow
107
108#endif // TENSORFLOW_COMPILER_MLIR_PYTHON_MLIR_H_
109