1 | #pragma once |
2 | #include <torch/csrc/Export.h> |
3 | #include <memory> |
4 | #include <ostream> |
5 | #include <string> |
6 | #include <unordered_map> |
7 | |
8 | // `TorchScript` offers a simple logging facility that can enabled by setting an |
9 | // environment variable `PYTORCH_JIT_LOG_LEVEL`. |
10 | |
11 | // Logging is enabled on a per file basis. To enable logging in |
12 | // `dead_code_elimination.cpp`, `PYTORCH_JIT_LOG_LEVEL` should be |
13 | // set to `dead_code_elimination.cpp` or, simply, to `dead_code_elimination` |
14 | // (i.e. `PYTORCH_JIT_LOG_LEVEL=dead_code_elimination`). |
15 | |
16 | // Multiple files can be logged by separating each file name with a colon `:` as |
17 | // in the following example, |
18 | // `PYTORCH_JIT_LOG_LEVEL=dead_code_elimination:guard_elimination` |
19 | |
20 | // There are 3 logging levels available for your use ordered by the detail level |
21 | // from lowest to highest. |
22 | |
23 | // * `GRAPH_DUMP` should be used for printing entire graphs after optimization |
24 | // passes |
25 | // * `GRAPH_UPDATE` should be used for reporting graph transformations (i.e. |
26 | // node deletion, constant folding, etc) |
27 | // * `GRAPH_DEBUG` should be used for providing information useful for debugging |
28 | // the internals of a particular optimization pass or analysis |
29 | |
30 | // The default logging level is `GRAPH_DUMP` meaning that only `GRAPH_DUMP` |
31 | // statements will be enabled when one specifies a file(s) in |
32 | // `PYTORCH_JIT_LOG_LEVEL`. |
33 | |
34 | // `GRAPH_UPDATE` can be enabled by prefixing a file name with an `>` as in |
35 | // `>alias_analysis`. |
36 | // `GRAPH_DEBUG` can be enabled by prefixing a file name with an `>>` as in |
37 | // `>>alias_analysis`. |
38 | // `>>>` is also valid and **currently** is equivalent to `GRAPH_DEBUG` as there |
39 | // is no logging level that is higher than `GRAPH_DEBUG`. |
40 | |
41 | namespace torch { |
42 | namespace jit { |
43 | |
44 | struct Node; |
45 | struct Graph; |
46 | |
47 | enum class JitLoggingLevels { |
48 | GRAPH_DUMP = 0, |
49 | GRAPH_UPDATE, |
50 | GRAPH_DEBUG, |
51 | }; |
52 | |
53 | TORCH_API std::string get_jit_logging_levels(); |
54 | |
55 | TORCH_API void set_jit_logging_levels(std::string level); |
56 | |
57 | TORCH_API void set_jit_logging_output_stream(std::ostream& out_stream); |
58 | |
59 | TORCH_API std::ostream& get_jit_logging_output_stream(); |
60 | |
61 | TORCH_API std::string (const Node* node); |
62 | |
63 | TORCH_API std::string log_function(const std::shared_ptr<Graph>& graph); |
64 | |
65 | TORCH_API ::torch::jit::JitLoggingLevels jit_log_level(); |
66 | |
67 | // Prefix every line in a multiline string \p IN_STR with \p PREFIX. |
68 | TORCH_API std::string jit_log_prefix( |
69 | const std::string& prefix, |
70 | const std::string& in_str); |
71 | |
72 | TORCH_API std::string jit_log_prefix( |
73 | ::torch::jit::JitLoggingLevels level, |
74 | const char* fn, |
75 | int l, |
76 | const std::string& in_str); |
77 | |
78 | TORCH_API bool is_enabled( |
79 | const char* cfname, |
80 | ::torch::jit::JitLoggingLevels level); |
81 | |
82 | TORCH_API std::ostream& operator<<( |
83 | std::ostream& out, |
84 | ::torch::jit::JitLoggingLevels level); |
85 | |
86 | #define JIT_LOG(level, ...) \ |
87 | if (is_enabled(__FILE__, level)) { \ |
88 | ::torch::jit::get_jit_logging_output_stream() \ |
89 | << ::torch::jit::jit_log_prefix( \ |
90 | level, __FILE__, __LINE__, ::c10::str(__VA_ARGS__)); \ |
91 | } |
92 | |
93 | // tries to reconstruct original python source |
94 | #define SOURCE_DUMP(MSG, G) \ |
95 | JIT_LOG( \ |
96 | ::torch::jit::JitLoggingLevels::GRAPH_DUMP, \ |
97 | MSG, \ |
98 | "\n", \ |
99 | ::torch::jit::log_function(G)); |
100 | // use GRAPH_DUMP for dumping graphs after optimization passes |
101 | #define GRAPH_DUMP(MSG, G) \ |
102 | JIT_LOG( \ |
103 | ::torch::jit::JitLoggingLevels::GRAPH_DUMP, MSG, "\n", (G)->toString()); |
104 | // use GRAPH_UPDATE for reporting graph transformations (i.e. node deletion, |
105 | // constant folding, CSE) |
106 | #define GRAPH_UPDATE(...) \ |
107 | JIT_LOG(::torch::jit::JitLoggingLevels::GRAPH_UPDATE, __VA_ARGS__); |
108 | // use GRAPH_DEBUG to provide information useful for debugging a particular opt |
109 | // pass |
110 | #define GRAPH_DEBUG(...) \ |
111 | JIT_LOG(::torch::jit::JitLoggingLevels::GRAPH_DEBUG, __VA_ARGS__); |
112 | // use GRAPH_EXPORT to export a graph so that the IR can be loaded by a script |
113 | #define GRAPH_EXPORT(MSG, G) \ |
114 | JIT_LOG( \ |
115 | ::torch::jit::JitLoggingLevels::GRAPH_DEBUG, \ |
116 | MSG, \ |
117 | "\n<GRAPH_EXPORT>\n", \ |
118 | (G)->toString(), \ |
119 | "</GRAPH_EXPORT>"); |
120 | |
121 | #define GRAPH_DUMP_ENABLED \ |
122 | (is_enabled(__FILE__, ::torch::jit::JitLoggingLevels::GRAPH_DUMP)) |
123 | #define GRAPH_UPDATE_ENABLED \ |
124 | (is_enabled(__FILE__, ::torch::jit::JitLoggingLevels::GRAPH_UPDATE)) |
125 | #define GRAPH_DEBUG_ENABLED \ |
126 | (is_enabled(__FILE__, ::torch::jit::JitLoggingLevels::GRAPH_DEBUG)) |
127 | } // namespace jit |
128 | } // namespace torch |
129 | |