1/**
2 * Copyright (c) Glow Contributors. See CONTRIBUTORS file.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16#ifndef GLOW_BACKENDS_BACKENDUTILS_H
17#define GLOW_BACKENDS_BACKENDUTILS_H
18
19#include "glow/CodeGen/MemoryAllocator.h"
20#include "glow/IR/IR.h"
21
22#include <map>
23
24namespace glow {
25namespace runtime {
26
27/// An enum to indicate what type each symbol in the bundle is.
28enum class SymbolCategory {
29 Activation,
30 Placeholder,
31 Constant,
32 PlaceholderTensorView,
33 ConstantTensorView
34};
35
36/// Contains information for initialization and handling of symbol at runtime.
37struct RuntimeSymbolInfo {
38 /// The size in bytes.
39 size_t size{0};
40 /// Offset in bytes from the base address.
41 size_t offset{0};
42 /// Type of symbol.
43 Type type;
44 /// Is the symbol an input for the function.
45 bool input{true};
46 /// Is the symbol an output for the function.
47 bool output{true};
48 /// Indicates what category the symbol is.
49 SymbolCategory symbolCategory;
50 /// Logical id assigned during the codegen (-1 if unused).
51 /// This id is used during the codegen to order the tensor
52 /// information in memory (e.g. is an index in the offsets array),
53 /// it is also used in runtime to generate tensor metadata,
54 /// when eager mode is enabled.
55 int index{-1};
56};
57
58using SymbolTableTy = std::map<std::string, RuntimeSymbolInfo>;
59
60/// Contains the information needed to be passed forward from compile time to
61/// runtime. In order to allocate and initialize memory.
62class RuntimeBundle {
63 /// Map from symbol name to a RuntimeSymbolInfo.
64 SymbolTableTy symbolTable_;
65 /// Pointer to memory containing the weights for execution.
66 uint8_t *constants_{nullptr};
67 /// Amount of memory needed for weights.
68 size_t constantWeightVarsMemSize_{0};
69 /// Amount of memory needed for mutable vars.
70 size_t mutableWeightVarsMemSize_{0};
71 /// Amount of memory needed for activations.
72 size_t activationsMemSize_{0};
73 /// True if the RuntimeBundle is valid, false if not.
74 bool isValid_{false};
75
76public:
77 /// Get Constant Weights memory size.
78 size_t getConstantWeightSize() const { return constantWeightVarsMemSize_; }
79 /// Get Mutable Weights memory size.
80 size_t getMutableWeightSize() const { return mutableWeightVarsMemSize_; }
81 /// Get Activations Weights memory size.
82 size_t getActivationsSize() const { return activationsMemSize_; }
83 /// Get pointer to memory block of constants.
84 uint8_t *getConstants() const { return constants_; }
85 /// Set pointer to memory block of constants.
86 void setConstants(uint8_t *constants) { constants_ = constants; }
87 /// Helper function, gets offset of \p v.
88 size_t getValueOffset(const Named *v) const;
89 /// Helper function, gets symbol info for \p v.
90 const RuntimeSymbolInfo &getSymbolInfo(const Named *v) const;
91 /// Get a const reference to the symbol table.
92 const SymbolTableTy &getSymbolTable() const { return symbolTable_; }
93 void updateSymbolTable(const SymbolTableTy &symbolTable) {
94 symbolTable_ = symbolTable;
95 }
96 /// At compile time condense constants to a single block of memory.
97 /// This allows the graph to go away after compile time.
98 /// Allocates a block of memory of size \p constantMaxSize then walks the
99 /// given function \p F and and copies weights to their address as specified
100 /// by offsets contained in symbolTable_.
101 void collectConstants(const IRFunction *F);
102 void collectConstants(const Module *M);
103#if FACEBOOK_INTERNAL
104 void collectConstants(const FXIRWrapper *F);
105#endif
106 /// Free constants.
107 void freeConstants();
108
109 /// Sets the input and output flags for each symbol in the symbolBundle.
110 void setInputsandOutputs();
111
112 /// Computes offsets and total allocation for Constants, Placeholders, and
113 /// Activations to build runtime symbol table. Returns RuntimeBundle.
114 static runtime::RuntimeBundle create(const IRFunction &F,
115 MemoryAllocator &constantAllocator,
116 MemoryAllocator &placeholderAllocator,
117 MemoryAllocator &activationsAllocator);
118
119 /// Computes offsets and total allocation for Constants, Placeholders, and
120 /// Activations to build runtime symbol table. \returns RuntimeBundle.
121 /// Constants and Placeholders are taken from \p F, and all Activations
122 /// required by each function in \p funcs are placed into the same
123 /// RuntimeBundle.
124 static runtime::RuntimeBundle
125 create(const Function &F, const std::vector<const IRFunction *> &funcs);
126
127 /// Computes offsets and total allocations for Constants, Placeholders, and
128 /// Activations to build runtime symbol table. \returns RuntimeBundle. Uses a
129 /// single allocator \p allocator and allocates all buffers contiguously in
130 /// the same block.
131 static runtime::RuntimeBundle create(const IRFunction &F,
132 MemoryAllocator &allocator);
133
134 /// Build a runtime symbol table from a Function. Computes Constant and
135 /// Placeholder sizes, but not Activations, since Functions are unserialized.
136 /// Only use this method to generate bundles for backends that do not use
137 /// Glow's IR.
138 static runtime::RuntimeBundle create(const Function &F);
139
140 /// Deleted default constructor. A properly constructed RuntimeBundle is
141 /// necessary for correct execution using the HostManager.
142 RuntimeBundle() = delete;
143
144 // Constructor.
145 RuntimeBundle(SymbolTableTy &symbolTable, size_t constWeight,
146 size_t mutableWeight, size_t activations)
147 : symbolTable_(std::move(symbolTable)), constants_(nullptr),
148 constantWeightVarsMemSize_(constWeight),
149 mutableWeightVarsMemSize_(mutableWeight),
150 activationsMemSize_(activations), isValid_(true) {}
151
152 // Explicit copy constructor and deleted assignment operator. A RuntimeBundle
153 // should be moved. It should only be copied if absolutely necessary and never
154 // implicitly.
155 explicit RuntimeBundle(const RuntimeBundle &) = default;
156 RuntimeBundle &operator=(const RuntimeBundle &) = delete;
157
158 // Move constructor and assignment operator.
159 RuntimeBundle(RuntimeBundle &&rhs);
160 RuntimeBundle &operator=(RuntimeBundle &&rhs);
161};
162} // namespace runtime
163
164/// Generates a struct named has_\p METHOD_NAME that looks for a method called
165/// \p METHOD_NAME inside of ClassName with return type ReturnType.
166#define CLASS_CONTAINS_METHOD(METHOD_NAME) \
167 template <typename ClassName, typename ReturnType> \
168 struct has_##METHOD_NAME { \
169 private: \
170 template <typename T> \
171 static constexpr auto check(T *) -> \
172 typename std::is_same<decltype(std::declval<T>().METHOD_NAME()), \
173 ReturnType>::type; \
174 template <typename> static constexpr std::false_type check(...); \
175 typedef decltype(check<ClassName>(0)) type; \
176 \
177 public: \
178 static constexpr bool value = type::value; \
179 };
180
181/// Use template meta-programming to check if typename ClassName contains
182/// getFusedActivation() method. Below generates a struct named
183/// has_getFusedActivation that looks for said method.
184CLASS_CONTAINS_METHOD(getFusedActivation)
185
186/// If \p W is a weight that is read from \returns true.
187bool isInput(const Value *W);
188
189/// If \p W is an output weight \returns true. This is determined by checking if
190/// the weight has a user which uses it as a write output.
191bool isOutput(const Value *W);
192
193/// If \p PH is an output placeholder in the IRFunction \p F,
194/// \returns true.
195/// This is determined by checking if the PH has weights which are referenced by
196/// other Instructions as OperandKind::InOut or OperandKind::Out.
197bool isOutput(const Placeholder *PH, const IRFunction &F);
198
199/// \returns true if \p PH is an output Placeholder for any function in \p
200/// funcs.
201bool isOutput(const Placeholder *PH,
202 const std::vector<const Function *> &funcs);
203
204/// If \p PH is an input placeholder in the IRFunction \p F,
205/// \returns true.
206/// This is determined by checking if the PH is always used as an @in parameter
207/// by the current function.
208bool isInput(const Placeholder *PH, const IRFunction &F);
209
210/// If \p N does not have fused activation \returns true.
211template <typename T,
212 std::enable_if_t<!has_getFusedActivation<T, FusedActivation>::value,
213 int> = 0>
214bool checkNoFusion(const T &N) {
215 (void)N;
216 return true;
217}
218
219/// If \p N does not have fused activation \returns true.
220template <typename T,
221 std::enable_if_t<has_getFusedActivation<T, FusedActivation>::value,
222 int> = 0>
223bool checkNoFusion(const T &N) {
224 if (N.getFusedActivation() != FusedActivation::NONE) {
225 report("Glow backend does not support fused Activations for: " +
226 std::string(N.getKindName()));
227 return false;
228 }
229 return true;
230}
231
232/// If \p N does not have fused activation \returns true.
233bool checkNoFusionForNode(const Node &N);
234
235/// If \p I does not have fused activation \returns true.
236bool checkNoFusionForInstr(const Instruction &I);
237
238/// Contains information for placeholder during allocation.
239struct PlaceholderInputOutputInfo {
240 /// The placeholder address.
241 const Placeholder *addr;
242 /// Is the placeholder an input for the function.
243 bool isInput;
244 /// Is the placeholder an onput for the function.
245 bool isOutput;
246};
247
248using ContiguousPlaceholders = std::vector<PlaceholderInputOutputInfo>;
249
250/// Convert placeholders to be ordered as input|inputOutput|output|neither.
251/// Packed into {Placeholder *, isInput, isOutput} as
252/// PlaceholderInputOutputInfo. FUN could be Function or IRFunction. ARR could
253/// be std::list<Placeholder *> or std::vector<const Placeholder *>
254template <typename FUN, typename ARR>
255ContiguousPlaceholders getContiguousPlaceHolder(const ARR &holders,
256 const FUN &F);
257
258/// Allocate \p placeholders using the provided \p allocator and store the
259/// allocation results into a \p symbolTable.
260void allocatePlaceholders(const ContiguousPlaceholders &placeholders,
261 MemoryAllocator &allocator,
262 glow::runtime::SymbolTableTy &symbolTable);
263
264/// Allocate \p constants using the provided \p allocator and store the
265/// allocation results into a \p symbolTable.
266void allocateConstants(const ConstList &constants, MemoryAllocator &allocator,
267 glow::runtime::SymbolTableTy &symbolTable);
268
269/// Allocate \p constants using the provided \p allocator and store the
270/// allocation results into a \p symbolTable.
271void allocateConstants(const std::vector<const glow::Constant *> &constants,
272 MemoryAllocator &allocator,
273 glow::runtime::SymbolTableTy &symbolTable);
274
275/// Allocate activations from the instruction stream \p instrs using the
276/// provided \p allocator and store the allocation results into a \p
277/// symbolTable.
278void allocateActivations(const glow::IRFunction::InstListTy &instrs,
279 MemoryAllocator &allocator,
280 glow::runtime::SymbolTableTy &symbolTable);
281
282/// \returns true if \p V is capable of handling a partial tensor as input.
283bool allowsPartialInput(const Placeholder *V, const Function *F);
284
285/// \returns true if \p V requires last-element padding
286bool requiresPadding(const Placeholder *V, const Function *F);
287
288/// \returns true if \p V is used in \p F; false otherwise.
289bool usedInFunction(const Placeholder *V, const Function *F);
290
291} // end namespace glow
292#endif // GLOW_BACKENDS_BACKENDUTILS_H
293