1 | // Copyright 2011 Google Inc. All Rights Reserved. |
2 | // |
3 | // Licensed under the Apache License, Version 2.0 (the "License"); |
4 | // you may not use this file except in compliance with the License. |
5 | // You may obtain a copy of the License at |
6 | // |
7 | // http://www.apache.org/licenses/LICENSE-2.0 |
8 | // |
9 | // Unless required by applicable law or agreed to in writing, software |
10 | // distributed under the License is distributed on an "AS IS" BASIS, |
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
12 | // See the License for the specific language governing permissions and |
13 | // limitations under the License. |
14 | |
15 | #include "manifest_parser.h" |
16 | |
17 | #include <stdio.h> |
18 | #include <stdlib.h> |
19 | #include <vector> |
20 | |
21 | #include "graph.h" |
22 | #include "state.h" |
23 | #include "util.h" |
24 | #include "version.h" |
25 | |
26 | using namespace std; |
27 | |
28 | ManifestParser::ManifestParser(State* state, FileReader* file_reader, |
29 | ManifestParserOptions options) |
30 | : Parser(state, file_reader), |
31 | options_(options), quiet_(false) { |
32 | env_ = &state->bindings_; |
33 | } |
34 | |
35 | bool ManifestParser::Parse(const string& filename, const string& input, |
36 | string* err) { |
37 | lexer_.Start(filename, input); |
38 | |
39 | for (;;) { |
40 | Lexer::Token token = lexer_.ReadToken(); |
41 | switch (token) { |
42 | case Lexer::POOL: |
43 | if (!ParsePool(err)) |
44 | return false; |
45 | break; |
46 | case Lexer::BUILD: |
47 | if (!ParseEdge(err)) |
48 | return false; |
49 | break; |
50 | case Lexer::RULE: |
51 | if (!ParseRule(err)) |
52 | return false; |
53 | break; |
54 | case Lexer::DEFAULT: |
55 | if (!ParseDefault(err)) |
56 | return false; |
57 | break; |
58 | case Lexer::IDENT: { |
59 | lexer_.UnreadToken(); |
60 | string name; |
61 | EvalString let_value; |
62 | if (!ParseLet(&name, &let_value, err)) |
63 | return false; |
64 | string value = let_value.Evaluate(env_); |
65 | // Check ninja_required_version immediately so we can exit |
66 | // before encountering any syntactic surprises. |
67 | if (name == "ninja_required_version" ) |
68 | CheckNinjaVersion(value); |
69 | env_->AddBinding(name, value); |
70 | break; |
71 | } |
72 | case Lexer::INCLUDE: |
73 | if (!ParseFileInclude(false, err)) |
74 | return false; |
75 | break; |
76 | case Lexer::SUBNINJA: |
77 | if (!ParseFileInclude(true, err)) |
78 | return false; |
79 | break; |
80 | case Lexer::ERROR: { |
81 | return lexer_.Error(lexer_.DescribeLastError(), err); |
82 | } |
83 | case Lexer::TEOF: |
84 | return true; |
85 | case Lexer::NEWLINE: |
86 | break; |
87 | default: |
88 | return lexer_.Error(string("unexpected " ) + Lexer::TokenName(token), |
89 | err); |
90 | } |
91 | } |
92 | return false; // not reached |
93 | } |
94 | |
95 | |
96 | bool ManifestParser::ParsePool(string* err) { |
97 | string name; |
98 | if (!lexer_.ReadIdent(&name)) |
99 | return lexer_.Error("expected pool name" , err); |
100 | |
101 | if (!ExpectToken(Lexer::NEWLINE, err)) |
102 | return false; |
103 | |
104 | if (state_->LookupPool(name) != NULL) |
105 | return lexer_.Error("duplicate pool '" + name + "'" , err); |
106 | |
107 | int depth = -1; |
108 | |
109 | while (lexer_.PeekToken(Lexer::INDENT)) { |
110 | string key; |
111 | EvalString value; |
112 | if (!ParseLet(&key, &value, err)) |
113 | return false; |
114 | |
115 | if (key == "depth" ) { |
116 | string depth_string = value.Evaluate(env_); |
117 | depth = atol(depth_string.c_str()); |
118 | if (depth < 0) |
119 | return lexer_.Error("invalid pool depth" , err); |
120 | } else { |
121 | return lexer_.Error("unexpected variable '" + key + "'" , err); |
122 | } |
123 | } |
124 | |
125 | if (depth < 0) |
126 | return lexer_.Error("expected 'depth =' line" , err); |
127 | |
128 | state_->AddPool(new Pool(name, depth)); |
129 | return true; |
130 | } |
131 | |
132 | |
133 | bool ManifestParser::ParseRule(string* err) { |
134 | string name; |
135 | if (!lexer_.ReadIdent(&name)) |
136 | return lexer_.Error("expected rule name" , err); |
137 | |
138 | if (!ExpectToken(Lexer::NEWLINE, err)) |
139 | return false; |
140 | |
141 | if (env_->LookupRuleCurrentScope(name) != NULL) |
142 | return lexer_.Error("duplicate rule '" + name + "'" , err); |
143 | |
144 | Rule* rule = new Rule(name); // XXX scoped_ptr |
145 | |
146 | while (lexer_.PeekToken(Lexer::INDENT)) { |
147 | string key; |
148 | EvalString value; |
149 | if (!ParseLet(&key, &value, err)) |
150 | return false; |
151 | |
152 | if (Rule::IsReservedBinding(key)) { |
153 | rule->AddBinding(key, value); |
154 | } else { |
155 | // Die on other keyvals for now; revisit if we want to add a |
156 | // scope here. |
157 | return lexer_.Error("unexpected variable '" + key + "'" , err); |
158 | } |
159 | } |
160 | |
161 | if (rule->bindings_["rspfile" ].empty() != |
162 | rule->bindings_["rspfile_content" ].empty()) { |
163 | return lexer_.Error("rspfile and rspfile_content need to be " |
164 | "both specified" , err); |
165 | } |
166 | |
167 | if (rule->bindings_["command" ].empty()) |
168 | return lexer_.Error("expected 'command =' line" , err); |
169 | |
170 | env_->AddRule(rule); |
171 | return true; |
172 | } |
173 | |
174 | bool ManifestParser::ParseLet(string* key, EvalString* value, string* err) { |
175 | if (!lexer_.ReadIdent(key)) |
176 | return lexer_.Error("expected variable name" , err); |
177 | if (!ExpectToken(Lexer::EQUALS, err)) |
178 | return false; |
179 | if (!lexer_.ReadVarValue(value, err)) |
180 | return false; |
181 | return true; |
182 | } |
183 | |
184 | bool ManifestParser::ParseDefault(string* err) { |
185 | EvalString eval; |
186 | if (!lexer_.ReadPath(&eval, err)) |
187 | return false; |
188 | if (eval.empty()) |
189 | return lexer_.Error("expected target name" , err); |
190 | |
191 | do { |
192 | string path = eval.Evaluate(env_); |
193 | if (path.empty()) |
194 | return lexer_.Error("empty path" , err); |
195 | uint64_t slash_bits; // Unused because this only does lookup. |
196 | CanonicalizePath(&path, &slash_bits); |
197 | std::string default_err; |
198 | if (!state_->AddDefault(path, &default_err)) |
199 | return lexer_.Error(default_err, err); |
200 | |
201 | eval.Clear(); |
202 | if (!lexer_.ReadPath(&eval, err)) |
203 | return false; |
204 | } while (!eval.empty()); |
205 | |
206 | return ExpectToken(Lexer::NEWLINE, err); |
207 | } |
208 | |
209 | bool ManifestParser::ParseEdge(string* err) { |
210 | vector<EvalString> ins, outs, validations; |
211 | |
212 | { |
213 | EvalString out; |
214 | if (!lexer_.ReadPath(&out, err)) |
215 | return false; |
216 | while (!out.empty()) { |
217 | outs.push_back(out); |
218 | |
219 | out.Clear(); |
220 | if (!lexer_.ReadPath(&out, err)) |
221 | return false; |
222 | } |
223 | } |
224 | |
225 | // Add all implicit outs, counting how many as we go. |
226 | int implicit_outs = 0; |
227 | if (lexer_.PeekToken(Lexer::PIPE)) { |
228 | for (;;) { |
229 | EvalString out; |
230 | if (!lexer_.ReadPath(&out, err)) |
231 | return false; |
232 | if (out.empty()) |
233 | break; |
234 | outs.push_back(out); |
235 | ++implicit_outs; |
236 | } |
237 | } |
238 | |
239 | if (outs.empty()) |
240 | return lexer_.Error("expected path" , err); |
241 | |
242 | if (!ExpectToken(Lexer::COLON, err)) |
243 | return false; |
244 | |
245 | string rule_name; |
246 | if (!lexer_.ReadIdent(&rule_name)) |
247 | return lexer_.Error("expected build command name" , err); |
248 | |
249 | const Rule* rule = env_->LookupRule(rule_name); |
250 | if (!rule) |
251 | return lexer_.Error("unknown build rule '" + rule_name + "'" , err); |
252 | |
253 | for (;;) { |
254 | // XXX should we require one path here? |
255 | EvalString in; |
256 | if (!lexer_.ReadPath(&in, err)) |
257 | return false; |
258 | if (in.empty()) |
259 | break; |
260 | ins.push_back(in); |
261 | } |
262 | |
263 | // Add all implicit deps, counting how many as we go. |
264 | int implicit = 0; |
265 | if (lexer_.PeekToken(Lexer::PIPE)) { |
266 | for (;;) { |
267 | EvalString in; |
268 | if (!lexer_.ReadPath(&in, err)) |
269 | return false; |
270 | if (in.empty()) |
271 | break; |
272 | ins.push_back(in); |
273 | ++implicit; |
274 | } |
275 | } |
276 | |
277 | // Add all order-only deps, counting how many as we go. |
278 | int order_only = 0; |
279 | if (lexer_.PeekToken(Lexer::PIPE2)) { |
280 | for (;;) { |
281 | EvalString in; |
282 | if (!lexer_.ReadPath(&in, err)) |
283 | return false; |
284 | if (in.empty()) |
285 | break; |
286 | ins.push_back(in); |
287 | ++order_only; |
288 | } |
289 | } |
290 | |
291 | // Add all validations, counting how many as we go. |
292 | if (lexer_.PeekToken(Lexer::PIPEAT)) { |
293 | for (;;) { |
294 | EvalString validation; |
295 | if (!lexer_.ReadPath(&validation, err)) |
296 | return false; |
297 | if (validation.empty()) |
298 | break; |
299 | validations.push_back(validation); |
300 | } |
301 | } |
302 | |
303 | if (!ExpectToken(Lexer::NEWLINE, err)) |
304 | return false; |
305 | |
306 | // Bindings on edges are rare, so allocate per-edge envs only when needed. |
307 | bool has_indent_token = lexer_.PeekToken(Lexer::INDENT); |
308 | BindingEnv* env = has_indent_token ? new BindingEnv(env_) : env_; |
309 | while (has_indent_token) { |
310 | string key; |
311 | EvalString val; |
312 | if (!ParseLet(&key, &val, err)) |
313 | return false; |
314 | |
315 | env->AddBinding(key, val.Evaluate(env_)); |
316 | has_indent_token = lexer_.PeekToken(Lexer::INDENT); |
317 | } |
318 | |
319 | Edge* edge = state_->AddEdge(rule); |
320 | edge->env_ = env; |
321 | |
322 | string pool_name = edge->GetBinding("pool" ); |
323 | if (!pool_name.empty()) { |
324 | Pool* pool = state_->LookupPool(pool_name); |
325 | if (pool == NULL) |
326 | return lexer_.Error("unknown pool name '" + pool_name + "'" , err); |
327 | edge->pool_ = pool; |
328 | } |
329 | |
330 | edge->outputs_.reserve(outs.size()); |
331 | for (size_t i = 0, e = outs.size(); i != e; ++i) { |
332 | string path = outs[i].Evaluate(env); |
333 | if (path.empty()) |
334 | return lexer_.Error("empty path" , err); |
335 | uint64_t slash_bits; |
336 | CanonicalizePath(&path, &slash_bits); |
337 | if (!state_->AddOut(edge, path, slash_bits)) { |
338 | if (options_.dupe_edge_action_ == kDupeEdgeActionError) { |
339 | lexer_.Error("multiple rules generate " + path, err); |
340 | return false; |
341 | } else { |
342 | if (!quiet_) { |
343 | Warning( |
344 | "multiple rules generate %s. builds involving this target will " |
345 | "not be correct; continuing anyway" , |
346 | path.c_str()); |
347 | } |
348 | if (e - i <= static_cast<size_t>(implicit_outs)) |
349 | --implicit_outs; |
350 | } |
351 | } |
352 | } |
353 | |
354 | if (edge->outputs_.empty()) { |
355 | // All outputs of the edge are already created by other edges. Don't add |
356 | // this edge. Do this check before input nodes are connected to the edge. |
357 | state_->edges_.pop_back(); |
358 | delete edge; |
359 | return true; |
360 | } |
361 | edge->implicit_outs_ = implicit_outs; |
362 | |
363 | edge->inputs_.reserve(ins.size()); |
364 | for (vector<EvalString>::iterator i = ins.begin(); i != ins.end(); ++i) { |
365 | string path = i->Evaluate(env); |
366 | if (path.empty()) |
367 | return lexer_.Error("empty path" , err); |
368 | uint64_t slash_bits; |
369 | CanonicalizePath(&path, &slash_bits); |
370 | state_->AddIn(edge, path, slash_bits); |
371 | } |
372 | edge->implicit_deps_ = implicit; |
373 | edge->order_only_deps_ = order_only; |
374 | |
375 | edge->validations_.reserve(validations.size()); |
376 | for (std::vector<EvalString>::iterator v = validations.begin(); |
377 | v != validations.end(); ++v) { |
378 | string path = v->Evaluate(env); |
379 | if (path.empty()) |
380 | return lexer_.Error("empty path" , err); |
381 | uint64_t slash_bits; |
382 | CanonicalizePath(&path, &slash_bits); |
383 | state_->AddValidation(edge, path, slash_bits); |
384 | } |
385 | |
386 | if (options_.phony_cycle_action_ == kPhonyCycleActionWarn && |
387 | edge->maybe_phonycycle_diagnostic()) { |
388 | // CMake 2.8.12.x and 3.0.x incorrectly write phony build statements |
389 | // that reference themselves. Ninja used to tolerate these in the |
390 | // build graph but that has since been fixed. Filter them out to |
391 | // support users of those old CMake versions. |
392 | Node* out = edge->outputs_[0]; |
393 | vector<Node*>::iterator new_end = |
394 | remove(edge->inputs_.begin(), edge->inputs_.end(), out); |
395 | if (new_end != edge->inputs_.end()) { |
396 | edge->inputs_.erase(new_end, edge->inputs_.end()); |
397 | if (!quiet_) { |
398 | Warning("phony target '%s' names itself as an input; " |
399 | "ignoring [-w phonycycle=warn]" , |
400 | out->path().c_str()); |
401 | } |
402 | } |
403 | } |
404 | |
405 | // Lookup, validate, and save any dyndep binding. It will be used later |
406 | // to load generated dependency information dynamically, but it must |
407 | // be one of our manifest-specified inputs. |
408 | string dyndep = edge->GetUnescapedDyndep(); |
409 | if (!dyndep.empty()) { |
410 | uint64_t slash_bits; |
411 | CanonicalizePath(&dyndep, &slash_bits); |
412 | edge->dyndep_ = state_->GetNode(dyndep, slash_bits); |
413 | edge->dyndep_->set_dyndep_pending(true); |
414 | vector<Node*>::iterator dgi = |
415 | std::find(edge->inputs_.begin(), edge->inputs_.end(), edge->dyndep_); |
416 | if (dgi == edge->inputs_.end()) { |
417 | return lexer_.Error("dyndep '" + dyndep + "' is not an input" , err); |
418 | } |
419 | } |
420 | |
421 | return true; |
422 | } |
423 | |
424 | bool ManifestParser::ParseFileInclude(bool new_scope, string* err) { |
425 | EvalString eval; |
426 | if (!lexer_.ReadPath(&eval, err)) |
427 | return false; |
428 | string path = eval.Evaluate(env_); |
429 | |
430 | ManifestParser subparser(state_, file_reader_, options_); |
431 | if (new_scope) { |
432 | subparser.env_ = new BindingEnv(env_); |
433 | } else { |
434 | subparser.env_ = env_; |
435 | } |
436 | |
437 | if (!subparser.Load(path, err, &lexer_)) |
438 | return false; |
439 | |
440 | if (!ExpectToken(Lexer::NEWLINE, err)) |
441 | return false; |
442 | |
443 | return true; |
444 | } |
445 | |