| 1 | //===-- lib/Parser/preprocessor.cpp ---------------------------------------===// |
| 2 | // |
| 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | |
| 9 | #include "flang/Parser/preprocessor.h" |
| 10 | |
| 11 | #include "prescan.h" |
| 12 | #include "flang/Common/idioms.h" |
| 13 | #include "flang/Parser/characters.h" |
| 14 | #include "flang/Parser/message.h" |
| 15 | #include "llvm/Support/FileSystem.h" |
| 16 | #include "llvm/Support/raw_ostream.h" |
| 17 | #include <algorithm> |
| 18 | #include <cinttypes> |
| 19 | #include <cstddef> |
| 20 | #include <ctime> |
| 21 | #include <map> |
| 22 | #include <memory> |
| 23 | #include <optional> |
| 24 | #include <set> |
| 25 | #include <string> |
| 26 | #include <utility> |
| 27 | #include <vector> |
| 28 | |
| 29 | namespace Fortran::parser { |
| 30 | |
| 31 | Definition::Definition( |
| 32 | const TokenSequence &repl, std::size_t firstToken, std::size_t tokens) |
| 33 | : replacement_{Tokenize({}, repl, firstToken, tokens)} {} |
| 34 | |
| 35 | Definition::Definition(const std::vector<std::string> &argNames, |
| 36 | const TokenSequence &repl, std::size_t firstToken, std::size_t tokens, |
| 37 | bool isVariadic) |
| 38 | : isFunctionLike_{true}, isVariadic_{isVariadic}, argNames_{argNames}, |
| 39 | replacement_{Tokenize(argNames, repl, firstToken, tokens)} {} |
| 40 | |
| 41 | Definition::Definition(const std::string &predefined, AllSources &sources) |
| 42 | : isPredefined_{true}, |
| 43 | replacement_{ |
| 44 | predefined, sources.AddCompilerInsertion(predefined).start()} {} |
| 45 | |
| 46 | bool Definition::set_isDisabled(bool disable) { |
| 47 | bool was{isDisabled_}; |
| 48 | isDisabled_ = disable; |
| 49 | return was; |
| 50 | } |
| 51 | |
| 52 | void Definition::Print(llvm::raw_ostream &out, const char *macroName) const { |
| 53 | if (!isFunctionLike_) { |
| 54 | // If it's not a function-like macro, then just print the replacement. |
| 55 | out << ' ' << replacement_.ToString(); |
| 56 | return; |
| 57 | } |
| 58 | |
| 59 | size_t argCount{argumentCount()}; |
| 60 | |
| 61 | out << '('; |
| 62 | for (size_t i{0}; i != argCount; ++i) { |
| 63 | if (i != 0) { |
| 64 | out << ", " ; |
| 65 | } |
| 66 | out << argNames_[i]; |
| 67 | } |
| 68 | if (isVariadic_) { |
| 69 | out << ", ..." ; |
| 70 | } |
| 71 | out << ") " ; |
| 72 | |
| 73 | for (size_t i{0}, e{replacement_.SizeInTokens()}; i != e; ++i) { |
| 74 | std::string tok{replacement_.TokenAt(i).ToString()}; |
| 75 | if (size_t idx{GetArgumentIndex(tok)}; idx < argCount) { |
| 76 | out << argNames_[idx]; |
| 77 | } else { |
| 78 | out << tok; |
| 79 | } |
| 80 | } |
| 81 | } |
| 82 | |
| 83 | static bool IsLegalIdentifierStart(const CharBlock &cpl) { |
| 84 | return cpl.size() > 0 && IsLegalIdentifierStart(cpl[0]); |
| 85 | } |
| 86 | |
| 87 | TokenSequence Definition::Tokenize(const std::vector<std::string> &argNames, |
| 88 | const TokenSequence &token, std::size_t firstToken, std::size_t tokens) { |
| 89 | std::map<std::string, std::string> args; |
| 90 | char argIndex{'A'}; |
| 91 | for (const std::string &arg : argNames) { |
| 92 | CHECK(args.find(arg) == args.end()); |
| 93 | args[arg] = "~"s + argIndex++; |
| 94 | } |
| 95 | TokenSequence result; |
| 96 | for (std::size_t j{0}; j < tokens; ++j) { |
| 97 | CharBlock tok{token.TokenAt(firstToken + j)}; |
| 98 | if (IsLegalIdentifierStart(tok)) { |
| 99 | auto it{args.find(tok.ToString())}; |
| 100 | if (it != args.end()) { |
| 101 | result.Put(it->second, token.GetTokenProvenance(j)); |
| 102 | continue; |
| 103 | } |
| 104 | } |
| 105 | result.AppendRange(token, firstToken + j, 1); |
| 106 | } |
| 107 | return result; |
| 108 | } |
| 109 | |
| 110 | std::size_t Definition::GetArgumentIndex(const CharBlock &token) const { |
| 111 | if (token.size() >= 2 && token[0] == '~') { |
| 112 | return static_cast<size_t>(token[1] - 'A'); |
| 113 | } |
| 114 | return argumentCount(); |
| 115 | } |
| 116 | |
| 117 | static TokenSequence Stringify( |
| 118 | const TokenSequence &tokens, AllSources &allSources) { |
| 119 | TokenSequence result; |
| 120 | Provenance quoteProvenance{allSources.CompilerInsertionProvenance('"')}; |
| 121 | result.PutNextTokenChar('"', quoteProvenance); |
| 122 | for (std::size_t j{0}; j < tokens.SizeInTokens(); ++j) { |
| 123 | const CharBlock &token{tokens.TokenAt(j)}; |
| 124 | std::size_t bytes{token.size()}; |
| 125 | for (std::size_t k{0}; k < bytes; ++k) { |
| 126 | char ch{token[k]}; |
| 127 | Provenance from{tokens.GetTokenProvenance(j, k)}; |
| 128 | if (ch == '"' || ch == '\\') { |
| 129 | result.PutNextTokenChar(ch, from); |
| 130 | } |
| 131 | result.PutNextTokenChar(ch, from); |
| 132 | } |
| 133 | } |
| 134 | result.PutNextTokenChar('"', quoteProvenance); |
| 135 | result.CloseToken(); |
| 136 | return result; |
| 137 | } |
| 138 | |
| 139 | constexpr bool IsTokenPasting(CharBlock opr) { |
| 140 | return opr.size() == 2 && opr[0] == '#' && opr[1] == '#'; |
| 141 | } |
| 142 | |
| 143 | static bool AnyTokenPasting(const TokenSequence &text) { |
| 144 | std::size_t tokens{text.SizeInTokens()}; |
| 145 | for (std::size_t j{0}; j < tokens; ++j) { |
| 146 | if (IsTokenPasting(text.TokenAt(j))) { |
| 147 | return true; |
| 148 | } |
| 149 | } |
| 150 | return false; |
| 151 | } |
| 152 | |
| 153 | static TokenSequence TokenPasting(TokenSequence &&text) { |
| 154 | if (!AnyTokenPasting(text)) { |
| 155 | return std::move(text); |
| 156 | } |
| 157 | TokenSequence result; |
| 158 | std::size_t tokens{text.SizeInTokens()}; |
| 159 | bool pasting{false}; |
| 160 | for (std::size_t j{0}; j < tokens; ++j) { |
| 161 | if (IsTokenPasting(text.TokenAt(j))) { |
| 162 | if (!pasting) { |
| 163 | while (!result.empty() && |
| 164 | result.TokenAt(result.SizeInTokens() - 1).IsBlank()) { |
| 165 | result.pop_back(); |
| 166 | } |
| 167 | if (!result.empty()) { |
| 168 | result.ReopenLastToken(); |
| 169 | pasting = true; |
| 170 | } |
| 171 | } |
| 172 | } else if (pasting && text.TokenAt(j).IsBlank()) { |
| 173 | } else { |
| 174 | result.AppendRange(text, j, 1); |
| 175 | pasting = false; |
| 176 | } |
| 177 | } |
| 178 | return result; |
| 179 | } |
| 180 | |
| 181 | constexpr bool IsDefinedKeyword(CharBlock token) { |
| 182 | return token.size() == 7 && (token[0] == 'd' || token[0] == 'D') && |
| 183 | ToLowerCaseLetters(token.ToString()) == "defined" ; |
| 184 | } |
| 185 | |
| 186 | TokenSequence Definition::Apply(const std::vector<TokenSequence> &args, |
| 187 | Prescanner &prescanner, bool inIfExpression) { |
| 188 | TokenSequence result; |
| 189 | bool skipping{false}; |
| 190 | int parenthesesNesting{0}; |
| 191 | std::size_t tokens{replacement_.SizeInTokens()}; |
| 192 | for (std::size_t j{0}; j < tokens; ++j) { |
| 193 | CharBlock token{replacement_.TokenAt(j)}; |
| 194 | std::size_t bytes{token.size()}; |
| 195 | if (skipping) { |
| 196 | char ch{token.OnlyNonBlank()}; |
| 197 | if (ch == '(') { |
| 198 | ++parenthesesNesting; |
| 199 | } else if (ch == ')') { |
| 200 | if (parenthesesNesting > 0) { |
| 201 | --parenthesesNesting; |
| 202 | } |
| 203 | skipping = parenthesesNesting > 0; |
| 204 | } |
| 205 | continue; |
| 206 | } |
| 207 | if (bytes == 2 && token[0] == '~') { // argument substitution |
| 208 | std::size_t index{GetArgumentIndex(token)}; |
| 209 | if (index >= args.size()) { |
| 210 | continue; |
| 211 | } |
| 212 | std::size_t prev{j}; |
| 213 | while (prev > 0 && replacement_.TokenAt(prev - 1).IsBlank()) { |
| 214 | --prev; |
| 215 | } |
| 216 | if (prev > 0 && replacement_.TokenAt(prev - 1).size() == 1 && |
| 217 | replacement_.TokenAt(prev - 1)[0] == |
| 218 | '#') { // stringify argument without macro replacement |
| 219 | std::size_t resultSize{result.SizeInTokens()}; |
| 220 | while (resultSize > 0 && result.TokenAt(resultSize - 1).IsBlank()) { |
| 221 | result.pop_back(); |
| 222 | --resultSize; |
| 223 | } |
| 224 | CHECK(resultSize > 0 && |
| 225 | result.TokenAt(resultSize - 1) == replacement_.TokenAt(prev - 1)); |
| 226 | result.pop_back(); |
| 227 | result.CopyAll(Stringify(args[index], prescanner.allSources())); |
| 228 | } else { |
| 229 | const TokenSequence *arg{&args[index]}; |
| 230 | std::optional<TokenSequence> replaced; |
| 231 | // Don't replace macros in the actual argument if it is preceded or |
| 232 | // followed by the token-pasting operator ## in the replacement text, |
| 233 | // or if we have to worry about "defined(X)"/"defined X" in an |
| 234 | // #if/#elif expression. |
| 235 | if (!inIfExpression && |
| 236 | (prev == 0 || !IsTokenPasting(replacement_.TokenAt(prev - 1)))) { |
| 237 | auto next{replacement_.SkipBlanks(j + 1)}; |
| 238 | if (next >= tokens || !IsTokenPasting(replacement_.TokenAt(next))) { |
| 239 | // Apply macro replacement to the actual argument |
| 240 | replaced = prescanner.preprocessor().MacroReplacement( |
| 241 | *arg, prescanner, nullptr, inIfExpression); |
| 242 | if (replaced) { |
| 243 | arg = &*replaced; |
| 244 | } |
| 245 | } |
| 246 | } |
| 247 | result.CopyAll(DEREF(arg)); |
| 248 | } |
| 249 | } else if (bytes == 11 && isVariadic_ && |
| 250 | token.ToString() == "__VA_ARGS__" ) { |
| 251 | Provenance commaProvenance{ |
| 252 | prescanner.preprocessor().allSources().CompilerInsertionProvenance( |
| 253 | ',')}; |
| 254 | for (std::size_t k{argumentCount()}; k < args.size(); ++k) { |
| 255 | if (k > argumentCount()) { |
| 256 | result.Put(","s , commaProvenance); |
| 257 | } |
| 258 | result.CopyAll(args[k]); |
| 259 | } |
| 260 | } else if (bytes == 10 && isVariadic_ && token.ToString() == "__VA_OPT__" && |
| 261 | j + 2 < tokens && replacement_.TokenAt(j + 1).OnlyNonBlank() == '(' && |
| 262 | parenthesesNesting == 0) { |
| 263 | parenthesesNesting = 1; |
| 264 | skipping = args.size() == argumentCount(); |
| 265 | ++j; |
| 266 | } else { |
| 267 | if (parenthesesNesting > 0) { |
| 268 | char ch{token.OnlyNonBlank()}; |
| 269 | if (ch == '(') { |
| 270 | ++parenthesesNesting; |
| 271 | } else if (ch == ')') { |
| 272 | if (--parenthesesNesting == 0) { |
| 273 | skipping = false; |
| 274 | continue; |
| 275 | } |
| 276 | } |
| 277 | } |
| 278 | result.AppendRange(replacement_, j); |
| 279 | } |
| 280 | } |
| 281 | return TokenPasting(std::move(result)); |
| 282 | } |
| 283 | |
| 284 | static std::string FormatTime(const std::time_t &now, const char *format) { |
| 285 | char buffer[16]; |
| 286 | return {buffer, |
| 287 | std::strftime(s: buffer, maxsize: sizeof buffer, format: format, tp: std::localtime(timer: &now))}; |
| 288 | } |
| 289 | |
| 290 | Preprocessor::Preprocessor(AllSources &allSources) : allSources_{allSources} {} |
| 291 | |
| 292 | void Preprocessor::DefineStandardMacros() { |
| 293 | // Capture current local date & time once now to avoid having the values |
| 294 | // of __DATE__ or __TIME__ change during compilation. |
| 295 | std::time_t now; |
| 296 | std::time(&now); |
| 297 | Define("__DATE__"s , FormatTime(now, "\"%h %e %Y\"" )); // e.g., "Jun 16 1904" |
| 298 | Define("__TIME__"s , FormatTime(now, "\"%T\"" )); // e.g., "23:59:60" |
| 299 | // The values of these predefined macros depend on their invocation sites. |
| 300 | Define("__FILE__"s , "__FILE__"s ); |
| 301 | Define("__LINE__"s , "__LINE__"s ); |
| 302 | Define("__TIMESTAMP__"s , "__TIMESTAMP__"s ); |
| 303 | Define("__COUNTER__"s , "__COUNTER__"s ); |
| 304 | } |
| 305 | |
| 306 | static const std::string idChars{ |
| 307 | "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789"s }; |
| 308 | |
| 309 | static std::optional<std::vector<std::string>> TokenizeMacroNameAndArgs( |
| 310 | const std::string &str) { |
| 311 | // TODO: variadic macros on the command line (?) |
| 312 | std::vector<std::string> names; |
| 313 | for (std::string::size_type at{0};;) { |
| 314 | auto nameStart{str.find_first_not_of(" "s , at)}; |
| 315 | if (nameStart == str.npos) { |
| 316 | return std::nullopt; |
| 317 | } |
| 318 | auto nameEnd{str.find_first_not_of(idChars, nameStart)}; |
| 319 | if (nameEnd == str.npos) { |
| 320 | return std::nullopt; |
| 321 | } |
| 322 | auto punc{str.find_first_not_of(" "s , nameEnd)}; |
| 323 | if (punc == str.npos) { |
| 324 | return std::nullopt; |
| 325 | } |
| 326 | if ((at == 0 && str[punc] != '(') || |
| 327 | (at > 0 && str[punc] != ',' && str[punc] != ')')) { |
| 328 | return std::nullopt; |
| 329 | } |
| 330 | names.push_back(str.substr(pos: nameStart, n: nameEnd - nameStart)); |
| 331 | at = punc + 1; |
| 332 | if (str[punc] == ')') { |
| 333 | if (str.find_first_not_of(" "s , at) != str.npos) { |
| 334 | return std::nullopt; |
| 335 | } else { |
| 336 | return names; |
| 337 | } |
| 338 | } |
| 339 | } |
| 340 | } |
| 341 | |
| 342 | TokenSequence Preprocessor::TokenizeMacroBody(const std::string &str) { |
| 343 | TokenSequence tokens; |
| 344 | Provenance provenance{allSources_.AddCompilerInsertion(str).start()}; |
| 345 | auto end{str.size()}; |
| 346 | for (std::string::size_type at{0}; at < end;) { |
| 347 | // Alternate between tokens that are identifiers (and therefore subject |
| 348 | // to argument replacement) and those that are not. |
| 349 | auto start{str.find_first_of(idChars, at)}; |
| 350 | if (start == str.npos) { |
| 351 | tokens.Put(str.substr(at), provenance + at); |
| 352 | break; |
| 353 | } else if (start > at) { |
| 354 | tokens.Put(str.substr(at, start - at), provenance + at); |
| 355 | } |
| 356 | at = str.find_first_not_of(idChars, start + 1); |
| 357 | if (at == str.npos) { |
| 358 | tokens.Put(str.substr(start), provenance + start); |
| 359 | break; |
| 360 | } else { |
| 361 | tokens.Put(str.substr(start, at - start), provenance + start); |
| 362 | } |
| 363 | } |
| 364 | return tokens; |
| 365 | } |
| 366 | |
| 367 | void Preprocessor::Define(const std::string ¯o, const std::string &value) { |
| 368 | if (auto lhs{TokenizeMacroNameAndArgs(macro)}) { |
| 369 | // function-like macro |
| 370 | CharBlock macroName{SaveTokenAsName(lhs->front())}; |
| 371 | auto iter{lhs->begin()}; |
| 372 | ++iter; |
| 373 | std::vector<std::string> argNames{iter, lhs->end()}; |
| 374 | auto rhs{TokenizeMacroBody(value)}; |
| 375 | definitions_.emplace(std::make_pair(macroName, |
| 376 | Definition{ |
| 377 | argNames, rhs, 0, rhs.SizeInTokens(), /*isVariadic=*/false})); |
| 378 | } else { // keyword macro |
| 379 | definitions_.emplace( |
| 380 | SaveTokenAsName(macro), Definition{value, allSources_}); |
| 381 | } |
| 382 | } |
| 383 | |
| 384 | void Preprocessor::Undefine(std::string macro) { definitions_.erase(macro); } |
| 385 | |
| 386 | std::optional<TokenSequence> Preprocessor::MacroReplacement( |
| 387 | const TokenSequence &input, Prescanner &prescanner, |
| 388 | std::optional<std::size_t> *partialFunctionLikeMacro, bool inIfExpression) { |
| 389 | // Do quick scan for any use of a defined name. |
| 390 | if (definitions_.empty()) { |
| 391 | return std::nullopt; |
| 392 | } |
| 393 | std::size_t tokens{input.SizeInTokens()}; |
| 394 | std::size_t j{0}; |
| 395 | for (; j < tokens; ++j) { |
| 396 | CharBlock token{input.TokenAt(j)}; |
| 397 | if (!token.empty() && IsLegalIdentifierStart(token[0]) && |
| 398 | (IsNameDefined(token) || (inIfExpression && IsDefinedKeyword(token)))) { |
| 399 | break; |
| 400 | } |
| 401 | } |
| 402 | if (j == tokens) { |
| 403 | return std::nullopt; // input contains nothing that would be replaced |
| 404 | } |
| 405 | TokenSequence result{input, 0, j}; |
| 406 | |
| 407 | // After rescanning after macro replacement has failed due to an unclosed |
| 408 | // function-like macro call (no left parenthesis yet, or no closing |
| 409 | // parenthesis), if tokens remain in the input, append them to the |
| 410 | // replacement text and attempt to proceed. Otherwise, return, so that |
| 411 | // the caller may try again with remaining tokens in its input. |
| 412 | auto CompleteFunctionLikeMacro{ |
| 413 | [this, &input, &prescanner, &result, &partialFunctionLikeMacro, |
| 414 | inIfExpression](std::size_t after, const TokenSequence &replacement, |
| 415 | std::size_t pFLMOffset) { |
| 416 | if (after < input.SizeInTokens()) { |
| 417 | result.AppendRange(replacement, 0, pFLMOffset); |
| 418 | TokenSequence suffix; |
| 419 | suffix.AppendRange( |
| 420 | replacement, pFLMOffset, replacement.SizeInTokens() - pFLMOffset); |
| 421 | suffix.AppendRange(input, after, input.SizeInTokens() - after); |
| 422 | auto further{ReplaceMacros( |
| 423 | suffix, prescanner, partialFunctionLikeMacro, inIfExpression)}; |
| 424 | if (partialFunctionLikeMacro && *partialFunctionLikeMacro) { |
| 425 | // still not closed |
| 426 | **partialFunctionLikeMacro += result.SizeInTokens(); |
| 427 | } |
| 428 | result.CopyAll(further); |
| 429 | return true; |
| 430 | } else { |
| 431 | if (partialFunctionLikeMacro) { |
| 432 | *partialFunctionLikeMacro = pFLMOffset + result.SizeInTokens(); |
| 433 | } |
| 434 | return false; |
| 435 | } |
| 436 | }}; |
| 437 | |
| 438 | for (; j < tokens; ++j) { |
| 439 | CharBlock token{input.TokenAt(j)}; |
| 440 | if (token.IsBlank() || !IsLegalIdentifierStart(token[0])) { |
| 441 | result.AppendRange(input, j); |
| 442 | continue; |
| 443 | } |
| 444 | // Process identifier in replacement text. |
| 445 | auto it{definitions_.find(token)}; |
| 446 | // Is in the X in "defined(X)" or "defined X" in an #if/#elif expression? |
| 447 | if (inIfExpression) { |
| 448 | if (auto prev{result.SkipBlanksBackwards(result.SizeInTokens())}) { |
| 449 | bool ok{true}; |
| 450 | std::optional<std::size_t> rightParenthesis; |
| 451 | if (result.TokenAt(*prev).OnlyNonBlank() == '(') { |
| 452 | prev = result.SkipBlanksBackwards(*prev); |
| 453 | rightParenthesis = input.SkipBlanks(j + 1); |
| 454 | ok = *rightParenthesis < tokens && |
| 455 | input.TokenAt(*rightParenthesis).OnlyNonBlank() == ')'; |
| 456 | } |
| 457 | if (ok && prev && IsDefinedKeyword(result.TokenAt(*prev))) { |
| 458 | result = TokenSequence{result, 0, *prev}; // trims off "defined (" |
| 459 | char truth{it != definitions_.end() ? '1' : '0'}; |
| 460 | result.Put(&truth, 1, allSources_.CompilerInsertionProvenance(truth)); |
| 461 | j = rightParenthesis.value_or(j); |
| 462 | continue; |
| 463 | } |
| 464 | } |
| 465 | } |
| 466 | if (it == definitions_.end()) { |
| 467 | result.AppendRange(input, j); |
| 468 | continue; |
| 469 | } |
| 470 | Definition *def{&it->second}; |
| 471 | if (def->isDisabled()) { |
| 472 | result.AppendRange(input, j); |
| 473 | continue; |
| 474 | } |
| 475 | if (!def->isFunctionLike()) { |
| 476 | if (def->isPredefined() && !def->replacement().empty()) { |
| 477 | std::string repl; |
| 478 | std::string name{def->replacement().TokenAt(0).ToString()}; |
| 479 | if (name == "__FILE__" ) { |
| 480 | repl = "\""s + |
| 481 | allSources_.GetPath(prescanner.GetCurrentProvenance()) + '"'; |
| 482 | } else if (name == "__LINE__" ) { |
| 483 | std::string buf; |
| 484 | llvm::raw_string_ostream ss{buf}; |
| 485 | ss << allSources_.GetLineNumber(prescanner.GetCurrentProvenance()); |
| 486 | repl = ss.str(); |
| 487 | } else if (name == "__TIMESTAMP__" ) { |
| 488 | auto path{allSources_.GetPath( |
| 489 | prescanner.GetCurrentProvenance(), /*topLevel=*/true)}; |
| 490 | llvm::sys::fs::file_status status; |
| 491 | repl = "??? ??? ?? ??:??:?? ????" ; |
| 492 | if (!llvm::sys::fs::status(path, status)) { |
| 493 | auto modTime{llvm::sys::toTimeT(status.getLastModificationTime())}; |
| 494 | if (std::string time{std::asctime(std::localtime(&modTime))}; |
| 495 | time.size() > 1 && time[time.size() - 1] == '\n') { |
| 496 | time.erase(time.size() - 1); // clip terminal '\n' |
| 497 | repl = "\""s + time + '"'; |
| 498 | } |
| 499 | } |
| 500 | } else if (name == "__COUNTER__" ) { |
| 501 | repl = std::to_string(counterVal_++); |
| 502 | } |
| 503 | if (!repl.empty()) { |
| 504 | ProvenanceRange insert{allSources_.AddCompilerInsertion(repl)}; |
| 505 | ProvenanceRange call{allSources_.AddMacroCall( |
| 506 | insert, input.GetTokenProvenanceRange(j), repl)}; |
| 507 | result.Put(repl, call.start()); |
| 508 | continue; |
| 509 | } |
| 510 | } |
| 511 | std::optional<std::size_t> partialFLM; |
| 512 | def->set_isDisabled(true); |
| 513 | TokenSequence replaced{TokenPasting(ReplaceMacros( |
| 514 | def->replacement(), prescanner, &partialFLM, inIfExpression))}; |
| 515 | def->set_isDisabled(false); |
| 516 | if (partialFLM && |
| 517 | CompleteFunctionLikeMacro(j + 1, replaced, *partialFLM)) { |
| 518 | return result; |
| 519 | } |
| 520 | if (!replaced.empty()) { |
| 521 | ProvenanceRange from{def->replacement().GetProvenanceRange()}; |
| 522 | ProvenanceRange use{input.GetTokenProvenanceRange(j)}; |
| 523 | ProvenanceRange newRange{ |
| 524 | allSources_.AddMacroCall(from, use, replaced.ToString())}; |
| 525 | result.CopyWithProvenance(replaced, newRange); |
| 526 | } |
| 527 | } else { |
| 528 | // Possible function-like macro call. Skip spaces and newlines to see |
| 529 | // whether '(' is next. |
| 530 | std::size_t k{j}; |
| 531 | bool leftParen{false}; |
| 532 | while (++k < tokens) { |
| 533 | const CharBlock &lookAhead{input.TokenAt(k)}; |
| 534 | if (!lookAhead.IsBlank() && lookAhead[0] != '\n') { |
| 535 | leftParen = lookAhead[0] == '(' && lookAhead.size() == 1; |
| 536 | break; |
| 537 | } |
| 538 | } |
| 539 | if (!leftParen) { |
| 540 | if (partialFunctionLikeMacro) { |
| 541 | *partialFunctionLikeMacro = result.SizeInTokens(); |
| 542 | result.AppendRange(input, j, tokens - j); |
| 543 | return result; |
| 544 | } else { |
| 545 | result.AppendRange(input, j); |
| 546 | continue; |
| 547 | } |
| 548 | } |
| 549 | std::vector<std::size_t> argStart{++k}; |
| 550 | for (int nesting{0}; k < tokens; ++k) { |
| 551 | CharBlock token{input.TokenAt(k)}; |
| 552 | char ch{token.OnlyNonBlank()}; |
| 553 | if (ch == '(') { |
| 554 | ++nesting; |
| 555 | } else if (ch == ')') { |
| 556 | if (nesting == 0) { |
| 557 | break; |
| 558 | } |
| 559 | --nesting; |
| 560 | } else if (ch == ',' && nesting == 0) { |
| 561 | argStart.push_back(k + 1); |
| 562 | } |
| 563 | } |
| 564 | if (argStart.size() == 1 && k == argStart[0] && |
| 565 | def->argumentCount() == 0) { |
| 566 | // Subtle: () is zero arguments, not one empty argument, |
| 567 | // unless one argument was expected. |
| 568 | argStart.clear(); |
| 569 | } |
| 570 | if (k >= tokens && partialFunctionLikeMacro) { |
| 571 | *partialFunctionLikeMacro = result.SizeInTokens(); |
| 572 | result.AppendRange(input, j, tokens - j); |
| 573 | return result; |
| 574 | } else if (k >= tokens || argStart.size() < def->argumentCount() || |
| 575 | (argStart.size() > def->argumentCount() && !def->isVariadic())) { |
| 576 | result.AppendRange(input, j); |
| 577 | continue; |
| 578 | } |
| 579 | std::vector<TokenSequence> args; |
| 580 | for (std::size_t n{0}; n < argStart.size(); ++n) { |
| 581 | std::size_t at{argStart[n]}; |
| 582 | std::size_t count{ |
| 583 | (n + 1 == argStart.size() ? k : argStart[n + 1] - 1) - at}; |
| 584 | args.emplace_back(TokenSequence(input, at, count)); |
| 585 | } |
| 586 | TokenSequence applied{def->Apply(args, prescanner, inIfExpression)}; |
| 587 | std::optional<std::size_t> partialFLM; |
| 588 | def->set_isDisabled(true); |
| 589 | TokenSequence replaced{ReplaceMacros( |
| 590 | std::move(applied), prescanner, &partialFLM, inIfExpression)}; |
| 591 | def->set_isDisabled(false); |
| 592 | if (partialFLM && |
| 593 | CompleteFunctionLikeMacro(k + 1, replaced, *partialFLM)) { |
| 594 | return result; |
| 595 | } |
| 596 | if (!replaced.empty()) { |
| 597 | ProvenanceRange from{def->replacement().GetProvenanceRange()}; |
| 598 | ProvenanceRange use{input.GetIntervalProvenanceRange(j, k - j)}; |
| 599 | ProvenanceRange newRange{ |
| 600 | allSources_.AddMacroCall(from, use, replaced.ToString())}; |
| 601 | result.CopyWithProvenance(replaced, newRange); |
| 602 | } |
| 603 | j = k; // advance to the terminal ')' |
| 604 | } |
| 605 | } |
| 606 | return result; |
| 607 | } |
| 608 | |
| 609 | TokenSequence Preprocessor::ReplaceMacros(const TokenSequence &tokens, |
| 610 | Prescanner &prescanner, |
| 611 | std::optional<std::size_t> *partialFunctionLikeMacro, bool inIfExpression) { |
| 612 | if (std::optional<TokenSequence> repl{MacroReplacement( |
| 613 | tokens, prescanner, partialFunctionLikeMacro, inIfExpression)}) { |
| 614 | return std::move(*repl); |
| 615 | } |
| 616 | return tokens; |
| 617 | } |
| 618 | |
| 619 | void Preprocessor::Directive(const TokenSequence &dir, Prescanner &prescanner) { |
| 620 | std::size_t tokens{dir.SizeInTokens()}; |
| 621 | std::size_t j{dir.SkipBlanks(0)}; |
| 622 | if (j == tokens) { |
| 623 | return; |
| 624 | } |
| 625 | if (dir.TokenAt(j).ToString() != "#" ) { |
| 626 | prescanner.Say(dir.GetTokenProvenanceRange(j), "missing '#'"_err_en_US ); |
| 627 | return; |
| 628 | } |
| 629 | j = dir.SkipBlanks(j + 1); |
| 630 | while (tokens > 0 && dir.TokenAt(tokens - 1).IsBlank()) { |
| 631 | --tokens; |
| 632 | } |
| 633 | if (j == tokens) { |
| 634 | return; |
| 635 | } |
| 636 | if (IsDecimalDigit(dir.TokenAt(j)[0]) || dir.TokenAt(j)[0] == '"') { |
| 637 | LineDirective(dir, j, prescanner); |
| 638 | return; |
| 639 | } |
| 640 | std::size_t dirOffset{j}; |
| 641 | std::string dirName{ToLowerCaseLetters(dir.TokenAt(dirOffset).ToString())}; |
| 642 | j = dir.SkipBlanks(j + 1); |
| 643 | CharBlock nameToken; |
| 644 | if (j < tokens && IsLegalIdentifierStart(dir.TokenAt(j)[0])) { |
| 645 | nameToken = dir.TokenAt(j); |
| 646 | } |
| 647 | if (dirName == "line" ) { |
| 648 | LineDirective(dir, j, prescanner); |
| 649 | } else if (dirName == "define" ) { |
| 650 | if (nameToken.empty()) { |
| 651 | prescanner.Say(dir.GetTokenProvenanceRange(j < tokens ? j : tokens - 1), |
| 652 | "#define: missing or invalid name"_err_en_US ); |
| 653 | return; |
| 654 | } |
| 655 | nameToken = SaveTokenAsName(nameToken); |
| 656 | definitions_.erase(nameToken); |
| 657 | if (++j < tokens && dir.TokenAt(j).OnlyNonBlank() == '(') { |
| 658 | j = dir.SkipBlanks(j + 1); |
| 659 | std::vector<std::string> argName; |
| 660 | bool isVariadic{false}; |
| 661 | if (dir.TokenAt(j).OnlyNonBlank() != ')') { |
| 662 | while (true) { |
| 663 | std::string an{dir.TokenAt(j).ToString()}; |
| 664 | if (an == "..." ) { |
| 665 | isVariadic = true; |
| 666 | } else { |
| 667 | if (an.empty() || !IsLegalIdentifierStart(an[0])) { |
| 668 | prescanner.Say(dir.GetTokenProvenanceRange(j), |
| 669 | "#define: missing or invalid argument name"_err_en_US ); |
| 670 | return; |
| 671 | } |
| 672 | argName.push_back(an); |
| 673 | } |
| 674 | j = dir.SkipBlanks(j + 1); |
| 675 | if (j == tokens) { |
| 676 | prescanner.Say(dir.GetTokenProvenanceRange(tokens - 1), |
| 677 | "#define: malformed argument list"_err_en_US ); |
| 678 | return; |
| 679 | } |
| 680 | char punc{dir.TokenAt(j).OnlyNonBlank()}; |
| 681 | if (punc == ')') { |
| 682 | break; |
| 683 | } |
| 684 | if (isVariadic || punc != ',') { |
| 685 | prescanner.Say(dir.GetTokenProvenanceRange(j), |
| 686 | "#define: malformed argument list"_err_en_US ); |
| 687 | return; |
| 688 | } |
| 689 | j = dir.SkipBlanks(j + 1); |
| 690 | if (j == tokens) { |
| 691 | prescanner.Say(dir.GetTokenProvenanceRange(tokens - 1), |
| 692 | "#define: malformed argument list"_err_en_US ); |
| 693 | return; |
| 694 | } |
| 695 | } |
| 696 | if (std::set<std::string>(argName.begin(), argName.end()).size() != |
| 697 | argName.size()) { |
| 698 | prescanner.Say(dir.GetTokenProvenance(dirOffset), |
| 699 | "#define: argument names are not distinct"_err_en_US ); |
| 700 | return; |
| 701 | } |
| 702 | } |
| 703 | j = dir.SkipBlanks(j + 1); |
| 704 | definitions_.emplace(std::make_pair( |
| 705 | nameToken, Definition{argName, dir, j, tokens - j, isVariadic})); |
| 706 | } else { |
| 707 | j = dir.SkipBlanks(j + 1); |
| 708 | definitions_.emplace( |
| 709 | std::make_pair(nameToken, Definition{dir, j, tokens - j})); |
| 710 | } |
| 711 | } else if (dirName == "undef" ) { |
| 712 | if (nameToken.empty()) { |
| 713 | prescanner.Say( |
| 714 | dir.GetIntervalProvenanceRange(dirOffset, tokens - dirOffset), |
| 715 | "# missing or invalid name"_err_en_US ); |
| 716 | } else { |
| 717 | if (dir.IsAnythingLeft(++j)) { |
| 718 | if (prescanner.features().ShouldWarn( |
| 719 | common::UsageWarning::Portability)) { |
| 720 | prescanner.Say(common::UsageWarning::Portability, |
| 721 | dir.GetIntervalProvenanceRange(j, tokens - j), |
| 722 | "#undef: excess tokens at end of directive"_port_en_US ); |
| 723 | } |
| 724 | } else { |
| 725 | definitions_.erase(nameToken); |
| 726 | } |
| 727 | } |
| 728 | } else if (dirName == "ifdef" || dirName == "ifndef" ) { |
| 729 | bool doThen{false}; |
| 730 | if (nameToken.empty()) { |
| 731 | prescanner.Say( |
| 732 | dir.GetIntervalProvenanceRange(dirOffset, tokens - dirOffset), |
| 733 | "#%s: missing name"_err_en_US , dirName); |
| 734 | } else { |
| 735 | if (dir.IsAnythingLeft(++j)) { |
| 736 | if (prescanner.features().ShouldWarn( |
| 737 | common::UsageWarning::Portability)) { |
| 738 | prescanner.Say(common::UsageWarning::Portability, |
| 739 | dir.GetIntervalProvenanceRange(j, tokens - j), |
| 740 | "#%s: excess tokens at end of directive"_port_en_US , dirName); |
| 741 | } |
| 742 | } |
| 743 | doThen = IsNameDefined(nameToken) == (dirName == "ifdef" ); |
| 744 | } |
| 745 | if (doThen) { |
| 746 | ifStack_.push(CanDeadElseAppear::Yes); |
| 747 | } else { |
| 748 | SkipDisabledConditionalCode(dirName, IsElseActive::Yes, prescanner, |
| 749 | dir.GetTokenProvenance(dirOffset)); |
| 750 | } |
| 751 | } else if (dirName == "if" ) { |
| 752 | if (IsIfPredicateTrue(dir, j, tokens - j, prescanner)) { |
| 753 | ifStack_.push(CanDeadElseAppear::Yes); |
| 754 | } else { |
| 755 | SkipDisabledConditionalCode(dirName, IsElseActive::Yes, prescanner, |
| 756 | dir.GetTokenProvenanceRange(dirOffset)); |
| 757 | } |
| 758 | } else if (dirName == "else" ) { |
| 759 | if (dir.IsAnythingLeft(j)) { |
| 760 | if (prescanner.features().ShouldWarn(common::UsageWarning::Portability)) { |
| 761 | prescanner.Say(common::UsageWarning::Portability, |
| 762 | dir.GetIntervalProvenanceRange(j, tokens - j), |
| 763 | "#else: excess tokens at end of directive"_port_en_US ); |
| 764 | } |
| 765 | } |
| 766 | if (ifStack_.empty()) { |
| 767 | prescanner.Say(dir.GetTokenProvenanceRange(dirOffset), |
| 768 | "#else: not nested within #if, #ifdef, or #ifndef"_err_en_US ); |
| 769 | } else if (ifStack_.top() != CanDeadElseAppear::Yes) { |
| 770 | prescanner.Say(dir.GetTokenProvenanceRange(dirOffset), |
| 771 | "#else: already appeared within this #if, #ifdef, or #ifndef"_err_en_US ); |
| 772 | } else { |
| 773 | ifStack_.pop(); |
| 774 | SkipDisabledConditionalCode("else" , IsElseActive::No, prescanner, |
| 775 | dir.GetTokenProvenanceRange(dirOffset)); |
| 776 | } |
| 777 | } else if (dirName == "elif" ) { |
| 778 | if (ifStack_.empty()) { |
| 779 | prescanner.Say(dir.GetTokenProvenanceRange(dirOffset), |
| 780 | "#elif: not nested within #if, #ifdef, or #ifndef"_err_en_US ); |
| 781 | } else if (ifStack_.top() != CanDeadElseAppear::Yes) { |
| 782 | prescanner.Say(dir.GetTokenProvenanceRange(dirOffset), |
| 783 | "#elif: #else previously appeared within this #if, #ifdef, or #ifndef"_err_en_US ); |
| 784 | } else { |
| 785 | ifStack_.pop(); |
| 786 | SkipDisabledConditionalCode("elif" , IsElseActive::No, prescanner, |
| 787 | dir.GetTokenProvenanceRange(dirOffset)); |
| 788 | } |
| 789 | } else if (dirName == "endif" ) { |
| 790 | if (dir.IsAnythingLeft(j)) { |
| 791 | if (prescanner.features().ShouldWarn(common::UsageWarning::Portability)) { |
| 792 | prescanner.Say(common::UsageWarning::Portability, |
| 793 | dir.GetIntervalProvenanceRange(j, tokens - j), |
| 794 | "#endif: excess tokens at end of directive"_port_en_US ); |
| 795 | } |
| 796 | } else if (ifStack_.empty()) { |
| 797 | prescanner.Say(dir.GetTokenProvenanceRange(dirOffset), |
| 798 | "#endif: no #if, #ifdef, or #ifndef"_err_en_US ); |
| 799 | } else { |
| 800 | ifStack_.pop(); |
| 801 | } |
| 802 | } else if (dirName == "error" ) { |
| 803 | prescanner.Say( |
| 804 | dir.GetIntervalProvenanceRange(dirOffset, tokens - dirOffset), |
| 805 | "%s"_err_en_US , dir.ToString()); |
| 806 | } else if (dirName == "warning" ) { |
| 807 | prescanner.Say( |
| 808 | dir.GetIntervalProvenanceRange(dirOffset, tokens - dirOffset), |
| 809 | "%s"_warn_en_US , dir.ToString()); |
| 810 | } else if (dirName == "comment" || dirName == "note" ) { |
| 811 | prescanner.Say( |
| 812 | dir.GetIntervalProvenanceRange(dirOffset, tokens - dirOffset), |
| 813 | "%s"_en_US , dir.ToString()); |
| 814 | } else if (dirName == "include" ) { |
| 815 | if (j == tokens) { |
| 816 | prescanner.Say( |
| 817 | dir.GetIntervalProvenanceRange(dirOffset, tokens - dirOffset), |
| 818 | "#include: missing name of file to include"_err_en_US ); |
| 819 | return; |
| 820 | } |
| 821 | std::optional<std::string> prependPath; |
| 822 | TokenSequence path{dir, j, tokens - j}; |
| 823 | std::string include{path.TokenAt(0).ToString()}; |
| 824 | if (include != "<" && include.substr(0, 1) != "\"" && |
| 825 | include.substr(0, 1) != "'" ) { |
| 826 | path = ReplaceMacros(path, prescanner); |
| 827 | include = path.empty() ? ""s : path.TokenAt(0).ToString(); |
| 828 | } |
| 829 | auto pathTokens{path.SizeInTokens()}; |
| 830 | std::size_t k{0}; |
| 831 | if (include == "<" ) { // #include <foo> |
| 832 | k = 1; |
| 833 | if (k >= pathTokens) { |
| 834 | prescanner.Say(dir.GetIntervalProvenanceRange(j, pathTokens), |
| 835 | "#include: file name missing"_err_en_US ); |
| 836 | return; |
| 837 | } |
| 838 | while (k < pathTokens && path.TokenAt(k) != ">" ) { |
| 839 | ++k; |
| 840 | } |
| 841 | if (k >= pathTokens) { |
| 842 | if (prescanner.features().ShouldWarn( |
| 843 | common::UsageWarning::Portability)) { |
| 844 | prescanner.Say(common::UsageWarning::Portability, |
| 845 | dir.GetIntervalProvenanceRange(j, tokens - j), |
| 846 | "#include: expected '>' at end of included file"_port_en_US ); |
| 847 | } |
| 848 | } |
| 849 | TokenSequence braced{path, 1, k - 1}; |
| 850 | include = braced.ToString(); |
| 851 | } else if ((include.substr(0, 1) == "\"" || include.substr(0, 1) == "'" ) && |
| 852 | include.front() == include.back()) { |
| 853 | // #include "foo" and #include 'foo' |
| 854 | include = include.substr(1, include.size() - 2); |
| 855 | // Start search in directory of file containing the directive |
| 856 | auto prov{dir.GetTokenProvenanceRange(dirOffset).start()}; |
| 857 | if (const auto *currentFile{allSources_.GetSourceFile(prov)}) { |
| 858 | prependPath = DirectoryName(currentFile->path()); |
| 859 | } |
| 860 | } else { |
| 861 | prescanner.Say(dir.GetTokenProvenanceRange(j < tokens ? j : tokens - 1), |
| 862 | "#include %s: expected name of file to include"_err_en_US , |
| 863 | path.ToString()); |
| 864 | return; |
| 865 | } |
| 866 | if (include.empty()) { |
| 867 | prescanner.Say(dir.GetTokenProvenanceRange(dirOffset), |
| 868 | "#include %s: empty include file name"_err_en_US , path.ToString()); |
| 869 | return; |
| 870 | } |
| 871 | k = path.SkipBlanks(k + 1); |
| 872 | if (k < pathTokens && path.TokenAt(k).ToString() != "!" ) { |
| 873 | if (prescanner.features().ShouldWarn(common::UsageWarning::Portability)) { |
| 874 | prescanner.Say(common::UsageWarning::Portability, |
| 875 | dir.GetIntervalProvenanceRange(j, tokens - j), |
| 876 | "#include: extra stuff ignored after file name"_port_en_US ); |
| 877 | } |
| 878 | } |
| 879 | std::string buf; |
| 880 | llvm::raw_string_ostream error{buf}; |
| 881 | if (const SourceFile * |
| 882 | included{allSources_.Open(include, error, std::move(prependPath))}) { |
| 883 | if (included->bytes() > 0) { |
| 884 | ProvenanceRange fileRange{ |
| 885 | allSources_.AddIncludedFile(*included, dir.GetProvenanceRange())}; |
| 886 | Prescanner{prescanner, *this, /*isNestedInIncludeDirective=*/true} |
| 887 | .set_encoding(included->encoding()) |
| 888 | .Prescan(fileRange); |
| 889 | } |
| 890 | } else { |
| 891 | prescanner.Say(dir.GetTokenProvenanceRange(j), "#include: %s"_err_en_US , |
| 892 | error.str()); |
| 893 | } |
| 894 | } else { |
| 895 | prescanner.Say(dir.GetTokenProvenanceRange(dirOffset), |
| 896 | "#%s: unknown or unimplemented directive"_err_en_US , dirName); |
| 897 | } |
| 898 | } |
| 899 | |
| 900 | void Preprocessor::PrintMacros(llvm::raw_ostream &out) const { |
| 901 | // std::set is ordered. Use that to print the macros in an |
| 902 | // alphabetical order. |
| 903 | std::set<std::string> macroNames; |
| 904 | for (const auto &[name, _] : definitions_) { |
| 905 | macroNames.insert(name.ToString()); |
| 906 | } |
| 907 | |
| 908 | for (const std::string &name : macroNames) { |
| 909 | out << "#define " << name; |
| 910 | definitions_.at(name).Print(out, name.c_str()); |
| 911 | out << '\n'; |
| 912 | } |
| 913 | } |
| 914 | |
| 915 | CharBlock Preprocessor::SaveTokenAsName(const CharBlock &t) { |
| 916 | names_.push_back(t.ToString()); |
| 917 | return {names_.back().data(), names_.back().size()}; |
| 918 | } |
| 919 | |
| 920 | bool Preprocessor::IsNameDefined(const CharBlock &token) { |
| 921 | return definitions_.find(token) != definitions_.end(); |
| 922 | } |
| 923 | |
| 924 | bool Preprocessor::IsNameDefinedEmpty(const CharBlock &token) { |
| 925 | if (auto it{definitions_.find(token)}; it != definitions_.end()) { |
| 926 | const Definition &def{it->second}; |
| 927 | return !def.isFunctionLike() && def.replacement().SizeInChars() == 0; |
| 928 | } else { |
| 929 | return false; |
| 930 | } |
| 931 | } |
| 932 | |
| 933 | bool Preprocessor::IsFunctionLikeDefinition(const CharBlock &token) { |
| 934 | auto it{definitions_.find(token)}; |
| 935 | return it != definitions_.end() && it->second.isFunctionLike(); |
| 936 | } |
| 937 | |
| 938 | static std::string GetDirectiveName( |
| 939 | const TokenSequence &line, std::size_t *rest) { |
| 940 | std::size_t tokens{line.SizeInTokens()}; |
| 941 | std::size_t j{line.SkipBlanks(0)}; |
| 942 | if (j == tokens || line.TokenAt(j).ToString() != "#" ) { |
| 943 | *rest = tokens; |
| 944 | return "" ; |
| 945 | } |
| 946 | j = line.SkipBlanks(j + 1); |
| 947 | if (j == tokens) { |
| 948 | *rest = tokens; |
| 949 | return "" ; |
| 950 | } |
| 951 | *rest = line.SkipBlanks(j + 1); |
| 952 | return ToLowerCaseLetters(line.TokenAt(j).ToString()); |
| 953 | } |
| 954 | |
| 955 | void Preprocessor::SkipDisabledConditionalCode(const std::string &dirName, |
| 956 | IsElseActive isElseActive, Prescanner &prescanner, |
| 957 | ProvenanceRange provenanceRange) { |
| 958 | int nesting{0}; |
| 959 | while (!prescanner.IsAtEnd()) { |
| 960 | if (!prescanner.IsNextLinePreprocessorDirective()) { |
| 961 | prescanner.NextLine(); |
| 962 | continue; |
| 963 | } |
| 964 | TokenSequence line{prescanner.TokenizePreprocessorDirective()}; |
| 965 | std::size_t rest{0}; |
| 966 | std::string dn{GetDirectiveName(line, &rest)}; |
| 967 | if (dn == "ifdef" || dn == "ifndef" || dn == "if" ) { |
| 968 | ++nesting; |
| 969 | } else if (dn == "endif" ) { |
| 970 | if (nesting-- == 0) { |
| 971 | return; |
| 972 | } |
| 973 | } else if (isElseActive == IsElseActive::Yes && nesting == 0) { |
| 974 | if (dn == "else" ) { |
| 975 | ifStack_.push(CanDeadElseAppear::No); |
| 976 | return; |
| 977 | } |
| 978 | if (dn == "elif" && |
| 979 | IsIfPredicateTrue( |
| 980 | line, rest, line.SizeInTokens() - rest, prescanner)) { |
| 981 | ifStack_.push(CanDeadElseAppear::Yes); |
| 982 | return; |
| 983 | } |
| 984 | } |
| 985 | } |
| 986 | prescanner.Say(provenanceRange, "#%s: missing #endif"_err_en_US , dirName); |
| 987 | } |
| 988 | |
| 989 | // Precedence level codes used here to accommodate mixed Fortran and C: |
| 990 | // 15: parentheses and constants, logical !, bitwise ~ |
| 991 | // 14: unary + and - |
| 992 | // 13: ** |
| 993 | // 12: *, /, % (modulus) |
| 994 | // 11: + and - |
| 995 | // 10: << and >> |
| 996 | // 9: bitwise & |
| 997 | // 8: bitwise ^ |
| 998 | // 7: bitwise | |
| 999 | // 6: relations (.EQ., ==, &c.) |
| 1000 | // 5: .NOT. |
| 1001 | // 4: .AND., && |
| 1002 | // 3: .OR., || |
| 1003 | // 2: .EQV. and .NEQV. / .XOR. |
| 1004 | // 1: ? : |
| 1005 | // 0: , |
| 1006 | static std::int64_t ExpressionValue(const TokenSequence &token, |
| 1007 | int minimumPrecedence, std::size_t *atToken, |
| 1008 | std::optional<Message> *error) { |
| 1009 | enum Operator { |
| 1010 | PARENS, |
| 1011 | CONST, |
| 1012 | NOTZERO, // ! |
| 1013 | COMPLEMENT, // ~ |
| 1014 | UPLUS, |
| 1015 | UMINUS, |
| 1016 | POWER, |
| 1017 | TIMES, |
| 1018 | DIVIDE, |
| 1019 | MODULUS, |
| 1020 | ADD, |
| 1021 | SUBTRACT, |
| 1022 | LEFTSHIFT, |
| 1023 | RIGHTSHIFT, |
| 1024 | BITAND, |
| 1025 | BITXOR, |
| 1026 | BITOR, |
| 1027 | LT, |
| 1028 | LE, |
| 1029 | EQ, |
| 1030 | NE, |
| 1031 | GE, |
| 1032 | GT, |
| 1033 | NOT, |
| 1034 | AND, |
| 1035 | OR, |
| 1036 | EQV, |
| 1037 | NEQV, |
| 1038 | SELECT, |
| 1039 | COMMA |
| 1040 | }; |
| 1041 | static const int precedence[]{ |
| 1042 | 15, 15, 15, 15, // (), 6, !, ~ |
| 1043 | 14, 14, // unary +, - |
| 1044 | 13, 12, 12, 12, 11, 11, 10, 10, // **, *, /, %, +, -, <<, >> |
| 1045 | 9, 8, 7, // &, ^, | |
| 1046 | 6, 6, 6, 6, 6, 6, // relations .LT. to .GT. |
| 1047 | 5, 4, 3, 2, 2, // .NOT., .AND., .OR., .EQV., .NEQV. |
| 1048 | 1, 0 // ?: and , |
| 1049 | }; |
| 1050 | static const int operandPrecedence[]{0, -1, 15, 15, 15, 15, 13, 12, 12, 12, |
| 1051 | 11, 11, 11, 11, 9, 8, 7, 7, 7, 7, 7, 7, 7, 6, 4, 3, 3, 3, 1, 0}; |
| 1052 | |
| 1053 | static std::map<std::string, enum Operator> opNameMap; |
| 1054 | if (opNameMap.empty()) { |
| 1055 | opNameMap["(" ] = PARENS; |
| 1056 | opNameMap["!" ] = NOTZERO; |
| 1057 | opNameMap["~" ] = COMPLEMENT; |
| 1058 | opNameMap["**" ] = POWER; |
| 1059 | opNameMap["*" ] = TIMES; |
| 1060 | opNameMap["/" ] = DIVIDE; |
| 1061 | opNameMap["%" ] = MODULUS; |
| 1062 | opNameMap["+" ] = ADD; |
| 1063 | opNameMap["-" ] = SUBTRACT; |
| 1064 | opNameMap["<<" ] = LEFTSHIFT; |
| 1065 | opNameMap[">>" ] = RIGHTSHIFT; |
| 1066 | opNameMap["&" ] = BITAND; |
| 1067 | opNameMap["^" ] = BITXOR; |
| 1068 | opNameMap["|" ] = BITOR; |
| 1069 | opNameMap[".lt." ] = opNameMap["<" ] = LT; |
| 1070 | opNameMap[".le." ] = opNameMap["<=" ] = LE; |
| 1071 | opNameMap[".eq." ] = opNameMap["==" ] = EQ; |
| 1072 | opNameMap[".ne." ] = opNameMap["/=" ] = opNameMap["!=" ] = NE; |
| 1073 | opNameMap[".ge." ] = opNameMap[">=" ] = GE; |
| 1074 | opNameMap[".gt." ] = opNameMap[">" ] = GT; |
| 1075 | opNameMap[".not." ] = NOT; |
| 1076 | opNameMap[".and." ] = opNameMap[".a." ] = opNameMap["&&" ] = AND; |
| 1077 | opNameMap[".or." ] = opNameMap[".o." ] = opNameMap["||" ] = OR; |
| 1078 | opNameMap[".eqv." ] = EQV; |
| 1079 | opNameMap[".neqv." ] = opNameMap[".xor." ] = opNameMap[".x." ] = NEQV; |
| 1080 | opNameMap["?" ] = SELECT; |
| 1081 | opNameMap["," ] = COMMA; |
| 1082 | } |
| 1083 | |
| 1084 | std::size_t tokens{token.SizeInTokens()}; |
| 1085 | CHECK(tokens > 0); |
| 1086 | if (*atToken >= tokens) { |
| 1087 | *error = |
| 1088 | Message{token.GetProvenanceRange(), "incomplete expression"_err_en_US }; |
| 1089 | return 0; |
| 1090 | } |
| 1091 | |
| 1092 | // Parse and evaluate a primary or a unary operator and its operand. |
| 1093 | std::size_t opAt{*atToken}; |
| 1094 | std::string t{token.TokenAt(opAt).ToString()}; |
| 1095 | enum Operator op; |
| 1096 | std::int64_t left{0}; |
| 1097 | if (t == "(" ) { |
| 1098 | op = PARENS; |
| 1099 | } else if (IsDecimalDigit(t[0])) { |
| 1100 | op = CONST; |
| 1101 | std::size_t consumed{0}; |
| 1102 | left = std::stoll(str: t, idx: &consumed, base: 0 /*base to be detected*/); |
| 1103 | if (consumed < t.size()) { |
| 1104 | *error = Message{token.GetTokenProvenanceRange(opAt), |
| 1105 | "Uninterpretable numeric constant '%s'"_err_en_US , t}; |
| 1106 | return 0; |
| 1107 | } |
| 1108 | } else if (IsLegalIdentifierStart(cpl: t[0])) { |
| 1109 | // undefined macro name -> zero |
| 1110 | // TODO: BOZ constants? |
| 1111 | op = CONST; |
| 1112 | } else if (t == "+" ) { |
| 1113 | op = UPLUS; |
| 1114 | } else if (t == "-" ) { |
| 1115 | op = UMINUS; |
| 1116 | } else if (t == "." && *atToken + 2 < tokens && |
| 1117 | ToLowerCaseLetters(token.TokenAt(*atToken + 1).ToString()) == "not" && |
| 1118 | token.TokenAt(*atToken + 2).ToString() == "." ) { |
| 1119 | op = NOT; |
| 1120 | *atToken += 2; |
| 1121 | } else { |
| 1122 | auto it{opNameMap.find(x: t)}; |
| 1123 | if (it != opNameMap.end()) { |
| 1124 | op = it->second; |
| 1125 | } else { |
| 1126 | *error = Message{token.GetTokenProvenanceRange(opAt), |
| 1127 | "operand expected in expression"_err_en_US }; |
| 1128 | return 0; |
| 1129 | } |
| 1130 | } |
| 1131 | if (precedence[op] < minimumPrecedence) { |
| 1132 | *error = Message{token.GetTokenProvenanceRange(opAt), |
| 1133 | "operator precedence error"_err_en_US }; |
| 1134 | return 0; |
| 1135 | } |
| 1136 | ++*atToken; |
| 1137 | if (op != CONST) { |
| 1138 | left = ExpressionValue(token, operandPrecedence[op], atToken, error); |
| 1139 | if (*error) { |
| 1140 | return 0; |
| 1141 | } |
| 1142 | switch (op) { |
| 1143 | case PARENS: |
| 1144 | if (*atToken < tokens && token.TokenAt(*atToken).OnlyNonBlank() == ')') { |
| 1145 | ++*atToken; |
| 1146 | break; |
| 1147 | } |
| 1148 | if (*atToken >= tokens) { |
| 1149 | *error = Message{token.GetProvenanceRange(), |
| 1150 | "')' missing from expression"_err_en_US }; |
| 1151 | } else { |
| 1152 | *error = Message{ |
| 1153 | token.GetTokenProvenanceRange(*atToken), "expected ')'"_err_en_US }; |
| 1154 | } |
| 1155 | return 0; |
| 1156 | case NOTZERO: |
| 1157 | left = !left; |
| 1158 | break; |
| 1159 | case COMPLEMENT: |
| 1160 | left = ~left; |
| 1161 | break; |
| 1162 | case UPLUS: |
| 1163 | break; |
| 1164 | case UMINUS: |
| 1165 | left = -left; |
| 1166 | break; |
| 1167 | case NOT: |
| 1168 | left = -!left; |
| 1169 | break; |
| 1170 | default: |
| 1171 | CRASH_NO_CASE; |
| 1172 | } |
| 1173 | } |
| 1174 | |
| 1175 | // Parse and evaluate binary operators and their second operands, if present. |
| 1176 | while (*atToken < tokens) { |
| 1177 | int advance{1}; |
| 1178 | t = token.TokenAt(*atToken).ToString(); |
| 1179 | if (t == "." && *atToken + 2 < tokens && |
| 1180 | token.TokenAt(*atToken + 2).ToString() == "." ) { |
| 1181 | t += ToLowerCaseLetters(token.TokenAt(*atToken + 1).ToString()) + '.'; |
| 1182 | advance = 3; |
| 1183 | } |
| 1184 | auto it{opNameMap.find(x: t)}; |
| 1185 | if (it == opNameMap.end()) { |
| 1186 | break; |
| 1187 | } |
| 1188 | op = it->second; |
| 1189 | if (op < POWER || precedence[op] < minimumPrecedence) { |
| 1190 | break; |
| 1191 | } |
| 1192 | opAt = *atToken; |
| 1193 | *atToken += advance; |
| 1194 | |
| 1195 | std::int64_t right{ |
| 1196 | ExpressionValue(token, operandPrecedence[op], atToken, error)}; |
| 1197 | if (*error) { |
| 1198 | return 0; |
| 1199 | } |
| 1200 | |
| 1201 | switch (op) { |
| 1202 | case POWER: |
| 1203 | if (left == 0) { |
| 1204 | if (right < 0) { |
| 1205 | *error = Message{token.GetTokenProvenanceRange(opAt), |
| 1206 | "0 ** negative power"_err_en_US }; |
| 1207 | } |
| 1208 | } else if (left != 1 && right != 1) { |
| 1209 | if (right <= 0) { |
| 1210 | left = !right; |
| 1211 | } else { |
| 1212 | std::int64_t power{1}; |
| 1213 | for (; right > 0; --right) { |
| 1214 | if ((power * left) / left != power) { |
| 1215 | *error = Message{token.GetTokenProvenanceRange(opAt), |
| 1216 | "overflow in exponentation"_err_en_US }; |
| 1217 | left = 1; |
| 1218 | } |
| 1219 | power *= left; |
| 1220 | } |
| 1221 | left = power; |
| 1222 | } |
| 1223 | } |
| 1224 | break; |
| 1225 | case TIMES: |
| 1226 | if (left != 0 && right != 0 && ((left * right) / left) != right) { |
| 1227 | *error = Message{token.GetTokenProvenanceRange(opAt), |
| 1228 | "overflow in multiplication"_err_en_US }; |
| 1229 | } |
| 1230 | left = left * right; |
| 1231 | break; |
| 1232 | case DIVIDE: |
| 1233 | if (right == 0) { |
| 1234 | *error = Message{ |
| 1235 | token.GetTokenProvenanceRange(opAt), "division by zero"_err_en_US }; |
| 1236 | left = 0; |
| 1237 | } else { |
| 1238 | left = left / right; |
| 1239 | } |
| 1240 | break; |
| 1241 | case MODULUS: |
| 1242 | if (right == 0) { |
| 1243 | *error = Message{ |
| 1244 | token.GetTokenProvenanceRange(opAt), "modulus by zero"_err_en_US }; |
| 1245 | left = 0; |
| 1246 | } else { |
| 1247 | left = left % right; |
| 1248 | } |
| 1249 | break; |
| 1250 | case ADD: |
| 1251 | if ((left < 0) == (right < 0) && (left < 0) != (left + right < 0)) { |
| 1252 | *error = Message{token.GetTokenProvenanceRange(opAt), |
| 1253 | "overflow in addition"_err_en_US }; |
| 1254 | } |
| 1255 | left = left + right; |
| 1256 | break; |
| 1257 | case SUBTRACT: |
| 1258 | if ((left < 0) != (right < 0) && (left < 0) == (left - right < 0)) { |
| 1259 | *error = Message{token.GetTokenProvenanceRange(opAt), |
| 1260 | "overflow in subtraction"_err_en_US }; |
| 1261 | } |
| 1262 | left = left - right; |
| 1263 | break; |
| 1264 | case LEFTSHIFT: |
| 1265 | if (right < 0 || right > 64) { |
| 1266 | *error = Message{token.GetTokenProvenanceRange(opAt), |
| 1267 | "bad left shift count"_err_en_US }; |
| 1268 | } |
| 1269 | left = right >= 64 ? 0 : left << right; |
| 1270 | break; |
| 1271 | case RIGHTSHIFT: |
| 1272 | if (right < 0 || right > 64) { |
| 1273 | *error = Message{token.GetTokenProvenanceRange(opAt), |
| 1274 | "bad right shift count"_err_en_US }; |
| 1275 | } |
| 1276 | left = right >= 64 ? 0 : left >> right; |
| 1277 | break; |
| 1278 | case BITAND: |
| 1279 | case AND: |
| 1280 | left = left & right; |
| 1281 | break; |
| 1282 | case BITXOR: |
| 1283 | left = left ^ right; |
| 1284 | break; |
| 1285 | case BITOR: |
| 1286 | case OR: |
| 1287 | left = left | right; |
| 1288 | break; |
| 1289 | case LT: |
| 1290 | left = -(left < right); |
| 1291 | break; |
| 1292 | case LE: |
| 1293 | left = -(left <= right); |
| 1294 | break; |
| 1295 | case EQ: |
| 1296 | left = -(left == right); |
| 1297 | break; |
| 1298 | case NE: |
| 1299 | left = -(left != right); |
| 1300 | break; |
| 1301 | case GE: |
| 1302 | left = -(left >= right); |
| 1303 | break; |
| 1304 | case GT: |
| 1305 | left = -(left > right); |
| 1306 | break; |
| 1307 | case EQV: |
| 1308 | left = -(!left == !right); |
| 1309 | break; |
| 1310 | case NEQV: |
| 1311 | left = -(!left != !right); |
| 1312 | break; |
| 1313 | case SELECT: |
| 1314 | if (*atToken >= tokens || token.TokenAt(*atToken).ToString() != ":" ) { |
| 1315 | *error = Message{token.GetTokenProvenanceRange(opAt), |
| 1316 | "':' required in selection expression"_err_en_US }; |
| 1317 | return 0; |
| 1318 | } else { |
| 1319 | ++*atToken; |
| 1320 | std::int64_t third{ |
| 1321 | ExpressionValue(token, operandPrecedence[op], atToken, error)}; |
| 1322 | left = left != 0 ? right : third; |
| 1323 | } |
| 1324 | break; |
| 1325 | case COMMA: |
| 1326 | left = right; |
| 1327 | break; |
| 1328 | default: |
| 1329 | CRASH_NO_CASE; |
| 1330 | } |
| 1331 | } |
| 1332 | return left; |
| 1333 | } |
| 1334 | |
| 1335 | bool Preprocessor::IsIfPredicateTrue(const TokenSequence &directive, |
| 1336 | std::size_t first, std::size_t exprTokens, Prescanner &prescanner) { |
| 1337 | TokenSequence expr{directive, first, exprTokens}; |
| 1338 | TokenSequence replaced{ |
| 1339 | ReplaceMacros(expr, prescanner, nullptr, /*inIfExpression=*/true)}; |
| 1340 | if (replaced.HasBlanks()) { |
| 1341 | replaced.RemoveBlanks(); |
| 1342 | } |
| 1343 | if (replaced.empty()) { |
| 1344 | prescanner.Say(expr.GetProvenanceRange(), "empty expression"_err_en_US ); |
| 1345 | return false; |
| 1346 | } |
| 1347 | std::size_t atToken{0}; |
| 1348 | std::optional<Message> error; |
| 1349 | bool result{ExpressionValue(replaced, 0, &atToken, &error) != 0}; |
| 1350 | if (error) { |
| 1351 | prescanner.Say(std::move(*error)); |
| 1352 | } else if (atToken < replaced.SizeInTokens() && |
| 1353 | replaced.TokenAt(atToken).ToString() != "!" ) { |
| 1354 | prescanner.Say(replaced.GetIntervalProvenanceRange( |
| 1355 | atToken, replaced.SizeInTokens() - atToken), |
| 1356 | atToken == 0 ? "could not parse any expression"_err_en_US |
| 1357 | : "excess characters after expression"_err_en_US ); |
| 1358 | } |
| 1359 | return result; |
| 1360 | } |
| 1361 | |
| 1362 | void Preprocessor::LineDirective( |
| 1363 | const TokenSequence &dir, std::size_t j, Prescanner &prescanner) { |
| 1364 | std::size_t tokens{dir.SizeInTokens()}; |
| 1365 | const std::string *linePath{nullptr}; |
| 1366 | std::optional<int> lineNumber; |
| 1367 | SourceFile *sourceFile{nullptr}; |
| 1368 | std::optional<SourcePosition> pos; |
| 1369 | for (; j < tokens; j = dir.SkipBlanks(j + 1)) { |
| 1370 | std::string tstr{dir.TokenAt(j).ToString()}; |
| 1371 | Provenance provenance{dir.GetTokenProvenance(j)}; |
| 1372 | if (!pos) { |
| 1373 | pos = allSources_.GetSourcePosition(provenance); |
| 1374 | } |
| 1375 | if (!sourceFile && pos) { |
| 1376 | sourceFile = const_cast<SourceFile *>(&*pos->sourceFile); |
| 1377 | } |
| 1378 | if (tstr.front() == '"' && tstr.back() == '"') { |
| 1379 | tstr = tstr.substr(1, tstr.size() - 2); |
| 1380 | if (!tstr.empty() && sourceFile) { |
| 1381 | linePath = &sourceFile->SavePath(std::move(tstr)); |
| 1382 | } |
| 1383 | } else if (IsDecimalDigit(tstr[0])) { |
| 1384 | if (!lineNumber) { // ignore later column number |
| 1385 | int ln{0}; |
| 1386 | for (char c : tstr) { |
| 1387 | if (IsDecimalDigit(c)) { |
| 1388 | int nln{10 * ln + c - '0'}; |
| 1389 | if (nln / 10 == ln && nln % 10 == c - '0') { |
| 1390 | ln = nln; |
| 1391 | continue; |
| 1392 | } |
| 1393 | } |
| 1394 | prescanner.Say(provenance, |
| 1395 | "bad line number '%s' in #line directive"_err_en_US , tstr); |
| 1396 | return; |
| 1397 | } |
| 1398 | lineNumber = ln; |
| 1399 | } |
| 1400 | } else { |
| 1401 | prescanner.Say( |
| 1402 | provenance, "bad token '%s' in #line directive"_err_en_US , tstr); |
| 1403 | return; |
| 1404 | } |
| 1405 | } |
| 1406 | if (lineNumber && sourceFile) { |
| 1407 | CHECK(pos); |
| 1408 | if (!linePath) { |
| 1409 | linePath = &*pos->path; |
| 1410 | } |
| 1411 | sourceFile->LineDirective(pos->trueLineNumber + 1, *linePath, *lineNumber); |
| 1412 | } |
| 1413 | } |
| 1414 | |
| 1415 | } // namespace Fortran::parser |
| 1416 | |