tree_node_diagnostic_converter.h 3.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #ifndef CARBON_TOOLCHAIN_PARSE_TREE_NODE_DIAGNOSTIC_CONVERTER_H_
  5. #define CARBON_TOOLCHAIN_PARSE_TREE_NODE_DIAGNOSTIC_CONVERTER_H_
  6. #include "toolchain/diagnostics/diagnostic_emitter.h"
  7. #include "toolchain/lex/tokenized_buffer.h"
  8. #include "toolchain/parse/tree.h"
  9. namespace Carbon::Parse {
  10. class NodeLoc {
  11. public:
  12. // NOLINTNEXTLINE(google-explicit-constructor)
  13. NodeLoc(NodeId node_id) : NodeLoc(node_id, false) {}
  14. NodeLoc(NodeId node_id, bool token_only)
  15. : node_id_(node_id), token_only_(token_only) {}
  16. // TODO: Have some other way of representing diagnostic that applies to a file
  17. // as a whole.
  18. // NOLINTNEXTLINE(google-explicit-constructor)
  19. NodeLoc(InvalidNodeId node_id) : NodeLoc(node_id, false) {}
  20. auto node_id() const -> NodeId { return node_id_; }
  21. auto token_only() const -> bool { return token_only_; }
  22. private:
  23. NodeId node_id_;
  24. bool token_only_;
  25. };
  26. class NodeLocConverter : public DiagnosticConverter<NodeLoc> {
  27. public:
  28. explicit NodeLocConverter(const Lex::TokenizedBuffer* tokens,
  29. llvm::StringRef filename, const Tree* parse_tree)
  30. : token_converter_(tokens),
  31. filename_(filename),
  32. parse_tree_(parse_tree) {}
  33. // Map the given token into a diagnostic location.
  34. auto ConvertLoc(NodeLoc node_loc, ContextFnT context_fn) const
  35. -> DiagnosticLoc override {
  36. // Support the invalid token as a way to emit only the filename, when there
  37. // is no line association.
  38. if (!node_loc.node_id().is_valid()) {
  39. return {.filename = filename_};
  40. }
  41. if (node_loc.token_only()) {
  42. return token_converter_.ConvertLoc(
  43. parse_tree_->node_token(node_loc.node_id()), context_fn);
  44. }
  45. // Construct a location that encompasses all tokens that descend from this
  46. // node (including the root).
  47. Lex::TokenIndex start_token = parse_tree_->node_token(node_loc.node_id());
  48. Lex::TokenIndex end_token = start_token;
  49. for (NodeId desc : parse_tree_->postorder(node_loc.node_id())) {
  50. Lex::TokenIndex desc_token = parse_tree_->node_token(desc);
  51. if (!desc_token.is_valid()) {
  52. continue;
  53. }
  54. if (desc_token < start_token) {
  55. start_token = desc_token;
  56. } else if (desc_token > end_token) {
  57. end_token = desc_token;
  58. }
  59. }
  60. DiagnosticLoc start_loc =
  61. token_converter_.ConvertLoc(start_token, context_fn);
  62. if (start_token == end_token) {
  63. return start_loc;
  64. }
  65. DiagnosticLoc end_loc = token_converter_.ConvertLoc(end_token, context_fn);
  66. // For multiline locations we simply return the rest of the line for now
  67. // since true multiline locations are not yet supported.
  68. if (start_loc.line_number != end_loc.line_number) {
  69. start_loc.length = start_loc.line.size() - start_loc.column_number + 1;
  70. } else {
  71. if (start_loc.column_number != end_loc.column_number) {
  72. start_loc.length =
  73. end_loc.column_number + end_loc.length - start_loc.column_number;
  74. }
  75. }
  76. return start_loc;
  77. }
  78. private:
  79. Lex::TokenDiagnosticConverter token_converter_;
  80. llvm::StringRef filename_;
  81. const Tree* parse_tree_;
  82. };
  83. } // namespace Carbon::Parse
  84. #endif // CARBON_TOOLCHAIN_PARSE_TREE_NODE_DIAGNOSTIC_CONVERTER_H_