Skip to content

Commit

Permalink
Use whisker::expected for lex_result
Browse files Browse the repository at this point in the history
Summary: This change makes it consistent with `parse_result` in {D62281498}.

Reviewed By: yoney

Differential Revision: D62281774

fbshipit-source-id: 20e24f9d7ef62de62baef0762edc0b91bf39b981
  • Loading branch information
praihan authored and facebook-github-bot committed Sep 11, 2024
1 parent 11f263d commit 16ad6b1
Showing 1 changed file with 48 additions and 25 deletions.
73 changes: 48 additions & 25 deletions third-party/thrift/src/thrift/compiler/whisker/lexer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
#include <thrift/compiler/whisker/detail/overload.h>
#include <thrift/compiler/whisker/detail/string.h>
#include <thrift/compiler/whisker/diagnostic.h>
#include <thrift/compiler/whisker/expected.h>
#include <thrift/compiler/whisker/lexer.h>

#include <cstdlib>
Expand Down Expand Up @@ -103,14 +104,13 @@ void skip_whitespace(detail::lexer_scan_window* scan) {
}

/**
* Result of a scan which resulted in a token or failed.
* Result of a scan which resulted in a token.
* The (now advanced) scan_window is packaged with the token so the lexer can
* move up its cursor.
*/
struct [[nodiscard]] lex_result {
lex_result(token t, const detail::lexer_scan_window& advanced)
: result_{success{std::move(t), advanced.make_fresh()}} {}
/* implicit */ lex_result(std::nullopt_t) : result_{std::nullopt} {}
struct lexed_token {
lexed_token(token t, const detail::lexer_scan_window& advanced)
: value_{std::move(t)}, new_head_{advanced.make_fresh()} {}

/**
* Advances the provided scan_window to the end of the token, then returns the
Expand All @@ -119,20 +119,43 @@ struct [[nodiscard]] lex_result {
*/
[[nodiscard]] token advance_to_token(detail::lexer_scan_window* scan) && {
assert(scan);
assert(result_);
*scan = std::move(result_->new_head);
return std::move(result_->value);
*scan = std::move(new_head_);
return std::move(value_);
}

bool has_value() const { return result_.has_value(); }
explicit operator bool() const { return has_value(); }
private:
token value_;
detail::lexer_scan_window new_head_;
};

/**
* Marker struct to indicate that a scan did not result in a token.
*/
struct no_lex_result {};

/**
* Result of a scan which resulted in a token.
* The (now advanced) scan_window is packaged with the token so the lexer can
* move up its cursor.
*/
struct [[nodiscard]] lex_result : private expected<lexed_token, no_lex_result> {
private:
struct success {
token value;
detail::lexer_scan_window new_head;
};
std::optional<success> result_;
using base = expected<lexed_token, no_lex_result>;

public:
lex_result(token t, const detail::lexer_scan_window& advanced)
: base(std::in_place, std::move(t), advanced) {}
/* implicit */ lex_result(no_lex_result) : base(unexpect) {}

[[nodiscard]] token advance_to_token(detail::lexer_scan_window* scan) && {
assert(has_value());
return std::move(**this).advance_to_token(scan);
}

using base::operator bool;
using base::has_value;
using base::operator*;
using base::operator->;
};

// Returns the skipped scan_window if the comment is escaped "{{--"
Expand All @@ -151,15 +174,15 @@ lex_result lex_punctuation(detail::lexer_scan_window scan) {
if (auto punct = token_detail::to_tok(c); punct != tok::error) {
return lex_result(token(punct, scan.range()), scan);
}
return std::nullopt;
return no_lex_result();
}

// Looks for identifiers or keywords. This implementation assumes that all
// keywords *could* have been valid identifiers.
lex_result lex_identifier_or_keyword(detail::lexer_scan_window scan) {
char c = scan.advance();
if (!is_identifier_start(c)) {
return std::nullopt;
return no_lex_result();
}
// This implementation assumes that is_identifier_continuation() implies
// is_identifier_start(). Otherwise, one word (i.e. no whitespace in between)
Expand All @@ -182,7 +205,7 @@ lex_result lex_identifier_or_keyword(detail::lexer_scan_window scan) {
lex_result lex_path_component(detail::lexer_scan_window scan) {
char c = scan.advance();
if (!is_path_component_start(c)) {
return std::nullopt;
return no_lex_result();
}
while (scan.can_advance() && is_path_component_continuation(scan.peek())) {
scan.advance();
Expand All @@ -194,7 +217,7 @@ lex_result lex_path_component(detail::lexer_scan_window scan) {
lex_result lex_path_separator(detail::lexer_scan_window scan) {
return scan.advance() == '/'
? lex_result(token(tok::slash, scan.range()), scan)
: std::nullopt;
: no_lex_result();
}

lex_result lex_i64_literal(
Expand Down Expand Up @@ -235,7 +258,7 @@ lex_result lex_i64_literal(
minus_scan.advance();
return lex_result(diagnoser.unexpected_token(minus_scan), minus_scan);
}
return std::nullopt;
return no_lex_result();
}

char* end = nullptr;
Expand Down Expand Up @@ -276,7 +299,7 @@ lex_result lex_string_literal(
detail::lexer_scan_window scan, lexer::diagnoser diagnoser) {
char c = scan.advance();
if (c != '"') {
return std::nullopt;
return no_lex_result();
}
std::string value;
while (scan.can_advance()) {
Expand Down Expand Up @@ -319,7 +342,7 @@ lex_result lex_comment_close(detail::lexer_scan_window scan, bool escaped) {
// "--"
for (int i = 0; i < 2; ++i) {
if (scan.advance() != '-') {
return std::nullopt;
return no_lex_result();
}
}
// ignore the escape syntax
Expand All @@ -328,7 +351,7 @@ lex_result lex_comment_close(detail::lexer_scan_window scan, bool escaped) {
// "}}"
for (int i = 0; i < 2; ++i) {
if (scan.advance() != '}') {
return std::nullopt;
return no_lex_result();
}
}
return lex_result(token(tok::close, scan.range()), scan);
Expand All @@ -340,7 +363,7 @@ lex_result lex_close(detail::lexer_scan_window scan) {
// "}}"
for (int i = 0; i < 2; ++i) {
if (scan.advance() != '}') {
return std::nullopt;
return no_lex_result();
}
}
return lex_result(token(tok::close, scan.range()), scan);
Expand All @@ -361,7 +384,7 @@ lex_result lex_template_part(
if (lex_result string_literal = lex_string_literal(scan, diagnoser)) {
return string_literal;
}
return std::nullopt;
return no_lex_result();
}

} // namespace
Expand Down

0 comments on commit 16ad6b1

Please sign in to comment.