0001-use-re2-StringPiece-for-RegexTokenizer-Tokenize.patch 1.6 KB

123456789101112131415161718192021222324252627282930313233343536
  1. From e19df25a06cb62b9e49b937c17d391d3b90bb3aa Mon Sep 17 00:00:00 2001
  2. From: Robert Ogden <robertogden@chromium.org>
  3. Date: Wed, 25 May 2022 10:52:32 -0700
  4. Subject: [PATCH 1/9] use re2 StringPiece for RegexTokenizer Tokenize
  5. ---
  6. .../cc/text/tokenizers/regex_tokenizer.cc | 8 ++++----
  7. 1 file changed, 4 insertions(+), 4 deletions(-)
  8. diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/text/tokenizers/regex_tokenizer.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/text/tokenizers/regex_tokenizer.cc
  9. index 564f5f63a0584..832f9df42f824 100644
  10. --- a/third_party/tflite_support/src/tensorflow_lite_support/cc/text/tokenizers/regex_tokenizer.cc
  11. +++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/text/tokenizers/regex_tokenizer.cc
  12. @@ -61,15 +61,15 @@ RegexTokenizer::RegexTokenizer(const std::string& regex_pattern,
  13. }
  14. TokenizerResult RegexTokenizer::Tokenize(const std::string& input) {
  15. - absl::string_view leftover(input.data());
  16. - absl::string_view last_end = leftover;
  17. + re2::StringPiece leftover(input.data());
  18. + re2::StringPiece last_end = leftover;
  19. TokenizerResult result;
  20. // Keep looking for split points until we have reached the end of the input.
  21. - absl::string_view extracted_delim_token;
  22. + re2::StringPiece extracted_delim_token;
  23. while (RE2::FindAndConsume(&leftover, delim_re_, &extracted_delim_token)) {
  24. - absl::string_view token(last_end.data(),
  25. + re2::StringPiece token(last_end.data(),
  26. extracted_delim_token.data() - last_end.data());
  27. bool has_non_empty_token = token.length() > 0;
  28. --
  29. 2.36.1.124.g0e6072fb45-goog