From a531e55610df260911dfa8038014d19dcff570d7 Mon Sep 17 00:00:00 2001
From: Daniel Cheng <[email protected]>
Date: Wed, 20 Sep 2023 09:07:05 -0700
Subject: [PATCH 10/10] Prefix ASSIGN_OR_RETURN and RETURN_IF_ERROR macros with
TFLITE_.
Chrome's //base contains macros with the same name but with different
definitions. Prefix the third-party ones to avoid the conflict.
Change-Id: I5e4fd29433981b000555669ec3c16e8219ab44f4
---
.../cc/port/default/status_macros.h | 30 ++--
.../cc/port/default/tflite_wrapper.cc | 30 ++--
.../cc/task/audio/audio_classifier.cc | 12 +-
.../cc/task/audio/audio_embedder.cc | 12 +-
.../cc/task/audio/utils/audio_utils.cc | 2 +-
.../cc/task/audio/utils/wav_io.cc | 34 ++--
.../cc/task/audio/utils/wav_io.h | 2 +-
.../cc/task/core/base_task_api.h | 4 +-
.../cc/task/core/classification_head.cc | 14 +-
.../cc/task/core/external_file_handler.cc | 2 +-
.../cc/task/core/score_calibration.cc | 2 +-
.../cc/task/core/task_api_factory.h | 14 +-
.../cc/task/core/task_utils.h | 10 +-
.../cc/task/core/tflite_engine.cc | 12 +-
.../cc/task/processor/audio_preprocessor.cc | 10 +-
.../cc/task/processor/bert_preprocessor.cc | 12 +-
.../processor/classification_postprocessor.cc | 8 +-
.../processor/classification_postprocessor.h | 4 +-
.../task/processor/embedding_postprocessor.cc | 4 +-
.../cc/task/processor/embedding_searcher.cc | 26 +--
.../cc/task/processor/image_preprocessor.cc | 12 +-
.../cc/task/processor/processor.h | 2 +-
.../cc/task/processor/regex_preprocessor.cc | 14 +-
.../cc/task/processor/search_postprocessor.cc | 16 +-
...universal_sentence_encoder_preprocessor.cc | 6 +-
.../cc/task/text/bert_clu_annotator.cc | 38 ++---
.../cc/task/text/bert_nl_classifier.cc | 10 +-
.../cc/task/text/bert_question_answerer.cc | 26 +--
.../cc/task/text/clu_lib/slot_repr.cc | 12 +-
.../task/text/clu_lib/slot_tagging_output.cc | 4 +-
.../cc/task/text/clu_lib/tflite_modules.cc | 22 +--
.../task/text/nlclassifier/nl_classifier.cc | 22 +--
.../cc/task/text/text_embedder.cc | 22 +--
.../cc/task/text/text_searcher.cc | 18 +-
.../text/universal_sentence_encoder_qa.cc | 24 +--
.../task/vision/core/base_vision_task_api.h | 2 +-
.../task/vision/core/classification_head.cc | 14 +-
.../cc/task/vision/image_classifier.cc | 28 ++--
.../cc/task/vision/image_embedder.cc | 14 +-
.../cc/task/vision/image_searcher.cc | 10 +-
.../cc/task/vision/image_segmenter.cc | 30 ++--
.../cc/task/vision/object_detector.cc | 54 +++---
.../vision/utils/frame_buffer_common_utils.cc | 10 +-
.../task/vision/utils/frame_buffer_utils.cc | 16 +-
.../task/vision/utils/image_tensor_specs.cc | 8 +-
.../cc/task/vision/utils/image_utils.cc | 4 +-
.../vision/utils/libyuv_frame_buffer_utils.cc | 156 +++++++++---------
.../cc/task/vision/utils/score_calibration.cc | 2 +-
.../task/processor/embedding_searcher_test.cc | 6 +-
.../test/task/vision/image_classifier_test.cc | 6 +-
.../test/task/vision/image_segmenter_test.cc | 8 +-
.../test/task/vision/object_detector_test.cc | 14 +-
.../cc/text/tokenizers/tokenizer_utils.cc | 6 +-
.../cc/utils/jni_utils.cc | 2 +-
.../sentencepiece_detokenizer_op.cc | 6 +-
.../sentencepiece_tokenizer_op.cc | 14 +-
.../audio/desktop/audio_classifier_lib.cc | 8 +-
.../text/desktop/bert_nl_classifier_demo.cc | 2 +-
.../desktop/bert_question_answerer_demo.cc | 2 +-
.../task/text/desktop/nl_classifier_demo.cc | 2 +-
.../task/text/desktop/text_embedder_demo.cc | 8 +-
.../task/text/desktop/text_searcher_demo.cc | 4 +-
.../vision/desktop/image_classifier_demo.cc | 6 +-
.../vision/desktop/image_embedder_demo.cc | 16 +-
.../vision/desktop/image_searcher_demo.cc | 6 +-
.../vision/desktop/image_segmenter_demo.cc | 12 +-
.../vision/desktop/object_detector_demo.cc | 10 +-
.../java/src/native/task/vision/jni_utils.cc | 2 +-
.../metadata/cc/metadata_extractor.cc | 12 +-
.../scann_ondevice/cc/index.cc | 4 +-
.../scann_ondevice/cc/index_builder.cc | 2 +-
71 files changed, 499 insertions(+), 499 deletions(-)
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/port/default/status_macros.h b/third_party/tflite_support/src/tensorflow_lite_support/cc/port/default/status_macros.h
index 8970a0e3ca3a2..14029a1f02124 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/port/default/status_macros.h
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/port/default/status_macros.h
@@ -25,11 +25,11 @@ limitations under the License.
//
// For example:
// absl::Status MultiStepFunction() {
-// RETURN_IF_ERROR(Function(args...));
-// RETURN_IF_ERROR(foo.Method(args...));
+// TFLITE_RETURN_IF_ERROR(Function(args...));
+// TFLITE_RETURN_IF_ERROR(foo.Method(args...));
// return absl::OkStatus();
// }
-#define RETURN_IF_ERROR(expr) \
+#define TFLITE_RETURN_IF_ERROR(expr) \
TFLITE_STATUS_MACROS_IMPL_ELSE_BLOCKER_ \
if (::tflite::support::status_macro_internal::StatusAdaptorForMacros \
status_macro_internal_adaptor = {(expr)}) { \
@@ -49,8 +49,8 @@ limitations under the License.
//
// Interface:
//
-// ASSIGN_OR_RETURN(lhs, rexpr)
-// ASSIGN_OR_RETURN(lhs, rexpr, error_expression);
+// TFLITE_ASSIGN_OR_RETURN(lhs, rexpr)
+// TFLITE_ASSIGN_OR_RETURN(lhs, rexpr, error_expression);
//
// WARNING: if lhs is parenthesized, the parentheses are removed. See examples
// for more details.
@@ -60,35 +60,35 @@ limitations under the License.
//
// Example: Declaring and initializing a new variable (ValueType can be anything
// that can be initialized with assignment, including references):
-// ASSIGN_OR_RETURN(ValueType value, MaybeGetValue(arg));
+// TFLITE_ASSIGN_OR_RETURN(ValueType value, MaybeGetValue(arg));
//
// Example: Assigning to an existing variable:
// ValueType value;
-// ASSIGN_OR_RETURN(value, MaybeGetValue(arg));
+// TFLITE_ASSIGN_OR_RETURN(value, MaybeGetValue(arg));
//
// Example: Assigning to an expression with side effects:
// MyProto data;
-// ASSIGN_OR_RETURN(*data.mutable_str(), MaybeGetValue(arg));
+// TFLITE_ASSIGN_OR_RETURN(*data.mutable_str(), MaybeGetValue(arg));
// // No field "str" is added on error.
//
// Example: Assigning to a std::unique_ptr.
-// ASSIGN_OR_RETURN(std::unique_ptr<T> ptr, MaybeGetPtr(arg));
+// TFLITE_ASSIGN_OR_RETURN(std::unique_ptr<T> ptr, MaybeGetPtr(arg));
//
// Example: Assigning to a map. Because of C preprocessor
// limitation, the type used in ASSIGN_OR_RETURN cannot contain comma, so
// wrap lhs in parentheses:
-// ASSIGN_OR_RETURN((absl::flat_hash_map<Foo, Bar> my_map), GetMap());
+// TFLITE_ASSIGN_OR_RETURN((absl::flat_hash_map<Foo, Bar> my_map), GetMap());
// Or use auto if the type is obvious enough:
-// ASSIGN_OR_RETURN(const auto& my_map, GetMapRef());
+// TFLITE_ASSIGN_OR_RETURN(const auto& my_map, GetMapRef());
//
// Example: Assigning to structured bindings. The same situation with comma as
// in map, so wrap the statement in parentheses.
-// ASSIGN_OR_RETURN((const auto& [first, second]), GetPair());
+// TFLITE_ASSIGN_OR_RETURN((const auto& [first, second]), GetPair());
#if defined(_WIN32)
-#define ASSIGN_OR_RETURN(_1, _2, ...) TFLITE_ASSIGN_OR_RETURN_IMPL_2(_1, _2)
+#define TFLITE_ASSIGN_OR_RETURN(_1, _2, ...) TFLITE_ASSIGN_OR_RETURN_IMPL_2(_1, _2)
#else
-#define ASSIGN_OR_RETURN(...) \
+#define TFLITE_ASSIGN_OR_RETURN(...) \
TFLITE_STATUS_MACROS_IMPL_GET_VARIADIC_( \
(__VA_ARGS__, TFLITE_STATUS_MACROS_IMPL_ASSIGN_OR_RETURN_3_, \
TFLITE_STATUS_MACROS_IMPL_ASSIGN_OR_RETURN_2_)) \
@@ -203,7 +203,7 @@ constexpr bool TFLSHasPotentialConditionalOperator(const char* lhs, int index) {
// because it thinks you might want the else to bind to the first if. This
// leads to problems with code like:
//
-// if (do_expr) RETURN_IF_ERROR(expr) << "Some message";
+// if (do_expr) TFLITE_RETURN_IF_ERROR(expr) << "Some message";
//
// The "switch (0) case 0:" idiom is used to suppress this.
#define TFLITE_STATUS_MACROS_IMPL_ELSE_BLOCKER_ \
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/port/default/tflite_wrapper.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/port/default/tflite_wrapper.cc
index 70fedca9a3f22..f579669e87e03 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/port/default/tflite_wrapper.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/port/default/tflite_wrapper.cc
@@ -82,7 +82,7 @@ absl::Status TfLiteInterpreterWrapper::InitializeWithFallback(
const InterpreterCreationResources& resources,
std::unique_ptr<tflite::Interpreter>* interpreter_out)
-> absl::Status {
- RETURN_IF_ERROR(interpreter_initializer(interpreter_out));
+ TFLITE_RETURN_IF_ERROR(interpreter_initializer(interpreter_out));
if (*interpreter_out != nullptr &&
resources.optional_delegate != nullptr) {
TfLiteStatus status =
@@ -90,7 +90,7 @@ absl::Status TfLiteInterpreterWrapper::InitializeWithFallback(
->ModifyGraphWithDelegate(resources.optional_delegate);
if (status != kTfLiteOk) {
*interpreter_out = nullptr;
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
absl::InvalidArgumentError("Applying delegate failed"));
}
}
@@ -112,7 +112,7 @@ absl::Status TfLiteInterpreterWrapper::InitializeWithFallback(
interpreter_initializer_ = std::move(interpreter_initializer);
// Sanity check and copy ComputeSettings.
- RETURN_IF_ERROR(SanityCheckComputeSettings(compute_settings));
+ TFLITE_RETURN_IF_ERROR(SanityCheckComputeSettings(compute_settings));
compute_settings_ = compute_settings;
if (compute_settings_.has_settings_to_test_locally()) {
flatbuffers::FlatBufferBuilder mini_benchmark_settings_fbb;
@@ -174,13 +174,13 @@ absl::Status TfLiteInterpreterWrapper::InitializeWithFallbackAndResize(
delegate_.reset(nullptr);
} else {
// Initialize delegate and add it to 'resources'.
- RETURN_IF_ERROR(InitializeDelegate());
+ TFLITE_RETURN_IF_ERROR(InitializeDelegate());
resources.optional_delegate = delegate_.get();
}
absl::Status status = interpreter_initializer_(resources, &interpreter_);
if (resources.optional_delegate == nullptr) {
- RETURN_IF_ERROR(status);
+ TFLITE_RETURN_IF_ERROR(status);
}
if (resources.optional_delegate != nullptr && !status.ok()) {
// Any error when constructing the interpreter is assumed to be a delegate
@@ -191,7 +191,7 @@ absl::Status TfLiteInterpreterWrapper::InitializeWithFallbackAndResize(
if (fallback_on_compilation_error_) {
InterpreterCreationResources fallback_resources{};
fallback_resources.optional_delegate = nullptr;
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
interpreter_initializer_(fallback_resources, &interpreter_));
} else {
// If instructed not to fallback, return error.
@@ -201,7 +201,7 @@ absl::Status TfLiteInterpreterWrapper::InitializeWithFallbackAndResize(
}
}
- RETURN_IF_ERROR(resize(interpreter_.get()));
+ TFLITE_RETURN_IF_ERROR(resize(interpreter_.get()));
if (compute_settings_.tflite_settings().cpu_settings().num_threads() != -1) {
if (interpreter_->SetNumThreads(
compute_settings_.tflite_settings().cpu_settings().num_threads()) !=
@@ -229,25 +229,25 @@ absl::Status TfLiteInterpreterWrapper::InitializeDelegate() {
tflite::ConvertFromProto(compute_settings_, &flatbuffers_builder_);
if (which_delegate == Delegate::NNAPI) {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
LoadDelegatePlugin("Nnapi", *compute_settings->tflite_settings()));
} else if (which_delegate == Delegate::HEXAGON) {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
LoadDelegatePlugin("Hexagon", *compute_settings->tflite_settings()));
} else if (which_delegate == Delegate::GPU) {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
LoadDelegatePlugin("Gpu", *compute_settings->tflite_settings()));
} else if (which_delegate == Delegate::EDGETPU) {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
LoadDelegatePlugin("EdgeTpu", *compute_settings->tflite_settings()));
} else if (which_delegate == Delegate::EDGETPU_CORAL) {
- RETURN_IF_ERROR(LoadDelegatePlugin("EdgeTpuCoral",
+ TFLITE_RETURN_IF_ERROR(LoadDelegatePlugin("EdgeTpuCoral",
*compute_settings->tflite_settings()));
} else if (which_delegate == Delegate::XNNPACK) {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
LoadDelegatePlugin("XNNPack", *compute_settings->tflite_settings()));
} else if (which_delegate == Delegate::CORE_ML) {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
LoadDelegatePlugin("CoreML", *compute_settings->tflite_settings()));
}
}
@@ -257,7 +257,7 @@ absl::Status TfLiteInterpreterWrapper::InitializeDelegate() {
absl::Status TfLiteInterpreterWrapper::InvokeWithFallback(
const std::function<absl::Status(tflite::Interpreter* interpreter)>&
set_inputs) {
- RETURN_IF_ERROR(set_inputs(interpreter_.get()));
+ TFLITE_RETURN_IF_ERROR(set_inputs(interpreter_.get()));
if (cancel_flag_.Get()) {
cancel_flag_.Set(false);
return absl::CancelledError("cancelled before Invoke() was called");
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/audio_classifier.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/audio_classifier.cc
index 4b1439dcc0719..995ea7b101a7b 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/audio_classifier.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/audio_classifier.cc
@@ -77,16 +77,16 @@ CreatePostprocessor(TfLiteEngine* engine,
StatusOr<std::unique_ptr<AudioClassifier>> AudioClassifier::CreateFromOptions(
const AudioClassifierOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
// Copy options to ensure the ExternalFile outlives the constructed object.
auto options_copy = absl::make_unique<AudioClassifierOptions>(options);
- ASSIGN_OR_RETURN(auto audio_classifier,
+ TFLITE_ASSIGN_OR_RETURN(auto audio_classifier,
TaskAPIFactory::CreateFromBaseOptions<AudioClassifier>(
&options_copy->base_options(), std::move(resolver)));
- RETURN_IF_ERROR(audio_classifier->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(audio_classifier->Init(std::move(options_copy)));
return audio_classifier;
}
@@ -108,7 +108,7 @@ absl::Status AudioClassifier::Init(
options_ = std::move(options);
// Create preprocessor, assuming having only 1 input tensor.
- ASSIGN_OR_RETURN(preprocessor_, processor::AudioPreprocessor::Create(
+ TFLITE_ASSIGN_OR_RETURN(preprocessor_, processor::AudioPreprocessor::Create(
GetTfLiteEngine(), {0}));
// Assuming all output tensors share the same option. This is an limitation in
@@ -117,7 +117,7 @@ absl::Status AudioClassifier::Init(
GetTfLiteEngine()->OutputCount(GetTfLiteEngine()->interpreter());
postprocessors_.reserve(output_count);
for (int i = 0; i < output_count; i++) {
- ASSIGN_OR_RETURN(auto processor, CreatePostprocessor(GetTfLiteEngine(), {i},
+ TFLITE_ASSIGN_OR_RETURN(auto processor, CreatePostprocessor(GetTfLiteEngine(), {i},
options_.get()));
postprocessors_.emplace_back(std::move(processor));
}
@@ -140,7 +140,7 @@ AudioClassifier::Postprocess(
// ClassificationPostprocessor doesn't set head name for backward
// compatibility, so we set it here manually.
classification->set_head_name(processor->GetHeadName());
- RETURN_IF_ERROR(processor->Postprocess(classification));
+ TFLITE_RETURN_IF_ERROR(processor->Postprocess(classification));
}
return result;
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/audio_embedder.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/audio_embedder.cc
index 56acada352121..6c07586fd0c25 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/audio_embedder.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/audio_embedder.cc
@@ -37,14 +37,14 @@ tflite::support::StatusOr<double> AudioEmbedder::CosineSimilarity(
tflite::support::StatusOr<std::unique_ptr<AudioEmbedder>>
AudioEmbedder::CreateFromOptions(const AudioEmbedderOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
auto options_copy = absl::make_unique<AudioEmbedderOptions>(options);
- ASSIGN_OR_RETURN(auto audio_embedder,
+ TFLITE_ASSIGN_OR_RETURN(auto audio_embedder,
core::TaskAPIFactory::CreateFromBaseOptions<AudioEmbedder>(
&options_copy->base_options(), std::move(resolver)));
- RETURN_IF_ERROR(audio_embedder->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(audio_embedder->Init(std::move(options_copy)));
return audio_embedder;
}
@@ -65,7 +65,7 @@ absl::Status AudioEmbedder::Init(
options_ = std::move(options);
// Create preprocessor, assuming having only 1 input tensor.
- ASSIGN_OR_RETURN(preprocessor_,
+ TFLITE_ASSIGN_OR_RETURN(preprocessor_,
tflite::task::processor::AudioPreprocessor::Create(
GetTfLiteEngine(), {0}));
@@ -95,7 +95,7 @@ absl::Status AudioEmbedder::Init(
"number of output tensors.",
support::TfLiteSupportStatus::kInvalidArgumentError);
}
- ASSIGN_OR_RETURN(auto processor,
+ TFLITE_ASSIGN_OR_RETURN(auto processor,
processor::EmbeddingPostprocessor::Create(
GetTfLiteEngine(), {i}, std::move(option)));
postprocessors_.emplace_back(std::move(processor));
@@ -110,7 +110,7 @@ AudioEmbedder::Postprocess(
tflite::task::processor::EmbeddingResult result;
for (int i = 0; i < postprocessors_.size(); i++) {
auto processor = postprocessors_.at(i).get();
- RETURN_IF_ERROR(processor->Postprocess(result.add_embeddings()));
+ TFLITE_RETURN_IF_ERROR(processor->Postprocess(result.add_embeddings()));
}
return result;
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/utils/audio_utils.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/utils/audio_utils.cc
index c1eea28f730d8..9c3df0b501b8d 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/utils/audio_utils.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/utils/audio_utils.cc
@@ -28,7 +28,7 @@ tflite::support::StatusOr<AudioBuffer> LoadAudioBufferFromFile(
uint16_t decoded_channel_count;
uint32_t decoded_sample_rate;
- RETURN_IF_ERROR(DecodeLin16WaveAsFloatVector(
+ TFLITE_RETURN_IF_ERROR(DecodeLin16WaveAsFloatVector(
contents, wav_data, offset, &decoded_sample_count, &decoded_channel_count,
&decoded_sample_rate));
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/utils/wav_io.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/utils/wav_io.cc
index 0671bb57b123e..fcda04f29b938 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/utils/wav_io.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/utils/wav_io.cc
@@ -88,7 +88,7 @@ absl::Status IncrementOffset(uint32_t old_offset, size_t increment,
absl::Status ExpectText(const std::string& data,
const std::string& expected_text, uint32_t* offset) {
uint32_t new_offset;
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
IncrementOffset(*offset, expected_text.size(), data.size(), &new_offset));
const std::string found_text(data.begin() + *offset,
data.begin() + new_offset);
@@ -103,7 +103,7 @@ absl::Status ExpectText(const std::string& data,
absl::Status ReadString(const std::string& data, size_t expected_length,
std::string* value, uint32_t* offset) {
uint32_t new_offset;
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
IncrementOffset(*offset, expected_length, data.size(), &new_offset));
*value = std::string(data.begin() + *offset, data.begin() + new_offset);
*offset = new_offset;
@@ -116,42 +116,42 @@ absl::Status DecodeLin16WaveAsFloatVector(const std::string& wav_string,
uint32_t* sample_count,
uint16_t* channel_count,
uint32_t* sample_rate) {
- RETURN_IF_ERROR(ExpectText(wav_string, kRiffChunkId, offset));
+ TFLITE_RETURN_IF_ERROR(ExpectText(wav_string, kRiffChunkId, offset));
uint32_t total_file_size;
- RETURN_IF_ERROR(ReadValue<uint32_t>(wav_string, &total_file_size, offset));
- RETURN_IF_ERROR(ExpectText(wav_string, kRiffType, offset));
- RETURN_IF_ERROR(ExpectText(wav_string, kFormatChunkId, offset));
+ TFLITE_RETURN_IF_ERROR(ReadValue<uint32_t>(wav_string, &total_file_size, offset));
+ TFLITE_RETURN_IF_ERROR(ExpectText(wav_string, kRiffType, offset));
+ TFLITE_RETURN_IF_ERROR(ExpectText(wav_string, kFormatChunkId, offset));
uint32_t format_chunk_size;
- RETURN_IF_ERROR(ReadValue<uint32_t>(wav_string, &format_chunk_size, offset));
+ TFLITE_RETURN_IF_ERROR(ReadValue<uint32_t>(wav_string, &format_chunk_size, offset));
if ((format_chunk_size != 16) && (format_chunk_size != 18)) {
return absl::InvalidArgumentError(absl::StrFormat(
"Bad format chunk size for WAV: Expected 16 or 18, but got %" PRIu32,
format_chunk_size));
}
uint16_t audio_format;
- RETURN_IF_ERROR(ReadValue<uint16_t>(wav_string, &audio_format, offset));
+ TFLITE_RETURN_IF_ERROR(ReadValue<uint16_t>(wav_string, &audio_format, offset));
if (audio_format != 1) {
return absl::InvalidArgumentError(absl::StrFormat(
"Bad audio format for WAV: Expected 1 (PCM), but got %" PRIu16,
audio_format));
}
- RETURN_IF_ERROR(ReadValue<uint16_t>(wav_string, channel_count, offset));
+ TFLITE_RETURN_IF_ERROR(ReadValue<uint16_t>(wav_string, channel_count, offset));
if (*channel_count < 1) {
return absl::InvalidArgumentError(absl::StrFormat(
"Bad number of channels for WAV: Expected at least 1, but got %" PRIu16,
*channel_count));
}
- RETURN_IF_ERROR(ReadValue<uint32_t>(wav_string, sample_rate, offset));
+ TFLITE_RETURN_IF_ERROR(ReadValue<uint32_t>(wav_string, sample_rate, offset));
uint32_t bytes_per_second;
- RETURN_IF_ERROR(ReadValue<uint32_t>(wav_string, &bytes_per_second, offset));
+ TFLITE_RETURN_IF_ERROR(ReadValue<uint32_t>(wav_string, &bytes_per_second, offset));
uint16_t bytes_per_sample;
- RETURN_IF_ERROR(ReadValue<uint16_t>(wav_string, &bytes_per_sample, offset));
+ TFLITE_RETURN_IF_ERROR(ReadValue<uint16_t>(wav_string, &bytes_per_sample, offset));
// Confusingly, bits per sample is defined as holding the number of bits for
// one channel, unlike the definition of sample used elsewhere in the WAV
// spec. For example, bytes per sample is the memory needed for all channels
// for one point in time.
uint16_t bits_per_sample;
- RETURN_IF_ERROR(ReadValue<uint16_t>(wav_string, &bits_per_sample, offset));
+ TFLITE_RETURN_IF_ERROR(ReadValue<uint16_t>(wav_string, &bits_per_sample, offset));
if (bits_per_sample != 16) {
return absl::InvalidArgumentError(
absl::StrFormat("Can only read 16-bit WAV files, but received %" PRIu16,
@@ -182,9 +182,9 @@ absl::Status DecodeLin16WaveAsFloatVector(const std::string& wav_string,
bool was_data_found = false;
while (*offset < wav_string.size()) {
std::string chunk_id;
- RETURN_IF_ERROR(ReadString(wav_string, 4, &chunk_id, offset));
+ TFLITE_RETURN_IF_ERROR(ReadString(wav_string, 4, &chunk_id, offset));
uint32_t chunk_size;
- RETURN_IF_ERROR(ReadValue<uint32_t>(wav_string, &chunk_size, offset));
+ TFLITE_RETURN_IF_ERROR(ReadValue<uint32_t>(wav_string, &chunk_size, offset));
if (chunk_size > std::numeric_limits<int32_t>::max()) {
return absl::InvalidArgumentError(absl::StrFormat(
"WAV data chunk '%s' is too large: %" PRIu32
@@ -202,12 +202,12 @@ absl::Status DecodeLin16WaveAsFloatVector(const std::string& wav_string,
uint32_t unused_new_offset = 0;
// Validate that the data exists before allocating space for it
// (prevent easy OOM errors).
- RETURN_IF_ERROR(IncrementOffset(*offset, sizeof(int16_t) * data_count,
+ TFLITE_RETURN_IF_ERROR(IncrementOffset(*offset, sizeof(int16_t) * data_count,
wav_string.size(), &unused_new_offset));
float_values->resize(data_count);
for (int i = 0; i < data_count; ++i) {
int16_t single_channel_value = 0;
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
ReadValue<int16_t>(wav_string, &single_channel_value, offset));
(*float_values)[i] = Int16SampleToFloat(single_channel_value);
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/utils/wav_io.h b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/utils/wav_io.h
index fd3b779613f3d..07cb45ba39c1f 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/utils/wav_io.h
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/audio/utils/wav_io.h
@@ -74,7 +74,7 @@ absl::Status IncrementOffset(uint32_t old_offset, size_t increment,
template <class T>
absl::Status ReadValue(const std::string& data, T* value, uint32_t* offset) {
uint32_t new_offset;
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
IncrementOffset(*offset, sizeof(T), data.size(), &new_offset));
if (port::kLittleEndian) {
memcpy(value, data.data() + *offset, sizeof(T));
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/base_task_api.h b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/base_task_api.h
index d743383734b42..e29cef20e0f43 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/base_task_api.h
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/base_task_api.h
@@ -123,7 +123,7 @@ class BaseTaskApi : public BaseUntypedTaskApi {
GetTfLiteEngine()->interpreter_wrapper();
// Note: AllocateTensors() is already performed by the interpreter wrapper
// at InitInterpreter time (see TfLiteEngine).
- RETURN_IF_ERROR(Preprocess(GetInputTensors(), args...));
+ TFLITE_RETURN_IF_ERROR(Preprocess(GetInputTensors(), args...));
absl::Status status = interpreter_wrapper->InvokeWithoutFallback();
if (!status.ok()) {
return status.GetPayload(tflite::support::kTfLiteSupportPayload)
@@ -143,7 +143,7 @@ class BaseTaskApi : public BaseUntypedTaskApi {
GetTfLiteEngine()->interpreter_wrapper();
// Note: AllocateTensors() is already performed by the interpreter wrapper
// at InitInterpreter time (see TfLiteEngine).
- RETURN_IF_ERROR(Preprocess(GetInputTensors(), args...));
+ TFLITE_RETURN_IF_ERROR(Preprocess(GetInputTensors(), args...));
auto set_inputs_nop =
[](tflite::task::core::TfLiteEngine::Interpreter* interpreter)
-> absl::Status {
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/classification_head.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/classification_head.cc
index fe2217620be1a..1945f0172099f 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/classification_head.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/classification_head.cc
@@ -44,7 +44,7 @@ StatusOr<ClassificationHead> BuildClassificationHead(
output_tensor_metadata,
tflite::AssociatedFileType_TENSOR_AXIS_LABELS);
if (!labels_filename.empty()) {
- ASSIGN_OR_RETURN(absl::string_view labels_file,
+ TFLITE_ASSIGN_OR_RETURN(absl::string_view labels_file,
metadata_extractor.GetAssociatedFile(labels_filename));
const std::string display_names_filename =
ModelMetadataExtractor::FindFirstAssociatedFileName(
@@ -53,15 +53,15 @@ StatusOr<ClassificationHead> BuildClassificationHead(
display_names_locale);
absl::string_view display_names_file;
if (!display_names_filename.empty()) {
- ASSIGN_OR_RETURN(display_names_file, metadata_extractor.GetAssociatedFile(
+ TFLITE_ASSIGN_OR_RETURN(display_names_file, metadata_extractor.GetAssociatedFile(
display_names_filename));
}
- ASSIGN_OR_RETURN(head.label_map_items,
+ TFLITE_ASSIGN_OR_RETURN(head.label_map_items,
BuildLabelMapFromFiles(labels_file, display_names_file));
}
// Set score threshold, if present.
- ASSIGN_OR_RETURN(const tflite::ProcessUnit* score_thresholding_process_unit,
+ TFLITE_ASSIGN_OR_RETURN(const tflite::ProcessUnit* score_thresholding_process_unit,
ModelMetadataExtractor::FindFirstProcessUnit(
output_tensor_metadata,
tflite::ProcessUnitOptions_ScoreThresholdingOptions));
@@ -72,7 +72,7 @@ StatusOr<ClassificationHead> BuildClassificationHead(
}
// Build score calibration parameters, if present.
- ASSIGN_OR_RETURN(const tflite::ProcessUnit* score_calibration_process_unit,
+ TFLITE_ASSIGN_OR_RETURN(const tflite::ProcessUnit* score_calibration_process_unit,
ModelMetadataExtractor::FindFirstProcessUnit(
output_tensor_metadata,
tflite::ProcessUnitOptions_ScoreCalibrationOptions));
@@ -95,10 +95,10 @@ StatusOr<ClassificationHead> BuildClassificationHead(
"parameters file with type TENSOR_AXIS_SCORE_CALIBRATION.",
TfLiteSupportStatus::kMetadataAssociatedFileNotFoundError);
}
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
absl::string_view score_calibration_file,
metadata_extractor.GetAssociatedFile(score_calibration_filename));
- ASSIGN_OR_RETURN(SigmoidCalibrationParameters sigmoid_params,
+ TFLITE_ASSIGN_OR_RETURN(SigmoidCalibrationParameters sigmoid_params,
BuildSigmoidCalibrationParams(
*score_calibration_process_unit
->options_as_ScoreCalibrationOptions(),
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/external_file_handler.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/external_file_handler.cc
index 680e65de03403..f969f4dcc05ab 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/external_file_handler.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/external_file_handler.cc
@@ -77,7 +77,7 @@ ExternalFileHandler::CreateFromExternalFile(const ExternalFile* external_file) {
std::unique_ptr<ExternalFileHandler> handler =
absl::WrapUnique(new ExternalFileHandler(external_file));
- RETURN_IF_ERROR(handler->MapExternalFile());
+ TFLITE_RETURN_IF_ERROR(handler->MapExternalFile());
return handler;
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/score_calibration.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/score_calibration.cc
index 818839a77e43d..a7080f25780e8 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/score_calibration.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/score_calibration.cc
@@ -211,7 +211,7 @@ StatusOr<SigmoidCalibrationParameters> BuildSigmoidCalibrationParams(
if (lines[i].empty()) {
continue;
}
- ASSIGN_OR_RETURN(Sigmoid sigmoid, SigmoidFromLabelAndLine(
+ TFLITE_ASSIGN_OR_RETURN(Sigmoid sigmoid, SigmoidFromLabelAndLine(
label_map_items[i].name, lines[i]));
sigmoid_vector.emplace_back(std::move(sigmoid));
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/task_api_factory.h b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/task_api_factory.h
index 11e0d1be54ea7..17d8d57468670 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/task_api_factory.h
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/task_api_factory.h
@@ -56,7 +56,7 @@ class TaskAPIFactory {
const tflite::proto::ComputeSettings& compute_settings =
tflite::proto::ComputeSettings()) {
auto engine = absl::make_unique<TfLiteEngine>(std::move(resolver));
- RETURN_IF_ERROR(engine->BuildModelFromFlatBuffer(buffer_data, buffer_size,
+ TFLITE_RETURN_IF_ERROR(engine->BuildModelFromFlatBuffer(buffer_data, buffer_size,
compute_settings));
return CreateFromTfLiteEngine<T>(std::move(engine), num_threads,
compute_settings);
@@ -74,7 +74,7 @@ class TaskAPIFactory {
const tflite::proto::ComputeSettings& compute_settings =
tflite::proto::ComputeSettings()) {
auto engine = absl::make_unique<TfLiteEngine>(std::move(resolver));
- RETURN_IF_ERROR(engine->BuildModelFromFile(file_name, compute_settings));
+ TFLITE_RETURN_IF_ERROR(engine->BuildModelFromFile(file_name, compute_settings));
return CreateFromTfLiteEngine<T>(std::move(engine), num_threads,
compute_settings);
}
@@ -91,7 +91,7 @@ class TaskAPIFactory {
const tflite::proto::ComputeSettings& compute_settings =
tflite::proto::ComputeSettings()) {
auto engine = absl::make_unique<TfLiteEngine>(std::move(resolver));
- RETURN_IF_ERROR(engine->BuildModelFromFileDescriptor(file_descriptor,
+ TFLITE_RETURN_IF_ERROR(engine->BuildModelFromFileDescriptor(file_descriptor,
compute_settings));
return CreateFromTfLiteEngine<T>(std::move(engine), num_threads,
compute_settings);
@@ -110,7 +110,7 @@ class TaskAPIFactory {
const tflite::proto::ComputeSettings& compute_settings =
tflite::proto::ComputeSettings()) {
auto engine = absl::make_unique<TfLiteEngine>(std::move(resolver));
- RETURN_IF_ERROR(engine->BuildModelFromExternalFileProto(external_file,
+ TFLITE_RETURN_IF_ERROR(engine->BuildModelFromExternalFileProto(external_file,
compute_settings));
return CreateFromTfLiteEngine<T>(std::move(engine), num_threads,
compute_settings);
@@ -146,10 +146,10 @@ class TaskAPIFactory {
tflite::proto::ComputeSettings compute_settings(
base_options->compute_settings());
if (compute_settings.has_settings_to_test_locally()) {
- RETURN_IF_ERROR(SetMiniBenchmarkFileNameFromBaseOptions(compute_settings,
+ TFLITE_RETURN_IF_ERROR(SetMiniBenchmarkFileNameFromBaseOptions(compute_settings,
base_options));
}
- RETURN_IF_ERROR(engine->BuildModelFromExternalFileProto(
+ TFLITE_RETURN_IF_ERROR(engine->BuildModelFromExternalFileProto(
&base_options->model_file(), compute_settings));
return CreateFromTfLiteEngine<T>(std::move(engine), compute_settings);
}
@@ -173,7 +173,7 @@ class TaskAPIFactory {
std::unique_ptr<TfLiteEngine> engine,
const tflite::proto::ComputeSettings& compute_settings =
tflite::proto::ComputeSettings()) {
- RETURN_IF_ERROR(engine->InitInterpreter(compute_settings));
+ TFLITE_RETURN_IF_ERROR(engine->InitInterpreter(compute_settings));
return absl::make_unique<T>(std::move(engine));
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/task_utils.h b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/task_utils.h
index 08b79b8dba5f2..a722c091651fc 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/task_utils.h
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/task_utils.h
@@ -71,7 +71,7 @@ template <typename T, typename = std::enable_if_t<
inline absl::Status PopulateTensor(const T* data, int num_elements,
TfLiteTensor* tensor) {
T* v;
- ASSIGN_OR_RETURN(v, AssertAndReturnTypedTensor<T>(tensor));
+ TFLITE_ASSIGN_OR_RETURN(v, AssertAndReturnTypedTensor<T>(tensor));
size_t bytes = num_elements * sizeof(T);
if (tensor->bytes != bytes) {
return tflite::support::CreateStatusWithPayload(
@@ -113,7 +113,7 @@ inline absl::Status PopulateTensor<std::string>(
template <typename T>
inline absl::Status PopulateTensor(const T& data, TfLiteTensor* tensor) {
T* v;
- ASSIGN_OR_RETURN(v, AssertAndReturnTypedTensor<T>(tensor));
+ TFLITE_ASSIGN_OR_RETURN(v, AssertAndReturnTypedTensor<T>(tensor));
*v = data;
return absl::OkStatus();
}
@@ -133,7 +133,7 @@ template <typename T>
inline absl::Status PopulateVector(const TfLiteTensor* tensor,
std::vector<T>* data) {
const T* v;
- ASSIGN_OR_RETURN(v, AssertAndReturnTypedTensor<T>(tensor));
+ TFLITE_ASSIGN_OR_RETURN(v, AssertAndReturnTypedTensor<T>(tensor));
size_t num = tensor->bytes / sizeof(tensor->type);
data->reserve(num);
for (size_t i = 0; i < num; i++) {
@@ -146,7 +146,7 @@ template <>
inline absl::Status PopulateVector<std::string>(
const TfLiteTensor* tensor, std::vector<std::string>* data) {
std::string* v;
- ASSIGN_OR_RETURN(v, AssertAndReturnTypedTensor<std::string>(tensor));
+ TFLITE_ASSIGN_OR_RETURN(v, AssertAndReturnTypedTensor<std::string>(tensor));
(void)v;
int num = GetStringCount(tensor);
data->reserve(num);
@@ -166,7 +166,7 @@ template <
inline absl::Status PopulateVectorToRepeated(const TfLiteTensor* tensor,
TRepeatedField* data) {
const T* v;
- ASSIGN_OR_RETURN(v, AssertAndReturnTypedTensor<T>(tensor));
+ TFLITE_ASSIGN_OR_RETURN(v, AssertAndReturnTypedTensor<T>(tensor));
size_t num = tensor->bytes / sizeof(tensor->type);
data->Resize(num, T());
T* pdata = data->mutable_data();
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/tflite_engine.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/tflite_engine.cc
index 0b9af59afdc79..44421ea1df463 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/tflite_engine.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/core/tflite_engine.cc
@@ -111,7 +111,7 @@ absl::Status TfLiteEngine::InitializeFromModelFileHandler(
}
}
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
model_metadata_extractor_,
tflite::metadata::ModelMetadataExtractor::CreateFromModelBuffer(
buffer_data, buffer_size));
@@ -128,7 +128,7 @@ absl::Status TfLiteEngine::BuildModelFromFlatBuffer(
}
external_file_ = std::make_unique<ExternalFile>();
external_file_->set_file_content(std::string(buffer_data, buffer_size));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
model_file_handler_,
ExternalFileHandler::CreateFromExternalFile(external_file_.get()));
return InitializeFromModelFileHandler(compute_settings);
@@ -145,7 +145,7 @@ absl::Status TfLiteEngine::BuildModelFromFile(
external_file_ = std::make_unique<ExternalFile>();
}
external_file_->set_file_name(file_name);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
model_file_handler_,
ExternalFileHandler::CreateFromExternalFile(external_file_.get()));
return InitializeFromModelFileHandler(compute_settings);
@@ -162,7 +162,7 @@ absl::Status TfLiteEngine::BuildModelFromFileDescriptor(
external_file_ = std::make_unique<ExternalFile>();
}
external_file_->mutable_file_descriptor_meta()->set_fd(file_descriptor);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
model_file_handler_,
ExternalFileHandler::CreateFromExternalFile(external_file_.get()));
return InitializeFromModelFileHandler(compute_settings);
@@ -175,7 +175,7 @@ absl::Status TfLiteEngine::BuildModelFromExternalFileProto(
return CreateStatusWithPayload(StatusCode::kInternal,
"Model already built");
}
- ASSIGN_OR_RETURN(model_file_handler_,
+ TFLITE_ASSIGN_OR_RETURN(model_file_handler_,
ExternalFileHandler::CreateFromExternalFile(external_file));
return InitializeFromModelFileHandler(compute_settings);
}
@@ -187,7 +187,7 @@ absl::Status TfLiteEngine::BuildModelFromExternalFileProto(
"Model already built");
}
external_file_ = std::move(external_file);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
model_file_handler_,
ExternalFileHandler::CreateFromExternalFile(external_file_.get()));
// Dummy proto. InitializeFromModelFileHandler doesn't use this proto.
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/audio_preprocessor.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/audio_preprocessor.cc
index e3ea2b134e3f4..4eaee87081750 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/audio_preprocessor.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/audio_preprocessor.cc
@@ -74,22 +74,22 @@ tflite::support::StatusOr<const AudioProperties*> GetAudioPropertiesSafe(
tflite::support::StatusOr<std::unique_ptr<AudioPreprocessor>>
AudioPreprocessor::Create(tflite::task::core::TfLiteEngine* engine,
const std::initializer_list<int> input_indices) {
- ASSIGN_OR_RETURN(auto processor,
+ TFLITE_ASSIGN_OR_RETURN(auto processor,
Processor::Create<AudioPreprocessor>(
/* num_expected_tensors = */ 1, engine, input_indices));
- RETURN_IF_ERROR(processor->Init());
+ TFLITE_RETURN_IF_ERROR(processor->Init());
return processor;
}
absl::Status AudioPreprocessor::Init() {
- RETURN_IF_ERROR(SetAudioFormatFromMetadata());
- RETURN_IF_ERROR(CheckAndSetInputs());
+ TFLITE_RETURN_IF_ERROR(SetAudioFormatFromMetadata());
+ TFLITE_RETURN_IF_ERROR(CheckAndSetInputs());
return absl::OkStatus();
}
absl::Status AudioPreprocessor::SetAudioFormatFromMetadata() {
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
const AudioProperties* props,
GetAudioPropertiesSafe(GetTensorMetadata(), tensor_indices_.at(0)));
audio_format_.channels = props->channels();
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/bert_preprocessor.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/bert_preprocessor.cc
index 76a7a534a27bd..62d5382fe5a17 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/bert_preprocessor.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/bert_preprocessor.cc
@@ -44,11 +44,11 @@ constexpr char kSeparator[] = "[SEP]";
StatusOr<std::unique_ptr<BertPreprocessor>> BertPreprocessor::Create(
tflite::task::core::TfLiteEngine* engine,
const std::initializer_list<int> input_tensor_indices) {
- ASSIGN_OR_RETURN(auto processor, Processor::Create<BertPreprocessor>(
+ TFLITE_ASSIGN_OR_RETURN(auto processor, Processor::Create<BertPreprocessor>(
/* num_expected_tensors = */ 3, engine,
input_tensor_indices,
/* requires_metadata = */ false));
- RETURN_IF_ERROR(processor->Init());
+ TFLITE_RETURN_IF_ERROR(processor->Init());
return processor;
}
@@ -58,7 +58,7 @@ absl::Status BertPreprocessor::Init() {
// BertTokenizer is packed in the processing unit SubgraphMetadata.
const tflite::ProcessUnit* tokenizer_metadata =
GetMetadataExtractor()->GetInputProcessUnit(kTokenizerProcessUnitIndex);
- ASSIGN_OR_RETURN(tokenizer_, CreateTokenizerFromProcessUnit(
+ TFLITE_ASSIGN_OR_RETURN(tokenizer_, CreateTokenizerFromProcessUnit(
tokenizer_metadata, GetMetadataExtractor()));
const auto& ids_tensor = *GetTensor(kIdsTensorIndex);
@@ -178,9 +178,9 @@ absl::Status BertPreprocessor::Preprocess(const std::string& input_text) {
// input_masks 1 1 1... 1 1 0 0... 0
// segment_ids 0 0 0... 0 0 0 0... 0
- RETURN_IF_ERROR(PopulateTensor(input_ids, ids_tensor));
- RETURN_IF_ERROR(PopulateTensor(input_mask, mask_tensor));
- RETURN_IF_ERROR(PopulateTensor(std::vector<int>(input_tensor_length, 0),
+ TFLITE_RETURN_IF_ERROR(PopulateTensor(input_ids, ids_tensor));
+ TFLITE_RETURN_IF_ERROR(PopulateTensor(input_mask, mask_tensor));
+ TFLITE_RETURN_IF_ERROR(PopulateTensor(std::vector<int>(input_tensor_length, 0),
segment_ids_tensor));
return absl::OkStatus();
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/classification_postprocessor.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/classification_postprocessor.cc
index 9c11083c4f839..393992056faad 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/classification_postprocessor.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/classification_postprocessor.cc
@@ -44,11 +44,11 @@ tflite::support::StatusOr<std::unique_ptr<ClassificationPostprocessor>>
ClassificationPostprocessor::Create(
core::TfLiteEngine* engine, const std::initializer_list<int> output_indices,
std::unique_ptr<ClassificationOptions> options) {
- ASSIGN_OR_RETURN(auto processor,
+ TFLITE_ASSIGN_OR_RETURN(auto processor,
Processor::Create<ClassificationPostprocessor>(
/* num_expected_tensors = */ 1, engine, output_indices));
- RETURN_IF_ERROR(processor->Init(std::move(options)));
+ TFLITE_RETURN_IF_ERROR(processor->Init(std::move(options)));
return processor;
}
@@ -70,7 +70,7 @@ absl::Status ClassificationPostprocessor::Init(
TfLiteSupportStatus::kInvalidArgumentError);
}
- ASSIGN_OR_RETURN(classification_head_,
+ TFLITE_ASSIGN_OR_RETURN(classification_head_,
BuildClassificationHead(*engine_->metadata_extractor(),
*GetTensorMetadata(),
options->display_names_locale()));
@@ -199,7 +199,7 @@ absl::Status ClassificationPostprocessor::Init(
StatusCode::kInternal, "Could not create score calibration object.");
}
- RETURN_IF_ERROR(score_calibration_->InitializeFromParameters(
+ TFLITE_RETURN_IF_ERROR(score_calibration_->InitializeFromParameters(
classification_head_.calibration_params.value()));
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/classification_postprocessor.h b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/classification_postprocessor.h
index 517974c7370ea..c51812472c9c8 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/classification_postprocessor.h
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/classification_postprocessor.h
@@ -107,7 +107,7 @@ absl::Status ClassificationPostprocessor::Postprocess(T* classifications) {
const TfLiteTensor* output_tensor = GetTensor();
if (output_tensor->type == kTfLiteUInt8) {
- ASSIGN_OR_RETURN(const uint8* output_data,
+ TFLITE_ASSIGN_OR_RETURN(const uint8* output_data,
core::AssertAndReturnTypedTensor<uint8>(output_tensor));
for (int j = 0; j < head.label_map_items.size(); ++j) {
score_pairs.emplace_back(
@@ -115,7 +115,7 @@ absl::Status ClassificationPostprocessor::Postprocess(T* classifications) {
output_tensor->params.zero_point));
}
} else {
- ASSIGN_OR_RETURN(const float* output_data,
+ TFLITE_ASSIGN_OR_RETURN(const float* output_data,
core::AssertAndReturnTypedTensor<float>(output_tensor));
for (int j = 0; j < head.label_map_items.size(); ++j) {
score_pairs.emplace_back(j, output_data[j]);
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/embedding_postprocessor.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/embedding_postprocessor.cc
index 83b123fd5c873..8e4ad06902da8 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/embedding_postprocessor.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/embedding_postprocessor.cc
@@ -24,12 +24,12 @@ tflite::support::StatusOr<std::unique_ptr<EmbeddingPostprocessor>>
EmbeddingPostprocessor::Create(core::TfLiteEngine* engine,
const std::initializer_list<int> output_indices,
std::unique_ptr<EmbeddingOptions> options) {
- ASSIGN_OR_RETURN(auto processor,
+ TFLITE_ASSIGN_OR_RETURN(auto processor,
Processor::Create<EmbeddingPostprocessor>(
/* num_expected_tensors = */ 1, engine, output_indices,
/* requires_metadata = */ false));
- RETURN_IF_ERROR(processor->Init(std::move(options)));
+ TFLITE_RETURN_IF_ERROR(processor->Init(std::move(options)));
return processor;
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/embedding_searcher.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/embedding_searcher.cc
index 274d6ae8658b5..c9de0bcfaa990 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/embedding_searcher.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/embedding_searcher.cc
@@ -176,7 +176,7 @@ StatusOr<std::unique_ptr<EmbeddingSearcher>> EmbeddingSearcher::Create(
std::optional<absl::string_view> optional_index_file_content) {
auto embedding_searcher = std::make_unique<EmbeddingSearcher>();
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
embedding_searcher->Init(
std::move(search_options), optional_index_file_content));
return embedding_searcher;
@@ -185,7 +185,7 @@ StatusOr<std::unique_ptr<EmbeddingSearcher>> EmbeddingSearcher::Create(
StatusOr<SearchResult> EmbeddingSearcher::Search(const Embedding& embedding) {
// Convert embedding to Eigen matrix, as expected by ScaNN.
Eigen::MatrixXf query;
- RETURN_IF_ERROR(ConvertEmbeddingToEigenMatrix(embedding, &query));
+ TFLITE_RETURN_IF_ERROR(ConvertEmbeddingToEigenMatrix(embedding, &query));
// Identify partitions to search.
std::vector<std::vector<int>> leaves_to_search(
@@ -203,10 +203,10 @@ StatusOr<SearchResult> EmbeddingSearcher::Search(const Embedding& embedding) {
std::make_pair(std::numeric_limits<float>::max(), kNoNeighborId));
// Perform search.
if (quantizer_) {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
QuantizedSearch(query, leaves_to_search[0], absl::MakeSpan(top_n)));
} else {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
LinearSearch(query, leaves_to_search[0], absl::MakeSpan(top_n)));
}
@@ -216,7 +216,7 @@ StatusOr<SearchResult> EmbeddingSearcher::Search(const Embedding& embedding) {
if (id == kNoNeighborId) {
break;
}
- ASSIGN_OR_RETURN(auto metadata, index_->GetMetadataAtIndex(id));
+ TFLITE_ASSIGN_OR_RETURN(auto metadata, index_->GetMetadataAtIndex(id));
NearestNeighbor* nearest_neighbor = search_result.add_nearest_neighbors();
nearest_neighbor->set_distance(distance);
nearest_neighbor->set_metadata(std::string(metadata));
@@ -231,13 +231,13 @@ StatusOr<absl::string_view> EmbeddingSearcher::GetUserInfo() {
absl::Status EmbeddingSearcher::Init(
std::unique_ptr<SearchOptions> options,
std::optional<absl::string_view> optional_index_file_content) {
- RETURN_IF_ERROR(SanityCheckOptions(*options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(*options));
options_ = std::move(options);
// Initialize index.
absl::string_view index_file_content;
if (options_->has_index_file()) {
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
index_file_handler_,
ExternalFileHandler::CreateFromExternalFile(&options_->index_file()));
index_file_content = index_file_handler_->GetFileContent();
@@ -251,13 +251,13 @@ absl::Status EmbeddingSearcher::Init(
}
index_file_content = *optional_index_file_content;
}
- ASSIGN_OR_RETURN(index_,
+ TFLITE_ASSIGN_OR_RETURN(index_,
Index::CreateFromIndexBuffer(index_file_content.data(),
index_file_content.size()));
- ASSIGN_OR_RETURN(index_config_, index_->GetIndexConfig());
- RETURN_IF_ERROR(SanityCheckIndexConfig(index_config_));
+ TFLITE_ASSIGN_OR_RETURN(index_config_, index_->GetIndexConfig());
+ TFLITE_RETURN_IF_ERROR(SanityCheckIndexConfig(index_config_));
// Get distance measure once and for all.
- ASSIGN_OR_RETURN(distance_measure_,
+ TFLITE_ASSIGN_OR_RETURN(distance_measure_,
GetDistanceMeasure(index_config_.scann_config()));
// Initialize partitioner.
@@ -296,7 +296,7 @@ absl::Status EmbeddingSearcher::QuantizedSearch(
}
for (int leaf_id : leaves_to_search) {
// Load partition into Eigen matrix.
- ASSIGN_OR_RETURN(auto partition, index_->GetPartitionAtIndex(leaf_id));
+ TFLITE_ASSIGN_OR_RETURN(auto partition, index_->GetPartitionAtIndex(leaf_id));
int partition_size = partition.size() / dim;
Eigen::Map<const Matrix8u> database(
reinterpret_cast<const uint8_t*>(partition.data()), dim,
@@ -319,7 +319,7 @@ absl::Status EmbeddingSearcher::LinearSearch(Eigen::Ref<Eigen::MatrixXf> query,
int dim = index_config_.embedding_dim();
for (int leaf_id : leaves_to_search) {
// Load partition into Eigen matrix.
- ASSIGN_OR_RETURN(auto partition, index_->GetPartitionAtIndex(leaf_id));
+ TFLITE_ASSIGN_OR_RETURN(auto partition, index_->GetPartitionAtIndex(leaf_id));
int partition_size = partition.size() / (dim * sizeof(float));
Eigen::Map<const Eigen::MatrixXf> database(
reinterpret_cast<const float*>(partition.data()), dim, partition_size);
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/image_preprocessor.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/image_preprocessor.cc
index 7ad4ad4703789..05a6df3f1a380 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/image_preprocessor.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/image_preprocessor.cc
@@ -38,12 +38,12 @@ tflite::support::StatusOr<std::unique_ptr<ImagePreprocessor>>
ImagePreprocessor::Create(
core::TfLiteEngine* engine, const std::initializer_list<int> input_indices,
const vision::FrameBufferUtils::ProcessEngine& process_engine) {
- ASSIGN_OR_RETURN(auto processor,
+ TFLITE_ASSIGN_OR_RETURN(auto processor,
Processor::Create<ImagePreprocessor>(
/* num_expected_tensors = */ 1, engine, input_indices,
/* requires_metadata = */ false));
- RETURN_IF_ERROR(processor->Init(process_engine));
+ TFLITE_RETURN_IF_ERROR(processor->Init(process_engine));
return processor;
}
@@ -72,7 +72,7 @@ absl::Status ImagePreprocessor::Init(
const vision::FrameBufferUtils::ProcessEngine& process_engine) {
frame_buffer_utils_ = vision::FrameBufferUtils::Create(process_engine);
- ASSIGN_OR_RETURN(input_specs_, vision::BuildInputImageTensorSpecs(
+ TFLITE_ASSIGN_OR_RETURN(input_specs_, vision::BuildInputImageTensorSpecs(
*engine_->interpreter(),
*engine_->metadata_extractor()));
@@ -137,7 +137,7 @@ absl::Status ImagePreprocessor::Preprocess(const FrameBuffer& frame_buffer,
{preprocessed_plane}, to_buffer_dimension, FrameBuffer::Format::kRGB,
FrameBuffer::Orientation::kTopLeft);
- RETURN_IF_ERROR(frame_buffer_utils_->Preprocess(
+ TFLITE_RETURN_IF_ERROR(frame_buffer_utils_->Preprocess(
frame_buffer, roi, preprocessed_frame_buffer.get()));
} else {
// Input frame buffer already targets model requirements: skip image
@@ -165,7 +165,7 @@ absl::Status ImagePreprocessor::Preprocess(const FrameBuffer& frame_buffer,
"and input tensor.");
}
// No normalization required: directly populate data.
- RETURN_IF_ERROR(tflite::task::core::PopulateTensor(
+ TFLITE_RETURN_IF_ERROR(tflite::task::core::PopulateTensor(
input_data, input_data_byte_size / sizeof(uint8), GetTensor()));
break;
case kTfLiteFloat32: {
@@ -177,7 +177,7 @@ absl::Status ImagePreprocessor::Preprocess(const FrameBuffer& frame_buffer,
"and input tensor.");
}
// Normalize and populate.
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
float* normalized_input_data,
tflite::task::core::AssertAndReturnTypedTensor<float>(GetTensor()));
const tflite::task::vision::NormalizationOptions& normalization_options =
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/processor.h b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/processor.h
index e7417ba65bf09..7f76bec5c0f18 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/processor.h
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/processor.h
@@ -56,7 +56,7 @@ class Processor {
const std::initializer_list<int> tensor_indices,
bool requires_metadata = true) {
auto processor = absl::make_unique<T>(engine, tensor_indices);
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
processor->SanityCheck(num_expected_tensors, requires_metadata));
return processor;
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/regex_preprocessor.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/regex_preprocessor.cc
index af923b4d6f2c1..758514ca3ed6e 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/regex_preprocessor.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/regex_preprocessor.cc
@@ -45,7 +45,7 @@ StatusOr<absl::string_view> CheckAndLoadFirstAssociatedFile(
"Invalid vocab_file from input process unit.",
TfLiteSupportStatus::kMetadataInvalidTokenizerError);
}
- ASSIGN_OR_RETURN(absl::string_view vocab_buffer,
+ TFLITE_ASSIGN_OR_RETURN(absl::string_view vocab_buffer,
metadata_extractor->GetAssociatedFile(
associated_files->Get(0)->name()->str()));
return vocab_buffer;
@@ -56,11 +56,11 @@ StatusOr<absl::string_view> CheckAndLoadFirstAssociatedFile(
/* static */
StatusOr<std::unique_ptr<RegexPreprocessor>> RegexPreprocessor::Create(
tflite::task::core::TfLiteEngine* engine, int input_tensor_index) {
- ASSIGN_OR_RETURN(auto processor, Processor::Create<RegexPreprocessor>(
+ TFLITE_ASSIGN_OR_RETURN(auto processor, Processor::Create<RegexPreprocessor>(
/* num_expected_tensors = */ 1, engine,
{input_tensor_index},
/* requires_metadata = */ false));
- RETURN_IF_ERROR(processor->Init());
+ TFLITE_RETURN_IF_ERROR(processor->Init());
return processor;
}
@@ -70,10 +70,10 @@ absl::Status RegexPreprocessor::Init() {
return absl::OkStatus();
}
// Try if RegexTokenzier metadata can be found.
- ASSIGN_OR_RETURN(const auto tokenzier_metadata,
+ TFLITE_ASSIGN_OR_RETURN(const auto tokenzier_metadata,
TryFindRegexTokenizerMetadata());
- ASSIGN_OR_RETURN(tokenizer_, CreateTokenizerFromMetadata(
+ TFLITE_ASSIGN_OR_RETURN(tokenizer_, CreateTokenizerFromMetadata(
tokenzier_metadata, GetMetadataExtractor()));
return absl::OkStatus();
}
@@ -86,7 +86,7 @@ RegexPreprocessor::TryFindRegexTokenizerMetadata() {
return nullptr;
}
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto tokenizer_metadata,
GetMetadataExtractor()->FindFirstProcessUnit(
*tensor_metadata, ProcessUnitOptions_RegexTokenizerOptions));
@@ -120,7 +120,7 @@ RegexPreprocessor::CreateTokenizerFromMetadata(
ProcessUnitOptions_RegexTokenizerOptions) {
const tflite::RegexTokenizerOptions* options =
tokenizer_metadata->options_as<RegexTokenizerOptions>();
- ASSIGN_OR_RETURN(absl::string_view vocab_buffer,
+ TFLITE_ASSIGN_OR_RETURN(absl::string_view vocab_buffer,
CheckAndLoadFirstAssociatedFile(options->vocab_file(),
metadata_extractor));
if (options->delim_regex_pattern() == nullptr) {
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/search_postprocessor.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/search_postprocessor.cc
index fb34eea170ad8..e78351787229f 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/search_postprocessor.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/search_postprocessor.cc
@@ -91,16 +91,16 @@ StatusOr<std::unique_ptr<SearchPostprocessor>> SearchPostprocessor::Create(
TfLiteEngine* engine, int output_index,
std::unique_ptr<SearchOptions> search_options,
std::unique_ptr<EmbeddingOptions> embedding_options) {
- ASSIGN_OR_RETURN(auto embedding_postprocessor,
+ TFLITE_ASSIGN_OR_RETURN(auto embedding_postprocessor,
CreateEmbeddingPostprocessor(engine, {output_index},
std::move(embedding_options)));
- ASSIGN_OR_RETURN(auto search_processor,
+ TFLITE_ASSIGN_OR_RETURN(auto search_processor,
Processor::Create<SearchPostprocessor>(
/* num_expected_tensors =*/1, engine, {output_index},
/* requires_metadata =*/false));
- RETURN_IF_ERROR(search_processor->Init(std::move(embedding_postprocessor),
+ TFLITE_RETURN_IF_ERROR(search_processor->Init(std::move(embedding_postprocessor),
std::move(search_options)));
return search_processor;
}
@@ -108,10 +108,10 @@ StatusOr<std::unique_ptr<SearchPostprocessor>> SearchPostprocessor::Create(
StatusOr<SearchResult> SearchPostprocessor::Postprocess() {
// Extract embedding.
Embedding embedding;
- RETURN_IF_ERROR(embedding_postprocessor_->Postprocess(&embedding));
+ TFLITE_RETURN_IF_ERROR(embedding_postprocessor_->Postprocess(&embedding));
// Search the nearest-neighbor embedding.
- ASSIGN_OR_RETURN(SearchResult search_result,
+ TFLITE_ASSIGN_OR_RETURN(SearchResult search_result,
embedding_searcher_->Search(embedding));
return search_result;
}
@@ -126,14 +126,14 @@ absl::Status SearchPostprocessor::Init(
embedding_postprocessor_ = std::move(embedding_postprocessor);
if (options->has_index_file()) {
- ASSIGN_OR_RETURN(embedding_searcher_,
+ TFLITE_ASSIGN_OR_RETURN(embedding_searcher_,
EmbeddingSearcher::Create(std::move(options)));
} else {
// Index File is expected in the metadata if not provided in the options.
- ASSIGN_OR_RETURN(absl::string_view index_file_content,
+ TFLITE_ASSIGN_OR_RETURN(absl::string_view index_file_content,
GetIndexFileContentFromMetadata(*GetMetadataExtractor(),
*GetTensorMetadata()));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
embedding_searcher_,
EmbeddingSearcher::Create(std::move(options), index_file_content));
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/universal_sentence_encoder_preprocessor.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/universal_sentence_encoder_preprocessor.cc
index 54dc738e3ab26..7620ef6ce3041 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/universal_sentence_encoder_preprocessor.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/processor/universal_sentence_encoder_preprocessor.cc
@@ -55,10 +55,10 @@ absl::Status UniversalSentenceEncoderPreprocessor::Preprocess(
const std::string& text) {
// All input tensors must be populated, even though we're only using the
// response text input tensor.
- RETURN_IF_ERROR(PopulateTensor(std::string(""), GetTensor(kQueryTextIndex)));
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(PopulateTensor(std::string(""), GetTensor(kQueryTextIndex)));
+ TFLITE_RETURN_IF_ERROR(
PopulateTensor(std::string(""), GetTensor(kResponseContextIndex)));
- RETURN_IF_ERROR(PopulateTensor(text, GetTensor(kResponseTextIndex)));
+ TFLITE_RETURN_IF_ERROR(PopulateTensor(text, GetTensor(kResponseTextIndex)));
return absl::OkStatus();
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/bert_clu_annotator.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/bert_clu_annotator.cc
index f60a556dbbe1b..71068366d96f5 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/bert_clu_annotator.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/bert_clu_annotator.cc
@@ -76,17 +76,17 @@ absl::StatusOr<int> FindTensorIdxByName(
BertCluAnnotator::CreateFromOptions(
const BertCluAnnotatorOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
// Copy options to ensure the ExternalFile outlives the duration of this
// created BertCluAnnotator object.
auto options_copy = std::make_unique<BertCluAnnotatorOptions>(options);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto bert_clu_annotator,
core::TaskAPIFactory::CreateFromBaseOptions<BertCluAnnotator>(
&options_copy->base_options(), std::move(resolver)));
- RETURN_IF_ERROR(bert_clu_annotator->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(bert_clu_annotator->Init(std::move(options_copy)));
return std::move(bert_clu_annotator);
}
@@ -102,7 +102,7 @@ absl::Status BertCluAnnotator::Init(
"No input process unit found from metadata.",
support::TfLiteSupportStatus::kMetadataInvalidTokenizerError);
}
- ASSIGN_OR_RETURN(tokenizer_,
+ TFLITE_ASSIGN_OR_RETURN(tokenizer_,
support::text::tokenizer::CreateTokenizerFromProcessUnit(
tokenizer_process_unit, GetMetadataExtractor()));
@@ -116,31 +116,31 @@ absl::Status BertCluAnnotator::Init(
tensor_index_map_ = std::make_unique<TensorIndexMap>();
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
tensor_index_map_->token_id_idx,
FindTensorIdxByName(input_tensors_metadata, kTokenIdTensorName));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
tensor_index_map_->token_mask_idx,
FindTensorIdxByName(input_tensors_metadata, kMaskTensorName));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
tensor_index_map_->token_type_id_idx,
FindTensorIdxByName(input_tensors_metadata, kTokenTypeIdTensorName));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
tensor_index_map_->domain_names_idx,
FindTensorIdxByName(output_tensors_metadata, kDomainTaskNamesTensorName));
- ASSIGN_OR_RETURN(tensor_index_map_->domain_scores_idx,
+ TFLITE_ASSIGN_OR_RETURN(tensor_index_map_->domain_scores_idx,
FindTensorIdxByName(output_tensors_metadata,
kDomainTaskScoresTensorName));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
tensor_index_map_->intent_names_idx,
FindTensorIdxByName(output_tensors_metadata, kIntentTaskNamesTensorName));
- ASSIGN_OR_RETURN(tensor_index_map_->intent_scores_idx,
+ TFLITE_ASSIGN_OR_RETURN(tensor_index_map_->intent_scores_idx,
FindTensorIdxByName(output_tensors_metadata,
kIntentTaskScoresTensorName));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
tensor_index_map_->slot_names_idx,
FindTensorIdxByName(output_tensors_metadata, kSlotTaskNamesTensorName));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
tensor_index_map_->slot_scores_idx,
FindTensorIdxByName(output_tensors_metadata, kSlotTaskScoresTensorName));
@@ -150,21 +150,21 @@ absl::Status BertCluAnnotator::Init(
interpreter, tensor_index_map_.get(), options_.get(),
static_cast<tflite::support::text::tokenizer::BertTokenizer*>(
tokenizer_.get()));
- RETURN_IF_ERROR(m.status());
+ TFLITE_RETURN_IF_ERROR(m.status());
modules_.emplace_back(*std::move(m));
// DomainModule.
m = DomainModule::Create(interpreter, tensor_index_map_.get(),
options_.get());
- RETURN_IF_ERROR(m.status());
+ TFLITE_RETURN_IF_ERROR(m.status());
modules_.emplace_back(*std::move(m));
// IntentModule.
m = IntentModule::Create(interpreter, tensor_index_map_.get(),
options_.get());
- RETURN_IF_ERROR(m.status());
+ TFLITE_RETURN_IF_ERROR(m.status());
modules_.emplace_back(*std::move(m));
// SlotModule.
m = SlotModule::Create(interpreter, tensor_index_map_.get(), options_.get());
- RETURN_IF_ERROR(m.status());
+ TFLITE_RETURN_IF_ERROR(m.status());
modules_.emplace_back(*std::move(m));
return absl::OkStatus();
@@ -181,7 +181,7 @@ absl::Status BertCluAnnotator::Preprocess(
artifacts_.Clear();
// Preprocess
for (const auto& module : modules_) {
- RETURN_IF_ERROR(module->Preprocess(request, &artifacts_));
+ TFLITE_RETURN_IF_ERROR(module->Preprocess(request, &artifacts_));
}
return absl::OkStatus();
}
@@ -191,7 +191,7 @@ tflite::support::StatusOr<CluResponse> BertCluAnnotator::Postprocess(
const CluRequest& request) {
CluResponse response;
for (const auto& module : modules_) {
- RETURN_IF_ERROR(module->Postprocess(&artifacts_, &response));
+ TFLITE_RETURN_IF_ERROR(module->Postprocess(&artifacts_, &response));
}
return response;
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/bert_nl_classifier.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/bert_nl_classifier.cc
index 52c898dacb9ca..6b85f361b6047 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/bert_nl_classifier.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/bert_nl_classifier.cc
@@ -83,15 +83,15 @@ StatusOr<std::vector<core::Category>> BertNLClassifier::Postprocess(
StatusOr<std::unique_ptr<BertNLClassifier>> BertNLClassifier::CreateFromOptions(
const BertNLClassifierOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
auto options_copy = absl::make_unique<BertNLClassifierOptions>(options);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto bert_nl_classifier,
core::TaskAPIFactory::CreateFromBaseOptions<BertNLClassifier>(
&options_copy->base_options(), std::move(resolver)));
- RETURN_IF_ERROR(bert_nl_classifier->Initialize(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(bert_nl_classifier->Initialize(std::move(options_copy)));
return std::move(bert_nl_classifier);
}
@@ -100,9 +100,9 @@ absl::Status BertNLClassifier::Initialize(
options_ = std::move(options);
// Create preprocessor.
- ASSIGN_OR_RETURN(auto input_indices,
+ TFLITE_ASSIGN_OR_RETURN(auto input_indices,
GetBertInputTensorIndices(GetTfLiteEngine()));
- ASSIGN_OR_RETURN(preprocessor_,
+ TFLITE_ASSIGN_OR_RETURN(preprocessor_,
processor::BertPreprocessor::Create(
GetTfLiteEngine(),
{input_indices[0], input_indices[1], input_indices[2]}));
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/bert_question_answerer.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/bert_question_answerer.cc
index 444724ae504b0..152be993e86b4 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/bert_question_answerer.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/bert_question_answerer.cc
@@ -65,17 +65,17 @@ StatusOr<std::unique_ptr<QuestionAnswerer>>
BertQuestionAnswerer::CreateFromOptions(
const BertQuestionAnswererOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
// Copy options to ensure the ExternalFile outlives the duration of this
// created BertQuestionAnswerer object.
auto options_copy = absl::make_unique<BertQuestionAnswererOptions>(options);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto bert_question_answerer,
core::TaskAPIFactory::CreateFromBaseOptions<BertQuestionAnswerer>(
&options_copy->base_options(), std::move(resolver)));
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
bert_question_answerer->InitializeFromMetadata(std::move(options_copy)));
return std::move(bert_question_answerer);
}
@@ -113,7 +113,7 @@ StatusOr<std::unique_ptr<QuestionAnswerer>>
BertQuestionAnswerer::CreateBertQuestionAnswererFromFile(
const std::string& path_to_model, const std::string& path_to_vocab) {
std::unique_ptr<BertQuestionAnswerer> api_to_init;
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
api_to_init,
core::TaskAPIFactory::CreateFromFile<BertQuestionAnswerer>(
path_to_model,
@@ -128,7 +128,7 @@ BertQuestionAnswerer::CreateBertQuestionAnswererFromBuffer(
const char* model_buffer_data, size_t model_buffer_size,
const char* vocab_buffer_data, size_t vocab_buffer_size) {
std::unique_ptr<BertQuestionAnswerer> api_to_init;
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
api_to_init,
core::TaskAPIFactory::CreateFromBuffer<BertQuestionAnswerer>(
model_buffer_data, model_buffer_size,
@@ -143,7 +143,7 @@ StatusOr<std::unique_ptr<QuestionAnswerer>>
BertQuestionAnswerer::CreateAlbertQuestionAnswererFromFile(
const std::string& path_to_model, const std::string& path_to_spmodel) {
std::unique_ptr<BertQuestionAnswerer> api_to_init;
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
api_to_init,
core::TaskAPIFactory::CreateFromFile<BertQuestionAnswerer>(
path_to_model,
@@ -158,7 +158,7 @@ BertQuestionAnswerer::CreateAlbertQuestionAnswererFromBuffer(
const char* model_buffer_data, size_t model_buffer_size,
const char* spmodel_buffer_data, size_t spmodel_buffer_size) {
std::unique_ptr<BertQuestionAnswerer> api_to_init;
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
api_to_init,
core::TaskAPIFactory::CreateFromBuffer<BertQuestionAnswerer>(
model_buffer_data, model_buffer_size,
@@ -292,11 +292,11 @@ absl::Status BertQuestionAnswerer::Preprocess(
segment_ids.insert(segment_ids.end(), zeros_to_pad, 0);
// input_ids INT32[1, 384]
- RETURN_IF_ERROR(PopulateTensor(input_ids, ids_tensor));
+ TFLITE_RETURN_IF_ERROR(PopulateTensor(input_ids, ids_tensor));
// input_mask INT32[1, 384]
- RETURN_IF_ERROR(PopulateTensor(input_mask, mask_tensor));
+ TFLITE_RETURN_IF_ERROR(PopulateTensor(input_mask, mask_tensor));
// segment_ids INT32[1, 384]
- RETURN_IF_ERROR(PopulateTensor(segment_ids, segment_ids_tensor));
+ TFLITE_RETURN_IF_ERROR(PopulateTensor(segment_ids, segment_ids_tensor));
return absl::OkStatus();
}
@@ -323,9 +323,9 @@ StatusOr<std::vector<QaAnswer>> BertQuestionAnswerer::Postprocess(
std::vector<float> start_logits;
// end_logits FLOAT[1, 384]
- RETURN_IF_ERROR(PopulateVector(end_logits_tensor, &end_logits));
+ TFLITE_RETURN_IF_ERROR(PopulateVector(end_logits_tensor, &end_logits));
// start_logits FLOAT[1, 384]
- RETURN_IF_ERROR(PopulateVector(start_logits_tensor, &start_logits));
+ TFLITE_RETURN_IF_ERROR(PopulateVector(start_logits_tensor, &start_logits));
auto start_indices = ReverseSortIndices(start_logits);
auto end_indices = ReverseSortIndices(end_logits);
@@ -380,7 +380,7 @@ absl::Status BertQuestionAnswerer::InitializeFromMetadata(
"No input process unit found from metadata.",
TfLiteSupportStatus::kMetadataInvalidTokenizerError);
}
- ASSIGN_OR_RETURN(tokenizer_,
+ TFLITE_ASSIGN_OR_RETURN(tokenizer_,
CreateTokenizerFromProcessUnit(tokenizer_process_unit,
GetMetadataExtractor()));
return absl::OkStatus();
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/clu_lib/slot_repr.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/clu_lib/slot_repr.cc
index 114a721ee40ef..917479e9f6041 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/clu_lib/slot_repr.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/clu_lib/slot_repr.cc
@@ -70,7 +70,7 @@ StatusOr<SlotRepr> SlotRepr::CreateFromIob(const absl::string_view repr) {
kSlotBTagPrefix, " or ",
kSlotITagPrefix, ": ", repr));
}
- ASSIGN_OR_RETURN(const auto domain_name_pair, SplitDomainAndName(full_name));
+ TFLITE_ASSIGN_OR_RETURN(const auto domain_name_pair, SplitDomainAndName(full_name));
ret.domain_ = std::string(std::get<0>(domain_name_pair));
ret.name_ = std::string(std::get<1>(domain_name_pair));
return ret;
@@ -117,12 +117,12 @@ absl::Status ResolveInconsistentIobTagSeq(std::vector<std::string>* tag_names) {
for (size_t i = 0; i < tag_names->size(); ++i) {
const auto& tag = tag_names->at(i);
if (SlotRepr::IsI(tag)) {
- ASSIGN_OR_RETURN(const SlotRepr repr, SlotRepr::CreateFromIob(tag));
+ TFLITE_ASSIGN_OR_RETURN(const SlotRepr repr, SlotRepr::CreateFromIob(tag));
if (SlotRepr::IsO(prev_tag)) {
// inconsistent case. eg. O I-time
(*tag_names)[i] = repr.BTag();
} else {
- ASSIGN_OR_RETURN(const SlotRepr prev_repr,
+ TFLITE_ASSIGN_OR_RETURN(const SlotRepr prev_repr,
SlotRepr::CreateFromIob(prev_tag));
if (prev_repr.FullName() != repr.FullName()) {
// inconsistent case. eg. B-time I-per I-time I-per
@@ -151,7 +151,7 @@ absl::StatusOr<std::vector<SlotMentionStruct>> DecodeSlotChunks(
// Make a copy since the input is constant while still modifications are
// needed.
std::vector<std::string> tag_names_fixed(tag_names.begin(), tag_names.end());
- RETURN_IF_ERROR(ResolveInconsistentIobTagSeq(&tag_names_fixed));
+ TFLITE_RETURN_IF_ERROR(ResolveInconsistentIobTagSeq(&tag_names_fixed));
std::vector<SlotMentionStruct> result;
// Compute slot tag spans
@@ -165,7 +165,7 @@ absl::StatusOr<std::vector<SlotMentionStruct>> DecodeSlotChunks(
// I tag
if (SlotRepr::IsI(tag_str_i)) {
SlotRepr slot_tag_i;
- ASSIGN_OR_RETURN(slot_tag_i, SlotRepr::CreateFromIob(tag_str_i));
+ TFLITE_ASSIGN_OR_RETURN(slot_tag_i, SlotRepr::CreateFromIob(tag_str_i));
if (cur_slot == slot_tag_i) {
cur_slot_exclusive_end = token_i + 1;
// Compute the phrase level confidence by taking min(tag confidences).
@@ -194,7 +194,7 @@ absl::StatusOr<std::vector<SlotMentionStruct>> DecodeSlotChunks(
cur_slot_start = token_i;
cur_slot_exclusive_end = token_i + 1;
cur_slot_confidence = tag_probs[token_i];
- ASSIGN_OR_RETURN(cur_slot, SlotRepr::CreateFromIob(tag_str_i));
+ TFLITE_ASSIGN_OR_RETURN(cur_slot, SlotRepr::CreateFromIob(tag_str_i));
} else {
// O tag
if (!SlotRepr::IsO(tag_str_i)) {
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/clu_lib/slot_tagging_output.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/clu_lib/slot_tagging_output.cc
index 6643cf6f4e95d..1c582721b6c1c 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/clu_lib/slot_tagging_output.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/clu_lib/slot_tagging_output.cc
@@ -64,7 +64,7 @@ DecodeSlotChunksPredictOnFirstSubword(
const int last_exclusive_end = whole_word_token_alignments.back().second;
whole_word_token_alignments.emplace_back(last_exclusive_end,
last_exclusive_end);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto slot_mentions,
DecodeSlotChunks(first_subword_tag_names, first_subword_tag_probs,
whole_word_token_alignments));
@@ -105,7 +105,7 @@ absl::Status SlotModulePopulateResponse(
// Prepare the data and decode slot chunks.
std::vector<SlotMentionStruct> cur_turn_slot_mentions;
// Decode slot chunks based on first subword tokens in the turn.
- ASSIGN_OR_RETURN(cur_turn_slot_mentions,
+ TFLITE_ASSIGN_OR_RETURN(cur_turn_slot_mentions,
DecodeSlotChunksPredictOnFirstSubword(
cur_turn_start, cur_turn_end, seq_len, tags_as_span,
confidences_as_span, token_alignments_as_span,
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/clu_lib/tflite_modules.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/clu_lib/tflite_modules.cc
index 634c10b21934b..d60c1b9b6b057 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/clu_lib/tflite_modules.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/clu_lib/tflite_modules.cc
@@ -65,7 +65,7 @@ absl::Status PopulateInputTextTensorForBERT(
std::vector<std::pair<int, int>> alignments;
std::vector<int> first_subword_indicators;
std::vector<int> segment_id_list;
- RETURN_IF_ERROR(BertPreprocessing(
+ TFLITE_RETURN_IF_ERROR(BertPreprocessing(
tokenizer, artifacts->reverse_utterance_list_to_encode, max_seq_len,
max_history_turns, &token_ids, &alignments, &first_subword_indicators,
&segment_id_list, &(artifacts->token_turn_ids)));
@@ -146,9 +146,9 @@ absl::StatusOr<std::unique_ptr<AbstractModule>> UtteranceSeqModule::Create(
const tflite::support::text::tokenizer::BertTokenizer* tokenizer) {
auto out = std::make_unique<UtteranceSeqModule>();
out->tensor_index_map_ = tensor_index_map;
- RETURN_IF_ERROR(out->Init(interpreter, options));
+ TFLITE_RETURN_IF_ERROR(out->Init(interpreter, options));
out->tokenizer_ = tokenizer;
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
out->max_seq_len_,
GetInputSeqDimSize(tensor_index_map->token_id_idx, interpreter));
out->max_history_turns_ = options->max_history_turns();
@@ -195,13 +195,13 @@ absl::StatusOr<std::unique_ptr<AbstractModule>> DomainModule::Create(
auto out = std::make_unique<DomainModule>();
out->tensor_index_map_ = tensor_index_map;
out->domain_threshold_ = options->domain_threshold();
- RETURN_IF_ERROR(out->Init(interpreter, options));
+ TFLITE_RETURN_IF_ERROR(out->Init(interpreter, options));
return out;
}
absl::Status DomainModule::Postprocess(Artifacts* artifacts,
CluResponse* response) const {
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
const auto t_output,
NamesAndConfidencesFromOutput(tensor_index_map_->domain_names_idx,
tensor_index_map_->domain_scores_idx));
@@ -225,20 +225,20 @@ absl::StatusOr<std::unique_ptr<AbstractModule>> IntentModule::Create(
out->tensor_index_map_ = tensor_index_map;
out->intent_threshold_ = options->intent_threshold();
out->categorical_slot_threshold_ = options->categorical_slot_threshold();
- RETURN_IF_ERROR(out->Init(interpreter, options));
+ TFLITE_RETURN_IF_ERROR(out->Init(interpreter, options));
return out;
}
absl::Status IntentModule::Postprocess(Artifacts* artifacts,
CluResponse* response) const {
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
const auto t_output,
NamesAndConfidencesFromOutput(tensor_index_map_->intent_names_idx,
tensor_index_map_->intent_scores_idx));
const auto& [names, confidences] = t_output;
for (int i = 0; i < names.size(); ++i) {
- ASSIGN_OR_RETURN(const auto name, IntentRepr::CreateFromFullName(names[i]));
+ TFLITE_ASSIGN_OR_RETURN(const auto name, IntentRepr::CreateFromFullName(names[i]));
// TODO(xysong): Differentiate categorical slots from intents.
std::vector<absl::string_view> parts = absl::StrSplit(name.Name(), '=');
if (parts.size() == 2) {
@@ -272,18 +272,18 @@ absl::StatusOr<std::unique_ptr<AbstractModule>> SlotModule::Create(
out->tensor_index_map_ = tensor_index_map;
out->mentioned_slot_threshold_ =
options->mentioned_slot_threshold();
- RETURN_IF_ERROR(out->Init(interpreter, options));
+ TFLITE_RETURN_IF_ERROR(out->Init(interpreter, options));
return out;
}
absl::Status SlotModule::Postprocess(Artifacts* artifacts,
CluResponse* response) const {
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
const auto t_output,
NamesAndConfidencesFromOutput(tensor_index_map_->slot_names_idx,
tensor_index_map_->slot_scores_idx));
const auto& [tags, confidences] = t_output;
- RETURN_IF_ERROR(SlotModulePopulateResponse(
+ TFLITE_RETURN_IF_ERROR(SlotModulePopulateResponse(
tags, confidences, artifacts->token_alignments, artifacts->token_turn_ids,
artifacts->first_subword_indicators, mentioned_slot_threshold_,
artifacts->reverse_utterance_list_to_encode, response));
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/nlclassifier/nl_classifier.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/nlclassifier/nl_classifier.cc
index c08abda57401f..181ed79e8a4bd 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/nlclassifier/nl_classifier.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/nlclassifier/nl_classifier.cc
@@ -191,7 +191,7 @@ absl::Status NLClassifier::Initialize(
std::unique_ptr<tflite::task::text::NLClassifierOptions> options) {
proto_options_ = std::move(options);
- RETURN_IF_ERROR(Initialize(NLClassifierOptions{
+ TFLITE_RETURN_IF_ERROR(Initialize(NLClassifierOptions{
/* input_tensor_index= */ proto_options_->input_tensor_index(),
/* output_score_tensor_index= */
proto_options_->output_score_tensor_index(),
@@ -222,7 +222,7 @@ absl::Status NLClassifier::Initialize(const NLClassifierOptions& options) {
}
// Create preprocessor.
- ASSIGN_OR_RETURN(preprocessor_, processor::RegexPreprocessor::Create(
+ TFLITE_ASSIGN_OR_RETURN(preprocessor_, processor::RegexPreprocessor::Create(
GetTfLiteEngine(), input_index));
// output score tensor should be type
@@ -295,16 +295,16 @@ absl::Status NLClassifier::Initialize(const NLClassifierOptions& options) {
StatusOr<std::unique_ptr<NLClassifier>> NLClassifier::CreateFromOptions(
const NLClassifierProtoOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
// Copy options to ensure the ExternalFile outlives the duration of this
// created NLClassifier object.
auto options_copy = absl::make_unique<NLClassifierProtoOptions>(options);
- ASSIGN_OR_RETURN(auto nl_classifier,
+ TFLITE_ASSIGN_OR_RETURN(auto nl_classifier,
TaskAPIFactory::CreateFromBaseOptions<NLClassifier>(
&options_copy->base_options(), std::move(resolver)));
- RETURN_IF_ERROR(nl_classifier->Initialize(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(nl_classifier->Initialize(std::move(options_copy)));
return nl_classifier;
}
@@ -315,11 +315,11 @@ NLClassifier::CreateFromBufferAndOptions(
const NLClassifierOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
std::unique_ptr<NLClassifier> nl_classifier;
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
nl_classifier,
core::TaskAPIFactory::CreateFromBuffer<NLClassifier>(
model_buffer_data, model_buffer_size, std::move(resolver)));
- RETURN_IF_ERROR(nl_classifier->Initialize(options));
+ TFLITE_RETURN_IF_ERROR(nl_classifier->Initialize(options));
return std::move(nl_classifier);
}
@@ -327,10 +327,10 @@ StatusOr<std::unique_ptr<NLClassifier>> NLClassifier::CreateFromFileAndOptions(
const std::string& path_to_model, const NLClassifierOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
std::unique_ptr<NLClassifier> nl_classifier;
- ASSIGN_OR_RETURN(nl_classifier,
+ TFLITE_ASSIGN_OR_RETURN(nl_classifier,
core::TaskAPIFactory::CreateFromFile<NLClassifier>(
path_to_model, std::move(resolver)));
- RETURN_IF_ERROR(nl_classifier->Initialize(options));
+ TFLITE_RETURN_IF_ERROR(nl_classifier->Initialize(options));
return std::move(nl_classifier);
}
@@ -338,10 +338,10 @@ StatusOr<std::unique_ptr<NLClassifier>> NLClassifier::CreateFromFdAndOptions(
int fd, const NLClassifierOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
std::unique_ptr<NLClassifier> nl_classifier;
- ASSIGN_OR_RETURN(nl_classifier,
+ TFLITE_ASSIGN_OR_RETURN(nl_classifier,
core::TaskAPIFactory::CreateFromFileDescriptor<NLClassifier>(
fd, std::move(resolver)));
- RETURN_IF_ERROR(nl_classifier->Initialize(options));
+ TFLITE_RETURN_IF_ERROR(nl_classifier->Initialize(options));
return std::move(nl_classifier);
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/text_embedder.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/text_embedder.cc
index 7363540797cf2..dc908baeb4026 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/text_embedder.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/text_embedder.cc
@@ -66,15 +66,15 @@ tflite::support::StatusOr<double> TextEmbedder::CosineSimilarity(
tflite::support::StatusOr<std::unique_ptr<TextEmbedder>>
TextEmbedder::CreateFromOptions(const TextEmbedderOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
// Copy options to ensure the ExternalFile-s outlive the constructed object.
auto options_copy = absl::make_unique<TextEmbedderOptions>(options);
- ASSIGN_OR_RETURN(auto text_embedder,
+ TFLITE_ASSIGN_OR_RETURN(auto text_embedder,
TaskAPIFactory::CreateFromBaseOptions<TextEmbedder>(
&options_copy->base_options(), std::move(resolver)));
- RETURN_IF_ERROR(text_embedder->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(text_embedder->Init(std::move(options_copy)));
return text_embedder;
}
@@ -87,7 +87,7 @@ absl::Status TextEmbedder::Init(std::unique_ptr<TextEmbedderOptions> options) {
std::vector<int> output_tensor_indices;
if (input_count == 1) {
// Assume Regex-based model.
- ASSIGN_OR_RETURN(preprocessor_, processor::RegexPreprocessor::Create(
+ TFLITE_ASSIGN_OR_RETURN(preprocessor_, processor::RegexPreprocessor::Create(
GetTfLiteEngine(), 0));
// All output tensors are assumed to be embeddings.
for (int i = 0; i < GetTfLiteEngine()->GetOutputs().size(); ++i) {
@@ -97,9 +97,9 @@ absl::Status TextEmbedder::Init(std::unique_ptr<TextEmbedderOptions> options) {
// Check if BertTokenizer is present.
if (GetMetadataExtractor()->GetInputProcessUnitsCount() > 0) {
// Assume Bert-based model.
- ASSIGN_OR_RETURN(auto input_indices,
+ TFLITE_ASSIGN_OR_RETURN(auto input_indices,
GetBertInputTensorIndices(GetTfLiteEngine()));
- ASSIGN_OR_RETURN(preprocessor_, processor::BertPreprocessor::Create(
+ TFLITE_ASSIGN_OR_RETURN(preprocessor_, processor::BertPreprocessor::Create(
GetTfLiteEngine(),
{input_indices[0], input_indices[1],
input_indices[2]}));
@@ -109,13 +109,13 @@ absl::Status TextEmbedder::Init(std::unique_ptr<TextEmbedderOptions> options) {
}
} else {
// Assume Universal Sentence Encoder-based model.
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto input_indices,
GetUniversalSentenceEncoderInputTensorIndices(GetTfLiteEngine()));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto output_indices,
GetUniversalSentenceEncoderOutputTensorIndices(GetTfLiteEngine()));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
preprocessor_,
processor::UniversalSentenceEncoderPreprocessor::Create(
GetTfLiteEngine(),
@@ -154,7 +154,7 @@ absl::Status TextEmbedder::Init(std::unique_ptr<TextEmbedderOptions> options) {
"number of output tensors.",
support::TfLiteSupportStatus::kInvalidArgumentError);
}
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto processor,
processor::EmbeddingPostprocessor::Create(
GetTfLiteEngine(), {output_tensor_indices[i]}, std::move(option)));
@@ -183,7 +183,7 @@ tflite::support::StatusOr<EmbeddingResult> TextEmbedder::Postprocess(
const std::string& input) {
EmbeddingResult result;
for (int i = 0; i < postprocessors_.size(); ++i) {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
postprocessors_.at(i)->Postprocess(result.add_embeddings()));
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/text_searcher.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/text_searcher.cc
index fd9cbcb29adfd..9b8de0bb025d4 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/text_searcher.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/text_searcher.cc
@@ -65,10 +65,10 @@ StatusOr<std::unique_ptr<TextSearcher>> TextSearcher::CreateFromOptions(
// Copy options to ensure the ExternalFile-s outlive the constructed object.
auto options_copy = absl::make_unique<TextSearcherOptions>(options);
- ASSIGN_OR_RETURN(auto text_searcher,
+ TFLITE_ASSIGN_OR_RETURN(auto text_searcher,
TaskAPIFactory::CreateFromBaseOptions<TextSearcher>(
&options_copy->base_options(), std::move(resolver)));
- RETURN_IF_ERROR(text_searcher->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(text_searcher->Init(std::move(options_copy)));
return text_searcher;
}
@@ -87,7 +87,7 @@ absl::Status TextSearcher::Init(std::unique_ptr<TextSearcherOptions> options) {
absl::StrFormat("Expected exactly 1 output tensor, got %d.",
output_count));
}
- ASSIGN_OR_RETURN(preprocessor_, processor::RegexPreprocessor::Create(
+ TFLITE_ASSIGN_OR_RETURN(preprocessor_, processor::RegexPreprocessor::Create(
GetTfLiteEngine(), 0));
output_tensor_index = 0;
} else if (input_count == 3) {
@@ -100,22 +100,22 @@ absl::Status TextSearcher::Init(std::unique_ptr<TextSearcherOptions> options) {
absl::StrFormat("Expected exactly 1 output tensor, got %d.",
output_count));
}
- ASSIGN_OR_RETURN(auto input_indices,
+ TFLITE_ASSIGN_OR_RETURN(auto input_indices,
GetBertInputTensorIndices(GetTfLiteEngine()));
- ASSIGN_OR_RETURN(preprocessor_, processor::BertPreprocessor::Create(
+ TFLITE_ASSIGN_OR_RETURN(preprocessor_, processor::BertPreprocessor::Create(
GetTfLiteEngine(),
{input_indices[0], input_indices[1],
input_indices[2]}));
output_tensor_index = 0;
} else {
// Assume Universal Sentence Encoder-based model.
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto input_indices,
GetUniversalSentenceEncoderInputTensorIndices(GetTfLiteEngine()));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto output_indices,
GetUniversalSentenceEncoderOutputTensorIndices(GetTfLiteEngine()));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
preprocessor_,
processor::UniversalSentenceEncoderPreprocessor::Create(
GetTfLiteEngine(),
@@ -129,7 +129,7 @@ absl::Status TextSearcher::Init(std::unique_ptr<TextSearcherOptions> options) {
absl::StrFormat("Expected 1 or 3 input tensors, got %d.", input_count));
}
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
postprocessor_,
SearchPostprocessor::Create(
GetTfLiteEngine(), output_tensor_index,
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/universal_sentence_encoder_qa.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/universal_sentence_encoder_qa.cc
index 52b0041039acf..439dfa4873459 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/universal_sentence_encoder_qa.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/text/universal_sentence_encoder_qa.cc
@@ -119,13 +119,13 @@ StatusOr<RetrievalOutput> UniversalSentenceEncoderQA::Retrieve(
// Only encode query for the first time.
if (i == 0) {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
CopyVector(out.query_encoding, output.mutable_query_encoding()));
}
// For each answer, set the response result.
auto r = output.mutable_response_results()->Add();
- RETURN_IF_ERROR(CopyVector(out.response_encoding, r->mutable_encoding()));
+ TFLITE_RETURN_IF_ERROR(CopyVector(out.response_encoding, r->mutable_encoding()));
} else {
// If response is already encoded, encode query only and keep response
// encoding.
@@ -164,7 +164,7 @@ StatusOr<FeatureVector> UniversalSentenceEncoderQA::EncodeQuery(
const auto& output = Run(query_text, "", "");
FeatureVector v;
- RETURN_IF_ERROR(CopyVector(output.query_encoding, &v));
+ TFLITE_RETURN_IF_ERROR(CopyVector(output.query_encoding, &v));
return v;
}
@@ -178,7 +178,7 @@ StatusOr<FeatureVector> UniversalSentenceEncoderQA::EncodeResponse(
const auto& output = Run("", response_text, response_context);
FeatureVector v;
- RETURN_IF_ERROR(CopyVector(output.response_encoding, &v));
+ TFLITE_RETURN_IF_ERROR(CopyVector(output.response_encoding, &v));
return v;
}
@@ -215,11 +215,11 @@ std::vector<size_t> UniversalSentenceEncoderQA::Top(
Status UniversalSentenceEncoderQA::Preprocess(
const std::vector<TfLiteTensor*>& input_tensors, const QAInput& input) {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
PopulateTensor(input.query_text, input_tensors[input_indices_[0]]));
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
PopulateTensor(input.response_context, input_tensors[input_indices_[1]]));
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
PopulateTensor(input.response_text, input_tensors[input_indices_[2]]));
return absl::OkStatus();
@@ -248,10 +248,10 @@ absl::Status UniversalSentenceEncoderQA::Init(
std::unique_ptr<RetrievalOptions> options) {
options_ = std::move(options);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
input_indices_,
GetUniversalSentenceEncoderInputTensorIndices(GetTfLiteEngine()));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
output_indices_,
GetUniversalSentenceEncoderOutputTensorIndices(GetTfLiteEngine()));
@@ -262,18 +262,18 @@ StatusOr<std::unique_ptr<UniversalSentenceEncoderQA>>
UniversalSentenceEncoderQA::CreateFromOption(
const RetrievalOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
// Copy options to ensure the ExternalFile outlives the duration of this
// created object.
auto options_copy = absl::make_unique<RetrievalOptions>(options);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto encoder,
TaskAPIFactory::CreateFromBaseOptions<UniversalSentenceEncoderQA>(
&options_copy->base_options(), std::move(resolver)));
- RETURN_IF_ERROR(encoder->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(encoder->Init(std::move(options_copy)));
return encoder;
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/core/base_vision_task_api.h b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/core/base_vision_task_api.h
index 76a03671b54af..1d86136da193d 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/core/base_vision_task_api.h
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/core/base_vision_task_api.h
@@ -73,7 +73,7 @@ class BaseVisionTaskApi
// already successfully initialized before calling this method.
virtual absl::Status CheckAndSetInputs() {
// BaseTaskApi always assume having a single input.
- ASSIGN_OR_RETURN(preprocessor_,
+ TFLITE_ASSIGN_OR_RETURN(preprocessor_,
::tflite::task::processor::ImagePreprocessor::Create(
this->GetTfLiteEngine(), {0}, process_engine_));
return absl::OkStatus();
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/core/classification_head.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/core/classification_head.cc
index b5b57f21e939f..1bcaee2b86ddc 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/core/classification_head.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/core/classification_head.cc
@@ -44,7 +44,7 @@ StatusOr<ClassificationHead> BuildClassificationHead(
output_tensor_metadata,
tflite::AssociatedFileType_TENSOR_AXIS_LABELS);
if (!labels_filename.empty()) {
- ASSIGN_OR_RETURN(absl::string_view labels_file,
+ TFLITE_ASSIGN_OR_RETURN(absl::string_view labels_file,
metadata_extractor.GetAssociatedFile(labels_filename));
const std::string display_names_filename =
ModelMetadataExtractor::FindFirstAssociatedFileName(
@@ -53,15 +53,15 @@ StatusOr<ClassificationHead> BuildClassificationHead(
display_names_locale);
absl::string_view display_names_file;
if (!display_names_filename.empty()) {
- ASSIGN_OR_RETURN(display_names_file, metadata_extractor.GetAssociatedFile(
+ TFLITE_ASSIGN_OR_RETURN(display_names_file, metadata_extractor.GetAssociatedFile(
display_names_filename));
}
- ASSIGN_OR_RETURN(head.label_map_items,
+ TFLITE_ASSIGN_OR_RETURN(head.label_map_items,
BuildLabelMapFromFiles(labels_file, display_names_file));
}
// Set score threshold, if present.
- ASSIGN_OR_RETURN(const tflite::ProcessUnit* score_thresholding_process_unit,
+ TFLITE_ASSIGN_OR_RETURN(const tflite::ProcessUnit* score_thresholding_process_unit,
ModelMetadataExtractor::FindFirstProcessUnit(
output_tensor_metadata,
tflite::ProcessUnitOptions_ScoreThresholdingOptions));
@@ -72,7 +72,7 @@ StatusOr<ClassificationHead> BuildClassificationHead(
}
// Build score calibration parameters, if present.
- ASSIGN_OR_RETURN(const tflite::ProcessUnit* score_calibration_process_unit,
+ TFLITE_ASSIGN_OR_RETURN(const tflite::ProcessUnit* score_calibration_process_unit,
ModelMetadataExtractor::FindFirstProcessUnit(
output_tensor_metadata,
tflite::ProcessUnitOptions_ScoreCalibrationOptions));
@@ -95,10 +95,10 @@ StatusOr<ClassificationHead> BuildClassificationHead(
"parameters file with type TENSOR_AXIS_SCORE_CALIBRATION.",
TfLiteSupportStatus::kMetadataAssociatedFileNotFoundError);
}
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
absl::string_view score_calibration_file,
metadata_extractor.GetAssociatedFile(score_calibration_filename));
- ASSIGN_OR_RETURN(SigmoidCalibrationParameters sigmoid_params,
+ TFLITE_ASSIGN_OR_RETURN(SigmoidCalibrationParameters sigmoid_params,
BuildSigmoidCalibrationParams(
*score_calibration_process_unit
->options_as_ScoreCalibrationOptions(),
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_classifier.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_classifier.cc
index aa1e7707dd99b..60cd7abb83374 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_classifier.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_classifier.cc
@@ -52,20 +52,20 @@ using ::tflite::task::core::TfLiteEngine;
StatusOr<std::unique_ptr<ImageClassifier>> ImageClassifier::CreateFromOptions(
const ImageClassifierOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
// Copy options to ensure the ExternalFile outlives the constructed object.
auto options_copy = absl::make_unique<ImageClassifierOptions>(options);
std::unique_ptr<ImageClassifier> image_classifier;
if (options_copy->has_model_file_with_metadata()) {
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
image_classifier,
TaskAPIFactory::CreateFromExternalFileProto<ImageClassifier>(
&options_copy->model_file_with_metadata(), std::move(resolver),
options_copy->num_threads(), options_copy->compute_settings()));
} else if (options_copy->base_options().has_model_file()) {
- ASSIGN_OR_RETURN(image_classifier,
+ TFLITE_ASSIGN_OR_RETURN(image_classifier,
TaskAPIFactory::CreateFromBaseOptions<ImageClassifier>(
&options_copy->base_options(), std::move(resolver)));
} else {
@@ -77,7 +77,7 @@ StatusOr<std::unique_ptr<ImageClassifier>> ImageClassifier::CreateFromOptions(
TfLiteSupportStatus::kInvalidArgumentError);
}
- RETURN_IF_ERROR(image_classifier->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(image_classifier->Init(std::move(options_copy)));
return image_classifier;
}
@@ -125,18 +125,18 @@ absl::Status ImageClassifier::Init(
// Perform pre-initialization actions (by default, sets the process engine for
// image pre-processing to kLibyuv as a sane default).
- RETURN_IF_ERROR(PreInit());
+ TFLITE_RETURN_IF_ERROR(PreInit());
// Sanity check and set inputs and outputs.
- RETURN_IF_ERROR(CheckAndSetInputs());
- RETURN_IF_ERROR(CheckAndSetOutputs());
+ TFLITE_RETURN_IF_ERROR(CheckAndSetInputs());
+ TFLITE_RETURN_IF_ERROR(CheckAndSetOutputs());
// Initialize class whitelisting/blacklisting, if any.
- RETURN_IF_ERROR(CheckAndSetClassNameSet());
+ TFLITE_RETURN_IF_ERROR(CheckAndSetClassNameSet());
// Perform final initialization (by default, initialize score calibration
// parameters, if any).
- RETURN_IF_ERROR(PostInit());
+ TFLITE_RETURN_IF_ERROR(PostInit());
return absl::OkStatus();
}
@@ -177,7 +177,7 @@ absl::Status ImageClassifier::CheckAndSetOutputs() {
const tflite::TensorMetadata* output_tensor =
output_tensor_metadata->Get(i);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
ClassificationHead head,
BuildClassificationHead(*metadata_extractor, *output_tensor,
options_->display_names_locale()));
@@ -364,7 +364,7 @@ absl::Status ImageClassifier::InitScoreCalibrations() {
StatusCode::kInternal, "Could not create score calibration object.");
}
- RETURN_IF_ERROR(score_calibrations_[i]->InitializeFromParameters(
+ TFLITE_RETURN_IF_ERROR(score_calibrations_[i]->InitializeFromParameters(
classification_heads_[i].calibration_params.value()));
}
@@ -407,7 +407,7 @@ StatusOr<ClassificationResult> ImageClassifier::Postprocess(
const TfLiteTensor* output_tensor = output_tensors[i];
if (has_uint8_outputs_) {
- ASSIGN_OR_RETURN(const uint8* output_data,
+ TFLITE_ASSIGN_OR_RETURN(const uint8* output_data,
AssertAndReturnTypedTensor<uint8>(output_tensor));
for (int j = 0; j < head.label_map_items.size(); ++j) {
score_pairs.emplace_back(j, output_tensor->params.scale *
@@ -415,7 +415,7 @@ StatusOr<ClassificationResult> ImageClassifier::Postprocess(
output_tensor->params.zero_point));
}
} else {
- ASSIGN_OR_RETURN(const float* output_data,
+ TFLITE_ASSIGN_OR_RETURN(const float* output_data,
AssertAndReturnTypedTensor<float>(output_tensor));
for (int j = 0; j < head.label_map_items.size(); ++j) {
score_pairs.emplace_back(j, output_data[j]);
@@ -501,7 +501,7 @@ StatusOr<ClassificationResult> ImageClassifier::Postprocess(
}
}
- RETURN_IF_ERROR(FillResultsFromLabelMaps(&result));
+ TFLITE_RETURN_IF_ERROR(FillResultsFromLabelMaps(&result));
return result;
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_embedder.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_embedder.cc
index 0ce46fb9f9806..d24daeac9b61d 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_embedder.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_embedder.cc
@@ -62,13 +62,13 @@ ImageEmbedder::CreateFromOptions(const ImageEmbedderOptions& options,
// Copy options to ensure the ExternalFile-s outlive the constructed object.
auto options_copy = absl::make_unique<ImageEmbedderOptions>(options);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto image_embedder,
TaskAPIFactory::CreateFromExternalFileProto<ImageEmbedder>(
&options_copy->model_file_with_metadata(), std::move(resolver),
options_copy->num_threads(), options_copy->compute_settings()));
- RETURN_IF_ERROR(image_embedder->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(image_embedder->Init(std::move(options_copy)));
return image_embedder;
}
@@ -89,19 +89,19 @@ absl::Status ImageEmbedder::Init(
options_ = std::move(options);
// Perform pre-initialization actions.
- RETURN_IF_ERROR(PreInit());
+ TFLITE_RETURN_IF_ERROR(PreInit());
// Sanity check and set inputs and outputs.
- RETURN_IF_ERROR(CheckAndSetInputs());
+ TFLITE_RETURN_IF_ERROR(CheckAndSetInputs());
// Perform post-initialization actions.
- RETURN_IF_ERROR(PostInit());
+ TFLITE_RETURN_IF_ERROR(PostInit());
// ImageEmbedder assumes that all output tensors share the same
// embedding option.
postprocessors_.reserve(GetTfLiteEngine()->interpreter()->outputs().size());
for (int i = 0; i < GetTfLiteEngine()->interpreter()->outputs().size(); i++) {
- ASSIGN_OR_RETURN(auto processor,
+ TFLITE_ASSIGN_OR_RETURN(auto processor,
CreatePostprocessor(GetTfLiteEngine(), {i}, *options_));
postprocessors_.emplace_back(std::move(processor));
}
@@ -127,7 +127,7 @@ tflite::support::StatusOr<EmbeddingResult> ImageEmbedder::Postprocess(
const FrameBuffer& /*frame_buffer*/, const BoundingBox& /*roi*/) {
EmbeddingResult result;
for (int i = 0; i < postprocessors_.size(); ++i) {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
postprocessors_.at(i)->Postprocess(result.add_embeddings()));
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_searcher.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_searcher.cc
index fb8bdf4f36446..6b577cd961f61 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_searcher.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_searcher.cc
@@ -56,11 +56,11 @@ StatusOr<std::unique_ptr<ImageSearcher>> ImageSearcher::CreateFromOptions(
// Copy options to ensure the ExternalFile-s outlive the constructed object.
auto options_copy = absl::make_unique<ImageSearcherOptions>(options);
- ASSIGN_OR_RETURN(auto image_searcher,
+ TFLITE_ASSIGN_OR_RETURN(auto image_searcher,
TaskAPIFactory::CreateFromBaseOptions<ImageSearcher>(
&options_copy->base_options(), std::move(resolver)));
- RETURN_IF_ERROR(image_searcher->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(image_searcher->Init(std::move(options_copy)));
return image_searcher;
}
@@ -75,13 +75,13 @@ absl::Status ImageSearcher::Init(
options_ = std::move(options);
// Perform pre-initialization actions.
- RETURN_IF_ERROR(PreInit());
+ TFLITE_RETURN_IF_ERROR(PreInit());
// Sanity check and set inputs.
- RETURN_IF_ERROR(CheckAndSetInputs());
+ TFLITE_RETURN_IF_ERROR(CheckAndSetInputs());
// Create post-processor.
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
postprocessor_,
SearchPostprocessor::Create(GetTfLiteEngine(), 0,
std::make_unique<processor::SearchOptions>(
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_segmenter.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_segmenter.cc
index c9dad866f1a68..8bef73b9759ff 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_segmenter.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/image_segmenter.cc
@@ -117,7 +117,7 @@ StatusOr<std::vector<LabelMapItem>> GetLabelMapIfAny(
if (labels_filename.empty()) {
return std::vector<LabelMapItem>();
}
- ASSIGN_OR_RETURN(absl::string_view labels_file,
+ TFLITE_ASSIGN_OR_RETURN(absl::string_view labels_file,
metadata_extractor.GetAssociatedFile(labels_filename));
const std::string display_names_filename =
ModelMetadataExtractor::FindFirstAssociatedFileName(
@@ -125,7 +125,7 @@ StatusOr<std::vector<LabelMapItem>> GetLabelMapIfAny(
locale);
absl::string_view display_names_file = {};
if (!display_names_filename.empty()) {
- ASSIGN_OR_RETURN(display_names_file, metadata_extractor.GetAssociatedFile(
+ TFLITE_ASSIGN_OR_RETURN(display_names_file, metadata_extractor.GetAssociatedFile(
display_names_filename));
}
return BuildLabelMapFromFiles(labels_file, display_names_file);
@@ -164,20 +164,20 @@ absl::Status ImageSegmenter::SanityCheckOptions(
StatusOr<std::unique_ptr<ImageSegmenter>> ImageSegmenter::CreateFromOptions(
const ImageSegmenterOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
// Copy options to ensure the ExternalFile outlives the constructed object.
auto options_copy = absl::make_unique<ImageSegmenterOptions>(options);
std::unique_ptr<ImageSegmenter> image_segmenter;
if (options_copy->has_model_file_with_metadata()) {
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
image_segmenter,
TaskAPIFactory::CreateFromExternalFileProto<ImageSegmenter>(
&options_copy->model_file_with_metadata(), std::move(resolver),
options_copy->num_threads(), options_copy->compute_settings()));
} else if (options_copy->base_options().has_model_file()) {
- ASSIGN_OR_RETURN(image_segmenter,
+ TFLITE_ASSIGN_OR_RETURN(image_segmenter,
TaskAPIFactory::CreateFromBaseOptions<ImageSegmenter>(
&options_copy->base_options(), std::move(resolver)));
} else {
@@ -189,7 +189,7 @@ StatusOr<std::unique_ptr<ImageSegmenter>> ImageSegmenter::CreateFromOptions(
TfLiteSupportStatus::kInvalidArgumentError);
}
- RETURN_IF_ERROR(image_segmenter->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(image_segmenter->Init(std::move(options_copy)));
return image_segmenter;
}
@@ -201,14 +201,14 @@ absl::Status ImageSegmenter::Init(
// Perform pre-initialization actions (by default, sets the process engine for
// image pre-processing to kLibyuv as a sane default).
- RETURN_IF_ERROR(PreInit());
+ TFLITE_RETURN_IF_ERROR(PreInit());
// Sanity check and set inputs and outputs.
- RETURN_IF_ERROR(CheckAndSetInputs());
- RETURN_IF_ERROR(CheckAndSetOutputs());
+ TFLITE_RETURN_IF_ERROR(CheckAndSetInputs());
+ TFLITE_RETURN_IF_ERROR(CheckAndSetOutputs());
// Initialize colored_labels_ once and for all.
- RETURN_IF_ERROR(InitColoredLabels());
+ TFLITE_RETURN_IF_ERROR(InitColoredLabels());
return absl::OkStatus();
}
@@ -288,7 +288,7 @@ absl::Status ImageSegmenter::CheckAndSetOutputs() {
output_tensor_metadata->size()),
TfLiteSupportStatus::kMetadataInconsistencyError);
}
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
label_map_,
GetLabelMapIfAny(*metadata_extractor, *output_tensor_metadata->Get(0),
options_->display_names_locale()));
@@ -391,7 +391,7 @@ StatusOr<SegmentationResult> ImageSegmenter::Postprocess(
int class_index = 0;
float max_confidence = 0.0f;
for (int d = 0; d < output_depth_; ++d) {
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
const float confidence,
GetOutputConfidence(*output_tensor, tensor_x, tensor_y, d));
if (confidence > max_confidence) {
@@ -419,7 +419,7 @@ StatusOr<SegmentationResult> ImageSegmenter::Postprocess(
/*to_x=*/&tensor_x,
/*to_y=*/&tensor_y);
for (int d = 0; d < output_depth_; ++d) {
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
float confidence,
GetOutputConfidence(*output_tensor, tensor_x, tensor_y, d));
confidence_masks->mutable_confidence_mask(d)->add_value(confidence);
@@ -435,12 +435,12 @@ StatusOr<float> ImageSegmenter::GetOutputConfidence(
const TfLiteTensor& output_tensor, int x, int y, int depth) {
int index = output_width_ * output_depth_ * y + output_depth_ * x + depth;
if (has_uint8_outputs_) {
- ASSIGN_OR_RETURN(const uint8* data,
+ TFLITE_ASSIGN_OR_RETURN(const uint8* data,
AssertAndReturnTypedTensor<uint8>(&output_tensor));
return output_tensor.params.scale *
(static_cast<int>(data[index]) - output_tensor.params.zero_point);
} else {
- ASSIGN_OR_RETURN(const float* data,
+ TFLITE_ASSIGN_OR_RETURN(const float* data,
AssertAndReturnTypedTensor<float>(&output_tensor));
return data[index];
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/object_detector.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/object_detector.cc
index 0a4d5f7553ee9..a8c221a6dac64 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/object_detector.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/object_detector.cc
@@ -148,7 +148,7 @@ StatusOr<std::vector<LabelMapItem>> GetLabelMapIfAny(
if (labels_filename.empty()) {
return std::vector<LabelMapItem>();
}
- ASSIGN_OR_RETURN(absl::string_view labels_file,
+ TFLITE_ASSIGN_OR_RETURN(absl::string_view labels_file,
metadata_extractor.GetAssociatedFile(labels_filename));
const std::string display_names_filename =
ModelMetadataExtractor::FindFirstAssociatedFileName(
@@ -156,7 +156,7 @@ StatusOr<std::vector<LabelMapItem>> GetLabelMapIfAny(
locale);
absl::string_view display_names_file;
if (!display_names_filename.empty()) {
- ASSIGN_OR_RETURN(display_names_file, metadata_extractor.GetAssociatedFile(
+ TFLITE_ASSIGN_OR_RETURN(display_names_file, metadata_extractor.GetAssociatedFile(
display_names_filename));
}
return BuildLabelMapFromFiles(labels_file, display_names_file);
@@ -165,7 +165,7 @@ StatusOr<std::vector<LabelMapItem>> GetLabelMapIfAny(
StatusOr<float> GetScoreThreshold(
const ModelMetadataExtractor& metadata_extractor,
const TensorMetadata& tensor_metadata) {
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
const ProcessUnit* score_thresholding_process_unit,
metadata_extractor.FindFirstProcessUnit(
tensor_metadata, ProcessUnitOptions_ScoreThresholdingOptions));
@@ -225,7 +225,7 @@ absl::Status SanityCheckOutputTensors(
num_results_tensor->dims->data[0]));
}
- ASSIGN_OR_RETURN(float* num_results_data,
+ TFLITE_ASSIGN_OR_RETURN(float* num_results_data,
AssertAndReturnTypedTensor<float>(num_results_tensor));
int num_results = static_cast<int>(num_results_data[0]);
@@ -312,20 +312,20 @@ absl::Status ObjectDetector::SanityCheckOptions(
StatusOr<std::unique_ptr<ObjectDetector>> ObjectDetector::CreateFromOptions(
const ObjectDetectorOptions& options,
std::unique_ptr<tflite::OpResolver> resolver) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
// Copy options to ensure the ExternalFile outlives the constructed object.
auto options_copy = absl::make_unique<ObjectDetectorOptions>(options);
std::unique_ptr<ObjectDetector> object_detector;
if (options_copy->has_model_file_with_metadata()) {
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
object_detector,
TaskAPIFactory::CreateFromExternalFileProto<ObjectDetector>(
&options_copy->model_file_with_metadata(), std::move(resolver),
options_copy->num_threads(), options_copy->compute_settings()));
} else if (options_copy->base_options().has_model_file()) {
- ASSIGN_OR_RETURN(object_detector,
+ TFLITE_ASSIGN_OR_RETURN(object_detector,
TaskAPIFactory::CreateFromBaseOptions<ObjectDetector>(
&options_copy->base_options(), std::move(resolver)));
} else {
@@ -337,7 +337,7 @@ StatusOr<std::unique_ptr<ObjectDetector>> ObjectDetector::CreateFromOptions(
TfLiteSupportStatus::kInvalidArgumentError);
}
- RETURN_IF_ERROR(object_detector->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(object_detector->Init(std::move(options_copy)));
return object_detector;
}
@@ -349,18 +349,18 @@ absl::Status ObjectDetector::Init(
// Perform pre-initialization actions (by default, sets the process engine for
// image pre-processing to kLibyuv as a sane default).
- RETURN_IF_ERROR(PreInit());
+ TFLITE_RETURN_IF_ERROR(PreInit());
// Sanity check and set inputs and outputs.
- RETURN_IF_ERROR(CheckAndSetInputs());
- RETURN_IF_ERROR(CheckAndSetOutputs());
+ TFLITE_RETURN_IF_ERROR(CheckAndSetInputs());
+ TFLITE_RETURN_IF_ERROR(CheckAndSetOutputs());
// Initialize class whitelisting/blacklisting, if any.
- RETURN_IF_ERROR(CheckAndSetClassIndexSet());
+ TFLITE_RETURN_IF_ERROR(CheckAndSetClassIndexSet());
// Perform final initialization (by default, initialize score calibration
// parameters, if any).
- RETURN_IF_ERROR(PostInit());
+ TFLITE_RETURN_IF_ERROR(PostInit());
return absl::OkStatus();
}
@@ -386,7 +386,7 @@ StatusOr<SigmoidCalibrationParameters> BuildCalibrationParametersIfAny(
// that does sanity checks and builds sigmoid calibration params in:
// https://github.com/tensorflow/tflite-support/blob/64e044408f3d3654de7fc10bca401ed900649ca3/tensorflow_lite_support/cc/task/vision/core/classification_head.cc#L75-L107
// Consider to refactor it and reuse the same function.
- ASSIGN_OR_RETURN(const tflite::ProcessUnit* score_calibration_process_unit,
+ TFLITE_ASSIGN_OR_RETURN(const tflite::ProcessUnit* score_calibration_process_unit,
ModelMetadataExtractor::FindFirstProcessUnit(
output_tensor_metadata,
tflite::ProcessUnitOptions_ScoreCalibrationOptions));
@@ -395,12 +395,12 @@ StatusOr<SigmoidCalibrationParameters> BuildCalibrationParametersIfAny(
ModelMetadataExtractor::FindFirstAssociatedFileName(
output_tensor_metadata,
tflite::AssociatedFileType_TENSOR_AXIS_SCORE_CALIBRATION);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
absl::string_view score_calibration_file,
metadata_extractor.GetAssociatedFile(score_calibration_filename));
// Set has_score_calibration to true, only if sigmoid_params is built.
- ASSIGN_OR_RETURN(sigmoid_params,
+ TFLITE_ASSIGN_OR_RETURN(sigmoid_params,
BuildSigmoidCalibrationParams(
*score_calibration_process_unit
->options_as_ScoreCalibrationOptions(),
@@ -421,7 +421,7 @@ absl::Status ObjectDetector::InitScoreCalibrations() {
output_tensor_metadata = metadata_extractor->GetOutputTensorMetadata();
const tflite::TensorMetadata* output_tensor =
output_tensor_metadata->Get(kDefaultScoresIndex);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto calibration_params,
BuildCalibrationParametersIfAny(*metadata_extractor, *output_tensor,
label_map_, &has_score_calibration));
@@ -436,7 +436,7 @@ absl::Status ObjectDetector::InitScoreCalibrations() {
return CreateStatusWithPayload(
StatusCode::kInternal, "Could not create score calibration object.");
}
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
score_calibration_->InitializeFromParameters(calibration_params));
return absl::OkStatus();
}
@@ -485,7 +485,7 @@ absl::Status ObjectDetector::CheckAndSetOutputs() {
// Extract mandatory BoundingBoxProperties for easier access at
// post-processing time, performing sanity checks on the fly.
- ASSIGN_OR_RETURN(const BoundingBoxProperties* bounding_box_properties,
+ TFLITE_ASSIGN_OR_RETURN(const BoundingBoxProperties* bounding_box_properties,
GetBoundingBoxProperties(
*output_tensors_metadata->Get(output_indices_[0])));
if (bounding_box_properties->index() == nullptr) {
@@ -501,7 +501,7 @@ absl::Status ObjectDetector::CheckAndSetOutputs() {
}
// Build label map (if available) from metadata.
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
label_map_,
GetLabelMapIfAny(*metadata_extractor,
*output_tensors_metadata->Get(output_indices_[1]),
@@ -511,7 +511,7 @@ absl::Status ObjectDetector::CheckAndSetOutputs() {
if (options_->has_score_threshold()) {
score_threshold_ = options_->score_threshold();
} else {
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
score_threshold_,
GetScoreThreshold(*metadata_extractor,
*output_tensors_metadata->Get(output_indices_[2])));
@@ -603,10 +603,10 @@ StatusOr<DetectionResult> ObjectDetector::Postprocess(
// Most of the checks here should never happen, as outputs have been validated
// at construction time. Checking nonetheless and returning internal errors if
// something bad happens.
- RETURN_IF_ERROR(SanityCheckOutputTensors(output_tensors, output_indices_));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOutputTensors(output_tensors, output_indices_));
// Get number of available results.
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
float* num_results_data,
AssertAndReturnTypedTensor<float>(output_tensors[output_indices_[3]]));
const int num_results = static_cast<int>(num_results_data[0]);
@@ -623,13 +623,13 @@ StatusOr<DetectionResult> ObjectDetector::Postprocess(
upright_input_frame_dimensions.Swap();
}
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
const float* locations,
AssertAndReturnTypedTensor<float>(output_tensors[output_indices_[0]]));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
const float* classes,
AssertAndReturnTypedTensor<float>(output_tensors[output_indices_[1]]));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
const float* scores,
AssertAndReturnTypedTensor<float>(output_tensors[output_indices_[2]]));
DetectionResult results;
@@ -671,7 +671,7 @@ StatusOr<DetectionResult> ObjectDetector::Postprocess(
}
if (!label_map_.empty()) {
- RETURN_IF_ERROR(FillResultsFromLabelMap(&results));
+ TFLITE_RETURN_IF_ERROR(FillResultsFromLabelMap(&results));
}
return results;
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/frame_buffer_common_utils.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/frame_buffer_common_utils.cc
index 1854cf546d599..e12700079f601 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/frame_buffer_common_utils.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/frame_buffer_common_utils.cc
@@ -120,7 +120,7 @@ StatusOr<const uint8*> GetUvRawBuffer(const FrameBuffer& buffer) {
return absl::InvalidArgumentError(
"Only support getting biplanar UV buffer from NV12/NV21 frame buffer.");
}
- ASSIGN_OR_RETURN(FrameBuffer::YuvData yuv_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData yuv_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
const uint8* uv_buffer = buffer.format() == FrameBuffer::Format::kNV12
? yuv_data.u_buffer
@@ -192,8 +192,8 @@ absl::Status ValidateBufferFormat(const FrameBuffer& buffer) {
absl::Status ValidateBufferFormats(const FrameBuffer& buffer1,
const FrameBuffer& buffer2) {
- RETURN_IF_ERROR(ValidateBufferFormat(buffer1));
- RETURN_IF_ERROR(ValidateBufferFormat(buffer2));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferFormat(buffer1));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferFormat(buffer2));
return absl::OkStatus();
}
@@ -391,7 +391,7 @@ StatusOr<std::unique_ptr<FrameBuffer>> CreateFromRawBuffer(
return CreateFromOnePlaneNVRawBuffer(buffer, dimension, target_format,
orientation, timestamp);
case FrameBuffer::Format::kYV12: {
- ASSIGN_OR_RETURN(const FrameBuffer::Dimension uv_dimension,
+ TFLITE_ASSIGN_OR_RETURN(const FrameBuffer::Dimension uv_dimension,
GetUvPlaneDimension(dimension, target_format));
return CreateFromYuvRawBuffer(
/*y_plane=*/buffer,
@@ -401,7 +401,7 @@ StatusOr<std::unique_ptr<FrameBuffer>> CreateFromRawBuffer(
/*pixel_stride_uv=*/1, orientation, timestamp);
}
case FrameBuffer::Format::kYV21: {
- ASSIGN_OR_RETURN(const FrameBuffer::Dimension uv_dimension,
+ TFLITE_ASSIGN_OR_RETURN(const FrameBuffer::Dimension uv_dimension,
GetUvPlaneDimension(dimension, target_format));
return CreateFromYuvRawBuffer(
/*y_plane=*/buffer, /*u_plane=*/buffer + dimension.Size(),
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/frame_buffer_utils.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/frame_buffer_utils.cc
index 9298c60395286..653ff01f331ef 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/frame_buffer_utils.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/frame_buffer_utils.cc
@@ -398,22 +398,22 @@ absl::Status FrameBufferUtils::Execute(const FrameBuffer& buffer,
FrameBuffer* output_buffer) {
if (absl::holds_alternative<CropResizeOperation>(operation)) {
const auto& params = absl::get<CropResizeOperation>(operation);
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
Crop(buffer, params.crop_origin_x, params.crop_origin_y,
(params.crop_dimension.width + params.crop_origin_x - 1),
(params.crop_dimension.height + params.crop_origin_y - 1),
output_buffer));
} else if (absl::holds_alternative<UniformCropResizeOperation>(operation)) {
const auto& params = absl::get<UniformCropResizeOperation>(operation);
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
Crop(buffer, params.crop_origin_x, params.crop_origin_y,
(params.crop_dimension.width + params.crop_origin_x - 1),
(params.crop_dimension.height + params.crop_origin_y - 1),
output_buffer));
} else if (absl::holds_alternative<ConvertOperation>(operation)) {
- RETURN_IF_ERROR(Convert(buffer, output_buffer));
+ TFLITE_RETURN_IF_ERROR(Convert(buffer, output_buffer));
} else if (absl::holds_alternative<OrientOperation>(operation)) {
- RETURN_IF_ERROR(Orient(buffer, output_buffer));
+ TFLITE_RETURN_IF_ERROR(Orient(buffer, output_buffer));
} else {
return absl::UnimplementedError(absl::StrFormat(
"FrameBufferOperation %i is not supported.", operation.index()));
@@ -494,7 +494,7 @@ absl::Status FrameBufferUtils::Orient(const FrameBuffer& buffer,
output_buffer->format()),
output_buffer->dimension(), buffer.format(), buffer.orientation());
- RETURN_IF_ERROR(utils_->Rotate(buffer, params.rotation_angle_deg,
+ TFLITE_RETURN_IF_ERROR(utils_->Rotate(buffer, params.rotation_angle_deg,
tmp_frame_buffer.get()));
if (params.flip == OrientParams::FlipType::kHorizontal) {
return utils_->FlipHorizontally(*tmp_frame_buffer, output_buffer);
@@ -578,7 +578,7 @@ absl::Status FrameBufferUtils::Execute(
temp_frame_buffer = FrameBuffer(planes, new_size, new_format,
new_orientation, buffer.timestamp());
}
- RETURN_IF_ERROR(Execute(input_frame_buffer, operation, &temp_frame_buffer));
+ TFLITE_RETURN_IF_ERROR(Execute(input_frame_buffer, operation, &temp_frame_buffer));
}
return absl::OkStatus();
}
@@ -652,9 +652,9 @@ absl::Status FrameBufferUtils::Preprocess(
// Execute the processing pipeline.
if (frame_buffer_operations.empty()) {
// Using resize to perform copy.
- RETURN_IF_ERROR(Resize(buffer, output_buffer));
+ TFLITE_RETURN_IF_ERROR(Resize(buffer, output_buffer));
} else {
- RETURN_IF_ERROR(Execute(buffer, frame_buffer_operations, output_buffer));
+ TFLITE_RETURN_IF_ERROR(Execute(buffer, frame_buffer_operations, output_buffer));
}
return absl::OkStatus();
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/image_tensor_specs.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/image_tensor_specs.cc
index 392df5e40c662..b0355a2b69e26 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/image_tensor_specs.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/image_tensor_specs.cc
@@ -90,7 +90,7 @@ StatusOr<const ImageProperties*> GetImagePropertiesIfAny(
StatusOr<absl::optional<NormalizationOptions>> GetNormalizationOptionsIfAny(
const TensorMetadata& tensor_metadata) {
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
const tflite::ProcessUnit* normalization_process_unit,
ModelMetadataExtractor::FindFirstProcessUnit(
tensor_metadata, tflite::ProcessUnitOptions_NormalizationOptions));
@@ -140,14 +140,14 @@ StatusOr<absl::optional<NormalizationOptions>> GetNormalizationOptionsIfAny(
StatusOr<ImageTensorSpecs> BuildInputImageTensorSpecs(
const TfLiteEngine::Interpreter& interpreter,
const tflite::metadata::ModelMetadataExtractor& metadata_extractor) {
- ASSIGN_OR_RETURN(const TensorMetadata* metadata,
+ TFLITE_ASSIGN_OR_RETURN(const TensorMetadata* metadata,
GetInputTensorMetadataIfAny(metadata_extractor));
const ImageProperties* props = nullptr;
absl::optional<NormalizationOptions> normalization_options;
if (metadata != nullptr) {
- ASSIGN_OR_RETURN(props, GetImagePropertiesIfAny(*metadata));
- ASSIGN_OR_RETURN(normalization_options,
+ TFLITE_ASSIGN_OR_RETURN(props, GetImagePropertiesIfAny(*metadata));
+ TFLITE_ASSIGN_OR_RETURN(normalization_options,
GetNormalizationOptionsIfAny(*metadata));
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/image_utils.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/image_utils.cc
index 0a0b1b93423fe..c9910c0073d05 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/image_utils.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/image_utils.cc
@@ -63,7 +63,7 @@ StatusOr<ImageData> DecodeImageFromFile(const std::string& file_name) {
image_data.pixel_data = stbi_load(file_name.c_str(), &image_data.width,
&image_data.height, &image_data.channels,
/*desired_channels=*/0);
- RETURN_IF_ERROR(CheckImageData(image_data));
+ TFLITE_RETURN_IF_ERROR(CheckImageData(image_data));
return image_data;
}
@@ -73,7 +73,7 @@ tflite::support::StatusOr<ImageData> DecodeImageFromBuffer(
image_data.pixel_data = stbi_load_from_memory(
buffer, len, &image_data.width, &image_data.height, &image_data.channels,
/*desired_channels=*/0);
- RETURN_IF_ERROR(CheckImageData(image_data));
+ TFLITE_RETURN_IF_ERROR(CheckImageData(image_data));
return image_data;
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/libyuv_frame_buffer_utils.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/libyuv_frame_buffer_utils.cc
index 623326d5cba49..f697ab800cb12 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/libyuv_frame_buffer_utils.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/libyuv_frame_buffer_utils.cc
@@ -47,7 +47,7 @@ namespace {
// Supported output format includes RGB24 and YV21.
absl::Status ConvertFromNv12(const FrameBuffer& buffer,
FrameBuffer* output_buffer) {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData yuv_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData yuv_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
switch (output_buffer->format()) {
case FrameBuffer::Format::kRGB: {
@@ -83,7 +83,7 @@ absl::Status ConvertFromNv12(const FrameBuffer& buffer,
}
case FrameBuffer::Format::kYV12:
case FrameBuffer::Format::kYV21: {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
int ret = libyuv::NV12ToI420(
yuv_data.y_buffer, yuv_data.y_row_stride, yuv_data.u_buffer,
@@ -100,13 +100,13 @@ absl::Status ConvertFromNv12(const FrameBuffer& buffer,
break;
}
case FrameBuffer::Format::kNV21: {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
libyuv::CopyPlane(yuv_data.y_buffer, yuv_data.y_row_stride,
const_cast<uint8*>(output_data.y_buffer),
output_data.y_row_stride, buffer.dimension().width,
buffer.dimension().height);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
const FrameBuffer::Dimension uv_plane_dimension,
GetUvPlaneDimension(buffer.dimension(), buffer.format()));
libyuv::SwapUVPlane(yuv_data.u_buffer, yuv_data.uv_row_stride,
@@ -134,7 +134,7 @@ absl::Status ConvertFromNv12(const FrameBuffer& buffer,
// Supported output format includes RGB24 and YV21.
absl::Status ConvertFromNv21(const FrameBuffer& buffer,
FrameBuffer* output_buffer) {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData yuv_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData yuv_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
switch (output_buffer->format()) {
case FrameBuffer::Format::kRGB: {
@@ -170,7 +170,7 @@ absl::Status ConvertFromNv21(const FrameBuffer& buffer,
}
case FrameBuffer::Format::kYV12:
case FrameBuffer::Format::kYV21: {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
int ret = libyuv::NV21ToI420(
yuv_data.y_buffer, yuv_data.y_row_stride, yuv_data.v_buffer,
@@ -187,13 +187,13 @@ absl::Status ConvertFromNv21(const FrameBuffer& buffer,
break;
}
case FrameBuffer::Format::kNV12: {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
libyuv::CopyPlane(yuv_data.y_buffer, yuv_data.y_row_stride,
const_cast<uint8*>(output_data.y_buffer),
output_data.y_row_stride, buffer.dimension().width,
buffer.dimension().height);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
const FrameBuffer::Dimension uv_plane_dimension,
GetUvPlaneDimension(buffer.dimension(), buffer.format()));
libyuv::SwapUVPlane(yuv_data.v_buffer, yuv_data.uv_row_stride,
@@ -224,7 +224,7 @@ absl::Status ConvertFromNv21(const FrameBuffer& buffer,
// Supported output format includes RGB24, NV12, and NV21.
absl::Status ConvertFromYv(const FrameBuffer& buffer,
FrameBuffer* output_buffer) {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData yuv_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData yuv_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
switch (output_buffer->format()) {
case FrameBuffer::Format::kRGB: {
@@ -259,7 +259,7 @@ absl::Status ConvertFromYv(const FrameBuffer& buffer,
break;
}
case FrameBuffer::Format::kNV12: {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
int ret = libyuv::I420ToNV12(
yuv_data.y_buffer, yuv_data.y_row_stride, yuv_data.u_buffer,
@@ -275,7 +275,7 @@ absl::Status ConvertFromYv(const FrameBuffer& buffer,
break;
}
case FrameBuffer::Format::kNV21: {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
int ret = libyuv::I420ToNV21(
yuv_data.y_buffer, yuv_data.y_row_stride, yuv_data.u_buffer,
@@ -300,9 +300,9 @@ absl::Status ConvertFromYv(const FrameBuffer& buffer,
}
case FrameBuffer::Format::kYV12:
case FrameBuffer::Format::kYV21: {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_yuv_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_yuv_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
const FrameBuffer::Dimension uv_plane_dimension,
GetUvPlaneDimension(buffer.dimension(), buffer.format()));
libyuv::CopyPlane(yuv_data.y_buffer, yuv_data.y_row_stride,
@@ -333,9 +333,9 @@ absl::Status ConvertFromYv(const FrameBuffer& buffer,
absl::Status ResizeYv(
const FrameBuffer& buffer, FrameBuffer* output_buffer,
libyuv::FilterMode interpolation = libyuv::FilterMode::kFilterBilinear) {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
// TODO(b/151217096): Choose the optimal image resizing filter to optimize
// the model inference performance.
@@ -360,9 +360,9 @@ absl::Status ResizeYv(
absl::Status ResizeNv(
const FrameBuffer& buffer, FrameBuffer* output_buffer,
libyuv::FilterMode interpolation = libyuv::FilterMode::kFilterBilinear) {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
const uint8* src_uv = input_data.u_buffer;
const uint8* dst_uv = output_data.u_buffer;
@@ -391,7 +391,7 @@ absl::Status ResizeNv(
// in `dest_argb`.
absl::Status ConvertRgbToArgb(const FrameBuffer& buffer, uint8* dest_argb,
int dest_stride_argb) {
- RETURN_IF_ERROR(ValidateBufferPlaneMetadata(buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferPlaneMetadata(buffer));
if (buffer.format() != FrameBuffer::Format::kRGB) {
return CreateStatusWithPayload(StatusCode::kInternal,
"RGB input format is expected.",
@@ -428,7 +428,7 @@ absl::Status ConvertRgbToArgb(const FrameBuffer& buffer, uint8* dest_argb,
// stores the conversion result in `output_buffer`.
absl::Status ConvertArgbToRgb(uint8* src_argb, int src_stride_argb,
FrameBuffer* output_buffer) {
- RETURN_IF_ERROR(ValidateBufferPlaneMetadata(*output_buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferPlaneMetadata(*output_buffer));
if (output_buffer->format() != FrameBuffer::Format::kRGB) {
return absl::InternalError("RGB input format is expected.");
}
@@ -464,7 +464,7 @@ absl::Status ConvertArgbToRgb(uint8* src_argb, int src_stride_argb,
// memory) format and stores the conversion result in `dest_argb`.
absl::Status ConvertRgbaToArgb(const FrameBuffer& buffer, uint8* dest_argb,
int dest_stride_argb) {
- RETURN_IF_ERROR(ValidateBufferPlaneMetadata(buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferPlaneMetadata(buffer));
if (buffer.format() != FrameBuffer::Format::kRGBA) {
return CreateStatusWithPayload(
StatusCode::kInternal, "RGBA input format is expected.",
@@ -529,15 +529,15 @@ absl::Status ConvertFromRgb(const FrameBuffer& buffer,
output_buffer->format() == FrameBuffer::Format::kNV21) {
tmp_yuv_buffer = absl::make_unique<uint8[]>(
GetFrameBufferByteSize(buffer.dimension(), output_buffer->format()));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
yuv_frame_buffer,
CreateFromRawBuffer(tmp_yuv_buffer.get(), buffer.dimension(),
FrameBuffer::Format::kYV21,
output_buffer->orientation()));
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
yuv_data, FrameBuffer::GetYuvDataFromFrameBuffer(*yuv_frame_buffer));
} else {
- ASSIGN_OR_RETURN(yuv_data,
+ TFLITE_ASSIGN_OR_RETURN(yuv_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
}
int ret = libyuv::RAWToI420(
@@ -592,7 +592,7 @@ absl::Status ConvertFromRgba(const FrameBuffer& buffer,
FrameBuffer::Format::kRGBA);
auto argb_buffer = absl::make_unique<uint8[]>(argb_buffer_size);
const int argb_row_bytes = buffer.dimension().width * kRgbaPixelBytes;
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
ConvertRgbaToArgb(buffer, argb_buffer.get(), argb_row_bytes));
// Convert ARGB to kGRAY
@@ -609,7 +609,7 @@ absl::Status ConvertFromRgba(const FrameBuffer& buffer,
break;
}
case FrameBuffer::Format::kNV12: {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
int ret = libyuv::ABGRToNV12(
buffer.plane(0).buffer, buffer.plane(0).stride.row_stride_bytes,
@@ -624,7 +624,7 @@ absl::Status ConvertFromRgba(const FrameBuffer& buffer,
break;
}
case FrameBuffer::Format::kNV21: {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
int ret = libyuv::ABGRToNV21(
buffer.plane(0).buffer, buffer.plane(0).stride.row_stride_bytes,
@@ -640,7 +640,7 @@ absl::Status ConvertFromRgba(const FrameBuffer& buffer,
}
case FrameBuffer::Format::kYV12:
case FrameBuffer::Format::kYV21: {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
int ret = libyuv::ABGRToI420(
buffer.plane(0).buffer, buffer.plane(0).stride.row_stride_bytes,
@@ -729,7 +729,7 @@ absl::Status RotateRgb(const FrameBuffer& buffer, int angle_deg,
GetFrameBufferByteSize(buffer.dimension(), FrameBuffer::Format::kRGBA);
auto argb_buffer = absl::make_unique<uint8[]>(argb_buffer_size);
const int argb_row_bytes = buffer.dimension().width * kRgbaPixelBytes;
- RETURN_IF_ERROR(ConvertRgbToArgb(buffer, argb_buffer.get(), argb_row_bytes));
+ TFLITE_RETURN_IF_ERROR(ConvertRgbToArgb(buffer, argb_buffer.get(), argb_row_bytes));
// Rotate ARGB
auto argb_rotated_buffer = absl::make_unique<uint8[]>(argb_buffer_size);
@@ -776,9 +776,9 @@ absl::Status RotateGray(const FrameBuffer& buffer, int angle_deg,
// Rotates YV12/YV21 frame buffer.
absl::Status RotateYv(const FrameBuffer& buffer, int angle_deg,
FrameBuffer* output_buffer) {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
int ret = libyuv::I420Rotate(
input_data.y_buffer, input_data.y_row_stride, input_data.u_buffer,
@@ -807,19 +807,19 @@ absl::Status RotateNv(const FrameBuffer& buffer, int angle_deg,
"kNV12 or kNV21 input formats are expected.",
TfLiteSupportStatus::kImageProcessingError);
}
- ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
const int rotated_buffer_size = GetFrameBufferByteSize(
output_buffer->dimension(), FrameBuffer::Format::kYV21);
auto rotated_yuv_raw_buffer = absl::make_unique<uint8[]>(rotated_buffer_size);
- ASSIGN_OR_RETURN(std::unique_ptr<FrameBuffer> rotated_yuv_buffer,
+ TFLITE_ASSIGN_OR_RETURN(std::unique_ptr<FrameBuffer> rotated_yuv_buffer,
CreateFromRawBuffer(
rotated_yuv_raw_buffer.get(), output_buffer->dimension(),
/*target_format=*/FrameBuffer::Format::kYV21,
output_buffer->orientation()));
- ASSIGN_OR_RETURN(FrameBuffer::YuvData rotated_yuv_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData rotated_yuv_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*rotated_yuv_buffer));
// Get the first chroma plane and use it as the u plane. This is a workaround
// for optimizing NV21 rotation. For NV12, the implementation is logical
@@ -875,7 +875,7 @@ absl::Status FlipPlaneVertically(const FrameBuffer& buffer,
TfLiteSupportStatus::kImageProcessingError);
}
- ASSIGN_OR_RETURN(int pixel_stride, GetPixelStrides(buffer.format()));
+ TFLITE_ASSIGN_OR_RETURN(int pixel_stride, GetPixelStrides(buffer.format()));
// Flip vertically is achieved by passing in negative height.
libyuv::CopyPlane(buffer.plane(0).buffer,
@@ -899,7 +899,7 @@ absl::Status CropPlane(const FrameBuffer& buffer, int x0, int y0, int x1,
TfLiteSupportStatus::kImageProcessingError);
}
- ASSIGN_OR_RETURN(int pixel_stride, GetPixelStrides(buffer.format()));
+ TFLITE_ASSIGN_OR_RETURN(int pixel_stride, GetPixelStrides(buffer.format()));
FrameBuffer::Dimension crop_dimension = GetCropDimension(x0, x1, y0, y1);
// Cropping is achieved by adjusting origin to (x0, y0).
@@ -919,9 +919,9 @@ absl::Status CropPlane(const FrameBuffer& buffer, int x0, int y0, int x1,
// position (x0, y0) and the bottom right pixel position (x1, y1).
absl::Status CropNv(const FrameBuffer& buffer, int x0, int y0, int x1, int y1,
FrameBuffer* output_buffer) {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
// Crop Y plane by copying the buffer with the origin offset to (x0, y0).
int crop_offset_y = input_data.y_row_stride * y0 + x0;
@@ -937,8 +937,8 @@ absl::Status CropNv(const FrameBuffer& buffer, int x0, int y0, int x1, int y1,
// bounding box with odd X or Y starting positions.
int crop_offset_chroma = input_data.uv_row_stride * (y0 / 2) +
input_data.uv_pixel_stride * (x0 / 2);
- ASSIGN_OR_RETURN(const uint8* input_chroma_buffer, GetUvRawBuffer(buffer));
- ASSIGN_OR_RETURN(const uint8* output_chroma_buffer,
+ TFLITE_ASSIGN_OR_RETURN(const uint8* input_chroma_buffer, GetUvRawBuffer(buffer));
+ TFLITE_ASSIGN_OR_RETURN(const uint8* output_chroma_buffer,
GetUvRawBuffer(*output_buffer));
libyuv::CopyPlane(
input_chroma_buffer + crop_offset_chroma, input_data.uv_row_stride,
@@ -951,9 +951,9 @@ absl::Status CropNv(const FrameBuffer& buffer, int x0, int y0, int x1, int y1,
// position (x0, y0) and the bottom right pixel position (x1, y1).
absl::Status CropYv(const FrameBuffer& buffer, int x0, int y0, int x1, int y1,
FrameBuffer* output_buffer) {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
// Crop Y plane by copying the buffer with the origin offset to (x0, y0).
int crop_offset_y = input_data.y_row_stride * y0 + x0;
@@ -964,7 +964,7 @@ absl::Status CropYv(const FrameBuffer& buffer, int x0, int y0, int x1, int y1,
crop_dimension.width, crop_dimension.height);
// Crop U plane by copying the buffer with the origin offset to
// (x0 / 2, y0 / 2).
- ASSIGN_OR_RETURN(const FrameBuffer::Dimension crop_uv_dimension,
+ TFLITE_ASSIGN_OR_RETURN(const FrameBuffer::Dimension crop_uv_dimension,
GetUvPlaneDimension(crop_dimension, buffer.format()));
// TODO(b/152629712): Investigate the impact of color shifting caused by the
// bounding box with odd X or Y starting positions.
@@ -1002,7 +1002,7 @@ absl::Status CropResizeYuv(const FrameBuffer& buffer, int x0, int y0, int x1,
TfLiteSupportStatus::kImageProcessingError);
}
}
- ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
// Cropping YUV planes by offsetting the origins of each plane.
// TODO(b/152629712): Investigate the impact of color shifting caused by the
@@ -1119,7 +1119,7 @@ absl::Status ResizeRgb(
GetFrameBufferByteSize(buffer.dimension(), FrameBuffer::Format::kRGBA);
auto argb_buffer = absl::make_unique<uint8[]>(argb_buffer_size);
const int argb_row_bytes = buffer.dimension().width * kRgbaPixelBytes;
- RETURN_IF_ERROR(ConvertRgbToArgb(buffer, argb_buffer.get(), argb_row_bytes));
+ TFLITE_RETURN_IF_ERROR(ConvertRgbToArgb(buffer, argb_buffer.get(), argb_row_bytes));
// Resize ARGB
int resized_argb_buffer_size = GetFrameBufferByteSize(
@@ -1201,12 +1201,12 @@ absl::Status ResizeRgba(
// Flips NV12/NV21 FrameBuffer horizontally.
absl::Status FlipHorizontallyNv(const FrameBuffer& buffer,
FrameBuffer* output_buffer) {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
- ASSIGN_OR_RETURN(const uint8* input_chroma_buffer, GetUvRawBuffer(buffer));
- ASSIGN_OR_RETURN(const uint8* output_chroma_buffer,
+ TFLITE_ASSIGN_OR_RETURN(const uint8* input_chroma_buffer, GetUvRawBuffer(buffer));
+ TFLITE_ASSIGN_OR_RETURN(const uint8* output_chroma_buffer,
GetUvRawBuffer(*output_buffer));
int ret = libyuv::NV12Mirror(
@@ -1228,9 +1228,9 @@ absl::Status FlipHorizontallyNv(const FrameBuffer& buffer,
// Flips YV12/YV21 FrameBuffer horizontally.
absl::Status FlipHorizontallyYv(const FrameBuffer& buffer,
FrameBuffer* output_buffer) {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
int ret = libyuv::I420Mirror(
input_data.y_buffer, input_data.y_row_stride, input_data.u_buffer,
@@ -1251,9 +1251,9 @@ absl::Status FlipHorizontallyYv(const FrameBuffer& buffer,
// Flips NV12/NV21 FrameBuffer vertically.
absl::Status FlipVerticallyNv(const FrameBuffer& buffer,
FrameBuffer* output_buffer) {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
// Flip Y plane vertically by passing a negative height.
libyuv::CopyPlane(input_data.y_buffer, input_data.y_row_stride,
@@ -1261,10 +1261,10 @@ absl::Status FlipVerticallyNv(const FrameBuffer& buffer,
output_data.y_row_stride, buffer.dimension().width,
-output_buffer->dimension().height);
// Flip UV plane vertically by passing a negative height.
- ASSIGN_OR_RETURN(const uint8* input_chroma_buffer, GetUvRawBuffer(buffer));
- ASSIGN_OR_RETURN(const uint8* output_chroma_buffer,
+ TFLITE_ASSIGN_OR_RETURN(const uint8* input_chroma_buffer, GetUvRawBuffer(buffer));
+ TFLITE_ASSIGN_OR_RETURN(const uint8* output_chroma_buffer,
GetUvRawBuffer(*output_buffer));
- ASSIGN_OR_RETURN(const FrameBuffer::Dimension uv_plane_dimension,
+ TFLITE_ASSIGN_OR_RETURN(const FrameBuffer::Dimension uv_plane_dimension,
GetUvPlaneDimension(buffer.dimension(), buffer.format()));
libyuv::CopyPlane(
input_chroma_buffer, input_data.uv_row_stride,
@@ -1276,9 +1276,9 @@ absl::Status FlipVerticallyNv(const FrameBuffer& buffer,
// Flips NV12/NV21 FrameBuffer vertically.
absl::Status FlipVerticallyYv(const FrameBuffer& buffer,
FrameBuffer* output_buffer) {
- ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData input_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
- ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
+ TFLITE_ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
// Flip buffer vertically by passing a negative height.
int ret = libyuv::I420Copy(
@@ -1326,7 +1326,7 @@ absl::Status CropResize(const FrameBuffer& buffer, int x0, int y0, int x1,
return CropPlane(buffer, x0, y0, x1, y1, output_buffer);
}
- ASSIGN_OR_RETURN(int pixel_stride, GetPixelStrides(buffer.format()));
+ TFLITE_ASSIGN_OR_RETURN(int pixel_stride, GetPixelStrides(buffer.format()));
// Cropping is achieved by adjusting origin to (x0, y0).
int adjusted_offset =
buffer.plane(0).stride.row_stride_bytes * y0 + x0 * pixel_stride;
@@ -1357,11 +1357,11 @@ absl::Status CropResize(const FrameBuffer& buffer, int x0, int y0, int x1,
absl::Status LibyuvFrameBufferUtils::Crop(const FrameBuffer& buffer, int x0,
int y0, int x1, int y1,
FrameBuffer* output_buffer) {
- RETURN_IF_ERROR(ValidateBufferPlaneMetadata(buffer));
- RETURN_IF_ERROR(ValidateBufferPlaneMetadata(*output_buffer));
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(ValidateBufferPlaneMetadata(buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferPlaneMetadata(*output_buffer));
+ TFLITE_RETURN_IF_ERROR(
ValidateCropBufferInputs(buffer, *output_buffer, x0, y0, x1, y1));
- RETURN_IF_ERROR(ValidateBufferFormats(buffer, *output_buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferFormats(buffer, *output_buffer));
switch (buffer.format()) {
case FrameBuffer::Format::kRGBA:
@@ -1383,7 +1383,7 @@ absl::Status LibyuvFrameBufferUtils::Crop(const FrameBuffer& buffer, int x0,
absl::Status LibyuvFrameBufferUtils::Resize(const FrameBuffer& buffer,
FrameBuffer* output_buffer) {
- RETURN_IF_ERROR(ValidateResizeBufferInputs(buffer, *output_buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateResizeBufferInputs(buffer, *output_buffer));
switch (buffer.format()) {
case FrameBuffer::Format::kYV12:
case FrameBuffer::Format::kYV21:
@@ -1407,7 +1407,7 @@ absl::Status LibyuvFrameBufferUtils::Resize(const FrameBuffer& buffer,
absl::Status LibyuvFrameBufferUtils::ResizeNearestNeighbor(
const FrameBuffer& buffer, FrameBuffer* output_buffer) {
- RETURN_IF_ERROR(ValidateResizeBufferInputs(buffer, *output_buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateResizeBufferInputs(buffer, *output_buffer));
switch (buffer.format()) {
case FrameBuffer::Format::kYV12:
case FrameBuffer::Format::kYV21:
@@ -1432,11 +1432,11 @@ absl::Status LibyuvFrameBufferUtils::ResizeNearestNeighbor(
absl::Status LibyuvFrameBufferUtils::Rotate(const FrameBuffer& buffer,
int angle_deg,
FrameBuffer* output_buffer) {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
ValidateRotateBufferInputs(buffer, *output_buffer, angle_deg));
- RETURN_IF_ERROR(ValidateBufferFormats(buffer, *output_buffer));
- RETURN_IF_ERROR(ValidateBufferPlaneMetadata(buffer));
- RETURN_IF_ERROR(ValidateBufferPlaneMetadata(*output_buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferFormats(buffer, *output_buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferPlaneMetadata(buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferPlaneMetadata(*output_buffer));
switch (buffer.format()) {
case FrameBuffer::Format::kGRAY:
@@ -1461,10 +1461,10 @@ absl::Status LibyuvFrameBufferUtils::Rotate(const FrameBuffer& buffer,
absl::Status LibyuvFrameBufferUtils::FlipHorizontally(
const FrameBuffer& buffer, FrameBuffer* output_buffer) {
- RETURN_IF_ERROR(ValidateBufferPlaneMetadata(buffer));
- RETURN_IF_ERROR(ValidateBufferPlaneMetadata(*output_buffer));
- RETURN_IF_ERROR(ValidateFlipBufferInputs(buffer, *output_buffer));
- RETURN_IF_ERROR(ValidateBufferFormats(buffer, *output_buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferPlaneMetadata(buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferPlaneMetadata(*output_buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateFlipBufferInputs(buffer, *output_buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferFormats(buffer, *output_buffer));
switch (buffer.format()) {
case FrameBuffer::Format::kRGBA:
@@ -1489,10 +1489,10 @@ absl::Status LibyuvFrameBufferUtils::FlipHorizontally(
absl::Status LibyuvFrameBufferUtils::FlipVertically(
const FrameBuffer& buffer, FrameBuffer* output_buffer) {
- RETURN_IF_ERROR(ValidateBufferPlaneMetadata(buffer));
- RETURN_IF_ERROR(ValidateBufferPlaneMetadata(*output_buffer));
- RETURN_IF_ERROR(ValidateFlipBufferInputs(buffer, *output_buffer));
- RETURN_IF_ERROR(ValidateBufferFormats(buffer, *output_buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferPlaneMetadata(buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferPlaneMetadata(*output_buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateFlipBufferInputs(buffer, *output_buffer));
+ TFLITE_RETURN_IF_ERROR(ValidateBufferFormats(buffer, *output_buffer));
switch (buffer.format()) {
case FrameBuffer::Format::kRGBA:
@@ -1515,7 +1515,7 @@ absl::Status LibyuvFrameBufferUtils::FlipVertically(
absl::Status LibyuvFrameBufferUtils::Convert(const FrameBuffer& buffer,
FrameBuffer* output_buffer) {
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
ValidateConvertFormats(buffer.format(), output_buffer->format()));
switch (buffer.format()) {
case FrameBuffer::Format::kNV12:
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/score_calibration.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/score_calibration.cc
index f3cfc495ceb10..9e8bd33934867 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/score_calibration.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/task/vision/utils/score_calibration.cc
@@ -231,7 +231,7 @@ StatusOr<SigmoidCalibrationParameters> BuildSigmoidCalibrationParams(
if (lines[i].empty()) {
continue;
}
- ASSIGN_OR_RETURN(Sigmoid sigmoid, SigmoidFromLabelAndLine(
+ TFLITE_ASSIGN_OR_RETURN(Sigmoid sigmoid, SigmoidFromLabelAndLine(
label_map_items[i].name, lines[i]));
sigmoid_vector.emplace_back(std::move(sigmoid));
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/processor/embedding_searcher_test.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/processor/embedding_searcher_test.cc
index 720cc00f60323..a70b5100a2ff3 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/processor/embedding_searcher_test.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/processor/embedding_searcher_test.cc
@@ -84,13 +84,13 @@ StatusOr<absl::string_view> GetIndexFileContentFromMetadata(
StatusOr<std::string> GetIndexFileContentFromModelFile(
const std::string& model_path) {
auto engine = std::make_unique<core::TfLiteEngine>();
- RETURN_IF_ERROR(engine->BuildModelFromFile(model_path));
+ TFLITE_RETURN_IF_ERROR(engine->BuildModelFromFile(model_path));
const tflite::metadata::ModelMetadataExtractor* metadata_extractor =
engine->metadata_extractor();
const TensorMetadata* tensor_metadata =
metadata_extractor->GetOutputTensorMetadata(0);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
absl::string_view index_file_content,
GetIndexFileContentFromMetadata(*metadata_extractor, *tensor_metadata));
@@ -103,7 +103,7 @@ StatusOr<std::string> GetIndexFileContentFromModelFile(
StatusOr<std::string> GetFileContent(const std::string& file_path) {
tflite::task::core::ExternalFile external_file;
external_file.set_file_name(file_path);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto handler,
core::ExternalFileHandler::CreateFromExternalFile(&external_file));
absl::string_view file_content = handler->GetFileContent();
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/vision/image_classifier_test.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/vision/image_classifier_test.cc
index 2ea2c578f0d40..8cc04ca191fdc 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/vision/image_classifier_test.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/vision/image_classifier_test.cc
@@ -556,16 +556,16 @@ class PostprocessTest : public tflite::testing::Test {
static StatusOr<std::unique_ptr<TestImageClassifier>> CreateFromOptions(
const ImageClassifierOptions& options) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
auto options_copy = absl::make_unique<ImageClassifierOptions>(options);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto image_classifier,
TaskAPIFactory::CreateFromExternalFileProto<TestImageClassifier>(
&options_copy->model_file_with_metadata()));
- RETURN_IF_ERROR(image_classifier->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(image_classifier->Init(std::move(options_copy)));
return image_classifier;
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/vision/image_segmenter_test.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/vision/image_segmenter_test.cc
index 712c9a3dead7e..1c49b3630f91c 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/vision/image_segmenter_test.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/vision/image_segmenter_test.cc
@@ -386,16 +386,16 @@ class PostprocessTest : public tflite::testing::Test {
static StatusOr<std::unique_ptr<TestImageSegmenter>> CreateFromOptions(
const ImageSegmenterOptions& options) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
auto options_copy = absl::make_unique<ImageSegmenterOptions>(options);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto image_segmenter,
TaskAPIFactory::CreateFromExternalFileProto<TestImageSegmenter>(
&options_copy->model_file_with_metadata()));
- RETURN_IF_ERROR(image_segmenter->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(image_segmenter->Init(std::move(options_copy)));
return image_segmenter;
}
@@ -429,7 +429,7 @@ class PostprocessTest : public tflite::testing::Test {
confidence_scores.resize(/*width*/ 257 *
/*height*/ 257 *
/*classes*/ 21);
- RETURN_IF_ERROR(PopulateTensor(confidence_scores, output_tensor));
+ TFLITE_RETURN_IF_ERROR(PopulateTensor(confidence_scores, output_tensor));
return output_tensor;
}
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/vision/object_detector_test.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/vision/object_detector_test.cc
index 5bae0eefb4341..511b4ba338774 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/vision/object_detector_test.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/test/task/vision/object_detector_test.cc
@@ -399,16 +399,16 @@ class PostprocessTest : public tflite::testing::Test {
static StatusOr<std::unique_ptr<TestObjectDetector>> CreateFromOptions(
const ObjectDetectorOptions& options) {
- RETURN_IF_ERROR(SanityCheckOptions(options));
+ TFLITE_RETURN_IF_ERROR(SanityCheckOptions(options));
auto options_copy = absl::make_unique<ObjectDetectorOptions>(options);
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
auto object_detector,
TaskAPIFactory::CreateFromExternalFileProto<TestObjectDetector>(
&options_copy->model_file_with_metadata()));
- RETURN_IF_ERROR(object_detector->Init(std::move(options_copy)));
+ TFLITE_RETURN_IF_ERROR(object_detector->Init(std::move(options_copy)));
return object_detector;
}
@@ -459,7 +459,7 @@ class PostprocessTest : public tflite::testing::Test {
/*left=*/0.2, /*top=*/0.4, /*right=*/0.4, /*bottom=*/0.8};
// Pad with zeros to fill the 10 locations.
locations_data.resize(4 * 10);
- RETURN_IF_ERROR(PopulateTensor(locations_data, locations));
+ TFLITE_RETURN_IF_ERROR(PopulateTensor(locations_data, locations));
result.push_back(locations);
TfLiteTensor* classes = output_tensors[1];
@@ -467,19 +467,19 @@ class PostprocessTest : public tflite::testing::Test {
/*motorcycle*/ 3};
// Pad with zeros to fill the 10 classes.
classes_data.resize(10);
- RETURN_IF_ERROR(PopulateTensor(classes_data, classes));
+ TFLITE_RETURN_IF_ERROR(PopulateTensor(classes_data, classes));
result.push_back(classes);
TfLiteTensor* scores = output_tensors[2];
std::vector<float> scores_data = {0.8, 0.6, 0.4};
// Pad with zeros to fill the 10 scores.
scores_data.resize(10);
- RETURN_IF_ERROR(PopulateTensor(scores_data, scores));
+ TFLITE_RETURN_IF_ERROR(PopulateTensor(scores_data, scores));
result.push_back(scores);
TfLiteTensor* num_results = output_tensors[3];
std::vector<float> num_results_data = {10};
- RETURN_IF_ERROR(PopulateTensor(num_results_data, num_results));
+ TFLITE_RETURN_IF_ERROR(PopulateTensor(num_results_data, num_results));
result.push_back(num_results);
return result;
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/text/tokenizers/tokenizer_utils.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/text/tokenizers/tokenizer_utils.cc
index bd4f18fca7f08..135de8e18f525 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/text/tokenizers/tokenizer_utils.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/text/tokenizers/tokenizer_utils.cc
@@ -48,7 +48,7 @@ StatusOr<absl::string_view> CheckAndLoadFirstAssociatedFile(
"Invalid vocab_file from input process unit.",
TfLiteSupportStatus::kMetadataInvalidTokenizerError);
}
- ASSIGN_OR_RETURN(absl::string_view vocab_buffer,
+ TFLITE_ASSIGN_OR_RETURN(absl::string_view vocab_buffer,
metadata_extractor->GetAssociatedFile(
associated_files->Get(0)->name()->str()));
return vocab_buffer;
@@ -68,7 +68,7 @@ StatusOr<std::unique_ptr<Tokenizer>> CreateTokenizerFromProcessUnit(
case ProcessUnitOptions_BertTokenizerOptions: {
const tflite::BertTokenizerOptions* options =
tokenizer_process_unit->options_as<tflite::BertTokenizerOptions>();
- ASSIGN_OR_RETURN(absl::string_view vocab_buffer,
+ TFLITE_ASSIGN_OR_RETURN(absl::string_view vocab_buffer,
CheckAndLoadFirstAssociatedFile(options->vocab_file(),
metadata_extractor));
return absl::make_unique<BertTokenizer>(vocab_buffer.data(),
@@ -83,7 +83,7 @@ StatusOr<std::unique_ptr<Tokenizer>> CreateTokenizerFromProcessUnit(
case ProcessUnitOptions_RegexTokenizerOptions: {
const tflite::RegexTokenizerOptions* options =
tokenizer_process_unit->options_as<RegexTokenizerOptions>();
- ASSIGN_OR_RETURN(absl::string_view vocab_buffer,
+ TFLITE_ASSIGN_OR_RETURN(absl::string_view vocab_buffer,
CheckAndLoadFirstAssociatedFile(options->vocab_file(),
metadata_extractor));
if (options->delim_regex_pattern() == nullptr) {
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/cc/utils/jni_utils.cc b/third_party/tflite_support/src/tensorflow_lite_support/cc/utils/jni_utils.cc
index 8508f89a5cd71..8bbc4892c1706 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/cc/utils/jni_utils.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/cc/utils/jni_utils.cc
@@ -99,7 +99,7 @@ tflite::support::StatusOr<Delegate> ConvertToProtoDelegate(jint delegate) {
case 1:
return Delegate::NNAPI;
case 2:
- RETURN_IF_ERROR(loadDelegatePluginLibrary("gpu"));
+ TFLITE_RETURN_IF_ERROR(loadDelegatePluginLibrary("gpu"));
return Delegate::GPU;
default:
break;
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/custom_ops/kernel/sentencepiece/sentencepiece_detokenizer_op.cc b/third_party/tflite_support/src/tensorflow_lite_support/custom_ops/kernel/sentencepiece/sentencepiece_detokenizer_op.cc
index 273ccc8506811..a2939b0e95e4a 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/custom_ops/kernel/sentencepiece/sentencepiece_detokenizer_op.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/custom_ops/kernel/sentencepiece/sentencepiece_detokenizer_op.cc
@@ -31,11 +31,11 @@ REGISTER_OP("TFSentencepieceDetokenizeOp")
.Output("output: string")
.SetShapeFn([](tensorflow::shape_inference::InferenceContext* c) {
shape_inference::ShapeHandle unused;
- TF_RETURN_IF_ERROR(c->WithRank(c->input(1), 1, &unused));
- TF_RETURN_IF_ERROR(c->WithRank(c->input(2), 1, &unused));
+ TF_TFLITE_RETURN_IF_ERROR(c->WithRank(c->input(1), 1, &unused));
+ TF_TFLITE_RETURN_IF_ERROR(c->WithRank(c->input(2), 1, &unused));
shape_inference::DimensionHandle dim;
- TF_RETURN_IF_ERROR(c->Subtract(c->NumElements(c->input(2)), 1, &dim));
+ TF_TFLITE_RETURN_IF_ERROR(c->Subtract(c->NumElements(c->input(2)), 1, &dim));
c->set_output(0, c->Vector(dim));
return OkStatus();
});
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/custom_ops/kernel/sentencepiece/sentencepiece_tokenizer_op.cc b/third_party/tflite_support/src/tensorflow_lite_support/custom_ops/kernel/sentencepiece/sentencepiece_tokenizer_op.cc
index f922efa883775..7fd3442d9a791 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/custom_ops/kernel/sentencepiece/sentencepiece_tokenizer_op.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/custom_ops/kernel/sentencepiece/sentencepiece_tokenizer_op.cc
@@ -42,19 +42,19 @@ REGISTER_OP("TFSentencepieceTokenizeOp")
.Output("output_splits: Tsplits")
.SetShapeFn([](tensorflow::shape_inference::InferenceContext* c) {
tensorflow::shape_inference::ShapeHandle unused;
- TF_RETURN_IF_ERROR(c->WithRank(c->input(1), 1, &unused));
- TF_RETURN_IF_ERROR(c->WithRank(c->input(2), 0, &unused));
- TF_RETURN_IF_ERROR(c->WithRank(c->input(3), 0, &unused));
- TF_RETURN_IF_ERROR(c->WithRank(c->input(4), 0, &unused));
- TF_RETURN_IF_ERROR(c->WithRank(c->input(5), 0, &unused));
- TF_RETURN_IF_ERROR(c->WithRank(c->input(6), 0, &unused));
+ TF_TFLITE_RETURN_IF_ERROR(c->WithRank(c->input(1), 1, &unused));
+ TF_TFLITE_RETURN_IF_ERROR(c->WithRank(c->input(2), 0, &unused));
+ TF_TFLITE_RETURN_IF_ERROR(c->WithRank(c->input(3), 0, &unused));
+ TF_TFLITE_RETURN_IF_ERROR(c->WithRank(c->input(4), 0, &unused));
+ TF_TFLITE_RETURN_IF_ERROR(c->WithRank(c->input(5), 0, &unused));
+ TF_TFLITE_RETURN_IF_ERROR(c->WithRank(c->input(6), 0, &unused));
c->set_output(
0, c->Vector(
tensorflow::shape_inference::InferenceContext::kUnknownDim));
tensorflow::shape_inference::DimensionHandle num_splits;
- TF_RETURN_IF_ERROR(c->Add(c->NumElements(c->input(1)), 1, &num_splits));
+ TF_TFLITE_RETURN_IF_ERROR(c->Add(c->NumElements(c->input(1)), 1, &num_splits));
c->set_output(1, c->Vector(num_splits));
return tensorflow::OkStatus();
});
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/audio/desktop/audio_classifier_lib.cc b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/audio/desktop/audio_classifier_lib.cc
index d739cb55f26a5..8ed0ea3d6955b 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/audio/desktop/audio_classifier_lib.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/audio/desktop/audio_classifier_lib.cc
@@ -41,7 +41,7 @@ tflite::support::StatusOr<AudioBuffer> LoadAudioBufferFromFile(
uint32 decoded_sample_count;
uint16 decoded_channel_count;
uint32 decoded_sample_rate;
- RETURN_IF_ERROR(DecodeLin16WaveAsFloatVector(
+ TFLITE_RETURN_IF_ERROR(DecodeLin16WaveAsFloatVector(
contents, wav_data, offset, &decoded_sample_count, &decoded_channel_count,
&decoded_sample_rate));
@@ -66,19 +66,19 @@ tflite::support::StatusOr<ClassificationResult> Classify(
->mutable_tflite_settings()
->set_delegate(::tflite::proto::Delegate::EDGETPU_CORAL);
}
- ASSIGN_OR_RETURN(std::unique_ptr<AudioClassifier> classifier,
+ TFLITE_ASSIGN_OR_RETURN(std::unique_ptr<AudioClassifier> classifier,
AudioClassifier::CreateFromOptions(options));
// `wav_data` holds data loaded from the file and needs to outlive `buffer`.
std::vector<float> wav_data;
uint32_t offset = 0;
uint32_t buffer_size = classifier->GetRequiredInputBufferSize();
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
AudioBuffer buffer,
LoadAudioBufferFromFile(wav_file, &buffer_size, &offset, &wav_data));
auto start_classify = std::chrono::steady_clock::now();
- ASSIGN_OR_RETURN(ClassificationResult result, classifier->Classify(buffer));
+ TFLITE_ASSIGN_OR_RETURN(ClassificationResult result, classifier->Classify(buffer));
auto end_classify = std::chrono::steady_clock::now();
std::string delegate = use_coral ? "Coral Edge TPU" : "CPU";
const auto duration_ms =
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/bert_nl_classifier_demo.cc b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/bert_nl_classifier_demo.cc
index 02eed2332b2e4..e2d67030fe996 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/bert_nl_classifier_demo.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/bert_nl_classifier_demo.cc
@@ -50,7 +50,7 @@ absl::Status Classify() {
->set_delegate(::tflite::proto::Delegate::EDGETPU_CORAL);
}
- ASSIGN_OR_RETURN(std::unique_ptr<BertNLClassifier> classifier,
+ TFLITE_ASSIGN_OR_RETURN(std::unique_ptr<BertNLClassifier> classifier,
BertNLClassifier::CreateFromOptions(options));
auto start_classify = steady_clock::now();
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/bert_question_answerer_demo.cc b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/bert_question_answerer_demo.cc
index 4eaa2bbbdd9f5..9cc347764364f 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/bert_question_answerer_demo.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/bert_question_answerer_demo.cc
@@ -51,7 +51,7 @@ absl::Status Answer() {
->set_delegate(::tflite::proto::Delegate::EDGETPU_CORAL);
}
- ASSIGN_OR_RETURN(std::unique_ptr<QuestionAnswerer> answerer,
+ TFLITE_ASSIGN_OR_RETURN(std::unique_ptr<QuestionAnswerer> answerer,
BertQuestionAnswerer::CreateFromOptions(options));
auto start_answer = steady_clock::now();
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/nl_classifier_demo.cc b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/nl_classifier_demo.cc
index 49f233ce1e74c..72ee5b656a18f 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/nl_classifier_demo.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/nl_classifier_demo.cc
@@ -51,7 +51,7 @@ absl::Status Classify() {
->set_delegate(::tflite::proto::Delegate::EDGETPU_CORAL);
}
- ASSIGN_OR_RETURN(std::unique_ptr<NLClassifier> classifier,
+ TFLITE_ASSIGN_OR_RETURN(std::unique_ptr<NLClassifier> classifier,
NLClassifier::CreateFromOptions(options));
auto start_classify = steady_clock::now();
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/text_embedder_demo.cc b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/text_embedder_demo.cc
index e28e5fe6a804b..7f3a11cf72795 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/text_embedder_demo.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/text_embedder_demo.cc
@@ -80,13 +80,13 @@ TextEmbedderOptions BuildOptions() {
absl::Status ComputeCosineSimilarity() {
// Build TextEmbedder.
const TextEmbedderOptions options = BuildOptions();
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
std::unique_ptr<TextEmbedder> text_embedder,
TextEmbedder::CreateFromOptions(options, CreateTextOpResolver()));
// Run search and display results.
auto start_embed = steady_clock::now();
- ASSIGN_OR_RETURN(processor::EmbeddingResult first_embedding,
+ TFLITE_ASSIGN_OR_RETURN(processor::EmbeddingResult first_embedding,
text_embedder->Embed(absl::GetFlag(FLAGS_first_sentence)));
auto end_embed = steady_clock::now();
std::string delegate =
@@ -97,10 +97,10 @@ absl::Status ComputeCosineSimilarity() {
.count()
<< " ms" << std::endl;
- ASSIGN_OR_RETURN(processor::EmbeddingResult second_embedding,
+ TFLITE_ASSIGN_OR_RETURN(processor::EmbeddingResult second_embedding,
text_embedder->Embed(absl::GetFlag(FLAGS_second_sentence)));
// Compute cosine similarity.
- ASSIGN_OR_RETURN(double cosine_similarity,
+ TFLITE_ASSIGN_OR_RETURN(double cosine_similarity,
TextEmbedder::CosineSimilarity(
first_embedding.embeddings(0).feature_vector(),
second_embedding.embeddings(0).feature_vector()));
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/text_searcher_demo.cc b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/text_searcher_demo.cc
index 68c347c2639bb..92b9ffc8adf2b 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/text_searcher_demo.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/text/desktop/text_searcher_demo.cc
@@ -101,13 +101,13 @@ void DisplayResults(const processor::SearchResult& result) {
absl::Status Search() {
// Build TextSearcher.
const TextSearcherOptions options = BuildOptions();
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
std::unique_ptr<TextSearcher> text_searcher,
TextSearcher::CreateFromOptions(options, CreateTextOpResolver()));
// Run search and display results.
auto start_search = steady_clock::now();
- ASSIGN_OR_RETURN(processor::SearchResult result,
+ TFLITE_ASSIGN_OR_RETURN(processor::SearchResult result,
text_searcher->Search(absl::GetFlag(FLAGS_input_sentence)));
auto end_search = steady_clock::now();
std::string delegate =
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_classifier_demo.cc b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_classifier_demo.cc
index f29bd2de9c535..2650607b13191 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_classifier_demo.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_classifier_demo.cc
@@ -125,11 +125,11 @@ void DisplayResult(const ClassificationResult& result) {
absl::Status Classify() {
// Build ImageClassifier.
const ImageClassifierOptions& options = BuildOptions();
- ASSIGN_OR_RETURN(std::unique_ptr<ImageClassifier> image_classifier,
+ TFLITE_ASSIGN_OR_RETURN(std::unique_ptr<ImageClassifier> image_classifier,
ImageClassifier::CreateFromOptions(options));
// Load image in a FrameBuffer.
- ASSIGN_OR_RETURN(ImageData image,
+ TFLITE_ASSIGN_OR_RETURN(ImageData image,
DecodeImageFromFile(absl::GetFlag(FLAGS_image_path)));
std::unique_ptr<FrameBuffer> frame_buffer;
if (image.channels == 3) {
@@ -146,7 +146,7 @@ absl::Status Classify() {
// Run classification and display results.
auto start_classify = steady_clock::now();
- ASSIGN_OR_RETURN(ClassificationResult result,
+ TFLITE_ASSIGN_OR_RETURN(ClassificationResult result,
image_classifier->Classify(*frame_buffer));
auto end_classify = steady_clock::now();
std::string delegate =
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_embedder_demo.cc b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_embedder_demo.cc
index 50d615a486751..d5f0cb1233457 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_embedder_demo.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_embedder_demo.cc
@@ -106,22 +106,22 @@ StatusOr<std::unique_ptr<FrameBuffer>> BuildFrameBufferFromImageData(
absl::Status ComputeCosineSimilarity() {
// Build ImageEmbedder.
const ImageEmbedderOptions& options = BuildOptions();
- ASSIGN_OR_RETURN(std::unique_ptr<ImageEmbedder> image_embedder,
+ TFLITE_ASSIGN_OR_RETURN(std::unique_ptr<ImageEmbedder> image_embedder,
ImageEmbedder::CreateFromOptions(options));
// Load images into FrameBuffer objects.
- ASSIGN_OR_RETURN(ImageData first_image,
+ TFLITE_ASSIGN_OR_RETURN(ImageData first_image,
DecodeImageFromFile(absl::GetFlag(FLAGS_first_image_path)));
- ASSIGN_OR_RETURN(std::unique_ptr<FrameBuffer> first_frame_buffer,
+ TFLITE_ASSIGN_OR_RETURN(std::unique_ptr<FrameBuffer> first_frame_buffer,
BuildFrameBufferFromImageData(first_image));
- ASSIGN_OR_RETURN(ImageData second_image,
+ TFLITE_ASSIGN_OR_RETURN(ImageData second_image,
DecodeImageFromFile(absl::GetFlag(FLAGS_second_image_path)));
- ASSIGN_OR_RETURN(std::unique_ptr<FrameBuffer> second_frame_buffer,
+ TFLITE_ASSIGN_OR_RETURN(std::unique_ptr<FrameBuffer> second_frame_buffer,
BuildFrameBufferFromImageData(second_image));
// Extract feature vectors.
auto start_embed = steady_clock::now();
- ASSIGN_OR_RETURN(const EmbeddingResult& first_embedding_result,
+ TFLITE_ASSIGN_OR_RETURN(const EmbeddingResult& first_embedding_result,
image_embedder->Embed(*first_frame_buffer));
auto end_embed = steady_clock::now();
std::string delegate =
@@ -131,10 +131,10 @@ absl::Status ComputeCosineSimilarity() {
.count()
<< " ms" << std::endl;
- ASSIGN_OR_RETURN(const EmbeddingResult& second_embedding_result,
+ TFLITE_ASSIGN_OR_RETURN(const EmbeddingResult& second_embedding_result,
image_embedder->Embed(*second_frame_buffer));
// Compute cosine similarity.
- ASSIGN_OR_RETURN(
+ TFLITE_ASSIGN_OR_RETURN(
double cosine_similarity,
ImageEmbedder::CosineSimilarity(
image_embedder->GetEmbeddingByIndex(first_embedding_result, 0)
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_searcher_demo.cc b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_searcher_demo.cc
index b661447614bc7..0ef54df645447 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_searcher_demo.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_searcher_demo.cc
@@ -106,11 +106,11 @@ void DisplayResult(const processor::SearchResult& result) {
absl::Status Search() {
// Build ImageSearcher.
const ImageSearcherOptions options = BuildOptions();
- ASSIGN_OR_RETURN(std::unique_ptr<ImageSearcher> image_searcher,
+ TFLITE_ASSIGN_OR_RETURN(std::unique_ptr<ImageSearcher> image_searcher,
ImageSearcher::CreateFromOptions(options));
// Load image in a FrameBuffer.
- ASSIGN_OR_RETURN(ImageData image,
+ TFLITE_ASSIGN_OR_RETURN(ImageData image,
DecodeImageFromFile(absl::GetFlag(FLAGS_image_path)));
std::unique_ptr<FrameBuffer> frame_buffer;
if (image.channels == 3) {
@@ -127,7 +127,7 @@ absl::Status Search() {
// Run search and display results.
auto start_search = steady_clock::now();
- ASSIGN_OR_RETURN(processor::SearchResult result,
+ TFLITE_ASSIGN_OR_RETURN(processor::SearchResult result,
image_searcher->Search(*frame_buffer));
auto end_search = steady_clock::now();
std::string delegate =
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_segmenter_demo.cc b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_segmenter_demo.cc
index 5a566ecbcf921..bebbd44431c90 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_segmenter_demo.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/image_segmenter_demo.cc
@@ -104,7 +104,7 @@ absl::Status EncodeMaskToPngFile(const SegmentationResult& result) {
}
// Encode mask as PNG.
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
EncodeImageToPngFile(mask, absl::GetFlag(FLAGS_output_mask_png)));
std::cout << absl::StrFormat("Category mask saved to: %s\n",
absl::GetFlag(FLAGS_output_mask_png));
@@ -149,11 +149,11 @@ absl::Status DisplayColorLegend(const SegmentationResult& result) {
absl::Status Segment() {
// Build ImageClassifier.
const ImageSegmenterOptions& options = BuildOptions();
- ASSIGN_OR_RETURN(std::unique_ptr<ImageSegmenter> image_segmenter,
+ TFLITE_ASSIGN_OR_RETURN(std::unique_ptr<ImageSegmenter> image_segmenter,
ImageSegmenter::CreateFromOptions(options));
// Load image in a FrameBuffer.
- ASSIGN_OR_RETURN(ImageData image,
+ TFLITE_ASSIGN_OR_RETURN(ImageData image,
DecodeImageFromFile(absl::GetFlag(FLAGS_image_path)));
std::unique_ptr<FrameBuffer> frame_buffer;
if (image.channels == 3) {
@@ -170,7 +170,7 @@ absl::Status Segment() {
// Run segmentation and save category mask.
auto start_segment = steady_clock::now();
- ASSIGN_OR_RETURN(SegmentationResult result,
+ TFLITE_ASSIGN_OR_RETURN(SegmentationResult result,
image_segmenter->Segment(*frame_buffer));
auto end_segment = steady_clock::now();
std::string delegate =
@@ -181,10 +181,10 @@ absl::Status Segment() {
.count()
<< " ms" << std::endl;
- RETURN_IF_ERROR(EncodeMaskToPngFile(result));
+ TFLITE_RETURN_IF_ERROR(EncodeMaskToPngFile(result));
// Display the legend.
- RETURN_IF_ERROR(DisplayColorLegend(result));
+ TFLITE_RETURN_IF_ERROR(DisplayColorLegend(result));
// Cleanup and return.
ImageDataFree(&image);
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/object_detector_demo.cc b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/object_detector_demo.cc
index 20f7403207c2e..28d9243e32dd8 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/object_detector_demo.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/examples/task/vision/desktop/object_detector_demo.cc
@@ -151,7 +151,7 @@ absl::Status EncodeResultToPngFile(const DetectionResult& result,
}
}
// Encode to PNG and return.
- RETURN_IF_ERROR(
+ TFLITE_RETURN_IF_ERROR(
EncodeImageToPngFile(*image, absl::GetFlag(FLAGS_output_png)));
std::cout << absl::StrFormat("Results saved to: %s\n",
absl::GetFlag(FLAGS_output_png));
@@ -192,11 +192,11 @@ void DisplayResult(const DetectionResult& result) {
absl::Status Detect() {
// Build ObjectDetector.
const ObjectDetectorOptions& options = BuildOptions();
- ASSIGN_OR_RETURN(std::unique_ptr<ObjectDetector> object_detector,
+ TFLITE_ASSIGN_OR_RETURN(std::unique_ptr<ObjectDetector> object_detector,
ObjectDetector::CreateFromOptions(options));
// Load image in a FrameBuffer.
- ASSIGN_OR_RETURN(ImageData image,
+ TFLITE_ASSIGN_OR_RETURN(ImageData image,
DecodeImageFromFile(absl::GetFlag(FLAGS_image_path)));
std::unique_ptr<FrameBuffer> frame_buffer;
if (image.channels == 3) {
@@ -213,7 +213,7 @@ absl::Status Detect() {
// Run object detection and draw results on input image.
auto start_detect = steady_clock::now();
- ASSIGN_OR_RETURN(DetectionResult result,
+ TFLITE_ASSIGN_OR_RETURN(DetectionResult result,
object_detector->Detect(*frame_buffer));
auto end_detect = steady_clock::now();
std::string delegate =
@@ -224,7 +224,7 @@ absl::Status Detect() {
.count()
<< " ms" << std::endl;
- RETURN_IF_ERROR(EncodeResultToPngFile(result, &image));
+ TFLITE_RETURN_IF_ERROR(EncodeResultToPngFile(result, &image));
// Display results as text.
DisplayResult(result);
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/java/src/native/task/vision/jni_utils.cc b/third_party/tflite_support/src/tensorflow_lite_support/java/src/native/task/vision/jni_utils.cc
index 1b08e56ed509b..44254c23a83d4 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/java/src/native/task/vision/jni_utils.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/java/src/native/task/vision/jni_utils.cc
@@ -179,7 +179,7 @@ StatusOr<std::unique_ptr<FrameBuffer>> CreateFrameBufferFromYuvPlanes(
reinterpret_cast<const uint8*>(GetMappedFileBuffer(env, jv_plane).data());
FrameBuffer::Format format;
- ASSIGN_OR_RETURN(format,
+ TFLITE_ASSIGN_OR_RETURN(format,
GetYUVImageFormat(u_plane, v_plane, pixel_stride_uv));
return CreateFromYuvRawBuffer(
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/metadata/cc/metadata_extractor.cc b/third_party/tflite_support/src/tensorflow_lite_support/metadata/cc/metadata_extractor.cc
index d208da4e03780..4b3f816183c8f 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/metadata/cc/metadata_extractor.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/metadata/cc/metadata_extractor.cc
@@ -79,7 +79,7 @@ tflite::support::StatusOr<ZipFileInfo> GetCurrentZipFileInfo(
const unzFile& zf) {
// Open file in raw mode, as data is expected to be uncompressed.
int method;
- RETURN_IF_ERROR(UnzipErrorToStatus(
+ TFLITE_RETURN_IF_ERROR(UnzipErrorToStatus(
unzOpenCurrentFile2(zf, &method, /*level=*/nullptr, /*raw=*/1)));
if (method != Z_NO_COMPRESSION) {
return CreateStatusWithPayload(
@@ -89,7 +89,7 @@ tflite::support::StatusOr<ZipFileInfo> GetCurrentZipFileInfo(
// Get file info a first time to get filename size.
unz_file_info64 file_info;
- RETURN_IF_ERROR(UnzipErrorToStatus(unzGetCurrentFileInfo64(
+ TFLITE_RETURN_IF_ERROR(UnzipErrorToStatus(unzGetCurrentFileInfo64(
zf, &file_info, /*szFileName=*/nullptr, /*szFileNameBufferSize=*/0,
/*extraField=*/nullptr, /*extraFieldBufferSize=*/0,
/*szComment=*/nullptr, /*szCommentBufferSize=*/0)));
@@ -97,7 +97,7 @@ tflite::support::StatusOr<ZipFileInfo> GetCurrentZipFileInfo(
// Second call to get file name.
auto file_name_size = file_info.size_filename;
char* c_file_name = (char*)malloc(file_name_size);
- RETURN_IF_ERROR(UnzipErrorToStatus(unzGetCurrentFileInfo64(
+ TFLITE_RETURN_IF_ERROR(UnzipErrorToStatus(unzGetCurrentFileInfo64(
zf, &file_info, c_file_name, file_name_size,
/*extraField=*/nullptr, /*extraFieldBufferSize=*/0,
/*szComment=*/nullptr, /*szCommentBufferSize=*/0)));
@@ -113,7 +113,7 @@ tflite::support::StatusOr<ZipFileInfo> GetCurrentZipFileInfo(
}
// Close file and return.
- RETURN_IF_ERROR(UnzipErrorToStatus(unzCloseCurrentFile(zf)));
+ TFLITE_RETURN_IF_ERROR(UnzipErrorToStatus(unzCloseCurrentFile(zf)));
ZipFileInfo result{};
result.name = file_name;
@@ -131,7 +131,7 @@ ModelMetadataExtractor::CreateFromModelBuffer(const char* buffer_data,
// https://abseil.io/tips/126.
std::unique_ptr<ModelMetadataExtractor> extractor =
absl::WrapUnique(new ModelMetadataExtractor());
- RETURN_IF_ERROR(extractor->InitFromModelBuffer(buffer_data, buffer_size));
+ TFLITE_RETURN_IF_ERROR(extractor->InitFromModelBuffer(buffer_data, buffer_size));
return extractor;
}
@@ -261,7 +261,7 @@ absl::Status ModelMetadataExtractor::ExtractAssociatedFiles(
if (global_info.number_entry > 0) {
int error = unzGoToFirstFile(zf);
while (error == UNZ_OK) {
- ASSIGN_OR_RETURN(auto zip_file_info, GetCurrentZipFileInfo(zf));
+ TFLITE_ASSIGN_OR_RETURN(auto zip_file_info, GetCurrentZipFileInfo(zf));
// Store result in map.
associated_files_[zip_file_info.name] = absl::string_view(
buffer_data + zip_file_info.position, zip_file_info.size);
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/scann_ondevice/cc/index.cc b/third_party/tflite_support/src/tensorflow_lite_support/scann_ondevice/cc/index.cc
index 8e45119d7364d..2c2006725ec25 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/scann_ondevice/cc/index.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/scann_ondevice/cc/index.cc
@@ -64,13 +64,13 @@ absl::StatusOr<std::unique_ptr<Index>> Index::CreateFromIndexBuffer(
// Use absl::WrapUnique() to call private constructor:
// https://abseil.io/tips/126.
std::unique_ptr<Index> index = absl::WrapUnique(new Index());
- RETURN_IF_ERROR(index->InitFromBuffer(buffer_data, buffer_size));
+ TFLITE_RETURN_IF_ERROR(index->InitFromBuffer(buffer_data, buffer_size));
return index;
}
absl::StatusOr<IndexConfig> Index::GetIndexConfig() const {
std::string key(kIndexConfigKey);
- ASSIGN_OR_RETURN(absl::string_view value,
+ TFLITE_ASSIGN_OR_RETURN(absl::string_view value,
GetValueForKey(config_iterator_.get(), key));
IndexConfig config;
if (!config.ParseFromString(std::string(value))) {
diff --git a/third_party/tflite_support/src/tensorflow_lite_support/scann_ondevice/cc/index_builder.cc b/third_party/tflite_support/src/tensorflow_lite_support/scann_ondevice/cc/index_builder.cc
index fe5d1ef1175e4..94c85334f0574 100644
--- a/third_party/tflite_support/src/tensorflow_lite_support/scann_ondevice/cc/index_builder.cc
+++ b/third_party/tflite_support/src/tensorflow_lite_support/scann_ondevice/cc/index_builder.cc
@@ -104,7 +104,7 @@ absl::StatusOr<std::string> CreateIndexBufferImpl(
}
std::string buffer;
- ASSIGN_OR_RETURN(auto mem_writable_file, MemWritableFile::Create(&buffer));
+ TFLITE_ASSIGN_OR_RETURN(auto mem_writable_file, MemWritableFile::Create(&buffer));
leveldb::Options options;
options.compression =
--
2.42.0.515.g380fc7ccd1-goog