summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSayanna Chandula <sayanna@google.com>2023-12-05 19:06:05 +0000
committerAndroid (Google) Code Review <android-gerrit@google.com>2023-12-05 19:06:05 +0000
commit0db59c4c6ef9cb2b2498f0fff6edb8cf89033531 (patch)
treefc5d3ad555db454c7ab11d87a8e04e200168e0b7
parentda596ccc273f371da8880e1c84b1faa7c3772f06 (diff)
parentc40dc03d06fc861b6d864783de264b770afbc934 (diff)
downloadpixel-0db59c4c6ef9cb2b2498f0fff6edb8cf89033531.tar.gz
Merge "thermal: add support for vt estimator test app" into main
-rw-r--r--thermal/Android.bp35
-rw-r--r--thermal/virtualtemp_estimator/virtualtemp_estimator_test.cpp411
2 files changed, 446 insertions, 0 deletions
diff --git a/thermal/Android.bp b/thermal/Android.bp
index 51f98d66..a7ad5d87 100644
--- a/thermal/Android.bp
+++ b/thermal/Android.bp
@@ -121,3 +121,38 @@ sh_binary {
"pixel-thermal-symlinks.rc",
],
}
+
+
+cc_binary {
+ name: "virtualtemp_estimator_test",
+ srcs: [
+ "virtualtemp_estimator/virtualtemp_estimator.cpp",
+ "virtualtemp_estimator/virtualtemp_estimator_test.cpp"
+ ],
+ shared_libs: [
+ "libc",
+ "liblog",
+ "libcutils",
+ "libbinder",
+ "libhidlbase",
+ "libutils",
+ "libjsoncpp",],
+ vendor: true,
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ "-Wunused",
+ ],
+ tidy: true,
+ tidy_checks: [
+ "android-*",
+ "cert-*",
+ "clang-analyzer-security*",
+ ],
+ tidy_checks_as_errors: [
+ "android-*",
+ "clang-analyzer-security*",
+ "cert-*",
+ ],
+}
diff --git a/thermal/virtualtemp_estimator/virtualtemp_estimator_test.cpp b/thermal/virtualtemp_estimator/virtualtemp_estimator_test.cpp
new file mode 100644
index 00000000..73dacde0
--- /dev/null
+++ b/thermal/virtualtemp_estimator/virtualtemp_estimator_test.cpp
@@ -0,0 +1,411 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/**
+ *@file virtualtemp_estimator_test.cc
+ * Test application to verify virtualtemp estimator
+ *
+ */
+// Test application to run and verify virtualtemp estimator interface unit tests
+
+#include "virtualtemp_estimator.h"
+
+#include <android-base/file.h>
+#include <android-base/logging.h>
+#include <android-base/parsedouble.h>
+#include <android-base/properties.h>
+#include <android-base/strings.h>
+#include <cutils/properties.h>
+#include <cutils/trace.h>
+#include <json/reader.h>
+#include <json/value.h>
+#include <json/writer.h>
+#include <log/log.h>
+#include <malloc.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/time.h>
+
+#include <climits>
+#include <fstream>
+#include <iostream>
+
+static char default_model[] = "/vendor/etc/vt_estimation_model.tflite";
+static char default_thermal_config[] = "/vendor/etc/thermal_info_config.json";
+constexpr int kmillion = 1000000;
+constexpr int klog_interval_usec = 10 * kmillion;
+
+static inline unsigned long get_elapsed_time_usec(struct timeval start, struct timeval end) {
+ unsigned long elapsed_time = (end.tv_sec - start.tv_sec) * kmillion;
+ elapsed_time += (end.tv_usec - start.tv_usec);
+
+ return elapsed_time;
+}
+
+static std::vector<std::string> get_input_combination(const char *thermal_config_path) {
+ std::vector<std::string> result;
+ std::string json_doc;
+ if (!android::base::ReadFileToString(thermal_config_path, &json_doc)) {
+ std::cout << "Failed to read JSON config from " << thermal_config_path;
+ return result;
+ }
+
+ Json::Value root;
+ Json::CharReaderBuilder reader_builder;
+ std::unique_ptr<Json::CharReader> reader(reader_builder.newCharReader());
+ std::string errorMessage;
+
+ if (!reader->parse(&*json_doc.begin(), &*json_doc.end(), &root, &errorMessage)) {
+ std::cout << "Failed to parse JSON config: " << errorMessage;
+ return result;
+ }
+
+ Json::Value sensors = root["Sensors"];
+ std::cout << "Sensors size: " << sensors.size() << std::endl;
+
+ for (Json::Value::ArrayIndex i = 0; i < sensors.size(); ++i) {
+ const std::string &name = sensors[i]["Name"].asString();
+ if (name == "VIRTUAL-SKIN-MODEL") {
+ Json::Value values = sensors[i]["Combination"];
+ if (values.size() == 0) {
+ return result;
+ }
+
+ std::cout << "Combination : [";
+ for (Json::Value::ArrayIndex j = 0; j < values.size(); ++j) {
+ result.push_back(values[j].asString());
+ std::cout << result.back() << ", ";
+ }
+ std::cout << "]" << std::endl;
+ }
+ }
+
+ return result;
+}
+
+static int run_random_input_inference(char *model_path, const char *thermal_config_path,
+ int min_inference_count, int inference_delay_sec) {
+ float output;
+ unsigned long prev_log_time = 0;
+ thermal::vtestimator::VtEstimatorStatus ret;
+ std::vector<std::string> input_combination = get_input_combination(thermal_config_path);
+ int input_size = input_combination.size();
+ thermal::vtestimator::VirtualTempEstimator vt_estimator_(input_size);
+
+ std::cout << "Initialize estimator\n";
+ ret = vt_estimator_.Initialize(model_path);
+ if (ret != thermal::vtestimator::kVtEstimatorOk) {
+ std::cout << "Failed to Initialize estimator (ret: " << ret << ")\n";
+ return -1;
+ }
+
+ struct timeval start_loop_time;
+ int inference_count = 0;
+ unsigned long max_inference_time = 0, min_inference_time = ULONG_MAX;
+ unsigned long sum_inference_time = 0;
+ float avg_inference_time = 0;
+ std::vector<unsigned long> inference_times;
+
+ gettimeofday(&start_loop_time, nullptr);
+ do {
+ struct timeval begin, end;
+ std::vector<float> thermistors;
+
+ // preparing random inputs with starting temperature between 20C to 40C
+ int r = 20 + std::rand() % 20;
+ for (int i = 0; i < input_size; ++i) {
+ thermistors.push_back(r + i);
+ }
+
+ gettimeofday(&begin, nullptr);
+ ret = vt_estimator_.Estimate(thermistors, &output);
+ gettimeofday(&end, nullptr);
+ if (ret != thermal::vtestimator::kVtEstimatorOk) {
+ std::cout << "Failed to run estimator (ret: " << ret << ")\n";
+ return -1;
+ }
+
+ unsigned long inference_time_usec = get_elapsed_time_usec(begin, end);
+
+ inference_count++;
+ max_inference_time = std::max(max_inference_time, inference_time_usec);
+ min_inference_time = std::min(min_inference_time, inference_time_usec);
+ sum_inference_time += inference_time_usec;
+ avg_inference_time = sum_inference_time / inference_count;
+ inference_times.push_back(inference_time_usec);
+
+ unsigned long elapsed_time = get_elapsed_time_usec(start_loop_time, end);
+ if (elapsed_time - prev_log_time >= klog_interval_usec) {
+ std::cout << "elapsed_time_sec: " << elapsed_time / kmillion
+ << " inference_count: " << inference_count
+ << " min_inference_time: " << min_inference_time
+ << " max_inference_time: " << max_inference_time
+ << " avg_inference_time: " << avg_inference_time << std::endl;
+ prev_log_time = elapsed_time;
+ }
+
+ if (inference_delay_sec)
+ sleep(inference_delay_sec);
+ } while (inference_count < min_inference_count);
+
+ std::cout << "\n\ntotal inference count: " << inference_count << std::endl;
+ std::cout << "total inference time: " << sum_inference_time << std::endl;
+ std::cout << "avg_inference_time: " << avg_inference_time << std::endl;
+ std::cout << "min_inference_time: " << min_inference_time << std::endl;
+ std::cout << "max_inference_time: " << max_inference_time << std::endl;
+
+ std::sort(inference_times.begin(), inference_times.end());
+ std::cout << "\n\n";
+ std::cout << "p50: " << inference_times[inference_count * 0.5] << std::endl;
+ std::cout << "p90: " << inference_times[inference_count * 0.9] << std::endl;
+
+ return 0;
+}
+
+static int run_single_inference(char *model_path, char *input) {
+ if (!input) {
+ std::cout << "input is nullptr" << std::endl;
+ return -1;
+ }
+
+ std::vector<float> thermistors;
+ char *ip = input;
+ char *saveptr;
+
+ std::cout << "Parsing thermistors from input string: ";
+ ip = strtok_r(ip, " ", &saveptr);
+ while (ip) {
+ float thermistor_value;
+
+ if (sscanf(ip, "%f", &thermistor_value) != 1) {
+ std::cout << "inputs parsing failed";
+ }
+
+ std::cout << thermistor_value << " ";
+ thermistors.push_back(thermistor_value);
+
+ ip = strtok_r(NULL, " ", &saveptr);
+ }
+ std::cout << std::endl;
+ std::cout << "thermistors.size(): " << thermistors.size() << std::endl;
+
+ float output;
+ thermal::vtestimator::VtEstimatorStatus ret;
+ thermal::vtestimator::VirtualTempEstimator vt_estimator_(thermistors.size());
+
+ std::cout << "Initialize estimator\n";
+ ret = vt_estimator_.Initialize(model_path);
+ if (ret != thermal::vtestimator::kVtEstimatorOk) {
+ std::cout << "Failed to Initialize estimator (ret: " << ret << ")\n";
+ return -1;
+ }
+
+ std::cout << "run estimator\n";
+ ret = vt_estimator_.Estimate(thermistors, &output);
+ if (ret != thermal::vtestimator::kVtEstimatorOk) {
+ std::cout << "Failed to run estimator (ret: " << ret << ")\n";
+ return -1;
+ }
+
+ std::cout << "output: " << output << std::endl;
+ return 0;
+}
+
+static int run_batch_process(const char *model_path, const char *thermal_config_path,
+ const char *input_file, const char *output_file) {
+ if (!input_file || !output_file) {
+ std::cout << "input and output files required for batch process\n";
+ return -1;
+ }
+
+ std::cout << "get_input_combination(): ";
+ std::vector<std::string> input_combination = get_input_combination(thermal_config_path);
+ if (input_combination.size() == 0) {
+ LOG(ERROR) << "Invalid input_combination";
+ return -1;
+ }
+
+ thermal::vtestimator::VtEstimatorStatus ret;
+ thermal::vtestimator::VirtualTempEstimator vt_estimator_(input_combination.size());
+
+ std::cout << "Initialize estimator\n";
+ ret = vt_estimator_.Initialize(model_path);
+ if (ret != thermal::vtestimator::kVtEstimatorOk) {
+ std::cout << "Failed to Initialize estimator (ret: " << ret << ")\n";
+ return -1;
+ }
+
+ std::string json_doc;
+ if (!android::base::ReadFileToString(input_file, &json_doc)) {
+ LOG(ERROR) << "Failed to read JSON config from " << input_file;
+ return -1;
+ }
+ Json::Value root;
+ Json::CharReaderBuilder reader_builder;
+ std::unique_ptr<Json::CharReader> reader(reader_builder.newCharReader());
+ std::string errorMessage;
+
+ if (!reader->parse(&*json_doc.begin(), &*json_doc.end(), &root, &errorMessage)) {
+ LOG(ERROR) << "Failed to parse JSON config: " << errorMessage;
+ return -1;
+ }
+
+ std::cout << "Number of testcases " << root.size() << std::endl;
+
+ for (auto const &testcase_name : root.getMemberNames()) {
+ if (testcase_name == "Metadata") {
+ continue;
+ }
+
+ Json::Value testcase = root[testcase_name];
+ Json::Value model_vt_outputs;
+ int loop_count = testcase[input_combination[0]].size();
+
+ std::cout << "tc: " << testcase_name << " count: " << loop_count << std::endl;
+ for (int i = 0; i < loop_count; ++i) {
+ std::vector<float> model_inputs;
+ float model_output;
+ int num_inputs = input_combination.size();
+
+ for (int j = 0; j < num_inputs; ++j) {
+ std::string input_name = input_combination[j];
+ std::string value_str = testcase[input_name][std::to_string(i)].asString();
+
+ std::cout << "tc[" << testcase_name << "] entry[" << i << "] input[" << input_name
+ << "] value_str[" << value_str << "]\n";
+
+ float value;
+ if (android::base::ParseFloat(value_str, &value) == false) {
+ std::cout << "Failed to parse value_str : " << value_str << " to float\n";
+ }
+
+ model_inputs.push_back(value);
+ }
+
+ ret = vt_estimator_.Estimate(model_inputs, &model_output);
+ if (ret != thermal::vtestimator::kVtEstimatorOk) {
+ std::cout << "Failed to run estimator (ret: " << ret << ")\n";
+ return -1;
+ }
+
+ model_vt_outputs[std::to_string(i)] = std::to_string(model_output);
+ }
+
+ testcase["model_vt"] = model_vt_outputs;
+ root[testcase_name] = testcase;
+ std::cout << "completed testcase_name: " << testcase_name << std::endl;
+ }
+
+ Json::StreamWriterBuilder writer_builder;
+ writer_builder["indentation"] = "";
+ std::unique_ptr<Json::StreamWriter> writer(writer_builder.newStreamWriter());
+ std::ofstream output_stream(output_file, std::ofstream::out);
+ writer->write(root, &output_stream);
+
+ return 0;
+}
+
+void print_usage() {
+ std::string message = "usage: \n";
+ message += "-m : input mode (";
+ message += "0: single inference ";
+ message += "1: json input file ";
+ message += "2: generate random inputs) \n";
+ message += "-p : path to model file \n";
+ message += "-t : path to thermal config file \n";
+ message += "-i : input samples (mode 0), path to input file (mode 1) \n";
+ message += "-o : output file (mode 1) \n";
+ message += "-d : delay between inferences in seconds (mode 2) \n";
+ message += "-c : inference count (mode 2)";
+
+ std::cout << message << std::endl;
+}
+
+int main(int argc, char *argv[]) {
+ int c, mode = -1;
+ char *input = nullptr, *output = nullptr;
+ char *model_path = nullptr, *thermal_config_path = nullptr;
+ int min_inference_count = -1;
+ int inference_delay_sec = 0;
+
+ while ((c = getopt(argc, argv, "hm:p:i:c:o:d:t:")) != -1) switch (c) {
+ case 'm':
+ mode = atoi(optarg);
+ std::cout << "mode: " << mode << std::endl;
+ break;
+ case 'p':
+ model_path = optarg;
+ std::cout << "model_path: " << model_path << std::endl;
+ break;
+ case 't':
+ thermal_config_path = optarg;
+ std::cout << "thermal_config_path: " << thermal_config_path << std::endl;
+ break;
+ case 'i':
+ input = optarg;
+ std::cout << "input: " << input << std::endl;
+ break;
+ case 'o':
+ output = optarg;
+ std::cout << "output: " << output << std::endl;
+ break;
+ case 'c':
+ min_inference_count = atoi(optarg);
+ std::cout << "min_inference_count: " << min_inference_count << std::endl;
+ break;
+ case 'd':
+ inference_delay_sec = atoi(optarg);
+ std::cout << "inference_delay_sec : " << inference_delay_sec << std::endl;
+ break;
+ case 'h':
+ print_usage();
+ return 0;
+ default:
+ std::cout << "unsupported option " << c << std::endl;
+ abort();
+ }
+
+ if (!model_path) {
+ model_path = default_model;
+ std::cout << "Using default model_path: " << model_path << std::endl;
+ }
+
+ if (!thermal_config_path) {
+ thermal_config_path = default_thermal_config;
+ std::cout << "Using default thermal config: " << thermal_config_path << std::endl;
+ }
+
+ int ret = -1;
+ switch (mode) {
+ case 0:
+ ret = run_single_inference(model_path, input);
+ break;
+ case 1:
+ ret = run_batch_process(model_path, thermal_config_path, input, output);
+ break;
+ case 2:
+ ret = run_random_input_inference(model_path, thermal_config_path, min_inference_count,
+ inference_delay_sec);
+ break;
+ default:
+ std::cout << "unsupported mode" << std::endl;
+ print_usage();
+ break;
+ }
+
+ std::cout << "Exiting" << std::endl;
+ fflush(stdout);
+ return ret;
+}