aboutsummaryrefslogtreecommitdiff
path: root/benchmarks/perf_profile_benchmarks_target.sh
blob: e4ab8bda7ff3b148a6f78c9ead582e58ab0b91c2 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
#!/bin/bash
#
# Copyright (c) 2020, Linaro Ltd.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# This is an executable script for gathering benchmark performance data on Android devices using
# perf and adb. The performance data is gathered on the device using perf record and analyzed
# locally in order to generate annotations of the hotspots in the perf data.

readonly local_path=$(dirname "$0")
source "${local_path}/../utils/utils.sh"
source "${local_path}/../utils/utils_test.sh"
source "${local_path}/../utils/utils_android.sh"
source "${local_path}/../utils/utils_android_root.sh"
source "${local_path}/../utils/utils_benchmarks.sh"

readonly timer_name="Perf Target Benchmarks"
readonly LOG_DIRECTORY="$(get_workspace)"
readonly timestamp=$(date +%Y%m%d-%H%M)
readonly perf_out="${LOG_DIRECTORY}/perf-out/${timestamp}"
readonly target_virtual_work_dir="$(get_target_virtual_work_dir)"
readonly bench_name="bench"
readonly bench_apk="$(get_workspace)/benchmarks/out/build/${bench_name}.apk"
readonly target_bench_apk="${target_virtual_work_dir}/${bench_name}.apk"
readonly benchmarks_resources_file="$(get_workspace)/benchmarks/out/build/resources.tar"
readonly simpleperf_home="$(get_simpleperf_home)"

declare -a benchmarks
declare -A options=()

# As profiling is run in the chroot mode, it has a virtual working directory.
# This is the actual physical working directory corresponding to the virtual
# directory. The actual physical working directory is needed for accessing files before and
# after profiling.
# It is initialized in the main function.
target_physical_work_dir=""

# The value of the property 'ro.product.device'.
target_device=""

validate_options() {
  local -r mode="${options[mode]}"
  validate_mode_option "${mode}"

  local -r cpu="${options[cpu]}"
  validate_cpu_option "${cpu}"
}

usage() {
  log I "Usage: $0 [OPTION]... [BENCHMARK]..."
  log I "Use this script to gather performance data for benchmarks."
  log I "This script must be run from the root directory of the android source tree."
  log I "This script expects a device to be connected."
  log I "With no BENCHMARK, all available benchmarks are run."
  log I ""
  log I " -h|--help                       - help"
  log I " -v|--verbose                    - verbose"
  log I "-------------------------------------------"
  log I " --cpu <big|little>              - CPU cores."
  log I "                                  \"big\": Run benchmarks on big cores."
  log I "                                  \"little\": Run benchmarks on little cores."
  log I "                                   (default: little)"
  log I " --no-calibration                - Run benchmarks without calibration. Benchmarks are run"
  log I "                                   directly using their main() method."
  log I "                                   (default: use calibration)"
  log I " --single-event <pmu_event>      - Record only the specified event."
  log I "                                   (default: record all events)"
  log I "-------------------------------------------"
  exit 0
}

init_options() {
  options["cpu"]="little"
  # TODO: Currently only the 64-bit mode is supported. The 32-bit mode needs investigation
  # whether it can be profiled in the chroot mode.
  options["mode"]="64"
  options["calibration"]="true"
  options["single-event"]=""
}

parse_arguments() {
  init_options
  while [[ $# -gt 0 ]]; do
    local option=$1
    case "${option}" in
      -v|--verbose)
        enable_verbose
        ;;
      -h|--help)
        usage
        exit 0
        ;;
      --cpu|--single-event)
        shift
        set_option "${option}" "$1"
        ;;
      --no-calibration)
        options["calibration"]="false"
        ;;
      --*)
        log E "Invalid option: $1"
        exit 1
        ;;
      *)
        break
        ;;
    esac
    shift
  done
  readonly options
  validate_options
  if [[ $# -gt 0 ]]; then
    benchmarks=($@)
  else
    local bench
    for bench in $("$(get_workspace)/benchmarks/build.sh" -l); do
      benchmarks+=("${bench}")
    done
  fi
  readonly benchmarks
  validate_benchmarks_names "${benchmarks[@]}"
}

# Get a list of hardware events supported by simpleperf.
get_simpleperf_hw_events() {
  adb_shell simpleperf list hw | grep -v "events:"
}

# Run the calibration script.
# Args:
#   $1: a benchmark name.
run_calibrate_benchmark() {
  "$(get_workspace)"/scripts/benchmarks/calibrate_benchmark_target.sh  --cpu "${options["cpu"]}" \
    --classpath "${target_bench_apk}" "$1"
}

# Get the name of the file which is used to store a calibration result of the specified benchmark.
# Args:
#   $1: a benchmark name.
get_calibration_result_file_name() {
  local bench=$1
  local bench_file="$(get_benchmark_file_name "${bench}")"
  local bench_file_md5sum="$(md5sum "${bench_file}")"
  bench_file_md5sum="${bench_file_md5sum%%[[:space:]]*${bench_file}}"
  echo "${perf_out}/../${target_device}/${options["cpu"]}/${bench_file_md5sum}"
}

# Calibrate the benchmark to get its specification.
# The result is stored in a file. If the file exists the result from it is reused.
# Args:
#   $1: a benchmark name.
calibrate_benchmark() {
  local -r bench="$1"

  # The name of the file which is used to store a calibration result of the specified benchmark.
  local -r calibration_result_file_name="$(get_calibration_result_file_name "${bench}")"

  # Check whether there is a calibration result for the previous run.
  if [[ ! -f "${calibration_result_file_name}" ]]; then
    # No previous calibration result, need to run calibration.
    local calibration_run_output="$(run_calibrate_benchmark "${bench}")"
    echo "${calibration_run_output}"
    # Extract benchmark specification from the output string.
    local bench_specification="${calibration_run_output#INFO:*CALIBRATION_RESULT:}"
    bench_specification="${bench_specification%%[[:space:]]INFO:*}"
    mkdir -p "$(dirname "${calibration_result_file_name}")"
    echo -n "${bench_specification}" > "${calibration_result_file_name}"
  fi
}

# Convert the benchmark name to the its class name.
# Args:
#   $1: a benchmark name.
convert_benchmark_name_to_class_name() {
  local benchmark_name="$1"
  echo "${benchmark_name//\//.}"
}

profile_benchmarks() {
  local -r single_event="${options[single-event]}"

  # Construct the list of events that will be recorded.
  local event_list
  if [[ -n "${single_event}" ]]; then
    event_list="${single_event}"
  else
    event_list=$(get_simpleperf_hw_events)
  fi

  local bench
  for bench in "${benchmarks[@]}"; do
    local bench_dir="${bench#benchmarks\/}"
    bench_dir="${bench_dir//\//_}"
    mkdir -p "${perf_out}/${bench_dir}"

    local bench_cmd
    if ${options["calibration"]}; then
      calibrate_benchmark "${bench}"

      # The name of the file which is used to store a calibration result of the specified benchmark.
      local -r calibration_result_file_name="$(get_calibration_result_file_name "${bench}")"
      local -r bench_specification="$(cat "${calibration_result_file_name}")"
      bench_cmd="org.linaro.bench.RunBench --noverify ${bench_specification}"
    else
      bench_cmd=$(convert_benchmark_name_to_class_name "${bench}")
    fi

    log I "Benchmark command: ${bench_cmd}"

    local event
    for event in ${event_list}; do
      safe "$(get_workspace)/scripts/perf/sperf_target_dalvikvm.sh" --cpu "${options["cpu"]}" \
        --classpath "${target_bench_apk}" --event "${event}" "${bench_cmd[@]}"
      # We don't move it because the last created perf.data will be used to build the binary cahe.
      cp "$(get_workspace)/perf.data" "${perf_out}/${bench_dir}/${event}.perf.data"
    done
    ln -s "${perf_out}/binary_cache" "${perf_out}/${bench_dir}/binary_cache"
  done
}

# Copy benchmarks and benchmarks resources to a target.
copy_benchmarks_to_target() {
  safe adb push "${bench_apk}" "${target_physical_work_dir}"

  if [[ -f "${benchmarks_resources_file}" ]]; then
    safe adb push "${benchmarks_resources_file}" "${target_physical_work_dir}"
    safe adb_shell "tar xfv ${target_physical_work_dir}/$(basename "${benchmarks_resources_file}") \
      -C ${target_physical_work_dir}"
  fi
}

# As simpleperf does not depend on system libraries, the prebuilt one from the AOSP master is
# is copied to a target.
copy_simpleperf_to_target() {
  safe adb push "${simpleperf_home}/scripts/bin/android/arm64/simpleperf" \
    "${ART_TEST_CHROOT}/system/bin"
}

# Compile benchmarks with dex2oat on a target.
target_compile_benchmarks() {
  copy_benchmarks_to_target

  local -r dex2oat_cmd="$(get_target_dex2oat_cmd "arm64" "${target_bench_apk}") \
    $(get_target_art_test_bootclasspath_for_dex2oat)"

  adb_shell "mkdir -p ${target_physical_work_dir}/oat/arm64"
  safe adb_shell "chroot ${ART_TEST_CHROOT} ${dex2oat_cmd}"
}

# Build binary cache consisting of ART libraries and the compiled benchmarks OAT file.
# It is needed for generating reports.
build_binary_cache() {
  local -r binary_cache_dir="$(get_workspace)/binary_cache"
  if [[ -d "${binary_cache_dir}" ]]; then
    rm -rf "${binary_cache_dir}"
  fi
  safe mkdir -p "${binary_cache_dir}/system/bin"
  safe cp "${simpleperf_home}/scripts/bin/android/arm64/simpleperf" \
    "${binary_cache_dir}/system/bin"

  local -r target_physical_oat_dir="${target_physical_work_dir}/oat/arm64"
  local -r host_binary_cache_oat_dir="${binary_cache_dir}${target_virtual_work_dir}/oat/arm64"

  safe mkdir -p "${host_binary_cache_oat_dir}"
  safe adb pull "${target_physical_oat_dir}/${bench_name}.odex" "${host_binary_cache_oat_dir}"
  safe adb pull "${target_physical_oat_dir}/${bench_name}.vdex" "${host_binary_cache_oat_dir}"
  safe adb pull "${target_physical_oat_dir}/${bench_name}.art"  "${host_binary_cache_oat_dir}"
  safe "${simpleperf_home}/scripts/binary_cache_builder.py" --log info -lib "${ANDROID_PRODUCT_OUT}"
  safe mv "${binary_cache_dir}/" "${perf_out}"
}

main() {
  parse_arguments "$@"
  dump_options

  if android_build_already_setup; then
    log E "This test does not support environment targets. Please re-run in a clean environment."
    exit 1
  fi

  start_test "${timer_name}"

  set_environment_target
  prepare_adb
  target_device="$(retrieve_target_product_name)"
  readonly target_device
  select_android_target "arm" "${target_device}"

  target_physical_work_dir="$(get_target_physical_dir "${target_virtual_work_dir}")"
  readonly target_physical_work_dir

  local -r mode="${options[mode]}"
  local bits
  for bits in 32 64; do
    if [[ "${mode}" != "all" && "${mode}" != "${bits}" ]]; then
      log I "Skipping ${bits}bit benchmarks."
      continue
    fi
    log I "Starting ${bits}bit benchmarks."

    source_android_environment_default
    build_target "${bits}"
    buildbot_device_prepare "${bits}"
    build_benchmarks "${benchmarks[@]}"
    target_compile_benchmarks "${bits}"
    copy_simpleperf_to_target
    profile_benchmarks
    build_binary_cache
  done

  echo ""
  log I "Perf data of your run is in: ${perf_out}"
  echo ""

  end_test "${timer_name}"
}

main "$@"