__ __ __ __ _____ _ _ _____ _ _ _ | \/ | \ \ / / | __ \ (_) | | / ____| | | | | | \ / |_ __\ V / | |__) | __ ___ ____ _| |_ ___ | (___ | |__ ___| | | | |\/| | '__|> < | ___/ '__| \ \ / / _` | __/ _ \ \___ \| '_ \ / _ \ | | | | | | |_ / . \ | | | | | |\ V / (_| | || __/ ____) | | | | __/ | | |_| |_|_(_)_/ \_\ |_| |_| |_| \_/ \__,_|\__\___| |_____/|_| |_|\___V 2.1 if you need WebShell for Seo everyday contact me on Telegram Telegram Address : @jackleetFor_More_Tools:
#!/bin/bash
# perf script task-analyzer tests (exclusive)
# SPDX-License-Identifier: GPL-2.0
tmpdir=$(mktemp -d /tmp/perf-script-task-analyzer-XXXXX)
err=0
# set PERF_EXEC_PATH to find scripts in the source directory
perfdir=$(dirname "$0")/../..
if [ -e "$perfdir/scripts/python/Perf-Trace-Util" ]; then
export PERF_EXEC_PATH=$perfdir
fi
# Disable lsan to avoid warnings about python memory leaks.
export ASAN_OPTIONS=detect_leaks=0
cleanup() {
rm -f perf.data
rm -f perf.data.old
rm -f csv
rm -f csvsummary
rm -rf "$tmpdir"
trap - exit term int
}
trap_cleanup() {
cleanup
exit 1
}
trap trap_cleanup exit term int
report() {
if [ "$1" = 0 ]; then
echo "PASS: \"$2\""
else
echo "FAIL: \"$2\" Error message: \"$3\""
err=1
fi
}
check_exec_0() {
if [ $? != 0 ]; then
report 1 "invocation of $1 command failed"
fi
}
find_str_or_fail() {
grep -q "$1" "$2"
if [ "$?" != 0 ]; then
report 1 "$3" "Failed to find required string:'${1}'."
else
report 0 "$3"
fi
}
# check if perf is compiled with libtraceevent support
skip_no_probe_record_support() {
perf check feature -q libtraceevent && return 0
return 2
}
prepare_perf_data() {
# 1s should be sufficient to catch at least some switches
perf record -e sched:sched_switch -a -- sleep 1 > /dev/null 2>&1
# check if perf data file got created in above step.
if [ ! -e "perf.data" ]; then
printf "FAIL: perf record failed to create \"perf.data\" \n"
return 1
fi
}
# check standard inkvokation with no arguments
test_basic() {
out="$tmpdir/perf.out"
perf script report task-analyzer > "$out"
check_exec_0 "perf script report task-analyzer"
find_str_or_fail "Comm" "$out" "${FUNCNAME[0]}"
}
test_ns_rename(){
out="$tmpdir/perf.out"
perf script report task-analyzer --ns --rename-comms-by-tids 0:random > "$out"
check_exec_0 "perf script report task-analyzer --ns --rename-comms-by-tids 0:random"
find_str_or_fail "Comm" "$out" "${FUNCNAME[0]}"
}
test_ms_filtertasks_highlight(){
out="$tmpdir/perf.out"
perf script report task-analyzer --ms --filter-tasks perf --highlight-tasks perf \
> "$out"
check_exec_0 "perf script report task-analyzer --ms --filter-tasks perf --highlight-tasks perf"
find_str_or_fail "Comm" "$out" "${FUNCNAME[0]}"
}
test_extended_times_timelimit_limittasks() {
out="$tmpdir/perf.out"
perf script report task-analyzer --extended-times --time-limit :99999 \
--limit-to-tasks perf > "$out"
check_exec_0 "perf script report task-analyzer --extended-times --time-limit :99999 --limit-to-tasks perf"
find_str_or_fail "Out-Out" "$out" "${FUNCNAME[0]}"
}
test_summary() {
out="$tmpdir/perf.out"
perf script report task-analyzer --summary > "$out"
check_exec_0 "perf script report task-analyzer --summary"
find_str_or_fail "Summary" "$out" "${FUNCNAME[0]}"
}
test_summaryextended() {
out="$tmpdir/perf.out"
perf script report task-analyzer --summary-extended > "$out"
check_exec_0 "perf script report task-analyzer --summary-extended"
find_str_or_fail "Inter Task Times" "$out" "${FUNCNAME[0]}"
}
test_summaryonly() {
out="$tmpdir/perf.out"
perf script report task-analyzer --summary-only > "$out"
check_exec_0 "perf script report task-analyzer --summary-only"
find_str_or_fail "Summary" "$out" "${FUNCNAME[0]}"
}
test_extended_times_summary_ns() {
out="$tmpdir/perf.out"
perf script report task-analyzer --extended-times --summary --ns > "$out"
check_exec_0 "perf script report task-analyzer --extended-times --summary --ns"
find_str_or_fail "Out-Out" "$out" "${FUNCNAME[0]}"
find_str_or_fail "Summary" "$out" "${FUNCNAME[0]}"
}
test_csv() {
perf script report task-analyzer --csv csv > /dev/null
check_exec_0 "perf script report task-analyzer --csv csv"
find_str_or_fail "Comm;" csv "${FUNCNAME[0]}"
}
test_csv_extended_times() {
perf script report task-analyzer --csv csv --extended-times > /dev/null
check_exec_0 "perf script report task-analyzer --csv csv --extended-times"
find_str_or_fail "Out-Out;" csv "${FUNCNAME[0]}"
}
test_csvsummary() {
perf script report task-analyzer --csv-summary csvsummary > /dev/null
check_exec_0 "perf script report task-analyzer --csv-summary csvsummary"
find_str_or_fail "Comm;" csvsummary "${FUNCNAME[0]}"
}
test_csvsummary_extended() {
perf script report task-analyzer --csv-summary csvsummary --summary-extended \
>/dev/null
check_exec_0 "perf script report task-analyzer --csv-summary csvsummary --summary-extended"
find_str_or_fail "Out-Out;" csvsummary "${FUNCNAME[0]}"
}
skip_no_probe_record_support
err=$?
if [ $err -ne 0 ]; then
echo "WARN: Skipping tests. No libtraceevent support"
cleanup
exit $err
fi
prepare_perf_data
test_basic
test_ns_rename
test_ms_filtertasks_highlight
test_extended_times_timelimit_limittasks
test_summary
test_summaryextended
test_summaryonly
test_extended_times_summary_ns
test_csv
test_csvsummary
test_csv_extended_times
test_csvsummary_extended
cleanup
exit $err
| Name | Type | Size | Permission | Actions |
|---|---|---|---|---|
| base_probe | Folder | 0755 |
|
|
| base_report | Folder | 0755 |
|
|
| common | Folder | 0755 |
|
|
| coresight | Folder | 0755 |
|
|
| lib | Folder | 0755 |
|
|
| annotate.sh | File | 2.08 KB | 0755 |
|
| attr.sh | File | 385 B | 0755 |
|
| buildid.sh | File | 4.15 KB | 0755 |
|
| daemon.sh | File | 11.31 KB | 0755 |
|
| diff.sh | File | 1.93 KB | 0755 |
|
| ftrace.sh | File | 2.22 KB | 0755 |
|
| list.sh | File | 756 B | 0755 |
|
| lock_contention.sh | File | 8.9 KB | 0755 |
|
| perftool-testsuite_probe.sh | File | 575 B | 0755 |
|
| perftool-testsuite_report.sh | File | 578 B | 0755 |
|
| pipe_test.sh | File | 3.22 KB | 0755 |
|
| probe_vfs_getname.sh | File | 440 B | 0755 |
|
| record+probe_libc_inet_pton.sh | File | 3.43 KB | 0755 |
|
| record+script_probe_vfs_getname.sh | File | 1.53 KB | 0755 |
|
| record+zstd_comp_decomp.sh | File | 1.12 KB | 0755 |
|
| record.sh | File | 8.27 KB | 0755 |
|
| record_bpf_filter.sh | File | 4.37 KB | 0755 |
|
| record_lbr.sh | File | 3.46 KB | 0755 |
|
| record_offcpu.sh | File | 2.07 KB | 0755 |
|
| record_sideband.sh | File | 1.07 KB | 0755 |
|
| script.sh | File | 2.01 KB | 0755 |
|
| stat+csv_output.sh | File | 2 KB | 0755 |
|
| stat+csv_summary.sh | File | 611 B | 0755 |
|
| stat+json_output.sh | File | 5.02 KB | 0755 |
|
| stat+shadow_stat.sh | File | 2.08 KB | 0755 |
|
| stat+std_output.sh | File | 2.96 KB | 0755 |
|
| stat.sh | File | 6.36 KB | 0755 |
|
| stat_all_metricgroups.sh | File | 1016 B | 0755 |
|
| stat_all_metrics.sh | File | 1.83 KB | 0755 |
|
| stat_all_pfm.sh | File | 1.17 KB | 0755 |
|
| stat_all_pmu.sh | File | 1.73 KB | 0755 |
|
| stat_bpf_counters.sh | File | 2 KB | 0755 |
|
| stat_bpf_counters_cgrp.sh | File | 1.66 KB | 0755 |
|
| stat_metrics_values.sh | File | 844 B | 0755 |
|
| test_arm_callgraph_fp.sh | File | 1.18 KB | 0755 |
|
| test_arm_coresight.sh | File | 5.7 KB | 0755 |
|
| test_arm_coresight_disasm.sh | File | 1.97 KB | 0755 |
|
| test_arm_spe.sh | File | 3.71 KB | 0755 |
|
| test_arm_spe_fork.sh | File | 1.03 KB | 0755 |
|
| test_brstack.sh | File | 2.97 KB | 0755 |
|
| test_data_symbol.sh | File | 1.95 KB | 0755 |
|
| test_intel_pt.sh | File | 17.2 KB | 0755 |
|
| test_java_symbol.sh | File | 1.96 KB | 0755 |
|
| test_perf_data_converter_json.sh | File | 1.32 KB | 0755 |
|
| test_stat_intel_tpebs.sh | File | 848 B | 0755 |
|
| test_task_analyzer.sh | File | 4.8 KB | 0755 |
|
| test_uprobe_from_different_cu.sh | File | 1.63 KB | 0755 |
|
| trace+probe_vfs_getname.sh | File | 1.45 KB | 0755 |
|
| trace_btf_enum.sh | File | 1.35 KB | 0755 |
|
| trace_btf_general.sh | File | 2.03 KB | 0755 |
|
| trace_exit_race.sh | File | 1.22 KB | 0755 |
|