diff --git a/examples/androidtests/video_quality_loopback_test.py b/examples/androidtests/video_quality_loopback_test.py index bd4e94e4f3..a47fbe7e25 100755 --- a/examples/androidtests/video_quality_loopback_test.py +++ b/examples/androidtests/video_quality_loopback_test.py @@ -86,6 +86,8 @@ def _ParseArgs(): parser.add_argument('--temp_dir', help='A temporary directory to put the output.') parser.add_argument('--adb-path', help='Path to adb binary.', default='adb') + parser.add_argument('--chartjson-result-file', + help='Where to store perf results in chartjson format.', default=None) args = parser.parse_args() return args @@ -148,7 +150,8 @@ def SetUpTools(android_device, temp_dir, processes): '8089'])) -def RunTest(android_device, adb_path, build_dir, temp_dir): +def RunTest(android_device, adb_path, build_dir, temp_dir, + chartjson_result_file): ffmpeg_path = os.path.join(TOOLCHAIN_DIR, 'ffmpeg') def ConvertVideo(input_video, output_video): _RunCommand([ffmpeg_path, '-y', '-i', input_video, output_video]) @@ -181,8 +184,7 @@ def RunTest(android_device, adb_path, build_dir, temp_dir): stats_file_ref = os.path.join(temp_dir, 'stats_ref.txt') stats_file_test = os.path.join(temp_dir, 'stats_test.txt') - _RunCommand([ - sys.executable, compare_script, + args = [ '--ref_video', reference_video_yuv, '--test_video', test_video_yuv, '--yuv_frame_width', '640', @@ -191,7 +193,12 @@ def RunTest(android_device, adb_path, build_dir, temp_dir): '--stats_file_test', stats_file_test, '--frame_analyzer', frame_analyzer, '--ffmpeg_path', ffmpeg_path, - '--zxing_path', zxing_path]) + '--zxing_path', zxing_path, + ] + if chartjson_result_file: + args.extend(['--chartjson_result_file', chartjson_result_file]) + + _RunCommand([sys.executable, compare_script] + args) def main(): @@ -208,7 +215,8 @@ def main(): try: android_device = SelectAndroidDevice(adb_path) SetUpTools(android_device, temp_dir, processes) - RunTest(android_device, adb_path, build_dir, temp_dir) + RunTest(android_device, adb_path, build_dir, temp_dir, + args.chartjson_result_file) finally: for process in processes: if process: diff --git a/rtc_tools/BUILD.gn b/rtc_tools/BUILD.gn index 0b1c631ede..976fa06838 100644 --- a/rtc_tools/BUILD.gn +++ b/rtc_tools/BUILD.gn @@ -66,6 +66,7 @@ rtc_static_library("video_quality_analysis") { ] deps = [ "../common_video", + "../test:perf_test", "//third_party/libyuv", ] } @@ -79,6 +80,7 @@ rtc_executable("frame_analyzer") { deps = [ ":command_line_parser", ":video_quality_analysis", + "../test:perf_test", "//build/win:default_exe_manifest", ] } diff --git a/rtc_tools/compare_videos.py b/rtc_tools/compare_videos.py index ce5bff30c1..40a2aabb32 100755 --- a/rtc_tools/compare_videos.py +++ b/rtc_tools/compare_videos.py @@ -62,6 +62,8 @@ def _ParseArgs(): help='Width of the YUV file\'s frames. Default: %default') parser.add_option('--yuv_frame_height', type='int', default=480, help='Height of the YUV file\'s frames. Default: %default') + parser.add_option('--chartjson_result_file', type='str', default=None, + help='Where to store perf results in chartjson format.') options, _ = parser.parse_args() if options.stats_file: @@ -161,6 +163,8 @@ def main(): '--width=%d' % options.yuv_frame_width, '--height=%d' % options.yuv_frame_height, ] + if options.chartjson_result_file: + cmd.append('--chartjson_result_file=%s' % options.chartjson_result_file) frame_analyzer = subprocess.Popen(cmd, stdin=_DevNull(), stdout=sys.stdout, stderr=sys.stderr) frame_analyzer.wait() diff --git a/rtc_tools/frame_analyzer/frame_analyzer.cc b/rtc_tools/frame_analyzer/frame_analyzer.cc index d7851e6b12..b3d9b94b0a 100644 --- a/rtc_tools/frame_analyzer/frame_analyzer.cc +++ b/rtc_tools/frame_analyzer/frame_analyzer.cc @@ -17,6 +17,7 @@ #include "rtc_tools/frame_analyzer/video_quality_analysis.h" #include "rtc_tools/simple_command_line_parser.h" +#include "test/testsupport/perf_test.h" /* * A command line tool running PSNR and SSIM on a reference video and a test @@ -62,7 +63,10 @@ int main(int argc, char* argv[]) { " - reference_file(string): The reference YUV file to compare against." " Default: ref.yuv\n" " - test_file(string): The test YUV file to run the analysis for." - " Default: test_file.yuv\n"; + " Default: test_file.yuv\n" + " - chartjson_result_file: Where to store perf result in chartjson" + " format. If not present, no perf result will be stored." + " Default: None\n"; webrtc::test::CommandLineParser parser; @@ -77,6 +81,7 @@ int main(int argc, char* argv[]) { parser.SetFlag("stats_file_test", "stats_test.txt"); parser.SetFlag("reference_file", "ref.yuv"); parser.SetFlag("test_file", "test.yuv"); + parser.SetFlag("chartjson_result_file", ""); parser.SetFlag("help", "false"); parser.ProcessFlags(); @@ -101,11 +106,16 @@ int main(int argc, char* argv[]) { parser.GetFlag("stats_file_ref").c_str(), parser.GetFlag("stats_file_test").c_str(), width, height, &results); + webrtc::test::GetMaxRepeatedAndSkippedFrames( + parser.GetFlag("stats_file_ref"), parser.GetFlag("stats_file_test"), + &results); + + webrtc::test::PrintAnalysisResults(parser.GetFlag("label"), &results); + + std::string chartjson_result_file = parser.GetFlag("chartjson_result_file"); + if (!chartjson_result_file.empty()) { + webrtc::test::WritePerfResults(chartjson_result_file); + } - std::string label = parser.GetFlag("label"); - webrtc::test::PrintAnalysisResults(label, &results); - webrtc::test::PrintMaxRepeatedAndSkippedFrames( - label, parser.GetFlag("stats_file_ref"), - parser.GetFlag("stats_file_test")); return 0; } diff --git a/rtc_tools/frame_analyzer/video_quality_analysis.cc b/rtc_tools/frame_analyzer/video_quality_analysis.cc index 7dce734498..502ac82601 100644 --- a/rtc_tools/frame_analyzer/video_quality_analysis.cc +++ b/rtc_tools/frame_analyzer/video_quality_analysis.cc @@ -18,6 +18,8 @@ #include #include +#include "test/testsupport/perf_test.h" + #define STATS_LINE_LENGTH 32 #define Y4M_FILE_HEADER_MAX_SIZE 200 #define Y4M_FRAME_DELIMITER "FRAME" @@ -318,13 +320,6 @@ void RunAnalysis(const char* reference_file_name, delete[] reference_frame; } -void PrintMaxRepeatedAndSkippedFrames(const std::string& label, - const std::string& stats_file_ref_name, - const std::string& stats_file_test_name) { - PrintMaxRepeatedAndSkippedFrames(stdout, label, stats_file_ref_name, - stats_file_test_name); -} - std::vector > CalculateFrameClusters( FILE* file, int* num_decode_errors) { @@ -359,10 +354,9 @@ std::vector > CalculateFrameClusters( return frame_cnt; } -void PrintMaxRepeatedAndSkippedFrames(FILE* output, - const std::string& label, - const std::string& stats_file_ref_name, - const std::string& stats_file_test_name) { +void GetMaxRepeatedAndSkippedFrames(const std::string& stats_file_ref_name, + const std::string& stats_file_test_name, + ResultsContainer* results) { FILE* stats_file_ref = fopen(stats_file_ref_name.c_str(), "r"); FILE* stats_file_test = fopen(stats_file_test_name.c_str(), "r"); if (stats_file_ref == NULL) { @@ -460,22 +454,17 @@ void PrintMaxRepeatedAndSkippedFrames(FILE* output, } continue; } - fprintf(output, + fprintf(stdout, "Found barcode %d in test video, which is not in reference video\n", it_test->first); break; } - fprintf(output, "RESULT Max_repeated: %s= %d\n", label.c_str(), - max_repeated_frames); - fprintf(output, "RESULT Max_skipped: %s= %d\n", label.c_str(), - max_skipped_frames); - fprintf(output, "RESULT Total_skipped: %s= %d\n", label.c_str(), - total_skipped_frames); - fprintf(output, "RESULT Decode_errors_reference: %s= %d\n", label.c_str(), - decode_errors_ref); - fprintf(output, "RESULT Decode_errors_test: %s= %d\n", label.c_str(), - decode_errors_test); + results->max_repeated_frames = max_repeated_frames; + results->max_skipped_frames = max_skipped_frames; + results->total_skipped_frames = total_skipped_frames; + results->decode_errors_ref = decode_errors_ref; + results->decode_errors_test = decode_errors_test; } void PrintAnalysisResults(const std::string& label, ResultsContainer* results) { @@ -484,26 +473,32 @@ void PrintAnalysisResults(const std::string& label, ResultsContainer* results) { void PrintAnalysisResults(FILE* output, const std::string& label, ResultsContainer* results) { - std::vector::iterator iter; - - fprintf(output, "RESULT Unique_frames_count: %s= %u score\n", label.c_str(), - static_cast(results->frames.size())); + SetPerfResultsOutput(output); if (results->frames.size() > 0u) { - fprintf(output, "RESULT PSNR: %s= [", label.c_str()); - for (iter = results->frames.begin(); iter != results->frames.end() - 1; - ++iter) { - fprintf(output, "%f,", iter->psnr_value); - } - fprintf(output, "%f] dB\n", iter->psnr_value); + PrintResult("Unique_frames_count", "", label, results->frames.size(), + "score", false); - fprintf(output, "RESULT SSIM: %s= [", label.c_str()); - for (iter = results->frames.begin(); iter != results->frames.end() - 1; - ++iter) { - fprintf(output, "%f,", iter->ssim_value); + std::vector psnr_values; + std::vector ssim_values; + for (const auto& frame : results->frames) { + psnr_values.push_back(frame.psnr_value); + ssim_values.push_back(frame.ssim_value); } - fprintf(output, "%f] score\n", iter->ssim_value); + + PrintResultList("PSNR", "", label, psnr_values, "dB", false); + PrintResultList("SSIM", "", label, ssim_values, "score", false); } + + PrintResult("Max_repeated", "", label, results->max_repeated_frames, "", + false); + PrintResult("Max_skipped", "", label, results->max_skipped_frames, "", false); + PrintResult("Total_skipped", "", label, results->total_skipped_frames, "", + false); + PrintResult("Decode_errors_reference", "", label, results->decode_errors_ref, + "", false); + PrintResult("Decode_errors_test", "", label, results->decode_errors_test, "", + false); } } // namespace test diff --git a/rtc_tools/frame_analyzer/video_quality_analysis.h b/rtc_tools/frame_analyzer/video_quality_analysis.h index f6651f60a1..92228fcd08 100644 --- a/rtc_tools/frame_analyzer/video_quality_analysis.h +++ b/rtc_tools/frame_analyzer/video_quality_analysis.h @@ -37,6 +37,11 @@ struct ResultsContainer { ~ResultsContainer(); std::vector frames; + int max_repeated_frames; + int max_skipped_frames; + int total_skipped_frames; + int decode_errors_ref; + int decode_errors_test; }; enum VideoAnalysisMetricsType {kPSNR, kSSIM}; @@ -102,15 +107,9 @@ std::vector > CalculateFrameClusters( // Calculates max repeated and skipped frames and prints them to stdout in a // format that is compatible with Chromium performance numbers. -void PrintMaxRepeatedAndSkippedFrames(const std::string& label, - const std::string& stats_file_ref_name, - const std::string& stats_file_test_name); - -// Similar to the above, but will print to the specified file handle. -void PrintMaxRepeatedAndSkippedFrames(FILE* output, - const std::string& label, - const std::string& stats_file_ref_name, - const std::string& stats_file_test_name); +void GetMaxRepeatedAndSkippedFrames(const std::string& stats_file_ref_name, + const std::string& stats_file_test_name, + ResultsContainer* results); // Gets the next line from an open stats file. bool GetNextStatsLine(FILE* stats_file, char* line); diff --git a/rtc_tools/frame_analyzer/video_quality_analysis_unittest.cc b/rtc_tools/frame_analyzer/video_quality_analysis_unittest.cc index 0b1258a23e..6143c314d5 100644 --- a/rtc_tools/frame_analyzer/video_quality_analysis_unittest.cc +++ b/rtc_tools/frame_analyzer/video_quality_analysis_unittest.cc @@ -86,24 +86,25 @@ TEST_F(VideoQualityAnalysisTest, PrintAnalysisResultsThreeFrames) { PrintAnalysisResults(logfile_, "ThreeFrames", &result); } -TEST_F(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesInvalidFile) { +TEST_F(VideoQualityAnalysisTest, GetMaxRepeatedAndSkippedFramesInvalidFile) { + ResultsContainer result; remove(stats_filename_.c_str()); - PrintMaxRepeatedAndSkippedFrames(logfile_, "NonExistingStatsFile", - stats_filename_ref_, stats_filename_); + GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result); } TEST_F(VideoQualityAnalysisTest, - PrintMaxRepeatedAndSkippedFramesEmptyStatsFile) { + GetMaxRepeatedAndSkippedFramesEmptyStatsFile) { + ResultsContainer result; std::ofstream stats_file; stats_file.open(stats_filename_ref_.c_str()); stats_file.close(); stats_file.open(stats_filename_.c_str()); stats_file.close(); - PrintMaxRepeatedAndSkippedFrames(logfile_, "EmptyStatsFile", - stats_filename_ref_, stats_filename_); + GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result); } -TEST_F(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesNormalFile) { +TEST_F(VideoQualityAnalysisTest, GetMaxRepeatedAndSkippedFramesNormalFile) { + ResultsContainer result; std::ofstream stats_file; stats_file.open(stats_filename_ref_.c_str()); @@ -123,8 +124,7 @@ TEST_F(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesNormalFile) { stats_file << "frame_0004 0106\n"; stats_file.close(); - PrintMaxRepeatedAndSkippedFrames(logfile_, "NormalStatsFile", - stats_filename_ref_, stats_filename_); + GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result); } namespace { @@ -143,6 +143,7 @@ void VerifyLogOutput(const std::string& log_filename, TEST_F(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesSkippedFrames) { + ResultsContainer result; std::ofstream stats_file; std::string log_filename = @@ -171,21 +172,22 @@ TEST_F(VideoQualityAnalysisTest, stats_file << "frame_0006 0112\n"; stats_file.close(); - PrintMaxRepeatedAndSkippedFrames(logfile, "NormalStatsFile", - stats_filename_ref_, stats_filename_); + GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result); + PrintAnalysisResults(logfile, "NormalStatsFile", &result); ASSERT_EQ(0, fclose(logfile)); std::vector expected_out = { - "RESULT Max_repeated: NormalStatsFile= 2", - "RESULT Max_skipped: NormalStatsFile= 2", - "RESULT Total_skipped: NormalStatsFile= 3", - "RESULT Decode_errors_reference: NormalStatsFile= 0", - "RESULT Decode_errors_test: NormalStatsFile= 0"}; + "RESULT Max_repeated: NormalStatsFile= 2 ", + "RESULT Max_skipped: NormalStatsFile= 2 ", + "RESULT Total_skipped: NormalStatsFile= 3 ", + "RESULT Decode_errors_reference: NormalStatsFile= 0 ", + "RESULT Decode_errors_test: NormalStatsFile= 0 "}; VerifyLogOutput(log_filename, expected_out); } TEST_F(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesDecodeErrorInTest) { + ResultsContainer result; std::ofstream stats_file; std::string log_filename = @@ -214,16 +216,16 @@ TEST_F(VideoQualityAnalysisTest, stats_file << "frame_0006 0110\n"; stats_file.close(); - PrintMaxRepeatedAndSkippedFrames(logfile, "NormalStatsFile", - stats_filename_ref_, stats_filename_); + GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result); + PrintAnalysisResults(logfile, "NormalStatsFile", &result); ASSERT_EQ(0, fclose(logfile)); std::vector expected_out = { - "RESULT Max_repeated: NormalStatsFile= 1", - "RESULT Max_skipped: NormalStatsFile= 0", - "RESULT Total_skipped: NormalStatsFile= 0", - "RESULT Decode_errors_reference: NormalStatsFile= 0", - "RESULT Decode_errors_test: NormalStatsFile= 3"}; + "RESULT Max_repeated: NormalStatsFile= 1 ", + "RESULT Max_skipped: NormalStatsFile= 0 ", + "RESULT Total_skipped: NormalStatsFile= 0 ", + "RESULT Decode_errors_reference: NormalStatsFile= 0 ", + "RESULT Decode_errors_test: NormalStatsFile= 3 "}; VerifyLogOutput(log_filename, expected_out); } diff --git a/test/BUILD.gn b/test/BUILD.gn index 5f6150ec55..bfc521de1f 100644 --- a/test/BUILD.gn +++ b/test/BUILD.gn @@ -122,7 +122,6 @@ rtc_source_set("field_trial") { rtc_source_set("perf_test") { visibility = [ "*" ] - testonly = true sources = [ "testsupport/perf_test.cc", "testsupport/perf_test.h",