mirror of
https://github.com/mollyim/webrtc.git
synced 2025-05-12 21:30:45 +01:00
Migrate video_quality_analysis on new perf metrics logger API
Bug: b/246095034 Change-Id: Ibdaae04ccd874024ce8db5c1f015479c713264a0 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/276624 Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org> Reviewed-by: Tomas Gunnarsson <tommi@webrtc.org> Commit-Queue: Artem Titov <titovartem@webrtc.org> Cr-Commit-Position: refs/heads/main@{#38198}
This commit is contained in:
parent
da6297dc53
commit
5e7e411119
5 changed files with 188 additions and 117 deletions
|
@ -110,12 +110,14 @@ rtc_library("video_quality_analysis") {
|
|||
"../api:array_view",
|
||||
"../api:make_ref_counted",
|
||||
"../api:scoped_refptr",
|
||||
"../api/numerics",
|
||||
"../api/test/metrics:metric",
|
||||
"../api/test/metrics:metrics_logger",
|
||||
"../api/video:video_frame",
|
||||
"../api/video:video_rtp_headers",
|
||||
"../common_video",
|
||||
"../rtc_base:checks",
|
||||
"../rtc_base:logging",
|
||||
"../test:perf_test",
|
||||
"//third_party/libyuv",
|
||||
]
|
||||
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
|
||||
|
@ -138,6 +140,7 @@ rtc_executable("frame_analyzer") {
|
|||
":video_quality_analysis",
|
||||
"../api:make_ref_counted",
|
||||
"../api:scoped_refptr",
|
||||
"../api/test/metrics:global_metrics_logger_and_exporter",
|
||||
"../rtc_base:stringutils",
|
||||
"../test:perf_test",
|
||||
"//third_party/abseil-cpp/absl/flags:flag",
|
||||
|
@ -526,6 +529,8 @@ if (rtc_include_tests) {
|
|||
":video_file_writer",
|
||||
":video_quality_analysis",
|
||||
"../api:scoped_refptr",
|
||||
"../api/test/metrics:metric",
|
||||
"../api/test/metrics:metrics_logger",
|
||||
"../api/video:video_frame",
|
||||
"../api/video:video_rtp_headers",
|
||||
"../common_video",
|
||||
|
@ -533,6 +538,7 @@ if (rtc_include_tests) {
|
|||
"../rtc_base:checks",
|
||||
"../rtc_base:null_socket_server",
|
||||
"../rtc_base:threading",
|
||||
"../system_wrappers",
|
||||
"../test:fileutils",
|
||||
"../test:test_main",
|
||||
"../test:test_support",
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
#include "absl/flags/parse.h"
|
||||
#include "absl/strings/match.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/test/metrics/global_metrics_logger_and_exporter.h"
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
#include "rtc_tools/frame_analyzer/video_color_aligner.h"
|
||||
#include "rtc_tools/frame_analyzer/video_geometry_aligner.h"
|
||||
|
@ -159,7 +160,8 @@ int main(int argc, char* argv[]) {
|
|||
results.decode_errors_ref = 0;
|
||||
results.decode_errors_test = 0;
|
||||
|
||||
webrtc::test::PrintAnalysisResults(absl::GetFlag(FLAGS_label), &results);
|
||||
webrtc::test::PrintAnalysisResults(absl::GetFlag(FLAGS_label), results,
|
||||
*webrtc::test::GetGlobalMetricsLogger());
|
||||
|
||||
std::string chartjson_result_file =
|
||||
absl::GetFlag(FLAGS_chartjson_result_file);
|
||||
|
|
|
@ -14,9 +14,10 @@
|
|||
#include <array>
|
||||
#include <cstddef>
|
||||
|
||||
#include "api/numerics/samples_stats_counter.h"
|
||||
#include "api/test/metrics/metric.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "test/testsupport/perf_test.h"
|
||||
#include "third_party/libyuv/include/libyuv/compare.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
@ -117,39 +118,42 @@ int GetTotalNumberOfSkippedFrames(const std::vector<Cluster>& clusters) {
|
|||
return static_cast<int>(number_ref_frames - clusters.size());
|
||||
}
|
||||
|
||||
void PrintAnalysisResults(const std::string& label, ResultsContainer* results) {
|
||||
PrintAnalysisResults(stdout, label, results);
|
||||
}
|
||||
void PrintAnalysisResults(const std::string& label,
|
||||
ResultsContainer& results,
|
||||
MetricsLogger& logger) {
|
||||
if (results.frames.size() > 0u) {
|
||||
logger.LogSingleValueMetric("Unique_frames_count", label,
|
||||
results.frames.size(), Unit::kUnitless,
|
||||
ImprovementDirection::kNeitherIsBetter);
|
||||
|
||||
void PrintAnalysisResults(FILE* output,
|
||||
const std::string& label,
|
||||
ResultsContainer* results) {
|
||||
SetPerfResultsOutput(output);
|
||||
|
||||
if (results->frames.size() > 0u) {
|
||||
PrintResult("Unique_frames_count", "", label, results->frames.size(),
|
||||
"score", false);
|
||||
|
||||
std::vector<double> psnr_values;
|
||||
std::vector<double> ssim_values;
|
||||
for (const auto& frame : results->frames) {
|
||||
psnr_values.push_back(frame.psnr_value);
|
||||
ssim_values.push_back(frame.ssim_value);
|
||||
SamplesStatsCounter psnr_values;
|
||||
SamplesStatsCounter ssim_values;
|
||||
for (const auto& frame : results.frames) {
|
||||
psnr_values.AddSample(frame.psnr_value);
|
||||
ssim_values.AddSample(frame.ssim_value);
|
||||
}
|
||||
|
||||
PrintResultList("PSNR", "", label, psnr_values, "dB", false);
|
||||
PrintResultList("SSIM", "", label, ssim_values, "score", false);
|
||||
logger.LogMetric("PSNR_dB", label, psnr_values, Unit::kUnitless,
|
||||
ImprovementDirection::kNeitherIsBetter);
|
||||
logger.LogMetric("SSIM", label, ssim_values, Unit::kUnitless,
|
||||
ImprovementDirection::kNeitherIsBetter);
|
||||
}
|
||||
|
||||
PrintResult("Max_repeated", "", label, results->max_repeated_frames, "",
|
||||
false);
|
||||
PrintResult("Max_skipped", "", label, results->max_skipped_frames, "", false);
|
||||
PrintResult("Total_skipped", "", label, results->total_skipped_frames, "",
|
||||
false);
|
||||
PrintResult("Decode_errors_reference", "", label, results->decode_errors_ref,
|
||||
"", false);
|
||||
PrintResult("Decode_errors_test", "", label, results->decode_errors_test, "",
|
||||
false);
|
||||
logger.LogSingleValueMetric("Max_repeated", label,
|
||||
results.max_repeated_frames, Unit::kUnitless,
|
||||
ImprovementDirection::kNeitherIsBetter);
|
||||
logger.LogSingleValueMetric("Max_skipped", label, results.max_skipped_frames,
|
||||
Unit::kUnitless,
|
||||
ImprovementDirection::kNeitherIsBetter);
|
||||
logger.LogSingleValueMetric("Total_skipped", label,
|
||||
results.total_skipped_frames, Unit::kUnitless,
|
||||
ImprovementDirection::kNeitherIsBetter);
|
||||
logger.LogSingleValueMetric("Decode_errors_reference", label,
|
||||
results.decode_errors_ref, Unit::kUnitless,
|
||||
ImprovementDirection::kNeitherIsBetter);
|
||||
logger.LogSingleValueMetric("Decode_errors_test", label,
|
||||
results.decode_errors_test, Unit::kUnitless,
|
||||
ImprovementDirection::kNeitherIsBetter);
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#include <vector>
|
||||
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/test/metrics/metrics_logger.h"
|
||||
#include "api/video/video_frame_buffer.h"
|
||||
#include "rtc_tools/video_file_reader.h"
|
||||
|
||||
|
@ -69,12 +70,9 @@ double Ssim(const rtc::scoped_refptr<I420BufferInterface>& ref_buffer,
|
|||
// Prints the result from the analysis in Chromium performance
|
||||
// numbers compatible format to stdout. If the results object contains no frames
|
||||
// no output will be written.
|
||||
void PrintAnalysisResults(const std::string& label, ResultsContainer* results);
|
||||
|
||||
// Similar to the above, but will print to the specified file handle.
|
||||
void PrintAnalysisResults(FILE* output,
|
||||
const std::string& label,
|
||||
ResultsContainer* results);
|
||||
void PrintAnalysisResults(const std::string& label,
|
||||
ResultsContainer& results,
|
||||
MetricsLogger& logger);
|
||||
|
||||
struct Cluster {
|
||||
// Corresponding reference frame index for this cluster.
|
||||
|
|
|
@ -7,102 +7,133 @@
|
|||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
// This test doesn't actually verify the output since it's just printed
|
||||
// to stdout by void functions, but it's still useful as it executes the code.
|
||||
|
||||
#include "rtc_tools/frame_analyzer/video_quality_analysis.h"
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
#include <cstddef>
|
||||
#include <fstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "api/test/metrics/metric.h"
|
||||
#include "api/test/metrics/metrics_logger.h"
|
||||
#include "system_wrappers/include/clock.h"
|
||||
#include "test/gmock.h"
|
||||
#include "test/gtest.h"
|
||||
#include "test/testsupport/file_utils.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
namespace {
|
||||
|
||||
void VerifyLogOutput(const std::string& log_filename,
|
||||
const std::vector<std::string>& expected_out) {
|
||||
std::ifstream logf(log_filename);
|
||||
std::string line;
|
||||
using ::testing::IsSupersetOf;
|
||||
|
||||
std::size_t i;
|
||||
for (i = 0; i < expected_out.size() && getline(logf, line); ++i) {
|
||||
ASSERT_EQ(expected_out.at(i), line);
|
||||
}
|
||||
ASSERT_TRUE(i == expected_out.size()) << "Not enough input data";
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
// Setup a log file to write the output to instead of stdout because we don't
|
||||
// want those numbers to be picked up as perf numbers.
|
||||
class VideoQualityAnalysisTest : public ::testing::Test {
|
||||
protected:
|
||||
void SetUp() {
|
||||
std::string log_filename = TempFilename(webrtc::test::OutputPath(),
|
||||
"VideoQualityAnalysisTest.log");
|
||||
logfile_ = fopen(log_filename.c_str(), "w");
|
||||
ASSERT_TRUE(logfile_ != NULL);
|
||||
}
|
||||
void TearDown() { ASSERT_EQ(0, fclose(logfile_)); }
|
||||
FILE* logfile_;
|
||||
// Metric fields to assert on
|
||||
struct MetricValidationInfo {
|
||||
std::string test_case;
|
||||
std::string name;
|
||||
Unit unit;
|
||||
ImprovementDirection improvement_direction;
|
||||
double mean;
|
||||
};
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, PrintAnalysisResultsEmpty) {
|
||||
ResultsContainer result;
|
||||
PrintAnalysisResults(logfile_, "Empty", &result);
|
||||
bool operator==(const MetricValidationInfo& a, const MetricValidationInfo& b) {
|
||||
return a.name == b.name && a.test_case == b.test_case && a.unit == b.unit &&
|
||||
a.improvement_direction == b.improvement_direction;
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, PrintAnalysisResultsOneFrame) {
|
||||
std::ostream& operator<<(std::ostream& os, const MetricValidationInfo& m) {
|
||||
os << "{ test_case=" << m.test_case << "; name=" << m.name
|
||||
<< "; unit=" << test::ToString(m.unit)
|
||||
<< "; improvement_direction=" << test::ToString(m.improvement_direction)
|
||||
<< " }";
|
||||
return os;
|
||||
}
|
||||
|
||||
std::vector<MetricValidationInfo> ToValidationInfo(
|
||||
const std::vector<Metric>& metrics) {
|
||||
std::vector<MetricValidationInfo> out;
|
||||
for (const Metric& m : metrics) {
|
||||
out.push_back(
|
||||
MetricValidationInfo{.test_case = m.test_case,
|
||||
.name = m.name,
|
||||
.unit = m.unit,
|
||||
.improvement_direction = m.improvement_direction,
|
||||
.mean = *m.stats.mean});
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
TEST(VideoQualityAnalysisTest, PrintAnalysisResultsEmpty) {
|
||||
ResultsContainer result;
|
||||
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
|
||||
PrintAnalysisResults("Empty", result, logger);
|
||||
}
|
||||
|
||||
TEST(VideoQualityAnalysisTest, PrintAnalysisResultsOneFrame) {
|
||||
ResultsContainer result;
|
||||
result.frames.push_back(AnalysisResult(0, 35.0, 0.9));
|
||||
PrintAnalysisResults(logfile_, "OneFrame", &result);
|
||||
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
|
||||
PrintAnalysisResults("OneFrame", result, logger);
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, PrintAnalysisResultsThreeFrames) {
|
||||
TEST(VideoQualityAnalysisTest, PrintAnalysisResultsThreeFrames) {
|
||||
ResultsContainer result;
|
||||
result.frames.push_back(AnalysisResult(0, 35.0, 0.9));
|
||||
result.frames.push_back(AnalysisResult(1, 34.0, 0.8));
|
||||
result.frames.push_back(AnalysisResult(2, 33.0, 0.7));
|
||||
PrintAnalysisResults(logfile_, "ThreeFrames", &result);
|
||||
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
|
||||
PrintAnalysisResults("ThreeFrames", result, logger);
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest,
|
||||
PrintMaxRepeatedAndSkippedFramesSkippedFrames) {
|
||||
TEST(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesSkippedFrames) {
|
||||
ResultsContainer result;
|
||||
|
||||
std::string log_filename =
|
||||
TempFilename(webrtc::test::OutputPath(), "log.log");
|
||||
FILE* logfile = fopen(log_filename.c_str(), "w");
|
||||
ASSERT_TRUE(logfile != NULL);
|
||||
|
||||
result.max_repeated_frames = 2;
|
||||
result.max_skipped_frames = 2;
|
||||
result.total_skipped_frames = 3;
|
||||
result.decode_errors_ref = 0;
|
||||
result.decode_errors_test = 0;
|
||||
|
||||
PrintAnalysisResults(logfile, "NormalStatsFile", &result);
|
||||
ASSERT_EQ(0, fclose(logfile));
|
||||
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
|
||||
PrintAnalysisResults("NormalStatsFile", result, logger);
|
||||
|
||||
std::vector<std::string> expected_out = {
|
||||
"RESULT Max_repeated: NormalStatsFile= 2 ",
|
||||
"RESULT Max_skipped: NormalStatsFile= 2 ",
|
||||
"RESULT Total_skipped: NormalStatsFile= 3 ",
|
||||
"RESULT Decode_errors_reference: NormalStatsFile= 0 ",
|
||||
"RESULT Decode_errors_test: NormalStatsFile= 0 "};
|
||||
VerifyLogOutput(log_filename, expected_out);
|
||||
std::vector<MetricValidationInfo> metrics =
|
||||
ToValidationInfo(logger.GetCollectedMetrics());
|
||||
EXPECT_THAT(
|
||||
metrics,
|
||||
IsSupersetOf(
|
||||
{MetricValidationInfo{
|
||||
.test_case = "NormalStatsFile",
|
||||
.name = "Max_repeated",
|
||||
.unit = Unit::kUnitless,
|
||||
.improvement_direction = ImprovementDirection::kNeitherIsBetter,
|
||||
.mean = 2},
|
||||
MetricValidationInfo{
|
||||
.test_case = "NormalStatsFile",
|
||||
.name = "Max_skipped",
|
||||
.unit = Unit::kUnitless,
|
||||
.improvement_direction = ImprovementDirection::kNeitherIsBetter,
|
||||
.mean = 2},
|
||||
MetricValidationInfo{
|
||||
.test_case = "NormalStatsFile",
|
||||
.name = "Total_skipped",
|
||||
.unit = Unit::kUnitless,
|
||||
.improvement_direction = ImprovementDirection::kNeitherIsBetter,
|
||||
.mean = 3},
|
||||
MetricValidationInfo{
|
||||
.test_case = "NormalStatsFile",
|
||||
.name = "Decode_errors_reference",
|
||||
.unit = Unit::kUnitless,
|
||||
.improvement_direction = ImprovementDirection::kNeitherIsBetter,
|
||||
.mean = 0},
|
||||
MetricValidationInfo{
|
||||
.test_case = "NormalStatsFile",
|
||||
.name = "Decode_errors_test",
|
||||
.unit = Unit::kUnitless,
|
||||
.improvement_direction = ImprovementDirection::kNeitherIsBetter,
|
||||
.mean = 0}}));
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest,
|
||||
PrintMaxRepeatedAndSkippedFramesDecodeErrorInTest) {
|
||||
TEST(VideoQualityAnalysisTest,
|
||||
PrintMaxRepeatedAndSkippedFramesDecodeErrorInTest) {
|
||||
ResultsContainer result;
|
||||
|
||||
std::string log_filename =
|
||||
|
@ -115,38 +146,67 @@ TEST_F(VideoQualityAnalysisTest,
|
|||
result.total_skipped_frames = 0;
|
||||
result.decode_errors_ref = 0;
|
||||
result.decode_errors_test = 3;
|
||||
PrintAnalysisResults(logfile, "NormalStatsFile", &result);
|
||||
ASSERT_EQ(0, fclose(logfile));
|
||||
|
||||
std::vector<std::string> expected_out = {
|
||||
"RESULT Max_repeated: NormalStatsFile= 1 ",
|
||||
"RESULT Max_skipped: NormalStatsFile= 0 ",
|
||||
"RESULT Total_skipped: NormalStatsFile= 0 ",
|
||||
"RESULT Decode_errors_reference: NormalStatsFile= 0 ",
|
||||
"RESULT Decode_errors_test: NormalStatsFile= 3 "};
|
||||
VerifyLogOutput(log_filename, expected_out);
|
||||
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
|
||||
PrintAnalysisResults("NormalStatsFile", result, logger);
|
||||
|
||||
std::vector<MetricValidationInfo> metrics =
|
||||
ToValidationInfo(logger.GetCollectedMetrics());
|
||||
EXPECT_THAT(
|
||||
metrics,
|
||||
IsSupersetOf(
|
||||
{MetricValidationInfo{
|
||||
.test_case = "NormalStatsFile",
|
||||
.name = "Max_repeated",
|
||||
.unit = Unit::kUnitless,
|
||||
.improvement_direction = ImprovementDirection::kNeitherIsBetter,
|
||||
.mean = 1},
|
||||
MetricValidationInfo{
|
||||
.test_case = "NormalStatsFile",
|
||||
.name = "Max_skipped",
|
||||
.unit = Unit::kUnitless,
|
||||
.improvement_direction = ImprovementDirection::kNeitherIsBetter,
|
||||
.mean = 0},
|
||||
MetricValidationInfo{
|
||||
.test_case = "NormalStatsFile",
|
||||
.name = "Total_skipped",
|
||||
.unit = Unit::kUnitless,
|
||||
.improvement_direction = ImprovementDirection::kNeitherIsBetter,
|
||||
.mean = 0},
|
||||
MetricValidationInfo{
|
||||
.test_case = "NormalStatsFile",
|
||||
.name = "Decode_errors_reference",
|
||||
.unit = Unit::kUnitless,
|
||||
.improvement_direction = ImprovementDirection::kNeitherIsBetter,
|
||||
.mean = 0},
|
||||
MetricValidationInfo{
|
||||
.test_case = "NormalStatsFile",
|
||||
.name = "Decode_errors_test",
|
||||
.unit = Unit::kUnitless,
|
||||
.improvement_direction = ImprovementDirection::kNeitherIsBetter,
|
||||
.mean = 3}}));
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, CalculateFrameClustersOneValue) {
|
||||
TEST(VideoQualityAnalysisTest, CalculateFrameClustersOneValue) {
|
||||
const std::vector<Cluster> result = CalculateFrameClusters({1});
|
||||
EXPECT_EQ(1u, result.size());
|
||||
EXPECT_EQ(1u, result[0].index);
|
||||
EXPECT_EQ(1, result[0].number_of_repeated_frames);
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, GetMaxRepeatedFramesOneValue) {
|
||||
TEST(VideoQualityAnalysisTest, GetMaxRepeatedFramesOneValue) {
|
||||
EXPECT_EQ(1, GetMaxRepeatedFrames(CalculateFrameClusters({1})));
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, GetMaxSkippedFramesOneValue) {
|
||||
TEST(VideoQualityAnalysisTest, GetMaxSkippedFramesOneValue) {
|
||||
EXPECT_EQ(0, GetMaxSkippedFrames(CalculateFrameClusters({1})));
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, GetTotalNumberOfSkippedFramesOneValue) {
|
||||
TEST(VideoQualityAnalysisTest, GetTotalNumberOfSkippedFramesOneValue) {
|
||||
EXPECT_EQ(0, GetTotalNumberOfSkippedFrames(CalculateFrameClusters({1})));
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, CalculateFrameClustersOneOneTwo) {
|
||||
TEST(VideoQualityAnalysisTest, CalculateFrameClustersOneOneTwo) {
|
||||
const std::vector<Cluster> result = CalculateFrameClusters({1, 1, 2});
|
||||
EXPECT_EQ(2u, result.size());
|
||||
EXPECT_EQ(1u, result[0].index);
|
||||
|
@ -155,34 +215,35 @@ TEST_F(VideoQualityAnalysisTest, CalculateFrameClustersOneOneTwo) {
|
|||
EXPECT_EQ(1, result[1].number_of_repeated_frames);
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, GetMaxRepeatedFramesOneOneTwo) {
|
||||
TEST(VideoQualityAnalysisTest, GetMaxRepeatedFramesOneOneTwo) {
|
||||
EXPECT_EQ(2, GetMaxRepeatedFrames(CalculateFrameClusters({1, 1, 2})));
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, GetMaxSkippedFramesOneOneTwo) {
|
||||
TEST(VideoQualityAnalysisTest, GetMaxSkippedFramesOneOneTwo) {
|
||||
EXPECT_EQ(0, GetMaxSkippedFrames(CalculateFrameClusters({1, 1, 2})));
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, GetTotalNumberOfSkippedFramesOneOneTwo) {
|
||||
TEST(VideoQualityAnalysisTest, GetTotalNumberOfSkippedFramesOneOneTwo) {
|
||||
EXPECT_EQ(0,
|
||||
GetTotalNumberOfSkippedFrames(CalculateFrameClusters({1, 1, 2})));
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, CalculateFrameClustersEmpty) {
|
||||
TEST(VideoQualityAnalysisTest, CalculateFrameClustersEmpty) {
|
||||
EXPECT_TRUE(CalculateFrameClusters({}).empty());
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, GetMaxRepeatedFramesEmpty) {
|
||||
TEST(VideoQualityAnalysisTest, GetMaxRepeatedFramesEmpty) {
|
||||
EXPECT_EQ(0, GetMaxRepeatedFrames({}));
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, GetMaxSkippedFramesEmpty) {
|
||||
TEST(VideoQualityAnalysisTest, GetMaxSkippedFramesEmpty) {
|
||||
EXPECT_EQ(0, GetMaxSkippedFrames({}));
|
||||
}
|
||||
|
||||
TEST_F(VideoQualityAnalysisTest, GetTotalNumberOfSkippedFramesEmpty) {
|
||||
TEST(VideoQualityAnalysisTest, GetTotalNumberOfSkippedFramesEmpty) {
|
||||
EXPECT_EQ(0, GetTotalNumberOfSkippedFrames({}));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
|
|
Loading…
Reference in a new issue