Remove VideoEngine interfaces.

Removes ViE interfaces, _impl.cc files, managers (such as
ViEChannelManager and ViEInputManager) as well as ViESharedData.

Interfaces necessary to implement observers have been moved to a
corresponding header (such as vie_channel.h).

BUG=1695, 4491
R=mflodman@webrtc.org, solenberg@webrtc.org
TBR=pthatcher@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/55379004

Cr-Commit-Position: refs/heads/master@{#9179}
This commit is contained in:
Peter Boström 2015-05-12 16:51:11 +02:00
parent 8171735b0c
commit 300eeb68f5
168 changed files with 201 additions and 24400 deletions
WATCHLISTS
talk
webrtc
engine_configurations.h
test/channel_transport
video
video_engine
BUILD.gn
include
overuse_frame_detector.hoveruse_frame_detector_unittest.cc
test
auto_test
OWNERS
android
automated
interface
primitives
source
vie_auto_test.gypivie_auto_test.isolate
libvietest

View file

@ -24,7 +24,6 @@
},
'documented_interfaces': {
'filepath': 'webrtc/[^/]*\.h$|'\
'webrtc/video_engine/include/.*|'\
'webrtc/voice_engine/include/.*',
},
'build_files': {

View file

@ -78,7 +78,6 @@
#include "webrtc/base/stringutils.h"
#include "webrtc/system_wrappers/interface/field_trial_default.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/voice_engine/include/voe_base.h"
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)

View file

@ -494,7 +494,6 @@
'media/webrtc/webrtcvideoframe.h',
'media/webrtc/webrtcvideoframefactory.cc',
'media/webrtc/webrtcvideoframefactory.h',
'media/webrtc/webrtcvie.h',
'media/webrtc/webrtcvoe.h',
'media/webrtc/webrtcvoiceengine.cc',
'media/webrtc/webrtcvoiceengine.h',

View file

@ -36,12 +36,14 @@
#include "talk/media/webrtc/fakewebrtccommon.h"
#include "talk/media/webrtc/webrtcvideodecoderfactory.h"
#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
#include "talk/media/webrtc/webrtcvie.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/video_coding/codecs/interface/video_error_codes.h"
#include "webrtc/video_decoder.h"
#include "webrtc/video_encoder.h"
namespace cricket {

View file

@ -1027,9 +1027,6 @@ class FakeWebRtcVoiceEngine
unsigned short payloadSize));
WEBRTC_STUB(GetLastRemoteTimeStamp, (int channel,
uint32_t* lastRemoteTimeStamp));
WEBRTC_STUB(SetVideoEngineBWETarget, (int channel,
webrtc::ViENetwork* vie_network,
int video_channel));
// webrtc::VoEVideoSync
WEBRTC_STUB(GetPlayoutBufferSize, (int& bufferMs));

View file

@ -1,150 +0,0 @@
/*
* libjingle
* Copyright 2004 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TALK_MEDIA_WEBRTCVIE_H_
#define TALK_MEDIA_WEBRTCVIE_H_
#include "talk/media/webrtc/webrtccommon.h"
#include "webrtc/base/common.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/video_capture/include/video_capture.h"
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
#include "webrtc/modules/video_render/include/video_render.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/include/vie_external_codec.h"
#include "webrtc/video_engine/include/vie_image_process.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
namespace cricket {
// all tracing macros should go to a common file
// automatically handles lifetime of VideoEngine
class scoped_vie_engine {
public:
explicit scoped_vie_engine(webrtc::VideoEngine* e) : ptr(e) {}
// VERIFY, to ensure that there are no leaks at shutdown
~scoped_vie_engine() {
if (ptr) {
webrtc::VideoEngine::Delete(ptr);
}
}
webrtc::VideoEngine* get() const { return ptr; }
private:
webrtc::VideoEngine* ptr;
};
// scoped_ptr class to handle obtaining and releasing VideoEngine
// interface pointers
template<class T> class scoped_vie_ptr {
public:
explicit scoped_vie_ptr(const scoped_vie_engine& e)
: ptr(T::GetInterface(e.get())) {}
explicit scoped_vie_ptr(T* p) : ptr(p) {}
~scoped_vie_ptr() { if (ptr) ptr->Release(); }
T* operator->() const { return ptr; }
T* get() const { return ptr; }
private:
T* ptr;
};
// Utility class for aggregating the various WebRTC interface.
// Fake implementations can also be injected for testing.
class ViEWrapper {
public:
ViEWrapper()
: engine_(webrtc::VideoEngine::Create()),
base_(engine_), codec_(engine_), capture_(engine_),
network_(engine_), render_(engine_), rtp_(engine_),
image_(engine_), ext_codec_(engine_) {
}
ViEWrapper(webrtc::ViEBase* base, webrtc::ViECodec* codec,
webrtc::ViECapture* capture, webrtc::ViENetwork* network,
webrtc::ViERender* render, webrtc::ViERTP_RTCP* rtp,
webrtc::ViEImageProcess* image,
webrtc::ViEExternalCodec* ext_codec)
: engine_(NULL),
base_(base),
codec_(codec),
capture_(capture),
network_(network),
render_(render),
rtp_(rtp),
image_(image),
ext_codec_(ext_codec) {
}
virtual ~ViEWrapper() {}
webrtc::VideoEngine* engine() { return engine_.get(); }
webrtc::ViEBase* base() { return base_.get(); }
webrtc::ViECodec* codec() { return codec_.get(); }
webrtc::ViECapture* capture() { return capture_.get(); }
webrtc::ViENetwork* network() { return network_.get(); }
webrtc::ViERender* render() { return render_.get(); }
webrtc::ViERTP_RTCP* rtp() { return rtp_.get(); }
webrtc::ViEImageProcess* image() { return image_.get(); }
webrtc::ViEExternalCodec* ext_codec() { return ext_codec_.get(); }
int error() { return base_->LastError(); }
private:
scoped_vie_engine engine_;
scoped_vie_ptr<webrtc::ViEBase> base_;
scoped_vie_ptr<webrtc::ViECodec> codec_;
scoped_vie_ptr<webrtc::ViECapture> capture_;
scoped_vie_ptr<webrtc::ViENetwork> network_;
scoped_vie_ptr<webrtc::ViERender> render_;
scoped_vie_ptr<webrtc::ViERTP_RTCP> rtp_;
scoped_vie_ptr<webrtc::ViEImageProcess> image_;
scoped_vie_ptr<webrtc::ViEExternalCodec> ext_codec_;
};
// Adds indirection to static WebRtc functions, allowing them to be mocked.
class ViETraceWrapper {
public:
virtual ~ViETraceWrapper() {}
virtual int SetTraceFilter(const unsigned int filter) {
return webrtc::VideoEngine::SetTraceFilter(filter);
}
virtual int SetTraceFile(const char* fileNameUTF8) {
return webrtc::VideoEngine::SetTraceFile(fileNameUTF8);
}
virtual int SetTraceCallback(webrtc::TraceCallback* callback) {
return webrtc::VideoEngine::SetTraceCallback(callback);
}
};
} // namespace cricket
#endif // TALK_MEDIA_WEBRTCVIE_H_

View file

@ -53,7 +53,6 @@
#include "webrtc/base/stringutils.h"
#include "webrtc/common.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/video_engine/include/vie_network.h"
#ifdef WIN32
#include <objbase.h> // NOLINT

View file

@ -39,7 +39,6 @@
#include "talk/media/base/fakertp.h"
#include "talk/media/webrtc/fakewebrtccall.h"
#include "talk/media/webrtc/fakewebrtcvoiceengine.h"
#include "talk/media/webrtc/webrtcvie.h"
#include "talk/media/webrtc/webrtcvoiceengine.h"
#include "webrtc/p2p/base/fakesession.h"
#include "talk/session/media/channel.h"

View file

@ -85,27 +85,6 @@
#define WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
#define WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
// ============================================================================
// VideoEngine
// ============================================================================
// ----------------------------------------------------------------------------
// Settings for special VideoEngine configurations
// ----------------------------------------------------------------------------
// ----------------------------------------------------------------------------
// VideoEngine sub-API:s
// ----------------------------------------------------------------------------
#define WEBRTC_VIDEO_ENGINE_CAPTURE_API
#define WEBRTC_VIDEO_ENGINE_CODEC_API
#define WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
#define WEBRTC_VIDEO_ENGINE_RENDER_API
#define WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
#define WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
// Now handled by gyp:
// WEBRTC_VIDEO_ENGINE_FILE_API
// ============================================================================
// Platform specific configurations
// ============================================================================

View file

@ -16,7 +16,6 @@
#include "testing/gtest/include/gtest/gtest.h"
#endif
#include "webrtc/test/channel_transport/udp_transport.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/voice_engine/include/voe_network.h"
@ -80,58 +79,5 @@ int VoiceChannelTransport::SetSendDestination(const char* ip_address,
return socket_transport_->InitializeSendSockets(ip_address, rtp_port);
}
VideoChannelTransport::VideoChannelTransport(ViENetwork* vie_network,
int channel)
: channel_(channel),
vie_network_(vie_network) {
uint8_t socket_threads = 1;
socket_transport_ = UdpTransport::Create(channel, socket_threads);
int registered = vie_network_->RegisterSendTransport(channel,
*socket_transport_);
#if !defined(WEBRTC_ANDROID) && !defined(WEBRTC_IOS)
EXPECT_EQ(0, registered);
#else
assert(registered == 0);
#endif
}
VideoChannelTransport::~VideoChannelTransport() {
vie_network_->DeregisterSendTransport(channel_);
UdpTransport::Destroy(socket_transport_);
}
void VideoChannelTransport::IncomingRTPPacket(
const int8_t* incoming_rtp_packet,
const size_t packet_length,
const char* /*from_ip*/,
const uint16_t /*from_port*/) {
vie_network_->ReceivedRTPPacket(
channel_, incoming_rtp_packet, packet_length, PacketTime());
}
void VideoChannelTransport::IncomingRTCPPacket(
const int8_t* incoming_rtcp_packet,
const size_t packet_length,
const char* /*from_ip*/,
const uint16_t /*from_port*/) {
vie_network_->ReceivedRTCPPacket(channel_, incoming_rtcp_packet,
packet_length);
}
int VideoChannelTransport::SetLocalReceiver(uint16_t rtp_port) {
int return_value = socket_transport_->InitializeReceiveSockets(this,
rtp_port);
if (return_value == 0) {
return socket_transport_->StartReceiving(kViENumReceiveSocketBuffers);
}
return return_value;
}
int VideoChannelTransport::SetSendDestination(const char* ip_address,
uint16_t rtp_port) {
return socket_transport_->InitializeSendSockets(ip_address, rtp_port);
}
} // namespace test
} // namespace webrtc

View file

@ -15,7 +15,6 @@
namespace webrtc {
class ViENetwork;
class VoENetwork;
namespace test {
@ -51,37 +50,6 @@ class VoiceChannelTransport : public UdpTransportData {
UdpTransport* socket_transport_;
};
// Helper class for VideoEngine tests.
class VideoChannelTransport : public UdpTransportData {
public:
VideoChannelTransport(ViENetwork* vie_network, int channel);
virtual ~VideoChannelTransport();
// Start implementation of UdpTransportData.
void IncomingRTPPacket(const int8_t* incoming_rtp_packet,
const size_t packet_length,
const char* /*from_ip*/,
const uint16_t /*from_port*/) override;
void IncomingRTCPPacket(const int8_t* incoming_rtcp_packet,
const size_t packet_length,
const char* /*from_ip*/,
const uint16_t /*from_port*/) override;
// End implementation of UdpTransportData.
// Specifies the ports to receive RTP packets on.
int SetLocalReceiver(uint16_t rtp_port);
// Specifies the destination port and IP address for a specified channel.
int SetSendDestination(const char* ip_address, uint16_t rtp_port);
private:
int channel_;
ViENetwork* vie_network_;
UdpTransport* socket_transport_;
};
} // namespace test
} // namespace webrtc

View file

@ -19,8 +19,8 @@
#include "webrtc/frame_callback.h"
#include "webrtc/modules/remote_bitrate_estimator/rate_statistics.h"
#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/report_block_stats.h"
#include "webrtc/video_engine/vie_channel.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_renderer.h"

View file

@ -21,8 +21,8 @@
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/overuse_frame_detector.h"
#include "webrtc/video_engine/vie_encoder.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {

View file

@ -171,16 +171,14 @@ VideoSendStream::VideoSendStream(
ConfigureSsrcs();
char rtcp_cname[ViERTP_RTCP::KMaxRTCPCNameLength];
DCHECK_LT(config_.rtp.c_name.length(),
static_cast<size_t>(ViERTP_RTCP::KMaxRTCPCNameLength));
char rtcp_cname[RTCP_CNAME_SIZE];
DCHECK_LT(config_.rtp.c_name.length(), sizeof(rtcp_cname));
strncpy(rtcp_cname, config_.rtp.c_name.c_str(), sizeof(rtcp_cname) - 1);
rtcp_cname[sizeof(rtcp_cname) - 1] = '\0';
vie_channel_->SetRTCPCName(rtcp_cname);
vie_capturer_ = ViECapturer::CreateViECapturer(module_process_thread_);
CHECK_EQ(0, vie_capturer_->RegisterFrameCallback(channel_id_, vie_encoder_));
vie_capturer_ = new ViECapturer(module_process_thread_, vie_encoder_);
vie_channel_->RegisterSendTransport(&transport_adapter_);
// 28 to match packet overhead in ModuleRtpRtcpImpl.
@ -243,7 +241,6 @@ VideoSendStream::~VideoSendStream() {
vie_channel_->DeregisterSendTransport();
vie_capturer_->RegisterCpuOveruseObserver(nullptr);
vie_capturer_->DeregisterFrameCallback(vie_encoder_);
delete vie_capturer_;
vie_encoder_->DeRegisterExternalEncoder(

View file

@ -26,6 +26,7 @@
namespace webrtc {
class ChannelGroup;
class CpuOveruseObserver;
class ProcessThread;
class ViECapturer;

View file

@ -35,57 +35,23 @@ source_set("video_engine_core") {
"report_block_stats.h",
"stream_synchronization.cc",
"stream_synchronization.h",
"vie_base_impl.cc",
"vie_base_impl.h",
"vie_capture_impl.cc",
"vie_capture_impl.h",
"vie_capturer.cc",
"vie_capturer.h",
"vie_channel.cc",
"vie_channel_group.cc",
"vie_channel_group.h",
"vie_channel.h",
"vie_channel_manager.cc",
"vie_channel_manager.h",
"vie_codec_impl.cc",
"vie_codec_impl.h",
"vie_defines.h",
"vie_encoder.cc",
"vie_encoder.h",
"vie_external_codec_impl.cc",
"vie_external_codec_impl.h",
"vie_file_image.cc",
"vie_file_image.h",
"vie_frame_provider_base.cc",
"vie_frame_provider_base.h",
"vie_image_process_impl.cc",
"vie_image_process_impl.h",
"vie_impl.cc",
"vie_impl.h",
"vie_input_manager.cc",
"vie_input_manager.h",
"vie_manager_base.cc",
"vie_manager_base.h",
"vie_network_impl.cc",
"vie_network_impl.h",
"vie_receiver.cc",
"vie_receiver.h",
"vie_ref_count.cc",
"vie_ref_count.h",
"vie_remb.cc",
"vie_remb.h",
"vie_renderer.cc",
"vie_renderer.h",
"vie_render_impl.cc",
"vie_render_impl.h",
"vie_render_manager.cc",
"vie_render_manager.h",
"vie_rtp_rtcp_impl.cc",
"vie_rtp_rtcp_impl.h",
"vie_sender.cc",
"vie_sender.h",
"vie_shared_data.cc",
"vie_shared_data.h",
"vie_sync_module.cc",
"vie_sync_module.h",
]

View file

@ -1,281 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
//
// - Creating and deleting VideoEngine instances.
// - Creating and deleting channels.
// - Connect a video channel with a corresponding voice channel for audio/video
// synchronization.
// - Start and stop sending and receiving.
#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_BASE_H_
#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_BASE_H_
#include "webrtc/common_types.h"
namespace webrtc {
class Config;
class VoiceEngine;
class ReceiveStatisticsProxy;
class SendStatisticsProxy;
// Internal-class forward declarations, used to break out implementations for
// the new video API to remove interface dependencies to the VideoEngine API
// See webrtc:1695.
class ChannelGroup;
class ViEChannel;
class ViEEncoder;
class ViESharedData;
// CpuOveruseObserver is called when a system overuse is detected and
// VideoEngine cannot keep up the encoding frequency.
class CpuOveruseObserver {
public:
// Called as soon as an overuse is detected.
virtual void OveruseDetected() = 0;
// Called periodically when the system is not overused any longer.
virtual void NormalUsage() = 0;
protected:
virtual ~CpuOveruseObserver() {}
};
struct CpuOveruseOptions {
CpuOveruseOptions()
: enable_capture_jitter_method(false),
low_capture_jitter_threshold_ms(20.0f),
high_capture_jitter_threshold_ms(30.0f),
enable_encode_usage_method(true),
low_encode_usage_threshold_percent(55),
high_encode_usage_threshold_percent(85),
low_encode_time_rsd_threshold(-1),
high_encode_time_rsd_threshold(-1),
enable_extended_processing_usage(true),
frame_timeout_interval_ms(1500),
min_frame_samples(120),
min_process_count(3),
high_threshold_consecutive_count(2) {}
// Method based on inter-arrival jitter of captured frames.
bool enable_capture_jitter_method;
float low_capture_jitter_threshold_ms; // Threshold for triggering underuse.
float high_capture_jitter_threshold_ms; // Threshold for triggering overuse.
// Method based on encode time of frames.
bool enable_encode_usage_method;
int low_encode_usage_threshold_percent; // Threshold for triggering underuse.
int high_encode_usage_threshold_percent; // Threshold for triggering overuse.
// TODO(asapersson): Remove options, not used.
int low_encode_time_rsd_threshold; // Additional threshold for triggering
// underuse (used in addition to
// threshold above if configured).
int high_encode_time_rsd_threshold; // Additional threshold for triggering
// overuse (used in addition to
// threshold above if configured).
bool enable_extended_processing_usage; // Include a larger time span (in
// addition to encode time) for
// measuring the processing time of a
// frame.
// General settings.
int frame_timeout_interval_ms; // The maximum allowed interval between two
// frames before resetting estimations.
int min_frame_samples; // The minimum number of frames required.
int min_process_count; // The number of initial process times required before
// triggering an overuse/underuse.
int high_threshold_consecutive_count; // The number of consecutive checks
// above the high threshold before
// triggering an overuse.
bool Equals(const CpuOveruseOptions& o) const {
return enable_capture_jitter_method == o.enable_capture_jitter_method &&
low_capture_jitter_threshold_ms == o.low_capture_jitter_threshold_ms &&
high_capture_jitter_threshold_ms ==
o.high_capture_jitter_threshold_ms &&
enable_encode_usage_method == o.enable_encode_usage_method &&
low_encode_usage_threshold_percent ==
o.low_encode_usage_threshold_percent &&
high_encode_usage_threshold_percent ==
o.high_encode_usage_threshold_percent &&
low_encode_time_rsd_threshold == o.low_encode_time_rsd_threshold &&
high_encode_time_rsd_threshold == o.high_encode_time_rsd_threshold &&
enable_extended_processing_usage ==
o.enable_extended_processing_usage &&
frame_timeout_interval_ms == o.frame_timeout_interval_ms &&
min_frame_samples == o.min_frame_samples &&
min_process_count == o.min_process_count &&
high_threshold_consecutive_count == o.high_threshold_consecutive_count;
}
};
struct CpuOveruseMetrics {
CpuOveruseMetrics()
: capture_jitter_ms(-1),
avg_encode_time_ms(-1),
encode_usage_percent(-1),
capture_queue_delay_ms_per_s(-1) {}
int capture_jitter_ms; // The current estimated jitter in ms based on
// incoming captured frames.
int avg_encode_time_ms; // The average encode time in ms.
int encode_usage_percent; // The average encode time divided by the average
// time difference between incoming captured frames.
int capture_queue_delay_ms_per_s; // The current time delay between an
// incoming captured frame until the frame
// is being processed. The delay is
// expressed in ms delay per second.
};
class CpuOveruseMetricsObserver {
public:
virtual ~CpuOveruseMetricsObserver() {}
virtual void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) = 0;
};
class WEBRTC_DLLEXPORT VideoEngine {
public:
// Creates a VideoEngine object, which can then be used to acquire subAPIs.
static VideoEngine* Create();
static VideoEngine* Create(const Config& config);
// Deletes a VideoEngine instance.
static bool Delete(VideoEngine*& video_engine);
// Specifies the amount and type of trace information, which will be created
// by the VideoEngine.
static int SetTraceFilter(const unsigned int filter);
// Sets the name of the trace file and enables nonencrypted trace messages.
static int SetTraceFile(const char* file_nameUTF8,
const bool add_file_counter = false);
// Installs the TraceCallback implementation to ensure that the VideoEngine
// user receives callbacks for generated trace messages.
static int SetTraceCallback(TraceCallback* callback);
protected:
VideoEngine() {}
virtual ~VideoEngine() {}
};
class WEBRTC_DLLEXPORT ViEBase {
public:
// Factory for the ViEBase subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViEBase* GetInterface(VideoEngine* video_engine);
// Releases the ViEBase sub-API and decreases an internal reference counter.
// Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// Initiates all common parts of the VideoEngine.
virtual int Init() = 0;
// Connects a VideoEngine instance to a VoiceEngine instance for audio video
// synchronization.
virtual int SetVoiceEngine(VoiceEngine* voice_engine) = 0;
// Creates a new channel.
virtual int CreateChannel(int& video_channel) = 0;
// Creates a new channel grouped together with |original_channel|. The channel
// can both send and receive video. It is assumed the channel is sending
// and/or receiving video to the same end-point.
// Note: |CreateReceiveChannel| will give better performance and network
// properties for receive only channels.
virtual int CreateChannel(int& video_channel,
int original_channel) = 0;
virtual int CreateChannelWithoutDefaultEncoder(int& video_channel,
int original_channel) = 0;
virtual ChannelGroup* GetChannelGroup(int channel_id) = 0;
virtual ViEChannel* GetChannel(int channel_id) = 0;
virtual ViEEncoder* GetEncoder(int channel_id) = 0;
// Creates a new channel grouped together with |original_channel|. The channel
// can only receive video and it is assumed the remote end-point is the same
// as for |original_channel|.
virtual int CreateReceiveChannel(int& video_channel,
int original_channel) = 0;
// Deletes an existing channel and releases the utilized resources.
virtual int DeleteChannel(const int video_channel) = 0;
// Registers an observer to be called when an overuse is detected, see
// 'CpuOveruseObserver' for details.
// NOTE: This is still very experimental functionality.
virtual int RegisterCpuOveruseObserver(int channel,
CpuOveruseObserver* observer) = 0;
// Sets options for cpu overuse detector.
virtual int SetCpuOveruseOptions(int channel,
const CpuOveruseOptions& options) = 0;
// Gets cpu overuse measures.
virtual int GetCpuOveruseMetrics(int channel, CpuOveruseMetrics* metrics) = 0;
virtual void RegisterCpuOveruseMetricsObserver(
int channel,
CpuOveruseMetricsObserver* observer) = 0;
// Registers a callback which is called when send-side delay statistics has
// been updated.
// TODO(holmer): Remove the default implementation when fakevideoengine.h has
// been updated.
virtual void RegisterSendSideDelayObserver(
int channel, SendSideDelayObserver* observer) {}
// Specifies the VoiceEngine and VideoEngine channel pair to use for
// audio/video synchronization.
virtual int ConnectAudioChannel(const int video_channel,
const int audio_channel) = 0;
// Disconnects a previously paired VideoEngine and VoiceEngine channel pair.
virtual int DisconnectAudioChannel(const int video_channel) = 0;
// Starts sending packets to an already specified IP address and port number
// for a specified channel.
virtual int StartSend(const int video_channel) = 0;
// Stops packets from being sent for a specified channel.
virtual int StopSend(const int video_channel) = 0;
// Prepares VideoEngine for receiving packets on the specified channel.
virtual int StartReceive(const int video_channel) = 0;
// Stops receiving incoming RTP and RTCP packets on the specified channel.
virtual int StopReceive(const int video_channel) = 0;
// Retrieves the version information for VideoEngine and its components.
virtual int GetVersion(char version[1024]) = 0;
// Returns the last VideoEngine error code.
virtual int LastError() = 0;
virtual void RegisterSendStatisticsProxy(
int channel,
SendStatisticsProxy* send_statistics_proxy) = 0;
virtual void RegisterReceiveStatisticsProxy(
int channel,
ReceiveStatisticsProxy* receive_statistics_proxy) = 0;
virtual ViESharedData* shared_data() = 0;
protected:
ViEBase() {}
virtual ~ViEBase() {}
};
} // namespace webrtc
#endif // #define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_BASE_H_

View file

@ -1,215 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
//
// - Allocating capture devices.
// - Connect a capture device with one or more channels.
// - Start and stop capture devices.
// - Getting capture device capabilities.
#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CAPTURE_H_
#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CAPTURE_H_
#include "webrtc/common_types.h"
#include "webrtc/common_video/rotation.h"
#include "webrtc/video_frame.h"
namespace webrtc {
class VideoEngine;
class VideoCaptureModule;
// This structure describes one set of the supported capabilities for a capture
// device.
struct CaptureCapability {
unsigned int width;
unsigned int height;
unsigned int maxFPS;
RawVideoType rawType;
VideoCodecType codecType;
unsigned int expectedCaptureDelay;
bool interlaced;
CaptureCapability() {
width = 0;
height = 0;
maxFPS = 0;
rawType = kVideoI420;
codecType = kVideoCodecUnknown;
expectedCaptureDelay = 0;
interlaced = false;
}
};
// This enumerator tells the current brightness alarm mode.
enum Brightness {
Normal = 0,
Bright = 1,
Dark = 2
};
// This enumerator describes the capture alarm mode.
enum CaptureAlarm {
AlarmRaised = 0,
AlarmCleared = 1
};
// This class declares an abstract interface to be used when implementing
// a user-defined capture device. This interface is not meant to be
// implemented by the user. Instead, the user should call AllocateCaptureDevice
// in the ViECapture interface, which will create a suitable implementation.
// The user should then call IncomingFrame in this interface to deliver
// captured frames to the system.
class WEBRTC_DLLEXPORT ViEExternalCapture {
public:
ViEExternalCapture() {}
virtual ~ViEExternalCapture() {}
// This method is called by the user to deliver a new captured frame to
// VideoEngine.
virtual void IncomingFrame(const I420VideoFrame& frame) = 0;
};
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterObserver() and
// deregistered using DeregisterObserver().
class WEBRTC_DLLEXPORT ViECaptureObserver {
public:
// This method is called if a bright or dark captured image is detected.
virtual void BrightnessAlarm(const int capture_id,
const Brightness brightness) = 0;
// This method is called periodically telling the capture device frame rate.
virtual void CapturedFrameRate(const int capture_id,
const unsigned char frame_rate) = 0;
// This method is called if the capture device stops delivering images to
// VideoEngine.
virtual void NoPictureAlarm(const int capture_id,
const CaptureAlarm alarm) = 0;
protected:
virtual ~ViECaptureObserver() {}
};
class WEBRTC_DLLEXPORT ViECapture {
public:
// Factory for the ViECapture subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViECapture* GetInterface(VideoEngine* video_engine);
// Releases the ViECapture sub-API and decreases an internal reference
// counter.
// Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// Gets the number of available capture devices.
virtual int NumberOfCaptureDevices() = 0;
// Gets the name and unique id of a capture device.
virtual int GetCaptureDevice(unsigned int list_number,
char* device_nameUTF8,
const unsigned int device_nameUTF8Length,
char* unique_idUTF8,
const unsigned int unique_idUTF8Length) = 0;
// Allocates a capture device to be used in VideoEngine.
virtual int AllocateCaptureDevice(const char* unique_idUTF8,
const unsigned int unique_idUTF8Length,
int& capture_id) = 0;
// Registers an external capture device to be used in VideoEngine
virtual int AllocateExternalCaptureDevice(
int& capture_id,
ViEExternalCapture *&external_capture) = 0;
// Use capture device using external capture module.
virtual int AllocateCaptureDevice(VideoCaptureModule& capture_module,
int& capture_id) = 0;
// Releases a capture device and makes it available for other applications.
virtual int ReleaseCaptureDevice(const int capture_id) = 0;
// This function connects a capture device with a channel. Multiple channels
// can be connected to the same capture device.
virtual int ConnectCaptureDevice(const int capture_id,
const int video_channel) = 0;
// Disconnects a capture device as input for a specified channel.
virtual int DisconnectCaptureDevice(const int video_channel) = 0;
// Makes a capture device start capturing video frames.
virtual int StartCapture(
const int capture_id,
const CaptureCapability& capture_capability = CaptureCapability()) = 0;
// Stops a started capture device from capturing video frames.
virtual int StopCapture(const int capture_id) = 0;
// Rotates captured frames before encoding and sending.
// Used on mobile devices with rotates cameras.
virtual int SetVideoRotation(const int capture_id,
const VideoRotation rotation) = 0;
// This function sets the expected delay from when a video frame is captured
// to when that frame is delivered to VideoEngine.
virtual int SetCaptureDelay(const int capture_id,
const unsigned int capture_delay_ms) = 0;
// Returns the number of sets of capture capabilities the capture device
// supports.
virtual int NumberOfCapabilities(
const char* unique_id_utf8,
const unsigned int unique_id_utf8_length) = 0;
// Gets a set of capture capabilities for a specified capture device.
virtual int GetCaptureCapability(const char* unique_id_utf8,
const unsigned int unique_id_utf8_length,
const unsigned int capability_number,
CaptureCapability& capability) = 0;
// Displays the capture device property dialog box for the specified capture
// device. Windows only.
virtual int ShowCaptureSettingsDialogBox(
const char* unique_idUTF8,
const unsigned int unique_id_utf8_length,
const char* dialog_title,
void* parent_window = NULL,
const unsigned int x = 200,
const unsigned int y = 200) = 0;
// Gets the clockwise angle the frames from the camera must be rotated in
// order to display the frames correctly if the display is rotated in its
// natural orientation.
virtual int GetOrientation(const char* unique_id_utf8,
VideoRotation& orientation) = 0;
// Enables brightness alarm detection and the brightness alarm callback.
virtual int EnableBrightnessAlarm(const int capture_id,
const bool enable) = 0;
// Registers an instance of a user implementation of the ViECaptureObserver.
virtual int RegisterObserver(const int capture_id,
ViECaptureObserver& observer) = 0;
// Removes an already registered instance of ViECaptureObserver.
virtual int DeregisterObserver(const int capture_id) = 0;
protected:
ViECapture() {}
virtual ~ViECapture() {}
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CAPTURE_H_

View file

@ -1,220 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
// - Setting send and receive codecs.
// - Codec specific settings.
// - Key frame signaling.
// - Stream management settings.
#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CODEC_H_
#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CODEC_H_
#include "webrtc/common_types.h"
namespace webrtc {
class VideoEngine;
struct VideoCodec;
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterEncoderObserver()
// and deregistered using DeregisterEncoderObserver().
class WEBRTC_DLLEXPORT ViEEncoderObserver {
public:
// This method is called once per second with the current encoded frame rate
// and bit rate.
virtual void OutgoingRate(const int video_channel,
const unsigned int framerate,
const unsigned int bitrate) = 0;
// This method is called whenever the state of the SuspendBelowMinBitrate
// changes, i.e., when |is_suspended| toggles.
virtual void SuspendChange(int video_channel, bool is_suspended) = 0;
protected:
virtual ~ViEEncoderObserver() {}
};
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterDecoderObserver()
// and deregistered using DeregisterDecoderObserver().
class WEBRTC_DLLEXPORT ViEDecoderObserver {
public:
// This method is called when a new incoming stream is detected, normally
// triggered by a new incoming SSRC or payload type.
virtual void IncomingCodecChanged(const int video_channel,
const VideoCodec& video_codec) = 0;
// This method is called once per second containing the frame rate and bit
// rate for the incoming stream
virtual void IncomingRate(const int video_channel,
const unsigned int framerate,
const unsigned int bitrate) = 0;
// Called periodically with decoder timing information. All values are
// "current" snapshots unless decorated with a min_/max_ prefix.
virtual void DecoderTiming(int decode_ms,
int max_decode_ms,
int current_delay_ms,
int target_delay_ms,
int jitter_buffer_ms,
int min_playout_delay_ms,
int render_delay_ms) = 0;
// This method is called when the decoder needs a new key frame from encoder
// on the sender.
virtual void RequestNewKeyFrame(const int video_channel) = 0;
protected:
virtual ~ViEDecoderObserver() {}
};
class WEBRTC_DLLEXPORT ViECodec {
public:
// Factory for the ViECodec subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViECodec* GetInterface(VideoEngine* video_engine);
// Releases the ViECodec sub-API and decreases an internal reference
// counter.
// Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// Gets the number of available codecs for the VideoEngine build.
virtual int NumberOfCodecs() const = 0;
// Gets a VideoCodec struct for a codec containing the default configuration
// for that codec type.
virtual int GetCodec(const unsigned char list_number,
VideoCodec& video_codec) const = 0;
// Sets the send codec to use for a specified channel.
virtual int SetSendCodec(const int video_channel,
const VideoCodec& video_codec) = 0;
// Gets the current send codec settings.
virtual int GetSendCodec(const int video_channel,
VideoCodec& video_codec) const = 0;
// Prepares VideoEngine to receive a certain codec type and setting for a
// specified payload type.
virtual int SetReceiveCodec(const int video_channel,
const VideoCodec& video_codec) = 0;
// Gets the current receive codec.
virtual int GetReceiveCodec(const int video_channel,
VideoCodec& video_codec) const = 0;
// This function is used to get codec configuration parameters to be
// signaled from the encoder to the decoder in the call setup.
virtual int GetCodecConfigParameters(
const int video_channel,
unsigned char config_parameters[kConfigParameterSize],
unsigned char& config_parameters_size) const = 0;
// Enables advanced scaling of the captured video stream if the stream
// differs from the send codec settings.
virtual int SetImageScaleStatus(const int video_channel,
const bool enable) = 0;
// Gets the number of sent key frames and number of sent delta frames.
virtual int GetSendCodecStastistics(const int video_channel,
unsigned int& key_frames,
unsigned int& delta_frames) const = 0;
// Gets the number of decoded key frames and number of decoded delta frames.
virtual int GetReceiveCodecStastistics(const int video_channel,
unsigned int& key_frames,
unsigned int& delta_frames) const = 0;
// Estimate of the min required buffer time from the expected arrival time
// until rendering to get smooth playback.
virtual int GetReceiveSideDelay(const int video_channel,
int* delay_ms) const = 0;
// Current target bitrate for this channel.
virtual uint32_t GetLastObservedBitrateBps(int video_channel) const = 0;
// Gets the bitrate targeted by the video codec rate control in kbit/s.
virtual int GetCodecTargetBitrate(const int video_channel,
unsigned int* bitrate) const = 0;
// Gets the number of packets discarded by the jitter buffer because they
// arrived too late.
// TODO(asapersson): Remove default implementation.
virtual int GetNumDiscardedPackets(int video_channel) const { return -1; }
// TODO(asapersson): Remove once the api has been removed from
// fakewebrtcvideoengine.h.
virtual unsigned int GetDiscardedPackets(
const int video_channel) const { return 0; }
// Enables key frame request callback in ViEDecoderObserver.
virtual int SetKeyFrameRequestCallbackStatus(const int video_channel,
const bool enable) = 0;
// Enables key frame requests for detected lost packets.
virtual int SetSignalKeyPacketLossStatus(
const int video_channel,
const bool enable,
const bool only_key_frames = false) = 0;
// Registers an instance of a user implementation of the ViEEncoderObserver.
virtual int RegisterEncoderObserver(const int video_channel,
ViEEncoderObserver& observer) = 0;
// Removes an already registered instance of ViEEncoderObserver.
virtual int DeregisterEncoderObserver(const int video_channel) = 0;
// Registers an instance of a user implementation of the ViEDecoderObserver.
virtual int RegisterDecoderObserver(const int video_channel,
ViEDecoderObserver& observer) = 0;
// Removes an already registered instance of ViEDecoderObserver.
virtual int DeregisterDecoderObserver(const int video_channel) = 0;
// This function forces the next encoded frame to be a key frame. This is
// normally used when the remote endpoint only supports outband key frame
// request.
virtual int SendKeyFrame(const int video_channel) = 0;
// This function makes the decoder wait for a key frame before starting to
// decode the incoming video stream.
virtual int WaitForFirstKeyFrame(const int video_channel,
const bool wait) = 0;
// Enables recording of debugging information.
virtual int StartDebugRecording(int video_channel,
const char* file_name_utf8) = 0;
// Disables recording of debugging information.
virtual int StopDebugRecording(int video_channel) = 0;
// Lets the sender suspend video when the rate drops below
// |threshold_bps|, and turns back on when the rate goes back up above
// |threshold_bps| + |window_bps|.
// This is under development; not tested.
virtual void SuspendBelowMinBitrate(int video_channel) = 0;
// TODO(holmer): Remove this default implementation when possible.
virtual bool GetSendSideDelay(int video_channel, int* avg_delay_ms,
int* max_delay_ms) const { return false; }
protected:
ViECodec() {}
virtual ~ViECodec() {}
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CODEC_H_

View file

@ -1,111 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ERRORS_H_
#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ERRORS_H_
enum ViEErrors {
// ViEBase.
kViENotInitialized = 12000, // Init has not been called successfully.
kViEBaseVoEFailure, // SetVoiceEngine. ViE failed to use VE instance. Check VE instance pointer.ConnectAudioChannel failed to set voice channel. Have SetVoiceEngine been called? Is the voice channel correct.
kViEBaseChannelCreationFailed, // CreateChannel.
kViEBaseInvalidChannelId, // The channel does not exist.
kViEAPIDoesNotExist, // Release called on Interface that has not been created.
kViEBaseInvalidArgument,
kViEBaseAlreadySending, // StartSend called on channel that is already sending.
kViEBaseNotSending, // StopSend called on channel that is not sending.
kViEBaseReceiveOnlyChannel, // Can't send on a receive only channel.
kViEBaseAlreadyReceiving, // StartReceive called on channel that is already receiving.
kViEBaseObserverAlreadyRegistered, // RegisterObserver- an observer has already been set.
kViEBaseObserverNotRegistered, // DeregisterObserver - no observer has been registered.
kViEBaseUnknownError, // An unknown error has occurred. Check the log file.
// ViECodec.
kViECodecInvalidArgument = 12100, // Wrong input parameter to function.
kViECodecObserverAlreadyRegistered, // RegisterEncoderObserver, RegisterDecoderObserver.
kViECodecObserverNotRegistered, // DeregisterEncoderObserver, DeregisterDecoderObserver.
kViECodecInvalidCodec, // SetSendCodec,SetReceiveCodec- The codec structure is invalid.
kViECodecInvalidChannelId, // The channel does not exist.
kViECodecInUse, // SetSendCodec- Can't change codec size or type when multiple channels use the same encoder.
kViECodecReceiveOnlyChannel, // SetSendCodec, can't change receive only channel.
kViECodecUnknownError, // An unknown error has occurred. Check the log file.
// ViERender.
kViERenderInvalidRenderId = 12200, // No renderer with the ID exist. In AddRenderer - The render ID is invalid. No capture device, channel or file is allocated with that id.
kViERenderAlreadyExists, // AddRenderer: the renderer already exist.
kViERenderInvalidFrameFormat, // AddRender (external renderer). The user has requested a frame format that we don't support.
kViERenderUnknownError, // An unknown error has occurred. Check the log file.
// ViECapture.
kViECaptureDeviceAlreadyConnected = 12300, // ConnectCaptureDevice - A capture device has already been connected to this video channel.
kViECaptureDeviceDoesNotExist, // No capture device exist with the provided capture id or unique name.
kViECaptureDeviceInvalidChannelId, // ConnectCaptureDevice, DisconnectCaptureDevice- No Channel exist with the provided channel id.
kViECaptureDeviceNotConnected, // DisconnectCaptureDevice- No capture device is connected to the channel.
kViECaptureDeviceNotStarted, // Stop- The capture device is not started.
kViECaptureDeviceAlreadyStarted, // Start- The capture device is already started.
kViECaptureDeviceAlreadyAllocated, // AllocateCaptureDevice The device is already allocated.
kViECaptureDeviceMaxNoDevicesAllocated, // AllocateCaptureDevice Max number of devices already allocated.
kViECaptureObserverAlreadyRegistered, // RegisterObserver- An observer is already registered. Need to deregister first.
kViECaptureDeviceObserverNotRegistered, // DeregisterObserver- No observer is registered.
kViECaptureDeviceUnknownError, // An unknown error has occurred. Check the log file.
kViECaptureDeviceMacQtkitNotSupported, // QTKit handles the capture devices automatically. Thus querying capture capabilities is not supported.
// ViEFile.
kViEFileInvalidChannelId = 12400, // No Channel exist with the provided channel id.
kViEFileInvalidArgument, // Incorrect input argument
kViEFileAlreadyRecording, // StartRecordOutgoingVideo - already recording channel
kViEFileVoENotSet, // StartRecordOutgoingVideo. Failed to access voice engine. Has SetVoiceEngine been called?
kViEFileNotRecording, // StopRecordOutgoingVideo
kViEFileMaxNoOfFilesOpened, // StartPlayFile
kViEFileNotPlaying, // StopPlayFile. The file with the provided id is not playing.
kViEFileObserverAlreadyRegistered, // RegisterObserver
kViEFileObserverNotRegistered, // DeregisterObserver
kViEFileInputAlreadyConnected, // SendFileOnChannel- the video channel already have a connected input.
kViEFileNotConnected, // StopSendFileOnChannel- No file is being sent on the channel.
kViEFileVoEFailure, // SendFileOnChannel,StartPlayAudioLocally - failed to play audio stream
kViEFileInvalidRenderId, // SetRenderTimeoutImage and SetRenderStartImage: Renderer with the provided render id does not exist.
kViEFileInvalidFile, // Can't open the file with provided filename. Is the path and file format correct?
kViEFileInvalidCapture, // Can't use ViEPicture. Is the object correct?
kViEFileSetRenderTimeoutError, // SetRenderTimeoutImage- Please see log file.
kViEFileSetStartImageError, // SetRenderStartImage error. Please see log file.
kViEFileUnknownError, // An unknown error has occurred. Check the log file.
// ViENetwork.
kViENetworkInvalidChannelId = 12500, // No Channel exist with the provided channel id.
kViENetworkAlreadyReceiving, // SetLocalReceiver: Can not change ports while receiving.
kViENetworkLocalReceiverNotSet, // GetLocalReceiver: SetLocalReceiver not called.
kViENetworkAlreadySending, // SetSendDestination
kViENetworkDestinationNotSet, // GetSendDestination
kViENetworkInvalidArgument, // GetLocalIP- Check function arguments.
kViENetworkSendCodecNotSet, // SetSendGQoS- Need to set the send codec first.
kViENetworkServiceTypeNotSupported, // SetSendGQoS
kViENetworkNotSupported, // SetSendGQoS Not supported on this OS.
kViENetworkUnknownError, // An unknown error has occurred. Check the log file.
// ViERTP_RTCP.
kViERtpRtcpInvalidChannelId = 12600, // No Channel exist with the provided channel id.
kViERtpRtcpAlreadySending, // The channel is already sending. Need to stop send before calling this API.
kViERtpRtcpNotSending, // The channel needs to be sending in order for this function to work.
kViERtpRtcpRtcpDisabled, // Functions failed because RTCP is disabled.
kViERtpRtcpObserverAlreadyRegistered, // An observer is already registered. Need to deregister the old first.
kViERtpRtcpObserverNotRegistered, // No observer registered.
kViERtpRtcpUnknownError, // An unknown error has occurred. Check the log file.
// ViEImageProcess.
kViEImageProcessInvalidChannelId = 12800, // No Channel exist with the provided channel id.
kViEImageProcessInvalidCaptureId, // No capture device exist with the provided capture id.
kViEImageProcessFilterExists, // RegisterCaptureEffectFilter,RegisterSendEffectFilter,RegisterRenderEffectFilter - Effect filter already registered.
kViEImageProcessFilterDoesNotExist, // DeRegisterCaptureEffectFilter,DeRegisterSendEffectFilter,DeRegisterRenderEffectFilter - Effect filter not registered.
kViEImageProcessAlreadyEnabled, // EnableDeflickering,EnableColorEnhancement- Function already enabled.
kViEImageProcessAlreadyDisabled, // EnableDeflickering,EnableColorEnhancement- Function already disabled.
kViEImageProcessUnknownError // An unknown error has occurred. Check the log file.
};
#endif // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ERRORS_H_

View file

@ -1,52 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_EXTERNAL_CODEC_H_
#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_EXTERNAL_CODEC_H_
#include "webrtc/common_types.h"
namespace webrtc {
class VideoDecoder;
class VideoEncoder;
class VideoEngine;
class WEBRTC_DLLEXPORT ViEExternalCodec {
public:
static ViEExternalCodec* GetInterface(VideoEngine* video_engine);
virtual int Release() = 0;
virtual int RegisterExternalSendCodec(const int video_channel,
const unsigned char pl_type,
VideoEncoder* encoder,
bool internal_source) = 0;
virtual int DeRegisterExternalSendCodec(const int video_channel,
const unsigned char pl_type) = 0;
virtual int RegisterExternalReceiveCodec(const int video_channel,
const unsigned char pl_type,
VideoDecoder* decoder,
bool decoder_render = false,
int render_delay = 0) = 0;
virtual int DeRegisterExternalReceiveCodec(const int video_channel,
const unsigned char pl_type) = 0;
protected:
ViEExternalCodec() {}
virtual ~ViEExternalCodec() {}
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_EXTERNAL_CODEC_H_

View file

@ -1,104 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
// - Effect filters
// - Deflickering
// - Color enhancement
#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_IMAGE_PROCESS_H_
#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_IMAGE_PROCESS_H_
#include "webrtc/common_types.h"
namespace webrtc {
class EncodedImageCallback;
class I420FrameCallback;
class VideoEngine;
// This class declares an abstract interface for a user defined effect filter.
// The effect filter is registered using RegisterCaptureEffectFilter(),
// RegisterSendEffectFilter() or RegisterRenderEffectFilter() and deregistered
// with the corresponding deregister function.
class WEBRTC_DLLEXPORT ViEEffectFilter {
public:
// This method is called with an I420 video frame allowing the user to
// modify the video frame.
virtual int Transform(size_t size,
unsigned char* frame_buffer,
int64_t ntp_time_ms,
unsigned int timestamp,
unsigned int width,
unsigned int height) = 0;
protected:
ViEEffectFilter() {}
virtual ~ViEEffectFilter() {}
};
class WEBRTC_DLLEXPORT ViEImageProcess {
public:
// Factory for the ViEImageProcess subAPI and increases an internal
// reference counter if successful. Returns NULL if the API is not supported
// or if construction fails.
static ViEImageProcess* GetInterface(VideoEngine* video_engine);
// Releases the ViEImageProcess sub-API and decreases an internal reference
// counter. Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// This function registers a EffectFilter to use for a specified capture
// device.
virtual int RegisterCaptureEffectFilter(const int capture_id,
ViEEffectFilter& capture_filter) = 0;
// This function deregisters a EffectFilter for a specified capture device.
virtual int DeregisterCaptureEffectFilter(const int capture_id) = 0;
// This function registers an EffectFilter to use for a specified channel.
virtual int RegisterSendEffectFilter(const int video_channel,
ViEEffectFilter& send_filter) = 0;
// This function deregisters a send effect filter for a specified channel.
virtual int DeregisterSendEffectFilter(const int video_channel) = 0;
// This function registers a EffectFilter to use for the rendered video
// stream on an incoming channel.
virtual int RegisterRenderEffectFilter(const int video_channel,
ViEEffectFilter& render_filter) = 0;
// This function deregisters a render effect filter for a specified channel.
virtual int DeregisterRenderEffectFilter(const int video_channel) = 0;
// All cameras run the risk of getting in almost perfect sync with
// florescent lamps, which will result in a very annoying flickering of the
// image. Most cameras have some type of filter to protect against this but
// not all of them succeed. Enabling this function will remove the flicker.
virtual int EnableDeflickering(const int capture_id, const bool enable) = 0;
// TODO(pbos): Remove this function when removed from fakewebrtcvideoengine.h.
virtual int EnableDenoising(const int capture_id, const bool enable) {
return -1;
}
// This function enhances the colors on the decoded video stream, enabled by
// default.
virtual int EnableColorEnhancement(const int video_channel,
const bool enable) = 0;
protected:
ViEImageProcess() {}
virtual ~ViEImageProcess() {}
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_IMAGE_PROCESS_H_

View file

@ -1,101 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_NETWORK_H_
#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_NETWORK_H_
// This sub-API supports the following functionalities:
// - Configuring send and receive addresses.
// - External transport support.
// - Port and address filters.
// - Windows GQoS functions and ToS functions.
// - Packet timeout notification.
// - DeadorAlive connection observations.
#include "webrtc/common_types.h"
namespace webrtc {
class Transport;
class VideoEngine;
// This enumerator describes VideoEngine packet timeout states.
enum ViEPacketTimeout {
NoPacket = 0,
PacketReceived = 1
};
class WEBRTC_DLLEXPORT ViENetwork {
public:
// Default values.
enum { KDefaultSampleTimeSeconds = 2 };
// Factory for the ViENetwork subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViENetwork* GetInterface(VideoEngine* video_engine);
// Releases the ViENetwork sub-API and decreases an internal reference
// counter.Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
virtual void SetBitrateConfig(int video_channel,
int min_bitrate_bps,
int start_bitrate_bps,
int max_bitrate_bps) = 0;
// Inform the engine about if the network adapter is currently transmitting
// packets or not.
virtual void SetNetworkTransmissionState(const int video_channel,
const bool is_transmitting) = 0;
// This function registers a user implementation of Transport to use for
// sending RTP and RTCP packets on this channel.
virtual int RegisterSendTransport(const int video_channel,
Transport& transport) = 0;
// This function deregisters a used Transport for a specified channel.
virtual int DeregisterSendTransport(const int video_channel) = 0;
// When using external transport for a channel, received RTP packets should
// be passed to VideoEngine using this function. The input should contain
// the RTP header and payload.
virtual int ReceivedRTPPacket(const int video_channel,
const void* data,
const size_t length,
const PacketTime& packet_time) = 0;
// When using external transport for a channel, received RTCP packets should
// be passed to VideoEngine using this function.
virtual int ReceivedRTCPPacket(const int video_channel,
const void* data,
const size_t length) = 0;
// This function sets the Maximum Transition Unit (MTU) for a channel. The
// RTP packet will be packetized based on this MTU to optimize performance
// over the network.
virtual int SetMTU(int video_channel, unsigned int mtu) = 0;
// Forward (audio) packet to bandwidth estimator for the given video channel,
// for aggregated audio+video BWE.
virtual int ReceivedBWEPacket(const int video_channel,
int64_t arrival_time_ms, size_t payload_size, const RTPHeader& header) {
return 0;
}
protected:
ViENetwork() {}
virtual ~ViENetwork() {}
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_NETWORK_H_

View file

@ -1,123 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
// - Specify render destinations for incoming video streams, capture devices
// and files.
// - Configuring render streams.
#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RENDER_H_
#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RENDER_H_
#include "webrtc/common_types.h"
namespace webrtc {
class I420VideoFrame;
class VideoEngine;
class VideoRender;
class VideoRenderCallback;
// This class declares an abstract interface to be used for external renderers.
// The user implemented derived class is registered using AddRenderer().
class ExternalRenderer {
public:
// This method will be called when the stream to be rendered changes in
// resolution or number of streams mixed in the image.
virtual int FrameSizeChange(unsigned int width,
unsigned int height,
unsigned int number_of_streams) = 0;
// This method is called when a new frame should be rendered.
virtual int DeliverFrame(unsigned char* buffer,
size_t buffer_size,
// RTP timestamp in 90kHz.
uint32_t timestamp,
// NTP time of the capture time in local timebase
// in milliseconds.
int64_t ntp_time_ms,
// Wallclock render time in milliseconds.
int64_t render_time_ms,
// Handle of the underlying video frame.
void* handle) = 0;
// Alternative interface for I420 frames.
virtual int DeliverI420Frame(const I420VideoFrame& webrtc_frame) = 0;
// Returns true if the renderer supports textures. DeliverFrame can be called
// with NULL |buffer| and non-NULL |handle|.
virtual bool IsTextureSupported() = 0;
protected:
virtual ~ExternalRenderer() {}
};
class ViERender {
public:
// Factory for the ViERender subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViERender* GetInterface(VideoEngine* video_engine);
// Releases the ViERender sub-API and decreases an internal reference
// counter. Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// Registers render module.
virtual int RegisterVideoRenderModule(VideoRender& render_module) = 0;
// Deregisters render module.
virtual int DeRegisterVideoRenderModule(VideoRender& render_module) = 0;
// Sets the render destination for a given render ID.
virtual int AddRenderer(const int render_id,
void* window,
const unsigned int z_order,
const float left,
const float top,
const float right,
const float bottom) = 0;
// Removes the renderer for a stream.
virtual int RemoveRenderer(const int render_id) = 0;
// Starts rendering a render stream.
virtual int StartRender(const int render_id) = 0;
// Stops rendering a render stream.
virtual int StopRender(const int render_id) = 0;
// Set expected render time needed by graphics card or external renderer, i.e.
// the number of ms a frame will be sent to rendering before the actual render
// time.
virtual int SetExpectedRenderDelay(int render_id, int render_delay) = 0;
// Configures an already added render stream.
virtual int ConfigureRender(int render_id,
const unsigned int z_order,
const float left,
const float top,
const float right,
const float bottom) = 0;
// External render.
virtual int AddRenderer(const int render_id,
RawVideoType video_input_format,
ExternalRenderer* renderer) = 0;
protected:
ViERender() {}
virtual ~ViERender() {}
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RENDER_H_

View file

@ -1,469 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This sub-API supports the following functionalities:
// - Callbacks for RTP and RTCP events such as modified SSRC or CSRC.
// - SSRC handling.
// - Transmission of RTCP reports.
// - Obtaining RTCP data from incoming RTCP sender reports.
// - RTP and RTCP statistics (jitter, packet loss, RTT etc.).
// - Forward Error Correction (FEC).
// - Writing RTP and RTCP packets to binary files for offline analysis of the
// call quality.
// - Inserting extra RTP packets into active audio stream.
#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RTP_RTCP_H_
#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RTP_RTCP_H_
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
namespace webrtc {
class VideoEngine;
struct ReceiveBandwidthEstimatorStats;
// This enumerator sets the RTCP mode.
enum ViERTCPMode {
kRtcpNone = 0,
kRtcpCompound_RFC4585 = 1,
kRtcpNonCompound_RFC5506 = 2
};
// This enumerator describes the key frame request mode.
enum ViEKeyFrameRequestMethod {
kViEKeyFrameRequestNone = 0,
kViEKeyFrameRequestPliRtcp = 1,
kViEKeyFrameRequestFirRtp = 2,
kViEKeyFrameRequestFirRtcp = 3
};
enum StreamType {
kViEStreamTypeNormal = 0, // Normal media stream
kViEStreamTypeRtx = 1 // Retransmission media stream
};
// This class declares an abstract interface for a user defined observer. It is
// up to the VideoEngine user to implement a derived class which implements the
// observer class. The observer is registered using RegisterRTPObserver() and
// deregistered using DeregisterRTPObserver().
class WEBRTC_DLLEXPORT ViERTPObserver {
public:
// This method is called if SSRC of the incoming stream is changed.
virtual void IncomingSSRCChanged(const int video_channel,
const unsigned int SSRC) = 0;
// This method is called if a field in CSRC changes or if the number of
// CSRCs changes.
virtual void IncomingCSRCChanged(const int video_channel,
const unsigned int CSRC,
const bool added) = 0;
protected:
virtual ~ViERTPObserver() {}
};
class WEBRTC_DLLEXPORT ViERTP_RTCP {
public:
enum { KDefaultDeltaTransmitTimeSeconds = 15 };
enum { KMaxRTCPCNameLength = 256 };
// Factory for the ViERTP_RTCP subAPI and increases an internal reference
// counter if successful. Returns NULL if the API is not supported or if
// construction fails.
static ViERTP_RTCP* GetInterface(VideoEngine* video_engine);
// Releases the ViERTP_RTCP sub-API and decreases an internal reference
// counter. Returns the new reference count. This value should be zero
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// This function enables you to specify the RTP synchronization source
// identifier (SSRC) explicitly.
virtual int SetLocalSSRC(const int video_channel,
const unsigned int SSRC,
const StreamType usage = kViEStreamTypeNormal,
const unsigned char simulcast_idx = 0) = 0;
// This function gets the SSRC for the outgoing RTP stream for the specified
// channel.
virtual int GetLocalSSRC(const int video_channel,
unsigned int& SSRC) const = 0;
// This function map a incoming SSRC to a StreamType so that the engine
// can know which is the normal stream and which is the RTX
virtual int SetRemoteSSRCType(const int video_channel,
const StreamType usage,
const unsigned int SSRC) const = 0;
// This function gets the SSRC for the incoming RTP stream for the specified
// channel.
virtual int GetRemoteSSRC(const int video_channel,
unsigned int& SSRC) const = 0;
// This function returns the CSRCs of the incoming RTP packets.
virtual int GetRemoteCSRCs(const int video_channel,
unsigned int CSRCs[kRtpCsrcSize]) const = 0;
// This sets a specific payload type for the RTX stream. Note that this
// doesn't enable RTX, SetLocalSSRC must still be called to enable RTX.
virtual int SetRtxSendPayloadType(const int video_channel,
const uint8_t payload_type,
const uint8_t associated_payload_type) = 0;
virtual int SetRtxReceivePayloadType(
const int video_channel,
const uint8_t payload_type,
const uint8_t associated_payload_type) = 0;
// This function enables manual initialization of the sequence number. The
// start sequence number is normally a random number.
virtual int SetStartSequenceNumber(const int video_channel,
unsigned short sequence_number) = 0;
// TODO(pbos): Remove default implementation once this has been implemented
// in libjingle.
virtual void SetRtpStateForSsrc(int video_channel,
uint32_t ssrc,
const RtpState& rtp_state) {}
// TODO(pbos): Remove default implementation once this has been implemented
// in libjingle.
virtual RtpState GetRtpStateForSsrc(int video_channel, uint32_t ssrc) {
return RtpState();
}
// This function sets the RTCP status for the specified channel.
// Default mode is kRtcpCompound_RFC4585.
virtual int SetRTCPStatus(const int video_channel,
const ViERTCPMode rtcp_mode) = 0;
// This function gets the RTCP status for the specified channel.
virtual int GetRTCPStatus(const int video_channel,
ViERTCPMode& rtcp_mode) const = 0;
// This function sets the RTCP canonical name (CNAME) for the RTCP reports
// on a specific channel.
virtual int SetRTCPCName(const int video_channel,
const char rtcp_cname[KMaxRTCPCNameLength]) = 0;
// TODO(holmer): Remove this API once it has been removed from
// fakewebrtcvideoengine.h.
virtual int GetRTCPCName(const int video_channel,
char rtcp_cname[KMaxRTCPCNameLength]) const {
return -1;
}
// This function gets the RTCP canonical name (CNAME) for the RTCP reports
// received on the specified channel.
virtual int GetRemoteRTCPCName(
const int video_channel,
char rtcp_cname[KMaxRTCPCNameLength]) const = 0;
// This function sends an RTCP APP packet on a specific channel.
virtual int SendApplicationDefinedRTCPPacket(
const int video_channel,
const unsigned char sub_type,
unsigned int name,
const char* data,
unsigned short data_length_in_bytes) = 0;
// This function enables Negative Acknowledgment (NACK) using RTCP,
// implemented based on RFC 4585. NACK retransmits RTP packets if lost on
// the network. This creates a lossless transport at the expense of delay.
// If using NACK, NACK should be enabled on both endpoints in a call.
virtual int SetNACKStatus(const int video_channel, const bool enable) = 0;
// This function enables Forward Error Correction (FEC) using RTCP,
// implemented based on RFC 5109, to improve packet loss robustness. Extra
// FEC packets are sent together with the usual media packets, hence
// part of the bitrate will be used for FEC packets.
virtual int SetFECStatus(const int video_channel,
const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC) = 0;
// This function enables hybrid Negative Acknowledgment using RTCP
// and Forward Error Correction (FEC) implemented based on RFC 5109,
// to improve packet loss robustness. Extra
// FEC packets are sent together with the usual media packets, hence will
// part of the bitrate be used for FEC packets.
// The hybrid mode will choose between nack only, fec only and both based on
// network conditions. When both are applied, only packets that were not
// recovered by the FEC will be nacked.
virtual int SetHybridNACKFECStatus(const int video_channel,
const bool enable,
const unsigned char payload_typeRED,
const unsigned char payload_typeFEC) = 0;
// Sets send side support for delayed video buffering (actual delay will
// be exhibited on the receiver side).
// Target delay should be set to zero for real-time mode.
virtual int SetSenderBufferingMode(int video_channel,
int target_delay_ms) = 0;
// Sets receive side support for delayed video buffering. Target delay should
// be set to zero for real-time mode.
virtual int SetReceiverBufferingMode(int video_channel,
int target_delay_ms) = 0;
// This function enables RTCP key frame requests.
virtual int SetKeyFrameRequestMethod(
const int video_channel, const ViEKeyFrameRequestMethod method) = 0;
// This function enables signaling of temporary bitrate constraints using
// RTCP, implemented based on RFC4585.
virtual int SetTMMBRStatus(const int video_channel, const bool enable) = 0;
// Enables and disables REMB packets for this channel. |sender| indicates
// this channel is encoding, |receiver| tells the bitrate estimate for
// this channel should be included in the REMB packet.
virtual int SetRembStatus(int video_channel,
bool sender,
bool receiver) = 0;
// Enables RTP timestamp extension offset described in RFC 5450. This call
// must be done before ViECodec::SetSendCodec is called.
virtual int SetSendTimestampOffsetStatus(int video_channel,
bool enable,
int id) = 0;
virtual int SetReceiveTimestampOffsetStatus(int video_channel,
bool enable,
int id) = 0;
// Enables RTP absolute send time header extension. This call must be done
// before ViECodec::SetSendCodec is called.
virtual int SetSendAbsoluteSendTimeStatus(int video_channel,
bool enable,
int id) = 0;
// When enabled for a channel, *all* channels on the same transport will be
// expected to include the absolute send time header extension.
virtual int SetReceiveAbsoluteSendTimeStatus(int video_channel,
bool enable,
int id) = 0;
virtual int SetSendVideoRotationStatus(int video_channel,
bool enable,
int id) = 0;
virtual int SetReceiveVideoRotationStatus(int video_channel,
bool enable,
int id) = 0;
// Enables/disables RTCP Receiver Reference Time Report Block extension/
// DLRR Report Block extension (RFC 3611).
virtual int SetRtcpXrRrtrStatus(int video_channel, bool enable) = 0;
// Enables transmission smoothening, i.e. packets belonging to the same frame
// will be sent over a longer period of time instead of sending them
// back-to-back.
virtual int SetTransmissionSmoothingStatus(int video_channel,
bool enable) = 0;
// Sets a minimal bitrate which will be padded to when the encoder doesn't
// produce enough bitrate.
// TODO(pbos): Remove default implementation when libjingle's
// FakeWebRtcVideoEngine is updated.
virtual int SetMinTransmitBitrate(int video_channel,
int min_transmit_bitrate_kbps) {
return -1;
};
// Set a constant amount to deduct from received bitrate estimates before
// using it to allocate capacity among outgoing video streams.
virtual int SetReservedTransmitBitrate(
int video_channel, unsigned int reserved_transmit_bitrate_bps) {
return 0;
}
// This function returns our locally created statistics of the received RTP
// stream.
virtual int GetReceiveChannelRtcpStatistics(const int video_channel,
RtcpStatistics& basic_stats,
int64_t& rtt_ms) const = 0;
// This function returns statistics reported by the remote client in RTCP
// report blocks. If several streams are reported, the statistics will be
// aggregated.
// If statistics are aggregated, extended_max_sequence_number is not reported,
// and will always be set to 0.
virtual int GetSendChannelRtcpStatistics(const int video_channel,
RtcpStatistics& basic_stats,
int64_t& rtt_ms) const = 0;
// TODO(sprang): Temporary hacks to prevent libjingle build from failing,
// remove when libjingle has been lifted to support webrtc issue 2589
virtual int GetReceivedRTCPStatistics(const int video_channel,
unsigned short& fraction_lost,
unsigned int& cumulative_lost,
unsigned int& extended_max,
unsigned int& jitter,
int64_t& rtt_ms) const {
RtcpStatistics stats;
int ret_code = GetReceiveChannelRtcpStatistics(video_channel,
stats,
rtt_ms);
fraction_lost = stats.fraction_lost;
cumulative_lost = stats.cumulative_lost;
extended_max = stats.extended_max_sequence_number;
jitter = stats.jitter;
return ret_code;
}
virtual int GetSentRTCPStatistics(const int video_channel,
unsigned short& fraction_lost,
unsigned int& cumulative_lost,
unsigned int& extended_max,
unsigned int& jitter,
int64_t& rtt_ms) const {
RtcpStatistics stats;
int ret_code = GetSendChannelRtcpStatistics(video_channel,
stats,
rtt_ms);
fraction_lost = stats.fraction_lost;
cumulative_lost = stats.cumulative_lost;
extended_max = stats.extended_max_sequence_number;
jitter = stats.jitter;
return ret_code;
}
virtual int RegisterSendChannelRtcpStatisticsCallback(
int video_channel, RtcpStatisticsCallback* callback) = 0;
virtual int DeregisterSendChannelRtcpStatisticsCallback(
int video_channel, RtcpStatisticsCallback* callback) = 0;
virtual int RegisterReceiveChannelRtcpStatisticsCallback(
int video_channel, RtcpStatisticsCallback* callback) = 0;
virtual int DeregisterReceiveChannelRtcpStatisticsCallback(
int video_channel, RtcpStatisticsCallback* callback) = 0;
// The function gets statistics from the sent and received RTP streams.
virtual int GetRtpStatistics(const int video_channel,
StreamDataCounters& sent,
StreamDataCounters& received) const = 0;
// TODO(sprang): Temporary hacks to prevent libjingle build from failing,
// remove when libjingle has been lifted to support webrtc issue 2589
virtual int GetRTPStatistics(const int video_channel,
size_t& bytes_sent,
unsigned int& packets_sent,
size_t& bytes_received,
unsigned int& packets_received) const {
StreamDataCounters sent;
StreamDataCounters received;
int ret_code = GetRtpStatistics(video_channel, sent, received);
bytes_sent = sent.transmitted.payload_bytes;
packets_sent = sent.transmitted.packets;
bytes_received = received.transmitted.payload_bytes;
packets_received = received.transmitted.packets;
return ret_code;
}
virtual int RegisterSendChannelRtpStatisticsCallback(
int video_channel, StreamDataCountersCallback* callback) = 0;
virtual int DeregisterSendChannelRtpStatisticsCallback(
int video_channel, StreamDataCountersCallback* callback) = 0;
virtual int RegisterReceiveChannelRtpStatisticsCallback(
int video_channel, StreamDataCountersCallback* callback) = 0;
virtual int DeregisterReceiveChannelRtpStatisticsCallback(
int video_channel, StreamDataCountersCallback* callback) = 0;
// Gets RTCP packet type statistics from a sent/received stream.
virtual int GetSendRtcpPacketTypeCounter(
int video_channel,
RtcpPacketTypeCounter* packet_counter) const = 0;
virtual int GetReceiveRtcpPacketTypeCounter(
int video_channel,
RtcpPacketTypeCounter* packet_counter) const = 0;
// The function gets bandwidth usage statistics from the sent RTP streams in
// bits/s.
virtual int GetBandwidthUsage(const int video_channel,
unsigned int& total_bitrate_sent,
unsigned int& video_bitrate_sent,
unsigned int& fec_bitrate_sent,
unsigned int& nackBitrateSent) const = 0;
// (De)Register an observer, called whenever the send bitrate is updated
virtual int RegisterSendBitrateObserver(
int video_channel,
BitrateStatisticsObserver* observer) = 0;
virtual int DeregisterSendBitrateObserver(
int video_channel,
BitrateStatisticsObserver* observer) = 0;
// This function gets the send-side estimated bandwidth available for video,
// including overhead, in bits/s.
virtual int GetEstimatedSendBandwidth(
const int video_channel,
unsigned int* estimated_bandwidth) const = 0;
// This function gets the receive-side estimated bandwidth available for
// video, including overhead, in bits/s. |estimated_bandwidth| is 0 if there
// is no valid estimate.
virtual int GetEstimatedReceiveBandwidth(
const int video_channel,
unsigned int* estimated_bandwidth) const = 0;
// This function gets the PacedSender queuing delay for the last sent frame.
// TODO(jiayl): remove the default impl when libjingle is updated.
virtual int GetPacerQueuingDelayMs(
const int video_channel, int64_t* delay_ms) const {
return -1;
}
// This function enables capturing of RTP packets to a binary file on a
// specific channel and for a given direction. The file can later be
// replayed using e.g. RTP Tools rtpplay since the binary file format is
// compatible with the rtpdump format.
virtual int StartRTPDump(const int video_channel,
const char file_nameUTF8[1024],
RTPDirections direction) = 0;
// This function disables capturing of RTP packets to a binary file on a
// specific channel and for a given direction.
virtual int StopRTPDump(const int video_channel,
RTPDirections direction) = 0;
// Registers an instance of a user implementation of the ViERTPObserver.
virtual int RegisterRTPObserver(const int video_channel,
ViERTPObserver& observer) = 0;
// Removes a registered instance of ViERTPObserver.
virtual int DeregisterRTPObserver(const int video_channel) = 0;
// Registers and instance of a user implementation of ViEFrameCountObserver
virtual int RegisterSendFrameCountObserver(
int video_channel, FrameCountObserver* observer) = 0;
// Removes a registered instance of a ViEFrameCountObserver
virtual int DeregisterSendFrameCountObserver(
int video_channel, FrameCountObserver* observer) = 0;
// Called when RTCP packet type counters might have been changed. User has to
// filter on SSRCs to determine whether it's status sent or received.
virtual int RegisterRtcpPacketTypeCounterObserver(
int video_channel,
RtcpPacketTypeCounterObserver* observer) = 0;
protected:
virtual ~ViERTP_RTCP() {}
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RTP_RTCP_H_

View file

@ -18,12 +18,113 @@
#include "webrtc/base/thread_annotations.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/modules/interface/module.h"
#include "webrtc/video_engine/include/vie_base.h"
namespace webrtc {
class Clock;
class CpuOveruseObserver;
// CpuOveruseObserver is called when a system overuse is detected and
// VideoEngine cannot keep up the encoding frequency.
class CpuOveruseObserver {
public:
// Called as soon as an overuse is detected.
virtual void OveruseDetected() = 0;
// Called periodically when the system is not overused any longer.
virtual void NormalUsage() = 0;
protected:
virtual ~CpuOveruseObserver() {}
};
struct CpuOveruseOptions {
CpuOveruseOptions()
: enable_capture_jitter_method(false),
low_capture_jitter_threshold_ms(20.0f),
high_capture_jitter_threshold_ms(30.0f),
enable_encode_usage_method(true),
low_encode_usage_threshold_percent(55),
high_encode_usage_threshold_percent(85),
low_encode_time_rsd_threshold(-1),
high_encode_time_rsd_threshold(-1),
enable_extended_processing_usage(true),
frame_timeout_interval_ms(1500),
min_frame_samples(120),
min_process_count(3),
high_threshold_consecutive_count(2) {}
// Method based on inter-arrival jitter of captured frames.
bool enable_capture_jitter_method;
float low_capture_jitter_threshold_ms; // Threshold for triggering underuse.
float high_capture_jitter_threshold_ms; // Threshold for triggering overuse.
// Method based on encode time of frames.
bool enable_encode_usage_method;
int low_encode_usage_threshold_percent; // Threshold for triggering underuse.
int high_encode_usage_threshold_percent; // Threshold for triggering overuse.
// TODO(asapersson): Remove options, not used.
int low_encode_time_rsd_threshold; // Additional threshold for triggering
// underuse (used in addition to
// threshold above if configured).
int high_encode_time_rsd_threshold; // Additional threshold for triggering
// overuse (used in addition to
// threshold above if configured).
bool enable_extended_processing_usage; // Include a larger time span (in
// addition to encode time) for
// measuring the processing time of a
// frame.
// General settings.
int frame_timeout_interval_ms; // The maximum allowed interval between two
// frames before resetting estimations.
int min_frame_samples; // The minimum number of frames required.
int min_process_count; // The number of initial process times required before
// triggering an overuse/underuse.
int high_threshold_consecutive_count; // The number of consecutive checks
// above the high threshold before
// triggering an overuse.
bool Equals(const CpuOveruseOptions& o) const {
return enable_capture_jitter_method == o.enable_capture_jitter_method &&
low_capture_jitter_threshold_ms == o.low_capture_jitter_threshold_ms &&
high_capture_jitter_threshold_ms ==
o.high_capture_jitter_threshold_ms &&
enable_encode_usage_method == o.enable_encode_usage_method &&
low_encode_usage_threshold_percent ==
o.low_encode_usage_threshold_percent &&
high_encode_usage_threshold_percent ==
o.high_encode_usage_threshold_percent &&
low_encode_time_rsd_threshold == o.low_encode_time_rsd_threshold &&
high_encode_time_rsd_threshold == o.high_encode_time_rsd_threshold &&
enable_extended_processing_usage ==
o.enable_extended_processing_usage &&
frame_timeout_interval_ms == o.frame_timeout_interval_ms &&
min_frame_samples == o.min_frame_samples &&
min_process_count == o.min_process_count &&
high_threshold_consecutive_count == o.high_threshold_consecutive_count;
}
};
struct CpuOveruseMetrics {
CpuOveruseMetrics()
: capture_jitter_ms(-1),
avg_encode_time_ms(-1),
encode_usage_percent(-1),
capture_queue_delay_ms_per_s(-1) {}
int capture_jitter_ms; // The current estimated jitter in ms based on
// incoming captured frames.
int avg_encode_time_ms; // The average encode time in ms.
int encode_usage_percent; // The average encode time divided by the average
// time difference between incoming captured frames.
int capture_queue_delay_ms_per_s; // The current time delay between an
// incoming captured frame until the frame
// is being processed. The delay is
// expressed in ms delay per second.
};
class CpuOveruseMetricsObserver {
public:
virtual ~CpuOveruseMetricsObserver() {}
virtual void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) = 0;
};
// TODO(pbos): Move this somewhere appropriate.
class Statistics {

View file

@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/overuse_frame_detector.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/overuse_frame_detector.h"
namespace webrtc {
namespace {

View file

@ -1,6 +0,0 @@
per-file *.isolate=kjellander@webrtc.org
# These are for the common case of adding or renaming files. If you're doing
# structural changes, please get a review from a reviewer in this file.
per-file *.gyp=*
per-file *.gypi=*

View file

@ -1,9 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry kind="lib" path="libs/VideoEngine_android_java.jar"/>
<classpathentry kind="lib" path="libs/VoiceEngine_android_java.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View file

@ -1,33 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>ViEAutotest</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.ApkBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>com.android.ide.eclipse.adt.AndroidNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View file

@ -1,26 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
android:versionCode="1"
android:versionName="1.0" package="org.webrtc.vieautotest">
<application android:label="@string/app_name"
android:debuggable="true"
android:icon="@drawable/logo">
<activity android:label="@string/app_name"
android:name="ViEAutotest">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-sdk android:minSdkVersion="3" android:targetSdkVersion="8" />
<uses-permission android:name="android.permission.CAMERA"></uses-permission>
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
</manifest>

View file

@ -1,11 +0,0 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
# Project target.
target=android-9

View file

@ -1,37 +0,0 @@
/* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* aapt tool from the resource data it found. It
* should not be modified by hand.
*/
package org.webrtc.vieautotest;
public final class R {
public static final class array {
public static final int subtest_array=0x7f050001;
public static final int test_array=0x7f050000;
}
public static final class attr {
}
public static final class drawable {
public static final int logo=0x7f020000;
}
public static final class id {
public static final int Button01=0x7f060004;
public static final int LocalView=0x7f060001;
public static final int RemoteView=0x7f060000;
public static final int subtestSpinner=0x7f060003;
public static final int testSpinner=0x7f060002;
}
public static final class layout {
public static final int main=0x7f030000;
}
public static final class string {
public static final int SpinnerSubtest=0x7f040004;
public static final int SpinnerTitle=0x7f040003;
public static final int TitleName=0x7f040001;
public static final int app_name=0x7f040000;
public static final int run_button=0x7f040002;
}
}

View file

@ -1,34 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class org_webrtc_vieautotest_ViEAutotest */
#ifndef _Included_org_webrtc_vieautotest_ViEAutotest
#define _Included_org_webrtc_vieautotest_ViEAutotest
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: org_webrtc_vieautotest_ViEAutotest
* Method: RunTest
* Signature: (IILandroid/view/SurfaceView;Landroid/view/SurfaceView;)I
*/
JNIEXPORT jint JNICALL
Java_org_webrtc_vieautotest_ViEAutotest_RunTest__IILandroid_view_SurfaceView_2Landroid_view_SurfaceView_2
(JNIEnv *, jobject, jint, jint, jobject, jobject);
#ifdef __cplusplus
}
#endif
#endif

View file

@ -1,137 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <android/log.h>
#include <pthread.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include "webrtc/video_engine/test/auto_test/android/jni/org_webrtc_vieautotest_vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_android.h"
#define WEBRTC_LOG_TAG "*WEBRTCN*"
// VideoEngine data struct
typedef struct
{
JavaVM* jvm;
} VideoEngineData;
// Global variables
JavaVM* webrtcGlobalVM;
// Global variables visible in this file
static VideoEngineData vieData;
// "Local" functions (i.e. not Java accessible)
#define WEBRTC_TRACE_MAX_MESSAGE_SIZE 1024
static bool GetSubAPIs(VideoEngineData& vieData);
static bool ReleaseSubAPIs(VideoEngineData& vieData);
//
// General functions
//
// JNI_OnLoad
jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/) {
if (!vm) {
__android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
"JNI_OnLoad did not receive a valid VM pointer");
return -1;
}
JNIEnv* env;
if (JNI_OK != vm->GetEnv(reinterpret_cast<void**> (&env),
JNI_VERSION_1_4)) {
__android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
"JNI_OnLoad could not get JNI env");
return -1;
}
// Init ViE data
vieData.jvm = vm;
return JNI_VERSION_1_4;
}
// Class: org_webrtc_vieautotest_ViEAutotest
// Method: RunTest
// Signature: (IILandroid/opengl/GLSurfaceView;Landroid/opengl/GLSurfaceView;)I
JNIEXPORT jint JNICALL
Java_org_webrtc_vieautotest_ViEAutotest_RunTest__IILandroid_opengl_GLSurfaceView_2Landroid_opengl_GLSurfaceView_2(
JNIEnv* env,
jobject context,
jint testType,
jint subtestType,
jobject glView1,
jobject glView2)
{
int numErrors = -1;
numErrors = ViEAutoTestAndroid::RunAutotest(testType, subtestType, glView1,
glView2, vieData.jvm, env,
context);
return numErrors;
}
// Class: org_webrtc_vieautotest_ViEAutotest
// Method: RunTest
// Signature: (IILandroid/view/SurfaceView;Landroid/view/SurfaceView;)I
JNIEXPORT jint JNICALL
Java_org_webrtc_vieautotest_ViEAutotest_RunTest__IILandroid_view_SurfaceView_2Landroid_view_SurfaceView_2(
JNIEnv* env,
jobject context,
jint testType,
jint subtestType,
jobject surfaceHolder1,
jobject surfaceHolder2)
{
int numErrors = -1;
numErrors = ViEAutoTestAndroid::RunAutotest(testType, subtestType,
surfaceHolder1, surfaceHolder2,
vieData.jvm, env, context);
return numErrors;
}
//
//local function
//
bool GetSubAPIs(VideoEngineData& vieData) {
bool retVal = true;
//vieData.base = ViEBase::GetInterface(vieData.vie);
//if (vieData.base == NULL)
{
__android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
"Could not get Base API");
retVal = false;
}
return retVal;
}
bool ReleaseSubAPIs(VideoEngineData& vieData) {
bool releaseOk = true;
//if (vieData.base)
{
//if (vieData.base->Release() != 0)
if (false) {
__android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
"Release base sub-API failed");
releaseOk = false;
}
else {
//vieData.base = NULL;
}
}
return releaseOk;
}

Binary file not shown.

Before

(image error) Size: 409 B

View file

@ -1,64 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<RelativeLayout
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<LinearLayout
android:id="@+id/RemoteView"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1">
<!-- log instead of video
<ImageView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:scaleType="fitXY"
android:src="@drawable/logo" /> -->
</LinearLayout>
<LinearLayout
android:id="@+id/LocalView"
android:layout_width="120dip"
android:layout_height="120dip"
android:layout_weight="1">
<!-- <ImageView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:scaleType="fitXY"
android:src="@drawable/logo" /> -->
</LinearLayout>
<LinearLayout
android:orientation="horizontal"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true">
<LinearLayout
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true">
<Spinner
android:id="@+id/testSpinner"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:prompt="@string/SpinnerTitle"
/>
<Spinner
android:id="@+id/subtestSpinner"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:prompt="@string/SpinnerSubtest"
/>
<Button
android:text="@string/run_button"
android:id="@+id/Button01"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
</Button>
</LinearLayout>
</LinearLayout>
</RelativeLayout>
</FrameLayout>

View file

@ -1,30 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">ViEAutotest</string>
<string name="TitleName">ViEAutotest</string>
<string name="run_button">Run Test</string>
<string name="SpinnerTitle">Test type...</string>
<string-array name="test_array">
<item>Standard</item>
<item>API</item>
<item>Extended</item>
<item>Loopback</item>
<item>Custom</item>
</string-array>
<string name="SpinnerSubtest">Run...</string>
<string-array name="subtest_array">
<item>All</item>
<item>Base</item>
<item>Capture</item>
<item>Codec</item>
<item>Mix</item>
<item>External Codec</item>
<item>File</item>
<item>Image Process</item>
<item>Network</item>
<item>Render</item>
<item>RTP/RTCP</item>
</string-array>
</resources>

View file

@ -1,162 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.vieautotest;
import org.webrtc.vieautotest.R;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.widget.Button;
import android.view.SurfaceView;
import android.view.View;
import android.view.SurfaceHolder;
import android.widget.LinearLayout;
import android.opengl.GLSurfaceView;
import android.widget.Spinner;
import android.widget.ArrayAdapter;
import android.widget.AdapterView;
public class ViEAutotest extends Activity
implements
AdapterView.OnItemSelectedListener,
View.OnClickListener {
private Thread testThread;
private Spinner testSpinner;
private Spinner subtestSpinner;
private int testSelection;
private int subTestSelection;
// View for remote video
private LinearLayout remoteSurface = null;
private GLSurfaceView glSurfaceView = null;
private SurfaceView surfaceView = null;
private LinearLayout localSurface = null;
private GLSurfaceView glLocalSurfaceView = null;
private SurfaceView localSurfaceView = null;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.d("*WEBRTC*", "onCreate called");
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// Set the Start button action
final Button buttonStart = (Button) findViewById(R.id.Button01);
buttonStart.setOnClickListener(this);
// Set test spinner
testSpinner = (Spinner) findViewById(R.id.testSpinner);
ArrayAdapter<CharSequence> adapter =
ArrayAdapter.createFromResource(this, R.array.test_array,
android.R.layout.simple_spinner_item);
int resource = android.R.layout.simple_spinner_dropdown_item;
adapter.setDropDownViewResource(resource);
testSpinner.setAdapter(adapter);
testSpinner.setOnItemSelectedListener(this);
// Set sub test spinner
subtestSpinner = (Spinner) findViewById(R.id.subtestSpinner);
ArrayAdapter<CharSequence> subtestAdapter =
ArrayAdapter.createFromResource(this, R.array.subtest_array,
android.R.layout.simple_spinner_item);
subtestAdapter.setDropDownViewResource(resource);
subtestSpinner.setAdapter(subtestAdapter);
subtestSpinner.setOnItemSelectedListener(this);
remoteSurface = (LinearLayout) findViewById(R.id.RemoteView);
surfaceView = new SurfaceView(this);
remoteSurface.addView(surfaceView);
localSurface = (LinearLayout) findViewById(R.id.LocalView);
localSurfaceView = new SurfaceView(this);
localSurfaceView.setZOrderMediaOverlay(true);
localSurface.addView(localSurfaceView);
// Set members
testSelection = 0;
subTestSelection = 0;
}
public void onClick(View v) {
Log.d("*WEBRTC*", "Button clicked...");
switch (v.getId()) {
case R.id.Button01:
new Thread(new Runnable() {
public void run() {
Log.d("*WEBRTC*", "Calling RunTest...");
RunTest(testSelection, subTestSelection,
localSurfaceView, surfaceView);
Log.d("*WEBRTC*", "RunTest done");
}
}).start();
}
}
public void onItemSelected(AdapterView<?> parent, View v,
int position, long id) {
if (parent == (Spinner) findViewById(R.id.testSpinner)) {
testSelection = position;
} else {
subTestSelection = position;
}
}
public void onNothingSelected(AdapterView<?> parent) {
}
@Override
protected void onStart() {
super.onStart();
}
@Override
protected void onResume() {
super.onResume();
}
@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onStop() {
super.onStop();
}
@Override
protected void onDestroy() {
super.onDestroy();
}
// C++ function performing the chosen test
// private native int RunTest(int testSelection, int subtestSelection,
// GLSurfaceView window1, GLSurfaceView window2);
private native int RunTest(int testSelection, int subtestSelection,
SurfaceView window1, SurfaceView window2);
// this is used to load the 'ViEAutotestJNIAPI' library on application
// startup.
static {
Log.d("*WEBRTC*", "Loading ViEAutotest...");
System.loadLibrary("webrtc-video-autotest-jni");
}
}

View file

@ -1,28 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/automated/legacy_fixture.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
void LegacyFixture::SetUpTestCase() {
TwoWindowsFixture::SetUpTestCase();
// Create the test cases
tests_ = new ViEAutoTest(window_1_, window_2_);
}
void LegacyFixture::TearDownTestCase() {
delete tests_;
TwoWindowsFixture::TearDownTestCase();
}
ViEAutoTest* LegacyFixture::tests_ = NULL;

View file

@ -1,29 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_AUTOMATED_VIE_LEGACY_FIXTURE_H_
#define WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_AUTOMATED_VIE_LEGACY_FIXTURE_H_
#include "webrtc/video_engine/test/auto_test/automated/two_windows_fixture.h"
// Inherited by old-style standard integration tests based on ViEAutoTest.
class LegacyFixture : public TwoWindowsFixture {
public:
// Initializes ViEAutoTest in addition to the work done by ViEIntegrationTest.
static void SetUpTestCase();
// Releases anything allocated by SetupTestCase.
static void TearDownTestCase();
protected:
static ViEAutoTest* tests_;
};
#endif // WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_AUTOMATED_VIE_LEGACY_FIXTURE_H_

View file

@ -1,33 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/automated/two_windows_fixture.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_window_creator.h"
void TwoWindowsFixture::SetUpTestCase() {
window_creator_ = new ViEWindowCreator();
ViEAutoTestWindowManagerInterface* window_manager =
window_creator_->CreateTwoWindows();
window_1_ = window_manager->GetWindow1();
window_2_ = window_manager->GetWindow2();
}
void TwoWindowsFixture::TearDownTestCase() {
window_creator_->TerminateWindows();
delete window_creator_;
}
ViEWindowCreator* TwoWindowsFixture::window_creator_ = NULL;
void* TwoWindowsFixture::window_1_ = NULL;
void* TwoWindowsFixture::window_2_ = NULL;

View file

@ -1,35 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_AUTOMATED_TWO_WINDOWS_FIXTURE_H_
#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_AUTOMATED_TWO_WINDOWS_FIXTURE_H_
#include "testing/gtest/include/gtest/gtest.h"
class ViEWindowCreator;
class ViEAutoTest;
// Meant to be inherited by all standard test who require two windows.
class TwoWindowsFixture : public testing::Test {
public:
// Launches two windows in a platform-dependent manner and stores the handles
// in the window_1_ and window_2_ fields.
static void SetUpTestCase();
// Releases anything allocated by SetupTestCase.
static void TearDownTestCase();
protected:
static void* window_1_;
static void* window_2_;
static ViEWindowCreator* window_creator_;
};
#endif // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_AUTOMATED_TWO_WINDOWS_FIXTURE_H_

View file

@ -1,52 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/video_engine/test/auto_test/automated/legacy_fixture.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
namespace {
// TODO(phoglund): These tests are generally broken on mac.
// http://code.google.com/p/webrtc/issues/detail?id=1268
class DISABLED_ON_MAC(ViEApiIntegrationTest) : public LegacyFixture {
};
TEST_F(DISABLED_ON_MAC(ViEApiIntegrationTest), RunsBaseTestWithoutErrors) {
tests_->ViEBaseAPITest();
}
// TODO(phoglund): Crashes on the v4l2loopback camera.
TEST_F(DISABLED_ON_MAC(ViEApiIntegrationTest),
DISABLED_RunsCaptureTestWithoutErrors) {
tests_->ViECaptureAPITest();
}
TEST_F(DISABLED_ON_MAC(ViEApiIntegrationTest), RunsCodecTestWithoutErrors) {
tests_->ViECodecAPITest();
}
TEST_F(DISABLED_ON_MAC(ViEApiIntegrationTest),
RunsImageProcessTestWithoutErrors) {
tests_->ViEImageProcessAPITest();
}
TEST_F(DISABLED_ON_MAC(ViEApiIntegrationTest), RunsRenderTestWithoutErrors) {
tests_->ViERenderAPITest();
}
// See: https://code.google.com/p/webrtc/issues/detail?id=2415
TEST_F(DISABLED_ON_MAC(ViEApiIntegrationTest),
DISABLED_RunsRtpRtcpTestWithoutErrors) {
tests_->ViERtpRtcpAPITest();
}
} // namespace

View file

@ -1,55 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/video_engine/test/auto_test/automated/legacy_fixture.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
namespace {
// TODO(phoglund): These tests are generally broken on mac.
// http://code.google.com/p/webrtc/issues/detail?id=1268
class DISABLED_ON_MAC(ViEExtendedIntegrationTest) : public LegacyFixture {
};
TEST_F(DISABLED_ON_MAC(ViEExtendedIntegrationTest), RunsBaseTestWithoutErrors) {
tests_->ViEBaseExtendedTest();
}
// TODO(phoglund): Crashes on the v4l2loopback camera.
TEST_F(DISABLED_ON_MAC(ViEExtendedIntegrationTest),
DISABLED_RunsCaptureTestWithoutErrors) {
tests_->ViECaptureExtendedTest();
}
// Flaky on Windows: http://code.google.com/p/webrtc/issues/detail?id=1925
// (in addition to being disabled on Mac due to webrtc:1268).
#if defined(_WIN32)
#define MAYBE_RunsCodecTestWithoutErrors DISABLED_RunsCodecTestWithoutErrors
#else
#define MAYBE_RunsCodecTestWithoutErrors RunsCodecTestWithoutErrors
#endif
TEST_F(DISABLED_ON_MAC(ViEExtendedIntegrationTest),
MAYBE_RunsCodecTestWithoutErrors) {
tests_->ViECodecExtendedTest();
}
TEST_F(DISABLED_ON_MAC(ViEExtendedIntegrationTest),
RunsImageProcessTestWithoutErrors) {
tests_->ViEImageProcessExtendedTest();
}
TEST_F(DISABLED_ON_MAC(ViEExtendedIntegrationTest),
RunsRenderTestWithoutErrors) {
tests_->ViERenderExtendedTest();
}
} // namespace

View file

@ -1,228 +0,0 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdio.h>
#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/libvietest/include/tb_interfaces.h"
#include "webrtc/system_wrappers/interface/sleep.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
namespace {
class RtcpCollectorTransport : public webrtc::Transport {
public:
RtcpCollectorTransport() : packets_() {}
virtual ~RtcpCollectorTransport() {}
int SendPacket(int /*channel*/,
const void* /*data*/,
size_t /*len*/) override {
EXPECT_TRUE(false);
return 0;
}
int SendRTCPPacket(int channel, const void* data, size_t len) override {
const uint8_t* buf = static_cast<const uint8_t*>(data);
webrtc::RtpUtility::RtpHeaderParser parser(buf, len);
if (parser.RTCP()) {
Packet p;
p.channel = channel;
p.length = len;
if (parser.ParseRtcp(&p.header)) {
if (p.header.payloadType == 201 && len >= 20) {
buf += 20;
len -= 20;
} else {
return 0;
}
if (TryParseREMB(buf, len, &p)) {
packets_.push_back(p);
}
}
}
return 0;
}
bool FindREMBFor(uint32_t ssrc, double min_rate) const {
for (std::vector<Packet>::const_iterator it = packets_.begin();
it != packets_.end(); ++it) {
if (it->remb_bitrate >= min_rate && it->remb_ssrc.end() !=
std::find(it->remb_ssrc.begin(), it->remb_ssrc.end(), ssrc)) {
return true;
}
}
return false;
}
private:
struct Packet {
Packet() : channel(-1), length(0), header(), remb_bitrate(0), remb_ssrc() {}
int channel;
size_t length;
webrtc::RTPHeader header;
double remb_bitrate;
std::vector<uint32_t> remb_ssrc;
};
bool TryParseREMB(const uint8_t* buf, size_t length, Packet* p) {
if (length < 8) {
return false;
}
if (buf[0] != 'R' || buf[1] != 'E' || buf[2] != 'M' || buf[3] != 'B') {
return false;
}
size_t ssrcs = buf[4];
uint8_t exp = buf[5] >> 2;
uint32_t mantissa = ((buf[5] & 0x03) << 16) + (buf[6] << 8) + buf[7];
double bitrate = mantissa * static_cast<double>(1 << exp);
p->remb_bitrate = bitrate;
if (length < (8 + 4 * ssrcs)) {
return false;
}
buf += 8;
for (size_t i = 0; i < ssrcs; ++i) {
uint32_t ssrc = (buf[0] << 24) + (buf[1] << 16) + (buf[2] << 8) + buf[3];
p->remb_ssrc.push_back(ssrc);
buf += 4;
}
return true;
}
std::vector<Packet> packets_;
};
class ViENetworkTest : public testing::Test {
protected:
ViENetworkTest() : vie_("ViENetworkTest"), channel_(-1), transport() {}
virtual ~ViENetworkTest() {}
void SetUp() override {
EXPECT_EQ(0, vie_.base->CreateChannel(channel_));
EXPECT_EQ(0, vie_.rtp_rtcp->SetRembStatus(channel_, false, true));
EXPECT_EQ(0, vie_.network->RegisterSendTransport(channel_, transport));
}
void TearDown() override {
EXPECT_EQ(0, vie_.network->DeregisterSendTransport(channel_));
}
void ReceiveASTPacketsForBWE() {
for (int i = 0; i < kPacketCount; ++i) {
int64_t time = webrtc::TickTime::MillisecondTimestamp();
webrtc::RTPHeader header;
header.ssrc = kSsrc1;
header.timestamp = i * 45000;
header.extension.hasAbsoluteSendTime = true;
header.extension.absoluteSendTime = i << (18 - 6);
EXPECT_EQ(0, vie_.network->ReceivedBWEPacket(channel_, time, kPacketSize,
header));
webrtc::SleepMs(kIntervalMs);
}
}
enum {
kSsrc1 = 667,
kSsrc2 = 668,
kPacketCount = 100,
kPacketSize = 1000,
kIntervalMs = 22
};
TbInterfaces vie_;
int channel_;
RtcpCollectorTransport transport;
};
TEST_F(ViENetworkTest, ReceiveBWEPacket_NoExtension) {
for (int i = 0; i < kPacketCount; ++i) {
int64_t time = webrtc::TickTime::MillisecondTimestamp();
webrtc::RTPHeader header;
header.ssrc = kSsrc1;
header.timestamp = i * 45000;
EXPECT_EQ(0, vie_.network->ReceivedBWEPacket(channel_, time, kPacketSize,
header));
webrtc::SleepMs(kIntervalMs);
}
EXPECT_FALSE(transport.FindREMBFor(kSsrc1, 0.0));
unsigned int bandwidth = 0;
EXPECT_EQ(0, vie_.rtp_rtcp->GetEstimatedReceiveBandwidth(channel_,
&bandwidth));
}
TEST_F(ViENetworkTest, ReceiveBWEPacket_TOF) {
EXPECT_EQ(0, vie_.rtp_rtcp->SetReceiveTimestampOffsetStatus(channel_, true,
1));
for (int i = 0; i < kPacketCount; ++i) {
int64_t time = webrtc::TickTime::MillisecondTimestamp();
webrtc::RTPHeader header;
header.ssrc = kSsrc1;
header.timestamp = i * 45000;
header.extension.hasTransmissionTimeOffset = true;
header.extension.transmissionTimeOffset = 17;
EXPECT_EQ(0, vie_.network->ReceivedBWEPacket(channel_, time, kPacketSize,
header));
webrtc::SleepMs(kIntervalMs);
}
EXPECT_FALSE(transport.FindREMBFor(kSsrc1, 0.0));
unsigned int bandwidth = 0;
EXPECT_EQ(0, vie_.rtp_rtcp->GetEstimatedReceiveBandwidth(channel_,
&bandwidth));
}
TEST_F(ViENetworkTest, ReceiveBWEPacket_AST) {
EXPECT_EQ(0, vie_.rtp_rtcp->SetReceiveAbsoluteSendTimeStatus(channel_, true,
1));
ReceiveASTPacketsForBWE();
EXPECT_TRUE(transport.FindREMBFor(kSsrc1, 100000.0));
unsigned int bandwidth = 0;
EXPECT_EQ(0, vie_.rtp_rtcp->GetEstimatedReceiveBandwidth(channel_,
&bandwidth));
EXPECT_GT(bandwidth, 0u);
}
TEST_F(ViENetworkTest, ReceiveBWEPacket_ASTx2) {
EXPECT_EQ(0, vie_.rtp_rtcp->SetReceiveAbsoluteSendTimeStatus(channel_, true,
1));
for (int i = 0; i < kPacketCount; ++i) {
int64_t time = webrtc::TickTime::MillisecondTimestamp();
webrtc::RTPHeader header;
header.ssrc = kSsrc1;
header.timestamp = i * 45000;
header.extension.hasAbsoluteSendTime = true;
header.extension.absoluteSendTime = i << (18 - 6);
EXPECT_EQ(0, vie_.network->ReceivedBWEPacket(channel_, time, kPacketSize,
header));
header.ssrc = kSsrc2;
header.timestamp += 171717;
EXPECT_EQ(0, vie_.network->ReceivedBWEPacket(channel_, time, kPacketSize,
header));
webrtc::SleepMs(kIntervalMs);
}
EXPECT_TRUE(transport.FindREMBFor(kSsrc1, 200000.0));
EXPECT_TRUE(transport.FindREMBFor(kSsrc2, 200000.0));
unsigned int bandwidth = 0;
EXPECT_EQ(0, vie_.rtp_rtcp->GetEstimatedReceiveBandwidth(channel_,
&bandwidth));
EXPECT_GT(bandwidth, 0u);
}
TEST_F(ViENetworkTest, ReceiveBWEPacket_AST_DisabledReceive) {
EXPECT_EQ(0, vie_.rtp_rtcp->SetReceiveAbsoluteSendTimeStatus(channel_, false,
1));
ReceiveASTPacketsForBWE();
EXPECT_FALSE(transport.FindREMBFor(kSsrc1, 0.0));
unsigned int bandwidth = 0;
EXPECT_EQ(0, vie_.rtp_rtcp->GetEstimatedReceiveBandwidth(channel_,
&bandwidth));
}
} // namespace

View file

@ -1,59 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This file contains the "standard" suite of integration tests, implemented
// as a GUnit test. This file is a part of the effort to try to automate all
// tests in this section of the code. Currently, this code makes no attempt
// to verify any video output - it only checks for direct errors.
#include <stdio.h>
#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/test/testsupport/metrics/video_metrics.h"
#include "webrtc/test/testsupport/metrics/video_metrics.h"
#include "webrtc/video_engine/test/auto_test/automated/legacy_fixture.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_window_creator.h"
#include "webrtc/video_engine/test/libvietest/include/vie_to_file_renderer.h"
namespace {
class ViEStandardIntegrationTest : public LegacyFixture {
};
TEST_F(ViEStandardIntegrationTest, RunsBaseTestWithoutErrors) {
tests_->ViEBaseStandardTest();
}
// Flaky: https://code.google.com/p/webrtc/issues/detail?id=1734
TEST_F(ViEStandardIntegrationTest, DISABLED_RunsCodecTestWithoutErrors) {
tests_->ViECodecStandardTest();
}
TEST_F(ViEStandardIntegrationTest, RunsCaptureTestWithoutErrors) {
tests_->ViECaptureStandardTest();
}
TEST_F(ViEStandardIntegrationTest, RunsImageProcessTestWithoutErrors) {
tests_->ViEImageProcessStandardTest();
}
TEST_F(ViEStandardIntegrationTest, RunsRenderTestWithoutErrors) {
tests_->ViERenderStandardTest();
}
// Flaky, see webrtc:1790.
TEST_F(ViEStandardIntegrationTest, DISABLED_RunsRtpRtcpTestWithoutErrors) {
tests_->ViERtpRtcpStandardTest();
}
} // namespace

View file

@ -1,155 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <sstream>
#include <string>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/metrics/video_metrics.h"
#include "webrtc/test/testsupport/perf_test.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h"
#include "webrtc/video_engine/test/auto_test/primitives/framedrop_primitives.h"
#include "webrtc/video_engine/test/libvietest/include/tb_external_transport.h"
#include "webrtc/video_engine/test/libvietest/include/vie_to_file_renderer.h"
namespace {
// The input file must be QCIF since I420 gets scaled to that in the tests
// (it is so bandwidth-heavy we have no choice). Our comparison algorithms
// wouldn't like scaling, so this will work when we compare with the original.
const int kInputWidth = 176;
const int kInputHeight = 144;
class ViEVideoVerificationTest : public testing::Test {
protected:
void SetUp() {
input_file_ = webrtc::test::ResourcePath("paris_qcif", "yuv");
local_file_renderer_ = NULL;
remote_file_renderer_ = NULL;
}
void InitializeFileRenderers() {
local_file_renderer_ = new ViEToFileRenderer();
remote_file_renderer_ = new ViEToFileRenderer();
SetUpLocalFileRenderer(local_file_renderer_);
SetUpRemoteFileRenderer(remote_file_renderer_);
}
void SetUpLocalFileRenderer(ViEToFileRenderer* file_renderer) {
SetUpFileRenderer(file_renderer, "-local-preview.yuv");
}
void SetUpRemoteFileRenderer(ViEToFileRenderer* file_renderer) {
SetUpFileRenderer(file_renderer, "-remote.yuv");
}
// Must be called manually inside the tests.
void StopRenderers() {
local_file_renderer_->StopRendering();
remote_file_renderer_->StopRendering();
}
void CompareFiles(const std::string& reference_file,
const std::string& test_file,
double* psnr_result, double *ssim_result) {
webrtc::test::QualityMetricsResult psnr;
int error = I420PSNRFromFiles(reference_file.c_str(), test_file.c_str(),
kInputWidth, kInputHeight, &psnr);
EXPECT_EQ(0, error) << "PSNR routine failed - output files missing?";
*psnr_result = psnr.average;
webrtc::test::QualityMetricsResult ssim;
error = I420SSIMFromFiles(reference_file.c_str(), test_file.c_str(),
kInputWidth, kInputHeight, &ssim);
EXPECT_EQ(0, error) << "SSIM routine failed - output files missing?";
*ssim_result = ssim.average;
ViETest::Log("Results: PSNR is %f (dB; 48 is max), "
"SSIM is %f (1 is perfect)",
psnr.average, ssim.average);
}
// Note: must call AFTER CompareFiles.
void TearDownFileRenderers() {
TearDownFileRenderer(local_file_renderer_);
TearDownFileRenderer(remote_file_renderer_);
}
std::string input_file_;
ViEToFileRenderer* local_file_renderer_;
ViEToFileRenderer* remote_file_renderer_;
ViEFileBasedComparisonTests tests_;
private:
void SetUpFileRenderer(ViEToFileRenderer* file_renderer,
const std::string& suffix) {
std::string output_path = ViETest::GetResultOutputPath();
std::string filename = "render_output" + suffix;
if (!file_renderer->PrepareForRendering(output_path, filename)) {
FAIL() << "Could not open output file " << filename <<
" for writing.";
}
}
void TearDownFileRenderer(ViEToFileRenderer* file_renderer) {
assert(file_renderer);
bool test_failed = ::testing::UnitTest::GetInstance()->
current_test_info()->result()->Failed();
if (test_failed) {
// Leave the files for analysis if the test failed.
file_renderer->SaveOutputFile("failed-");
}
delete file_renderer;
}
};
TEST_F(ViEVideoVerificationTest, RunsBaseStandardTestWithoutErrors) {
// I420 is lossless, so the I420 test should obviously get perfect results -
// the local preview and remote output files should be bit-exact. This test
// runs on external transport to ensure we do not drop packets.
// However, it's hard to make 100% stringent requirements on the video engine
// since for instance the jitter buffer has non-deterministic elements. If it
// breaks five times in a row though, you probably introduced a bug.
const double kReasonablePsnr = webrtc::test::kMetricsPerfectPSNR - 2.0f;
const double kReasonableSsim = 0.99f;
const int kNumAttempts = 5;
for (int attempt = 0; attempt < kNumAttempts; ++attempt) {
InitializeFileRenderers();
ASSERT_TRUE(tests_.TestCallSetup(input_file_, kInputWidth, kInputHeight,
local_file_renderer_,
remote_file_renderer_));
std::string remote_file = remote_file_renderer_->GetFullOutputPath();
std::string local_preview = local_file_renderer_->GetFullOutputPath();
StopRenderers();
double actual_psnr = 0;
double actual_ssim = 0;
CompareFiles(local_preview, remote_file, &actual_psnr, &actual_ssim);
TearDownFileRenderers();
if (actual_psnr > kReasonablePsnr && actual_ssim > kReasonableSsim) {
// Test successful.
return;
} else {
ViETest::Log("Retrying; attempt %d of %d.", attempt + 1, kNumAttempts);
}
}
FAIL() << "Failed to achieve near-perfect PSNR and SSIM results after " <<
kNumAttempts << " attempts.";
}
} // namespace

View file

@ -1,130 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// vie_autotest.h
//
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_H_
#include "gflags/gflags.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/video_render/include/video_render_defines.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/voice_engine/include/voe_audio_processing.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h"
#include "webrtc/voice_engine/include/voe_hardware.h"
#ifndef WEBRTC_ANDROID
#include <string>
#endif
class TbCaptureDevice;
class TbInterfaces;
class TbVideoChannel;
class ViEToFileRenderer;
DECLARE_bool(include_timing_dependent_tests);
// This class provides a bunch of methods, implemented across several .cc
// files, which runs tests on the video engine. All methods will report
// errors using standard googletest macros, except when marked otherwise.
class ViEAutoTest
{
public:
ViEAutoTest(void* window1, void* window2);
~ViEAutoTest();
// These three are special and should not be run in a googletest harness.
// They keep track of their errors by themselves and return the number
// of errors.
int ViELoopbackCall();
int ViESimulcastCall();
int ViECustomCall();
int ViERecordCall();
// All functions except the three above are meant to run in a
// googletest harness.
void ViEStandardTest();
void ViEExtendedTest();
void ViEAPITest();
// vie_autotest_base.cc
void ViEBaseStandardTest();
void ViEBaseExtendedTest();
void ViEBaseAPITest();
// vie_autotest_capture.cc
void ViECaptureStandardTest();
void ViECaptureExtendedTest();
void ViECaptureAPITest();
void ViECaptureExternalCaptureTest();
// vie_autotest_codec.cc
void ViECodecStandardTest();
void ViECodecExtendedTest();
void ViECodecExternalCodecTest();
void ViECodecAPITest();
// vie_autotest_image_process.cc
void ViEImageProcessStandardTest();
void ViEImageProcessExtendedTest();
void ViEImageProcessAPITest();
// vie_autotest_network.cc
void ViENetworkStandardTest();
void ViENetworkExtendedTest();
void ViENetworkAPITest();
// vie_autotest_render.cc
void ViERenderStandardTest();
void ViERenderExtendedTest();
void ViERenderAPITest();
// vie_autotest_rtp_rtcp.cc
void ViERtpRtcpStandardTest();
void ViERtpRtcpAPITest();
private:
void PrintAudioCodec(const webrtc::CodecInst audioCodec);
void PrintVideoCodec(const webrtc::VideoCodec videoCodec);
// Sets up rendering so the capture device output goes to window 1 and
// the video engine output goes to window 2.
void RenderCaptureDeviceAndOutputStream(TbInterfaces* video_engine,
TbVideoChannel* video_channel,
TbCaptureDevice* capture_device);
// Stops rendering into the two windows as was set up by a call to
// RenderCaptureDeviceAndOutputStream.
void StopRenderCaptureDeviceAndOutputStream(
TbInterfaces* video_engine,
TbVideoChannel* video_channel,
TbCaptureDevice* capture_device);
void* _window1;
void* _window2;
webrtc::VideoRenderType _renderType;
webrtc::VideoRender* _vrm1;
webrtc::VideoRender* _vrm2;
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_H_

View file

@ -1,27 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
#include <jni.h>
class ViEAutoTestAndroid {
public:
static int RunAutotest(int testSelection,
int subTestSelection,
void* window1,
void* window2,
JavaVM* javaVM,
void* env,
void* context);
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_

View file

@ -1,195 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// vie_autotest_defines.h
//
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_DEFINES_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_DEFINES_H_
#include <assert.h>
#include <stdarg.h>
#include <stdio.h>
#include <string>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/system_wrappers/interface/sleep.h"
#if defined(_WIN32)
#include <windows.h>
#elif defined (WEBRTC_ANDROID)
#include <android/log.h>
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
#include <stdlib.h>
#include <string.h>
#include <sys/time.h>
#include <time.h>
#endif
// Choose how to log
//#define VIE_LOG_TO_FILE
#define VIE_LOG_TO_STDOUT
// Choose one way to test error
#define VIE_ASSERT_ERROR
#define VIE_LOG_FILE_NAME "ViEAutotestLog.txt"
#undef RGB
#define RGB(r,g,b) r|g<<8|b<<16
enum { kAutoTestSleepTimeMs = 5000 };
enum { kAutoTestFullStackSleepTimeMs = 20000 };
struct AutoTestSize {
unsigned int width;
unsigned int height;
AutoTestSize() :
width(0), height(0) {
}
AutoTestSize(unsigned int iWidth, unsigned int iHeight) :
width(iWidth), height(iHeight) {
}
};
struct AutoTestOrigin {
unsigned int x;
unsigned int y;
AutoTestOrigin() :
x(0), y(0) {
}
AutoTestOrigin(unsigned int iX, unsigned int iY) :
x(iX), y(iY) {
}
};
struct AutoTestRect {
AutoTestSize size;
AutoTestOrigin origin;
AutoTestRect() :
size(), origin() {
}
AutoTestRect(unsigned int iX, unsigned int iY, unsigned int iWidth, unsigned int iHeight) :
size(iX, iY), origin(iWidth, iHeight) {
}
void Copy(AutoTestRect iRect) {
origin.x = iRect.origin.x;
origin.y = iRect.origin.y;
size.width = iRect.size.width;
size.height = iRect.size.height;
}
};
// ============================================
class ViETest {
public:
static int Init() {
#ifdef VIE_LOG_TO_FILE
log_file_ = fopen(VIE_LOG_FILE_NAME, "w+t");
#else
log_file_ = NULL;
#endif
log_str_ = new char[kMaxLogSize];
memset(log_str_, 0, kMaxLogSize);
return 0;
}
static int Terminate() {
if (log_file_) {
fclose(log_file_);
log_file_ = NULL;
}
if (log_str_) {
delete[] log_str_;
log_str_ = NULL;
}
return 0;
}
static void Log(const char* fmt, ...) {
va_list va;
va_start(va, fmt);
memset(log_str_, 0, kMaxLogSize);
vsprintf(log_str_, fmt, va);
va_end(va);
WriteToSuitableOutput(log_str_);
}
// Writes to a suitable output, depending on platform and log mode.
static void WriteToSuitableOutput(const char* message) {
#ifdef VIE_LOG_TO_FILE
if (log_file_)
{
fwrite(log_str_, 1, strlen(log_str_), log_file_);
fwrite("\n", 1, 1, log_file_);
fflush(log_file_);
}
#endif
#ifdef VIE_LOG_TO_STDOUT
#if WEBRTC_ANDROID
__android_log_write(ANDROID_LOG_DEBUG, "*WebRTCN*", log_str_);
#else
printf("%s\n", log_str_);
#endif
#endif
}
// Deprecated(phoglund): Prefer to use googletest macros in all cases
// except the custom call case.
static int TestError(bool expr, const char* fmt, ...) {
if (!expr) {
va_list va;
va_start(va, fmt);
memset(log_str_, 0, kMaxLogSize);
vsprintf(log_str_, fmt, va);
#ifdef WEBRTC_ANDROID
__android_log_write(ANDROID_LOG_ERROR, "*WebRTCN*", log_str_);
#endif
WriteToSuitableOutput(log_str_);
va_end(va);
AssertError(log_str_);
return 1;
}
return 0;
}
// Returns a suitable path to write trace and result files to.
// You should always use this when you want to write output files.
// The returned path is guaranteed to end with a path separator.
// This function may be run at any time during the program's execution.
// Implemented in vie_autotest.cc
static std::string GetResultOutputPath();
private:
static void AssertError(const char* message) {
#ifdef VIE_ASSERT_ERROR
assert(false);
#endif
}
static FILE* log_file_;
enum {
kMaxLogSize = 512
};
static char* log_str_;
};
#define AutoTestSleep webrtc::SleepMs
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_DEFINES_H_

View file

@ -1,46 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_LINUX_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_LINUX_H_
// Note(pbos): This MUST be included before the X11 headers
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
#include <X11/Xlib.h>
#include <X11/Xutil.h>
// Forward declaration
class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface
{
public:
ViEAutoTestWindowManager();
virtual ~ViEAutoTestWindowManager();
virtual void* GetWindow1();
virtual void* GetWindow2();
virtual int TerminateWindows();
virtual int CreateWindows(AutoTestRect window1Size,
AutoTestRect window2Size, void* window1Title,
void* window2Title);
virtual bool SetTopmostWindow();
private:
int ViECreateWindow(Window *outWindow, Display **outDisplay, int xpos,
int ypos, int width, int height, char* title);
int ViEDestroyWindow(Window *window, Display *display);
Window _hwnd1;
Window _hwnd2;
Display* _hdsp1;
Display* _hdsp2;
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_LINUX_H_

View file

@ -1,70 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/engine_configurations.h"
#if defined(COCOA_RENDERING)
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_COCOA_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_COCOA_H_
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
@class CocoaRenderView;
#import <Cocoa/Cocoa.h>
@interface TestCocoaUi : NSObject {
CocoaRenderView* cocoaRenderView1_;
CocoaRenderView* cocoaRenderView2_;
NSWindow* window1_;
NSWindow* window2_;
AutoTestRect window1Size_;
AutoTestRect window2Size_;
void* window1Title_;
void* window2Title_;
}
// Must be called as a selector in the main thread.
- (void)createWindows:(NSObject*)ignored;
// Used to transfer parameters from background thread.
- (void)prepareToCreateWindowsWithSize:(AutoTestRect)window1Size
andSize:(AutoTestRect)window2Size
withTitle:(void*)window1Title
andTitle:(void*)window2Title;
- (NSWindow*)window1;
- (NSWindow*)window2;
- (CocoaRenderView*)cocoaRenderView1;
- (CocoaRenderView*)cocoaRenderView2;
@end
class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface {
public:
ViEAutoTestWindowManager();
virtual ~ViEAutoTestWindowManager();
virtual void* GetWindow1();
virtual void* GetWindow2();
virtual int CreateWindows(AutoTestRect window1Size,
AutoTestRect window2Size,
void* window1Title,
void* window2Title);
virtual int TerminateWindows();
virtual bool SetTopmostWindow();
private:
TestCocoaUi* cocoa_ui_;
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_COCOA_H_
#endif // COCOA_RENDERING

View file

@ -1,49 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAIN_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAIN_H_
#include <string>
#include <map>
class ViEAutoTestMain {
public:
ViEAutoTestMain();
// Runs the test according to the specified arguments.
// Pass in --automated to run in automated mode; interactive
// mode is default. All usual googletest flags also apply.
int RunTests(int argc, char** argv);
private:
std::map<int, std::string> index_to_test_method_map_;
static const int kInvalidChoice = -1;
// Starts interactive mode.
int RunInteractiveMode();
// Prompts the user for a specific test method in the provided test case.
// Returns 0 on success, nonzero otherwise.
int RunSpecificTestCaseIn(const std::string test_case_name);
// Asks the user for a particular test case to run.
int AskUserForTestCase();
// Retrieves a number from the user in the interval
// [min_allowed, max_allowed]. Returns kInvalidChoice on failure.
int AskUserForNumber(int min_allowed, int max_allowed);
// Runs all tests matching the provided filter. * are wildcards.
// Returns the test runner result (0 == OK).
int RunTestMatching(const std::string test_case,
const std::string test_method);
// Runs a non-gtest test case. Choice must be [7,9]. Returns 0 on success.
int RunSpecialTestCase(int choice);
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAIN_H_

View file

@ -1,33 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vie_autotest_window_manager_interface.h
*/
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOW_MANAGER_INTERFACE_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOW_MANAGER_INTERFACE_H_
class ViEAutoTestWindowManagerInterface
{
public:
virtual int CreateWindows(AutoTestRect window1Size,
AutoTestRect window2Size, void* window1Title,
void* window2Title) = 0;
virtual int TerminateWindows() = 0;
virtual void* GetWindow1() = 0;
virtual void* GetWindow2() = 0;
virtual bool SetTopmostWindow() = 0;
virtual ~ViEAutoTestWindowManagerInterface() {}
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOW_MANAGER_INTERFACE_H_

View file

@ -1,64 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOWS_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOWS_H_
#include "webrtc/engine_configurations.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
#include <windows.h>
#define TITLE_LENGTH 1024
// Forward declaration
namespace webrtc {
class CriticalSectionWrapper;
}
class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface
{
public:
ViEAutoTestWindowManager();
virtual ~ViEAutoTestWindowManager();
virtual void* GetWindow1();
virtual void* GetWindow2();
virtual int CreateWindows(AutoTestRect window1Size,
AutoTestRect window2Size, void* window1Title,
void* window2Title);
virtual int TerminateWindows();
virtual bool SetTopmostWindow();
protected:
static bool EventProcess(void* obj);
bool EventLoop();
private:
int ViECreateWindow(HWND &hwndMain, int xPos, int yPos, int width,
int height, TCHAR* className);
int ViEDestroyWindow(HWND& hwnd);
void* _window1;
void* _window2;
bool _terminate;
rtc::scoped_ptr<webrtc::ThreadWrapper> _eventThread;
webrtc::CriticalSectionWrapper& _crit;
HWND _hwndMain;
HWND _hwnd1;
HWND _hwnd2;
AutoTestRect _hwnd1Size;
AutoTestRect _hwnd2Size;
TCHAR _hwnd1Title[TITLE_LENGTH];
TCHAR _hwnd2Title[TITLE_LENGTH];
};
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOWS_H_

View file

@ -1,61 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SRC_VIDEO_ENGINE_TEST_AUTO_TEST_INTERFACE_VIE_COMPARISON_TESTS_H_
#define SRC_VIDEO_ENGINE_TEST_AUTO_TEST_INTERFACE_VIE_COMPARISON_TESTS_H_
#include <string>
#include "webrtc/video_engine/test/auto_test/primitives/general_primitives.h"
class FrameDropDetector;
struct NetworkParameters;
class ViEToFileRenderer;
// This class contains comparison tests, which will exercise video engine
// functionality and then run comparison tests on the result using PSNR and
// SSIM algorithms. These tests are intended mostly as sanity checks so that
// we know we are outputting roughly the right thing and not random noise or
// black screens.
//
// We will set up a fake ExternalCapture device which will pose as a webcam
// and read the input from the provided raw YUV file. Output will be written
// as a local preview in the local file renderer; the remote side output gets
// written to the provided remote file renderer.
//
// The local preview is a straight, unaltered copy of the input. This can be
// useful for comparisons if the test method contains several stages where the
// input is restarted between stages.
class ViEFileBasedComparisonTests {
public:
// Test a typical simple call setup. Returns false if the input file
// could not be opened; reports errors using googletest macros otherwise.
bool TestCallSetup(
const std::string& i420_test_video_path,
int width,
int height,
ViEToFileRenderer* local_file_renderer,
ViEToFileRenderer* remote_file_renderer);
// Runs a full stack test using the VP8 codec. Tests the full stack and uses
// RTP timestamps to sync frames between the endpoints.
void TestFullStack(
const std::string& i420_video_file,
int width,
int height,
int bit_rate_kbps,
ProtectionMethod protection_method,
const NetworkParameters& network,
ViEToFileRenderer* local_file_renderer,
ViEToFileRenderer* remote_file_renderer,
FrameDropDetector* frame_drop_detector);
};
#endif // SRC_VIDEO_ENGINE_TEST_AUTO_TEST_INTERFACE_VIE_COMPARISON_TESTS_H_

View file

@ -1,32 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_WINDOW_CREATOR_H_
#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_WINDOW_CREATOR_H_
class ViEAutoTestWindowManagerInterface;
class ViEWindowCreator {
public:
ViEWindowCreator();
virtual ~ViEWindowCreator();
// The pointer returned here will still be owned by this object.
// Only use it to retrieve the created windows.
ViEAutoTestWindowManagerInterface* CreateTwoWindows();
// Terminates windows opened by CreateTwoWindows, which must
// have been called before this method.
void TerminateWindows();
private:
ViEAutoTestWindowManagerInterface* window_manager_;
};
#endif // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_WINDOW_CREATOR_H_

View file

@ -1,25 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_WINDOW_MANAGER_FACTORY_H_
#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_WINDOW_MANAGER_FACTORY_H_
class ViEAutoTestWindowManagerInterface;
class ViEWindowManagerFactory {
public:
// This method is implemented in different files depending on platform.
// The caller is responsible for freeing the resulting object using
// the delete operator.
static ViEAutoTestWindowManagerInterface*
CreateWindowManagerForCurrentPlatform();
};
#endif // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_WINDOW_MANAGER_FACTORY_H_

View file

@ -1,76 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/primitives/base_primitives.h"
#include "webrtc/modules/video_capture/include/video_capture_factory.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/libvietest/include/tb_external_transport.h"
static void ConfigureCodecsToI420(int video_channel,
webrtc::VideoCodec video_codec,
webrtc::ViECodec* codec_interface) {
// Set up the codec interface with all known receive codecs and with
// I420 as the send codec.
for (int i = 0; i < codec_interface->NumberOfCodecs(); i++) {
EXPECT_EQ(0, codec_interface->GetCodec(i, video_codec));
// Try to keep the test frame size small and bit rate generous when I420.
if (video_codec.codecType == webrtc::kVideoCodecI420) {
video_codec.width = 176;
video_codec.height = 144;
video_codec.maxBitrate = 32000;
video_codec.startBitrate = 32000;
EXPECT_EQ(0, codec_interface->SetSendCodec(video_channel, video_codec));
}
EXPECT_EQ(0, codec_interface->SetReceiveCodec(video_channel, video_codec));
}
// Verify that we really found the I420 codec.
EXPECT_EQ(0, codec_interface->GetSendCodec(video_channel, video_codec));
EXPECT_EQ(webrtc::kVideoCodecI420, video_codec.codecType);
}
void TestI420CallSetup(webrtc::ViECodec* codec_interface,
webrtc::VideoEngine* video_engine,
webrtc::ViEBase* base_interface,
webrtc::ViENetwork* network_interface,
webrtc::ViERTP_RTCP* rtp_rtcp_interface,
int video_channel,
const char* device_name) {
webrtc::VideoCodec video_codec;
memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
EXPECT_EQ(0, rtp_rtcp_interface->SetTransmissionSmoothingStatus(video_channel,
false));
ConfigureCodecsToI420(video_channel, video_codec, codec_interface);
TbExternalTransport external_transport(
*network_interface, video_channel, NULL);
EXPECT_EQ(0, network_interface->RegisterSendTransport(
video_channel, external_transport));
EXPECT_EQ(0, base_interface->StartReceive(video_channel));
EXPECT_EQ(0, base_interface->StartSend(video_channel));
// Let the call run for a while.
ViETest::Log("Call started");
AutoTestSleep(kAutoTestSleepTimeMs);
// Stop the call.
ViETest::Log("Stopping call.");
EXPECT_EQ(0, base_interface->StopSend(video_channel));
// Make sure we receive all packets.
AutoTestSleep(1000);
EXPECT_EQ(0, base_interface->StopReceive(video_channel));
EXPECT_EQ(0, network_interface->DeregisterSendTransport(video_channel));
}

View file

@ -1,34 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_SOURCE_BASE_PRIMITIVES_H_
#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_SOURCE_BASE_PRIMITIVES_H_
namespace webrtc {
class VideoEngine;
class ViEBase;
class ViECodec;
class ViENetwork;
class ViERTP_RTCP;
}
// Tests a I420-to-I420 call. This test exercises the most basic WebRTC
// functionality by training the codec interface to recognize the most common
// codecs, and the initiating a I420 call. A video channel with a capture device
// must be set up prior to this call.
void TestI420CallSetup(webrtc::ViECodec* codec_interface,
webrtc::VideoEngine* video_engine,
webrtc::ViEBase* base_interface,
webrtc::ViENetwork* network_interface,
webrtc::ViERTP_RTCP* rtp_rtcp_interface,
int video_channel,
const char* device_name);
#endif // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_SOURCE_BASE_PRIMITIVES_H_

View file

@ -1,74 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/primitives/choice_helpers.h"
#include <assert.h>
#include <stdio.h>
#include <stdlib.h>
#include <algorithm>
#include <sstream>
namespace webrtc {
ChoiceBuilder::ChoiceBuilder(const std::string& title, const Choices& choices)
: choices_(choices),
input_helper_(TypedInput(title)) {
input_helper_.WithInputValidator(
new IntegerWithinRangeValidator(1, choices.size()));
input_helper_.WithAdditionalInfo(MakeHumanReadableOptions());
}
int ChoiceBuilder::Choose() {
std::string input = input_helper_.AskForInput();
return atoi(input.c_str());
}
ChoiceBuilder& ChoiceBuilder::WithDefault(const std::string& default_choice) {
Choices::const_iterator iterator = std::find(
choices_.begin(), choices_.end(), default_choice);
assert(iterator != choices_.end() && "No such choice.");
// Store the value as the choice number, e.g. its index + 1.
int choice_index = (iterator - choices_.begin()) + 1;
char number[16];
sprintf(number, "%d", choice_index);
input_helper_.WithDefault(number);
return *this;
}
ChoiceBuilder& ChoiceBuilder::WithInputSource(FILE* input_source) {
input_helper_.WithInputSource(input_source);
return *this;
}
std::string ChoiceBuilder::MakeHumanReadableOptions() {
std::string result = "";
Choices::const_iterator iterator = choices_.begin();
for (int number = 1; iterator != choices_.end(); ++iterator, ++number) {
std::ostringstream os;
os << "\n " << number << ". " << (*iterator).c_str();
result += os.str();
}
return result;
}
Choices SplitChoices(const std::string& raw_choices) {
return Split(raw_choices, "\n");
}
ChoiceBuilder FromChoices(
const std::string& title, const std::string& raw_choices) {
return ChoiceBuilder(title, SplitChoices(raw_choices));
}
} // namespace webrtc

View file

@ -1,71 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_CHOICE_HELPERS_H_
#define WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_CHOICE_HELPERS_H_
#include <string>
#include <vector>
#include "webrtc/video_engine/test/auto_test/primitives/input_helpers.h"
namespace webrtc {
typedef std::vector<std::string> Choices;
/**
* Used to ask the user to make a choice. This class will allow you to
* configure how to ask the question, and then ask it. For instance,
*
* int choice = FromChoices("Choice 1\n"
* "Choice 2\n").WithDefault("Choice 1").Choose();
*
* will print a menu presenting the two choices and ask for input. The user,
* can input 1, 2 or just hit enter since we specified a default in this case.
* The Choose call will block until the user gives valid input one way or the
* other. The choice variable is guaranteed to contain either 1 or 2 after
* this particular call.
*
* The class uses stdout and stdin by default, but stdin can be replaced using
* WithInputSource for unit tests.
*/
class ChoiceBuilder {
public:
explicit ChoiceBuilder(const std::string& title, const Choices& choices);
// Specifies the choice as the default. The choice must be one of the choices
// passed in the constructor. If this method is not called, the user has to
// choose an option explicitly.
ChoiceBuilder& WithDefault(const std::string& default_choice);
// Replaces the input source where we ask for input. Default is stdin.
ChoiceBuilder& WithInputSource(FILE* input_source);
// Prints the choice list and requests input from the input source. Returns
// the choice number (choices start at 1).
int Choose();
private:
std::string MakeHumanReadableOptions();
Choices choices_;
InputBuilder input_helper_;
};
// Convenience function that creates a choice builder given a string where
// choices are separated by \n.
ChoiceBuilder FromChoices(const std::string& title,
const std::string& raw_choices);
// Creates choices from a string where choices are separated by \n.
Choices SplitChoices(const std::string& raw_choices);
} // namespace webrtc
#endif // CHOICE_HELPERS_H_

View file

@ -1,95 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdio.h>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/video_engine/test/auto_test/primitives/choice_helpers.h"
#include "webrtc/video_engine/test/auto_test/primitives/fake_stdin.h"
namespace webrtc {
class ChoiceHelpersTest : public testing::Test {
};
TEST_F(ChoiceHelpersTest, SplitReturnsEmptyChoicesForEmptyInput) {
EXPECT_TRUE(SplitChoices("").empty());
}
TEST_F(ChoiceHelpersTest, SplitHandlesSingleChoice) {
Choices choices = SplitChoices("Single Choice");
EXPECT_EQ(1u, choices.size());
EXPECT_EQ("Single Choice", choices[0]);
}
TEST_F(ChoiceHelpersTest, SplitHandlesSingleChoiceWithEndingNewline) {
Choices choices = SplitChoices("Single Choice\n");
EXPECT_EQ(1u, choices.size());
EXPECT_EQ("Single Choice", choices[0]);
}
TEST_F(ChoiceHelpersTest, SplitHandlesMultipleChoices) {
Choices choices = SplitChoices(
"Choice 1\n"
"Choice 2\n"
"Choice 3");
EXPECT_EQ(3u, choices.size());
EXPECT_EQ("Choice 1", choices[0]);
EXPECT_EQ("Choice 2", choices[1]);
EXPECT_EQ("Choice 3", choices[2]);
}
TEST_F(ChoiceHelpersTest, SplitHandlesMultipleChoicesWithEndingNewline) {
Choices choices = SplitChoices(
"Choice 1\n"
"Choice 2\n"
"Choice 3\n");
EXPECT_EQ(3u, choices.size());
EXPECT_EQ("Choice 1", choices[0]);
EXPECT_EQ("Choice 2", choices[1]);
EXPECT_EQ("Choice 3", choices[2]);
}
TEST_F(ChoiceHelpersTest, CanSelectUsingChoiceBuilder) {
FILE* fake_stdin = FakeStdin("1\n2\n");
EXPECT_EQ(1, FromChoices("Title",
"Choice 1\n"
"Choice 2").WithInputSource(fake_stdin).Choose());
EXPECT_EQ(2, FromChoices("","Choice 1\n"
"Choice 2").WithInputSource(fake_stdin).Choose());
fclose(fake_stdin);
}
TEST_F(ChoiceHelpersTest, RetriesIfGivenInvalidChoice) {
FILE* fake_stdin = FakeStdin("3\n0\n99\n23409234809\na\nwhatever\n1\n");
EXPECT_EQ(1, FromChoices("Title",
"Choice 1\n"
"Choice 2").WithInputSource(fake_stdin).Choose());
fclose(fake_stdin);
}
TEST_F(ChoiceHelpersTest, RetriesOnEnterIfNoDefaultSet) {
FILE* fake_stdin = FakeStdin("\n2\n");
EXPECT_EQ(2, FromChoices("Title",
"Choice 1\n"
"Choice 2").WithInputSource(fake_stdin).Choose());
fclose(fake_stdin);
}
TEST_F(ChoiceHelpersTest, PicksDefaultOnEnterIfDefaultSet) {
FILE* fake_stdin = FakeStdin("\n");
EXPECT_EQ(2, FromChoices("Title",
"Choice 1\n"
"Choice 2").WithInputSource(fake_stdin)
.WithDefault("Choice 2").Choose());
fclose(fake_stdin);
}
} // namespace webrtc

View file

@ -1,25 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/primitives/fake_stdin.h"
namespace webrtc {
FILE* FakeStdin(const std::string& input) {
FILE* fake_stdin = tmpfile();
EXPECT_EQ(input.size(),
fwrite(input.c_str(), sizeof(char), input.size(), fake_stdin));
rewind(fake_stdin);
return fake_stdin;
}
} // namespace webrtc

View file

@ -1,26 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef FAKE_STDIN_H_
#define FAKE_STDIN_H_
#include <stdio.h>
#include <string>
#include "testing/gtest/include/gtest/gtest.h"
namespace webrtc {
// Creates a fake stdin-like FILE* for unit test usage.
FILE* FakeStdin(const std::string& input);
} // namespace webrtc
#endif // FAKE_STDIN_H_

View file

@ -1,628 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <assert.h>
#include <math.h>
#include <sstream>
#include <string>
#include "webrtc/modules/video_capture/include/video_capture_factory.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/frame_reader.h"
#include "webrtc/test/testsupport/frame_writer.h"
#include "webrtc/test/testsupport/perf_test.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/auto_test/primitives/framedrop_primitives.h"
#include "webrtc/video_engine/test/auto_test/primitives/general_primitives.h"
#include "webrtc/video_engine/test/libvietest/include/tb_external_transport.h"
#include "webrtc/video_engine/test/libvietest/include/tb_interfaces.h"
#include "webrtc/video_engine/test/libvietest/include/vie_external_render_filter.h"
#include "webrtc/video_engine/test/libvietest/include/vie_to_file_renderer.h"
enum { kWaitTimeForFinalDecodeMs = 100 };
// Writes the frames to be encoded to file and tracks which frames are sent in
// external transport on the local side and reports them to the
// FrameDropDetector class.
class LocalRendererEffectFilter : public webrtc::ExternalRendererEffectFilter {
public:
LocalRendererEffectFilter(webrtc::ExternalRenderer* renderer,
FrameDropDetector* frame_drop_detector)
: ExternalRendererEffectFilter(renderer),
frame_drop_detector_(frame_drop_detector) {}
int Transform(size_t size,
unsigned char* frame_buffer,
int64_t ntp_time_ms,
unsigned int timestamp,
unsigned int width,
unsigned int height) {
frame_drop_detector_->ReportFrameState(
FrameDropDetector::kCreated,
timestamp,
webrtc::TickTime::MicrosecondTimestamp());
return webrtc::ExternalRendererEffectFilter::Transform(
size, frame_buffer, ntp_time_ms, timestamp, width, height);
}
private:
FrameDropDetector* frame_drop_detector_;
};
// Tracks which frames are sent in external transport on the local side
// and reports them to the FrameDropDetector class.
class FrameSentCallback : public SendFrameCallback {
public:
explicit FrameSentCallback(FrameDropDetector* frame_drop_detector)
: frame_drop_detector_(frame_drop_detector) {}
virtual ~FrameSentCallback() {}
virtual void FrameSent(unsigned int rtp_timestamp) {
frame_drop_detector_->ReportFrameState(
FrameDropDetector::kSent,
rtp_timestamp,
webrtc::TickTime::MicrosecondTimestamp());
}
private:
FrameDropDetector* frame_drop_detector_;
};
// Tracks which frames are received in external transport on the remote side
// and reports them to the FrameDropDetector class.
class FrameReceivedCallback : public ReceiveFrameCallback {
public:
explicit FrameReceivedCallback(FrameDropDetector* frame_drop_detector)
: frame_drop_detector_(frame_drop_detector) {}
virtual ~FrameReceivedCallback() {}
virtual void FrameReceived(unsigned int rtp_timestamp) {
frame_drop_detector_->ReportFrameState(
FrameDropDetector::kReceived,
rtp_timestamp,
webrtc::TickTime::MicrosecondTimestamp());
}
private:
FrameDropDetector* frame_drop_detector_;
};
// Tracks when frames are decoded on the remote side (received from the
// jitter buffer) and reports them to the FrameDropDetector class.
class DecodedTimestampEffectFilter : public webrtc::ViEEffectFilter {
public:
explicit DecodedTimestampEffectFilter(FrameDropDetector* frame_drop_detector)
: frame_drop_detector_(frame_drop_detector) {}
virtual ~DecodedTimestampEffectFilter() {}
virtual int Transform(size_t size,
unsigned char* frame_buffer,
int64_t ntp_time_ms,
unsigned int timestamp,
unsigned int width,
unsigned int height) {
frame_drop_detector_->ReportFrameState(
FrameDropDetector::kDecoded,
timestamp,
webrtc::TickTime::MicrosecondTimestamp());
return 0;
}
private:
FrameDropDetector* frame_drop_detector_;
};
class Statistics {
public:
Statistics() : sum_(0.0f), sum_squared_(0.0f), count_(0) {};
void AddSample(float sample) {
sum_ += sample;
sum_squared_ += sample * sample;
++count_;
}
float Mean() {
if (count_ == 0)
return -1.0f;
return sum_ / count_;
}
float Variance() {
if (count_ == 0)
return -1.0f;
return sum_squared_ / count_ - Mean() * Mean();
}
std::string AsString() {
std::stringstream ss;
ss << (Mean() >= 0 ? Mean() : -1) << ", " <<
(Variance() >= 0 ? sqrt(Variance()) : -1);
return ss.str();
}
private:
float sum_;
float sum_squared_;
int count_;
};
void TestFullStack(const TbInterfaces& interfaces,
int capture_id,
int video_channel,
int width,
int height,
int bit_rate_kbps,
const NetworkParameters& network,
FrameDropDetector* frame_drop_detector,
ViEToFileRenderer* remote_file_renderer,
ViEToFileRenderer* local_file_renderer) {
webrtc::VideoEngine *video_engine_interface = interfaces.video_engine;
webrtc::ViEBase *base_interface = interfaces.base;
webrtc::ViECapture *capture_interface = interfaces.capture;
webrtc::ViERender *render_interface = interfaces.render;
webrtc::ViECodec *codec_interface = interfaces.codec;
webrtc::ViENetwork *network_interface = interfaces.network;
// ***************************************************************
// Engine ready. Begin testing class
// ***************************************************************
webrtc::VideoCodec video_codec;
memset(&video_codec, 0, sizeof (webrtc::VideoCodec));
// Set up all receive codecs. This basically setup the codec interface
// to be able to recognize all receive codecs based on payload type.
for (int idx = 0; idx < codec_interface->NumberOfCodecs(); idx++) {
EXPECT_EQ(0, codec_interface->GetCodec(idx, video_codec));
SetSuitableResolution(&video_codec, width, height);
EXPECT_EQ(0, codec_interface->SetReceiveCodec(video_channel, video_codec));
}
// Configure External transport to simulate network interference:
TbExternalTransport external_transport(*interfaces.network, video_channel,
NULL);
external_transport.SetNetworkParameters(network);
FrameSentCallback frame_sent_callback(frame_drop_detector);
FrameReceivedCallback frame_received_callback(frame_drop_detector);
external_transport.RegisterSendFrameCallback(&frame_sent_callback);
external_transport.RegisterReceiveFrameCallback(&frame_received_callback);
EXPECT_EQ(0, network_interface->RegisterSendTransport(video_channel,
external_transport));
RenderToFile(interfaces.render, video_channel, remote_file_renderer);
EXPECT_EQ(0, base_interface->StartReceive(video_channel));
// Setup only the VP8 codec, which is what we'll use.
webrtc::VideoCodec codec;
EXPECT_TRUE(FindSpecificCodec(webrtc::kVideoCodecVP8, codec_interface,
&codec));
codec.startBitrate = bit_rate_kbps;
codec.maxBitrate = bit_rate_kbps;
codec.width = width;
codec.height = height;
EXPECT_EQ(0, codec_interface->SetSendCodec(video_channel, codec));
webrtc::ViEImageProcess *image_process =
webrtc::ViEImageProcess::GetInterface(video_engine_interface);
EXPECT_TRUE(image_process);
// Setup the effect filters.
// Local rendering at the send-side is done in an effect filter to avoid
// synchronization issues with the remote renderer.
LocalRendererEffectFilter local_renderer_filter(local_file_renderer,
frame_drop_detector);
EXPECT_EQ(0, image_process->RegisterSendEffectFilter(video_channel,
local_renderer_filter));
DecodedTimestampEffectFilter decode_filter(frame_drop_detector);
EXPECT_EQ(0, image_process->RegisterRenderEffectFilter(video_channel,
decode_filter));
// Send video.
EXPECT_EQ(0, base_interface->StartSend(video_channel));
AutoTestSleep(kAutoTestFullStackSleepTimeMs);
ViETest::Log("Done!");
// ***************************************************************
// Testing finished. Tear down Video Engine
// ***************************************************************
EXPECT_EQ(0, capture_interface->DisconnectCaptureDevice(video_channel));
const int one_way_delay_99_percentile = network.mean_one_way_delay +
3 * network.std_dev_one_way_delay;
// Wait for the last packet to arrive before we tear down the receiver.
AutoTestSleep(2 * one_way_delay_99_percentile);
EXPECT_EQ(0, base_interface->StopSend(video_channel));
while (!external_transport.EmptyQueue()) {
AutoTestSleep(one_way_delay_99_percentile);
}
EXPECT_EQ(0, base_interface->StopReceive(video_channel));
EXPECT_EQ(0, network_interface->DeregisterSendTransport(video_channel));
// Wait for the last frame to be decoded and rendered. There is no guarantee
// this wait time will be long enough. Ideally we would wait for at least one
// "receive-side delay", which is what the video coding module calculates
// based on network statistics etc. We don't have access to that value here.
AutoTestSleep(kWaitTimeForFinalDecodeMs);
// Must stop the frame drop detectors in the right order to avoid getting
// frames which for instance are rendered but not decoded.
EXPECT_EQ(0, render_interface->StopRender(video_channel));
EXPECT_EQ(0, render_interface->RemoveRenderer(video_channel));
EXPECT_EQ(0, image_process->DeregisterRenderEffectFilter(video_channel));
EXPECT_EQ(0, image_process->DeregisterSendEffectFilter(video_channel));
image_process->Release();
EXPECT_EQ(0, base_interface->DeleteChannel(video_channel));
// Collect transport statistics.
int32_t num_rtp_packets = 0;
int32_t num_dropped_packets = 0;
int32_t num_rtcp_packets = 0;
std::map<uint8_t, int> packet_counters;
external_transport.GetStats(num_rtp_packets, num_dropped_packets,
num_rtcp_packets, &packet_counters);
ViETest::Log("RTP packets : %5d", num_rtp_packets);
ViETest::Log("Dropped packets: %5d", num_dropped_packets);
ViETest::Log("RTCP packets : %5d", num_rtcp_packets);
}
void FixOutputFileForComparison(const std::string& output_file,
int frame_length_in_bytes,
const std::vector<Frame*>& frames) {
webrtc::test::FrameReaderImpl frame_reader(output_file,
frame_length_in_bytes);
const std::string temp_file = output_file + ".fixed";
webrtc::test::FrameWriterImpl frame_writer(temp_file, frame_length_in_bytes);
frame_reader.Init();
frame_writer.Init();
ASSERT_FALSE(frames.front()->dropped_at_render) << "It should not be "
"possible to drop the first frame. Both because we don't have anything "
"useful to fill that gap with and it is impossible to detect it without "
"any previous timestamps to compare with.";
uint8_t* last_frame_data = new uint8_t[frame_length_in_bytes];
// Process the file and write frame duplicates for all dropped frames.
for (std::vector<Frame*>::const_iterator it = frames.begin();
it != frames.end(); ++it) {
if ((*it)->dropped_at_render) {
// Write the previous frame to the output file:
EXPECT_TRUE(frame_writer.WriteFrame(last_frame_data));
} else {
EXPECT_TRUE(frame_reader.ReadFrame(last_frame_data));
EXPECT_TRUE(frame_writer.WriteFrame(last_frame_data));
}
}
delete[] last_frame_data;
frame_reader.Close();
frame_writer.Close();
ASSERT_EQ(0, remove(output_file.c_str()));
ASSERT_EQ(0, rename(temp_file.c_str(), output_file.c_str()));
}
void FrameDropDetector::ReportFrameState(State state, unsigned int timestamp,
int64_t report_time_us) {
dirty_ = true;
switch (state) {
case kCreated: {
int number = created_frames_vector_.size();
Frame* frame = new Frame(number, timestamp);
frame->created_timestamp_in_us_ = report_time_us;
created_frames_vector_.push_back(frame);
created_frames_[timestamp] = frame;
num_created_frames_++;
break;
}
case kSent:
sent_frames_[timestamp] = report_time_us;
if (timestamp_diff_ == 0) {
// When the first created frame arrives we calculate the fixed
// difference between the timestamps of the frames entering and leaving
// the encoder. This diff is used to identify the frames from the
// created_frames_ map.
timestamp_diff_ =
timestamp - created_frames_vector_.front()->frame_timestamp_;
}
num_sent_frames_++;
break;
case kReceived:
received_frames_[timestamp] = report_time_us;
num_received_frames_++;
break;
case kDecoded:
decoded_frames_[timestamp] = report_time_us;
num_decoded_frames_++;
break;
case kRendered:
rendered_frames_[timestamp] = report_time_us;
num_rendered_frames_++;
break;
}
}
void FrameDropDetector::CalculateResults() {
// Fill in all fields of the Frame objects in the created_frames_ map.
// Iterate over the maps from converted timestamps to the arrival timestamps.
std::map<unsigned int, int64_t>::const_iterator it;
for (it = sent_frames_.begin(); it != sent_frames_.end(); ++it) {
unsigned int created_timestamp = it->first - timestamp_diff_;
created_frames_[created_timestamp]->sent_timestamp_in_us_ = it->second;
}
for (it = received_frames_.begin(); it != received_frames_.end(); ++it) {
unsigned int created_timestamp = it->first - timestamp_diff_;
created_frames_[created_timestamp]->received_timestamp_in_us_ = it->second;
}
for (it = decoded_frames_.begin(); it != decoded_frames_.end(); ++it) {
unsigned int created_timestamp = it->first - timestamp_diff_;
created_frames_[created_timestamp]->decoded_timestamp_in_us_ =it->second;
}
for (it = rendered_frames_.begin(); it != rendered_frames_.end(); ++it) {
unsigned int created_timestamp = it->first - timestamp_diff_;
created_frames_[created_timestamp]->rendered_timestamp_in_us_ = it->second;
}
// Find out where the frames were not present in the different states.
dropped_frames_at_send_ = 0;
dropped_frames_at_receive_ = 0;
dropped_frames_at_decode_ = 0;
dropped_frames_at_render_ = 0;
for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
it != created_frames_vector_.end(); ++it) {
int encoded_timestamp = (*it)->frame_timestamp_ + timestamp_diff_;
if (sent_frames_.find(encoded_timestamp) == sent_frames_.end()) {
(*it)->dropped_at_send = true;
dropped_frames_at_send_++;
}
if (received_frames_.find(encoded_timestamp) == received_frames_.end()) {
(*it)->dropped_at_receive = true;
dropped_frames_at_receive_++;
}
if (decoded_frames_.find(encoded_timestamp) == decoded_frames_.end()) {
(*it)->dropped_at_decode = true;
dropped_frames_at_decode_++;
}
if (rendered_frames_.find(encoded_timestamp) == rendered_frames_.end()) {
(*it)->dropped_at_render = true;
dropped_frames_at_render_++;
}
}
dirty_ = false;
}
void FrameDropDetector::PrintReport(const std::string& test_label) {
assert(!dirty_);
ViETest::Log("Frame Drop Detector report:");
ViETest::Log(" Created frames: %ld", created_frames_.size());
ViETest::Log(" Sent frames: %ld", sent_frames_.size());
ViETest::Log(" Received frames: %ld", received_frames_.size());
ViETest::Log(" Decoded frames: %ld", decoded_frames_.size());
ViETest::Log(" Rendered frames: %ld", rendered_frames_.size());
// Display all frames and stats for them:
long last_created = 0;
long last_sent = 0;
long last_received = 0;
long last_decoded = 0;
long last_rendered = 0;
ViETest::Log("\nDeltas between sent frames and drop status:");
ViETest::Log("Unit: Microseconds");
ViETest::Log("Frame Created Sent Received Decoded Rendered "
"Dropped at Dropped at Dropped at Dropped at");
ViETest::Log(" nbr delta delta delta delta delta "
" Send? Receive? Decode? Render?");
Statistics rendering_stats;
for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
it != created_frames_vector_.end(); ++it) {
int created_delta =
static_cast<int>((*it)->created_timestamp_in_us_ - last_created);
int sent_delta = (*it)->dropped_at_send ? -1 :
static_cast<int>((*it)->sent_timestamp_in_us_ - last_sent);
int received_delta = (*it)->dropped_at_receive ? -1 :
static_cast<int>((*it)->received_timestamp_in_us_ - last_received);
int decoded_delta = (*it)->dropped_at_decode ? -1 :
static_cast<int>((*it)->decoded_timestamp_in_us_ - last_decoded);
int rendered_delta = (*it)->dropped_at_render ? -1 :
static_cast<int>((*it)->rendered_timestamp_in_us_ - last_rendered);
// Set values to -1 for the first frame:
if ((*it)->number_ == 0) {
created_delta = -1;
sent_delta = -1;
received_delta = -1;
decoded_delta = -1;
rendered_delta = -1;
}
ViETest::Log("%5d %8d %8d %8d %8d %8d %10s %10s %10s %10s",
(*it)->number_,
created_delta,
sent_delta,
received_delta,
decoded_delta,
rendered_delta,
(*it)->dropped_at_send ? "DROPPED" : " ",
(*it)->dropped_at_receive ? "DROPPED" : " ",
(*it)->dropped_at_decode ? "DROPPED" : " ",
(*it)->dropped_at_render ? "DROPPED" : " ");
last_created = (*it)->created_timestamp_in_us_;
if (!(*it)->dropped_at_send) {
last_sent = (*it)->sent_timestamp_in_us_;
}
if (!(*it)->dropped_at_receive) {
last_received = (*it)->received_timestamp_in_us_;
}
if (!(*it)->dropped_at_decode) {
last_decoded = (*it)->decoded_timestamp_in_us_;
}
if (!(*it)->dropped_at_render) {
last_rendered = (*it)->rendered_timestamp_in_us_;
rendering_stats.AddSample(rendered_delta / 1000.0f);
}
}
ViETest::Log("\nLatency between states (-1 means N/A because of drop):");
ViETest::Log("Unit: Microseconds");
ViETest::Log("Frame Created Sent Received Decoded Total "
" Total");
ViETest::Log(" nbr ->Sent ->Received ->Decoded ->Rendered latency "
" latency");
ViETest::Log(" (incl network)"
"(excl network)");
Statistics latency_incl_network_stats;
for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
it != created_frames_vector_.end(); ++it) {
int created_to_sent = (*it)->dropped_at_send ? -1 :
static_cast<int>((*it)->sent_timestamp_in_us_ -
(*it)->created_timestamp_in_us_);
int sent_to_received = (*it)->dropped_at_receive ? -1 :
static_cast<int>((*it)->received_timestamp_in_us_ -
(*it)->sent_timestamp_in_us_);
int received_to_decoded = (*it)->dropped_at_decode ? -1 :
static_cast<int>((*it)->decoded_timestamp_in_us_ -
(*it)->received_timestamp_in_us_);
int decoded_to_render = (*it)->dropped_at_render ? -1 :
static_cast<int>((*it)->rendered_timestamp_in_us_ -
(*it)->decoded_timestamp_in_us_);
int total_latency_incl_network = (*it)->dropped_at_render ? -1 :
static_cast<int>((*it)->rendered_timestamp_in_us_ -
(*it)->created_timestamp_in_us_);
int total_latency_excl_network = (*it)->dropped_at_render ? -1 :
static_cast<int>((*it)->rendered_timestamp_in_us_ -
(*it)->created_timestamp_in_us_ - sent_to_received);
if (total_latency_incl_network >= 0)
latency_incl_network_stats.AddSample(total_latency_incl_network /
1000.0f);
ViETest::Log("%5d %9d %9d %9d %9d %12d %12d",
(*it)->number_,
created_to_sent,
sent_to_received,
received_to_decoded,
decoded_to_render,
total_latency_incl_network,
total_latency_excl_network);
}
// Plot all measurements in the same graph since they share the same value
// range.
webrtc::test::PrintResultMeanAndError(
"total_delay_incl_network", "", test_label,
latency_incl_network_stats.AsString(), "ms", false);
webrtc::test::PrintResultMeanAndError(
"time_between_rendered_frames", "", test_label,
rendering_stats.AsString(), "ms", false);
// Find and print the dropped frames.
ViETest::Log("\nTotal # dropped frames at:");
ViETest::Log(" Send : %d", dropped_frames_at_send_);
ViETest::Log(" Receive: %d", dropped_frames_at_receive_);
ViETest::Log(" Decode : %d", dropped_frames_at_decode_);
ViETest::Log(" Render : %d", dropped_frames_at_render_);
}
void FrameDropDetector::PrintDebugDump() {
assert(!dirty_);
ViETest::Log("\nPrintDebugDump: Frame objects:");
ViETest::Log("Frame FrTimeStamp Created Sent Received Decoded"
" Rendered ");
for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
it != created_frames_vector_.end(); ++it) {
ViETest::Log("%5d %11u %11lld %11lld %11lld %11lld %11lld",
(*it)->number_,
(*it)->frame_timestamp_,
(*it)->created_timestamp_in_us_,
(*it)->sent_timestamp_in_us_,
(*it)->received_timestamp_in_us_,
(*it)->decoded_timestamp_in_us_,
(*it)->rendered_timestamp_in_us_);
}
std::vector<int> mismatch_frame_num_list;
for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
it != created_frames_vector_.end(); ++it) {
if ((*it)->dropped_at_render != (*it)->dropped_at_decode) {
mismatch_frame_num_list.push_back((*it)->number_);
}
}
if (mismatch_frame_num_list.size() > 0) {
ViETest::Log("\nDecoded/Rendered mismatches:");
ViETest::Log("Frame FrTimeStamp Created Sent Received "
"Decoded Rendered ");
for (std::vector<int>::const_iterator it = mismatch_frame_num_list.begin();
it != mismatch_frame_num_list.end(); ++it) {
Frame* frame = created_frames_vector_[*it];
ViETest::Log("%5d %11u %11lld %11lld %11lld %11lld %11lld",
frame->number_,
frame->frame_timestamp_,
frame->created_timestamp_in_us_,
frame->sent_timestamp_in_us_,
frame->received_timestamp_in_us_,
frame->decoded_timestamp_in_us_,
frame->rendered_timestamp_in_us_);
}
}
ViETest::Log("\nReportFrameState method invocations:");
ViETest::Log(" Created : %d", num_created_frames_);
ViETest::Log(" Send : %d", num_sent_frames_);
ViETest::Log(" Received: %d", num_received_frames_);
ViETest::Log(" Decoded : %d", num_decoded_frames_);
ViETest::Log(" Rendered: %d", num_rendered_frames_);
}
const std::vector<Frame*>& FrameDropDetector::GetAllFrames() {
assert(!dirty_);
return created_frames_vector_;
}
int FrameDropDetector::GetNumberOfFramesDroppedAt(State state) {
assert(!dirty_);
switch (state) {
case kSent:
return dropped_frames_at_send_;
case kReceived:
return dropped_frames_at_receive_;
case kDecoded:
return dropped_frames_at_decode_;
case kRendered:
return dropped_frames_at_render_;
default:
return 0;
}
}
int FrameDropMonitoringRemoteFileRenderer::DeliverFrame(
unsigned char *buffer, size_t buffer_size, uint32_t time_stamp,
int64_t ntp_time_ms, int64_t render_time, void* /*handle*/) {
ReportFrameStats(time_stamp, render_time);
return ViEToFileRenderer::DeliverFrame(buffer, buffer_size,
time_stamp, ntp_time_ms,
render_time, NULL);
}
void FrameDropMonitoringRemoteFileRenderer::ReportFrameStats(
uint32_t time_stamp,
int64_t render_time) {
// |render_time| provides the ideal render time for this frame. If that time
// has already passed we will render it immediately.
int64_t report_render_time_us = render_time * 1000;
int64_t time_now_us = webrtc::TickTime::MicrosecondTimestamp();
if (render_time < (time_now_us + 500) / 1000) {
report_render_time_us = time_now_us;
}
// Register that this frame has been rendered.
frame_drop_detector_->ReportFrameState(FrameDropDetector::kRendered,
time_stamp, report_render_time_us);
}
int FrameDropMonitoringRemoteFileRenderer::DeliverI420Frame(
const webrtc::I420VideoFrame& webrtc_frame) {
ReportFrameStats(webrtc_frame.timestamp(), webrtc_frame.render_time_ms());
return ViEToFileRenderer::DeliverI420Frame(webrtc_frame);
}
int FrameDropMonitoringRemoteFileRenderer::FrameSizeChange(
unsigned int width, unsigned int height, unsigned int number_of_streams) {
return ViEToFileRenderer::FrameSizeChange(width, height, number_of_streams);
}

View file

@ -1,242 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_SOURCE_FRAMEDROP_PRIMITIVES_H_
#define WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_SOURCE_FRAMEDROP_PRIMITIVES_H_
#include <map>
#include <vector>
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_image_process.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/libvietest/include/vie_to_file_renderer.h"
class FrameDropDetector;
struct NetworkParameters;
class TbInterfaces;
// Initializes the Video engine and its components, runs video playback using
// for KAutoTestSleepTimeMs milliseconds, then shuts down everything.
// The bit rate and packet loss parameters should be configured so that
// frames are dropped, in order to test the frame drop detection that is
// performed by the FrameDropDetector class.
void TestFullStack(const TbInterfaces& interfaces,
int capture_id,
int video_channel,
int width,
int height,
int bit_rate_kbps,
const NetworkParameters& network,
FrameDropDetector* frame_drop_detector,
ViEToFileRenderer* remote_file_renderer,
ViEToFileRenderer* local_file_renderer);
// A frame in a video file. The four different points in the stack when
// register the frame state are (in time order): created, transmitted, decoded,
// rendered.
class Frame {
public:
Frame(int number, unsigned int timestamp)
: number_(number),
frame_timestamp_(timestamp),
created_timestamp_in_us_(-1),
sent_timestamp_in_us_(-1),
received_timestamp_in_us_(-1),
decoded_timestamp_in_us_(-1),
rendered_timestamp_in_us_(-1),
dropped_at_send(false),
dropped_at_receive(false),
dropped_at_decode(false),
dropped_at_render(false) {}
// Frame number, starting at 0.
int number_;
// Frame timestamp, that is used by Video Engine and RTP headers and set when
// the frame is sent into the stack.
unsigned int frame_timestamp_;
// Timestamps for our measurements of when the frame is in different states.
int64_t created_timestamp_in_us_;
int64_t sent_timestamp_in_us_;
int64_t received_timestamp_in_us_;
int64_t decoded_timestamp_in_us_;
int64_t rendered_timestamp_in_us_;
// Where the frame was dropped (more than one may be true).
bool dropped_at_send;
bool dropped_at_receive;
bool dropped_at_decode;
bool dropped_at_render;
};
// Fixes the output file by copying the last successful frame into the place
// where the dropped frame would be, for all dropped frames (if any).
// This method will not be able to fix data for the first frame if that is
// dropped, since there'll be no previous frame to copy. This case should never
// happen because of encoder frame dropping at least.
// Parameters:
// output_file The output file to modify (pad with frame copies
// for all dropped frames)
// frame_length_in_bytes Byte length of each frame.
// frames A vector of all Frame objects. Must be sorted by
// frame number. If empty this method will do nothing.
void FixOutputFileForComparison(const std::string& output_file,
int frame_length_in_bytes,
const std::vector<Frame*>& frames);
// Handles statistics about dropped frames. Frames travel through the stack
// with different timestamps. The frames created and sent to the encoder have
// one timestamp on the sending side while the decoded/rendered frames have
// another timestamp on the receiving side. The difference between these
// timestamps is fixed, which we can use to identify the frames when they
// arrive, since the FrameDropDetector class gets data reported from both sides.
// The four different points in the stack when this class examines the frame
// states are (in time order): created, sent, received, decoded, rendered.
//
// The flow can be visualized like this:
//
// Created Sent Received Decoded Rendered
// +-------+ | +-------+ | +---------+ | +------+ +-------+ | +--------+
// |Capture| | |Encoder| | | Ext. | | |Jitter| |Decoder| | | Ext. |
// | device|---->| |-->|transport|-->|buffer|->| |---->|renderer|
// +-------+ +-------+ +---------+ +------+ +-------+ +--------+
//
// This class has no intention of being thread-safe.
class FrameDropDetector {
public:
enum State {
// A frame being created, i.e. sent to the encoder; the first step of
// a frame's life cycle. This timestamp becomes the frame timestamp in the
// Frame objects.
kCreated,
// A frame being sent in external transport (to the simulated network). This
// timestamp differs from the one in the Created state by a constant diff.
kSent,
// A frame being received in external transport (from the simulated
// network). This timestamp differs from the one in the Created state by a
// constant diff.
kReceived,
// A frame that has been decoded in the decoder. This timestamp differs
// from the one in the Created state by a constant diff.
kDecoded,
// A frame that has been rendered; the last step of a frame's life cycle.
// This timestamp differs from the one in the Created state by a constant
// diff.
kRendered
};
FrameDropDetector()
: dirty_(true),
dropped_frames_at_send_(0),
dropped_frames_at_receive_(0),
dropped_frames_at_decode_(0),
dropped_frames_at_render_(0),
num_created_frames_(0),
num_sent_frames_(0),
num_received_frames_(0),
num_decoded_frames_(0),
num_rendered_frames_(0),
timestamp_diff_(0) {}
// Reports a frame has reached a state in the frame life cycle.
void ReportFrameState(State state, unsigned int timestamp,
int64_t report_time_us);
// Uses all the gathered timestamp information to calculate which frames have
// been dropped during the test and where they were dropped. Not until
// this method has been executed, the Frame objects will have all fields
// filled with the proper timestamp information.
void CalculateResults();
// Calculates the number of frames have been registered as dropped at the
// specified state of the frame life cycle.
// CalculateResults() must be called before calling this method.
int GetNumberOfFramesDroppedAt(State state);
// Gets a vector of all the created frames.
// CalculateResults() must be called before calling this method to have all
// fields of the Frame objects to represent the current state.
const std::vector<Frame*>& GetAllFrames();
// Prints a detailed report about all the different frame states and which
// ones are detected as dropped, using ViETest::Log. Also prints
// perf-formatted output and adds |test_label| as a modifier to the perf
// output.
// CalculateResults() must be called before calling this method.
void PrintReport(const std::string& test_label);
// Prints all the timestamp maps. Mainly used for debugging purposes to find
// missing timestamps.
void PrintDebugDump();
private:
// Will be false until CalculateResults() is called. Switches to true
// as soon as new timestamps are reported using ReportFrameState().
bool dirty_;
// Map of frame creation timestamps to all Frame objects.
std::map<unsigned int, Frame*> created_frames_;
// Maps converted frame timestamps (differ from creation timestamp) to the
// time they arrived in the different states of the frame's life cycle.
std::map<unsigned int, int64_t> sent_frames_;
std::map<unsigned int, int64_t> received_frames_;
std::map<unsigned int, int64_t> decoded_frames_;
std::map<unsigned int, int64_t> rendered_frames_;
// A vector with the frames sorted in their created order.
std::vector<Frame*> created_frames_vector_;
// Statistics.
int dropped_frames_at_send_;
int dropped_frames_at_receive_;
int dropped_frames_at_decode_;
int dropped_frames_at_render_;
int num_created_frames_;
int num_sent_frames_;
int num_received_frames_;
int num_decoded_frames_;
int num_rendered_frames_;
// The constant diff between the created and transmitted frames, since their
// timestamps are converted.
unsigned int timestamp_diff_;
};
// Tracks which frames are received on the remote side and reports back to the
// FrameDropDetector class when they are rendered.
class FrameDropMonitoringRemoteFileRenderer : public ViEToFileRenderer {
public:
explicit FrameDropMonitoringRemoteFileRenderer(
FrameDropDetector* frame_drop_detector)
: frame_drop_detector_(frame_drop_detector) {}
virtual ~FrameDropMonitoringRemoteFileRenderer() {}
// Implementation of ExternalRenderer:
int FrameSizeChange(unsigned int width,
unsigned int height,
unsigned int number_of_streams) override;
int DeliverFrame(unsigned char* buffer,
size_t buffer_size,
uint32_t time_stamp,
int64_t ntp_time_ms,
int64_t render_time,
void* handle) override;
int DeliverI420Frame(const webrtc::I420VideoFrame& webrtc_frame) override;
private:
void ReportFrameStats(uint32_t time_stamp, int64_t render_time);
FrameDropDetector* frame_drop_detector_;
};
#endif // WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_SOURCE_FRAMEDROP_PRIMITIVES_H_

View file

@ -1,97 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/primitives/framedrop_primitives.h"
#include <stdio.h>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/frame_reader.h"
#include "webrtc/test/testsupport/frame_writer.h"
namespace webrtc {
const std::string kOutputFilename = "temp_outputfile.tmp";
const int kFrameLength = 1000;
class FrameDropPrimitivesTest: public testing::Test {
protected:
FrameDropPrimitivesTest() {}
virtual ~FrameDropPrimitivesTest() {}
void SetUp() {
// Cleanup any previous output file.
remove(kOutputFilename.c_str());
}
void TearDown() {
// Cleanup the temporary file.
remove(kOutputFilename.c_str());
}
};
TEST_F(FrameDropPrimitivesTest, FixOutputFileForComparison) {
// Create test frame objects, where the second and fourth frame is marked
// as dropped at rendering.
std::vector<Frame*> frames;
Frame first_frame(0, kFrameLength);
Frame second_frame(0, kFrameLength);
Frame third_frame(0, kFrameLength);
Frame fourth_frame(0, kFrameLength);
second_frame.dropped_at_render = true;
fourth_frame.dropped_at_render = true;
frames.push_back(&first_frame);
frames.push_back(&second_frame);
frames.push_back(&third_frame);
frames.push_back(&fourth_frame);
// Prepare data for the first and third frames:
uint8_t first_frame_data[kFrameLength];
memset(first_frame_data, 5, kFrameLength); // Fill it with 5's to identify.
uint8_t third_frame_data[kFrameLength];
memset(third_frame_data, 7, kFrameLength); // Fill it with 7's to identify.
// Write the first and third frames to the temporary file. This means the fix
// method should add two frames of data by filling the file with data from
// the first and third frames after executing.
webrtc::test::FrameWriterImpl frame_writer(kOutputFilename, kFrameLength);
EXPECT_TRUE(frame_writer.Init());
EXPECT_TRUE(frame_writer.WriteFrame(first_frame_data));
EXPECT_TRUE(frame_writer.WriteFrame(third_frame_data));
frame_writer.Close();
EXPECT_EQ(2 * kFrameLength,
static_cast<int>(webrtc::test::GetFileSize(kOutputFilename)));
FixOutputFileForComparison(kOutputFilename, kFrameLength, frames);
// Verify that the output file has correct size.
EXPECT_EQ(4 * kFrameLength,
static_cast<int>(webrtc::test::GetFileSize(kOutputFilename)));
webrtc::test::FrameReaderImpl frame_reader(kOutputFilename, kFrameLength);
frame_reader.Init();
uint8_t read_buffer[kFrameLength];
EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));
EXPECT_EQ(0, memcmp(read_buffer, first_frame_data, kFrameLength));
EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));
EXPECT_EQ(0, memcmp(read_buffer, first_frame_data, kFrameLength));
EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));
EXPECT_EQ(0, memcmp(read_buffer, third_frame_data, kFrameLength));
EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));
EXPECT_EQ(0, memcmp(read_buffer, third_frame_data, kFrameLength));
frame_reader.Close();
}
} // namespace webrtc

View file

@ -1,143 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/primitives/general_primitives.h"
#include "webrtc/modules/video_capture/include/video_capture_factory.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/libvietest/include/vie_to_file_renderer.h"
void FindCaptureDeviceOnSystem(webrtc::ViECapture* capture,
char* device_name,
unsigned int device_name_length,
int* device_id,
webrtc::VideoCaptureModule** device_video) {
bool capture_device_set = false;
webrtc::VideoCaptureModule::DeviceInfo *dev_info =
webrtc::VideoCaptureFactory::CreateDeviceInfo(0);
const unsigned int kMaxUniqueIdLength = 256;
char unique_id[kMaxUniqueIdLength];
memset(unique_id, 0, kMaxUniqueIdLength);
for (unsigned int i = 0; i < dev_info->NumberOfDevices(); i++) {
EXPECT_EQ(0, dev_info->GetDeviceName(i, device_name, device_name_length,
unique_id, kMaxUniqueIdLength));
*device_video = webrtc::VideoCaptureFactory::Create(4571, unique_id);
EXPECT_TRUE(*device_video != NULL);
if (*device_video) {
(*device_video)->AddRef();
int error = capture->AllocateCaptureDevice(**device_video, *device_id);
if (error == 0) {
ViETest::Log("Using capture device: %s, captureId: %d.",
device_name, *device_id);
capture_device_set = true;
break;
} else {
(*device_video)->Release();
(*device_video) = NULL;
}
}
}
delete dev_info;
EXPECT_TRUE(capture_device_set) << "Found no suitable camera on your system.";
}
void RenderInWindow(webrtc::ViERender* video_render_interface,
int frame_provider_id,
void* os_window,
float z_index) {
EXPECT_EQ(0,
video_render_interface->AddRenderer(frame_provider_id, os_window,
z_index, 0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, video_render_interface->StartRender(frame_provider_id));
}
void StopRenderInWindow(webrtc::ViERender* video_render_interface,
int frame_provider_id) {
EXPECT_EQ(0, video_render_interface->StopRender(frame_provider_id));
EXPECT_EQ(0, video_render_interface->RemoveRenderer(frame_provider_id));
}
void RenderToFile(webrtc::ViERender* renderer_interface,
int frame_provider_id,
ViEToFileRenderer *to_file_renderer) {
EXPECT_EQ(0, renderer_interface->AddRenderer(
frame_provider_id, webrtc::kVideoI420, to_file_renderer));
EXPECT_EQ(0, renderer_interface->StartRender(frame_provider_id));
}
void ConfigureRtpRtcp(webrtc::ViERTP_RTCP* rtcp_interface,
ProtectionMethod protection_method,
int video_channel) {
EXPECT_EQ(0, rtcp_interface->SetRTCPStatus(video_channel,
webrtc::kRtcpCompound_RFC4585));
EXPECT_EQ(0, rtcp_interface->SetKeyFrameRequestMethod(
video_channel, webrtc::kViEKeyFrameRequestPliRtcp));
EXPECT_EQ(0, rtcp_interface->SetTMMBRStatus(video_channel, true));
switch (protection_method) {
case kNack:
EXPECT_EQ(0, rtcp_interface->SetNACKStatus(video_channel, true));
break;
case kHybridNackFec:
const int kRedPayloadType = 96;
const int kUlpFecPayloadType = 97;
EXPECT_EQ(0, rtcp_interface->SetHybridNACKFECStatus(video_channel,
true,
kRedPayloadType,
kUlpFecPayloadType));
break;
}
}
bool FindSpecificCodec(webrtc::VideoCodecType of_type,
webrtc::ViECodec* codec_interface,
webrtc::VideoCodec* result) {
memset(result, 0, sizeof(webrtc::VideoCodec));
for (int i = 0; i < codec_interface->NumberOfCodecs(); i++) {
webrtc::VideoCodec codec;
memset(&codec, 0, sizeof(webrtc::VideoCodec));
if (codec_interface->GetCodec(i, codec) != 0) {
return false;
}
if (codec.codecType == of_type) {
// Done
*result = codec;
return true;
}
}
// Didn't find it
return false;
}
void SetSuitableResolution(webrtc::VideoCodec* video_codec,
int forced_codec_width,
int forced_codec_height) {
if (forced_codec_width != kDoNotForceResolution &&
forced_codec_height != kDoNotForceResolution) {
video_codec->width = forced_codec_width;
video_codec->height = forced_codec_height;
} else if (video_codec->codecType == webrtc::kVideoCodecI420) {
// I420 is very bandwidth heavy, so limit it here.
video_codec->width = 176;
video_codec->height = 144;
} else {
// Otherwise go with 640x480.
video_codec->width = 640;
video_codec->height = 480;
}
}

View file

@ -1,88 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_GENERAL_PRIMITIVES_H_
#define WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_GENERAL_PRIMITIVES_H_
class ViEToFileRenderer;
#include "webrtc/common_types.h"
namespace webrtc {
class VideoCaptureModule;
class ViEBase;
class ViECapture;
class ViECodec;
class ViERender;
class ViERTP_RTCP;
struct VideoCodec;
}
enum ProtectionMethod {
kNack,
kHybridNackFec,
};
// This constant can be used as input to various functions to not force the
// codec resolution.
const int kDoNotForceResolution = 0;
// Finds a suitable capture device (e.g. camera) on the current system
// and allocates it. Details about the found device are filled into the out
// parameters. If this operation fails, device_id is assigned a negative value
// and number_of_errors is incremented.
void FindCaptureDeviceOnSystem(webrtc::ViECapture* capture,
char* device_name,
const unsigned int kDeviceNameLength,
int* device_id,
webrtc::VideoCaptureModule** device_video);
// Sets up rendering in a window previously created using a Window Manager
// (See vie_window_manager_factory.h for more details on how to make one of
// those). The frame provider id is a source of video frames, for instance
// a capture device or a video channel.
// NOTE: A call to StopRenderInWindow needs to be done in order to clear
// up the configuration applied by this function.
void RenderInWindow(webrtc::ViERender* video_render_interface,
int frame_provider_id,
void* os_window,
float z_index);
// Stops rendering into a window as previously set up by calling RenderInWindow.
void StopRenderInWindow(webrtc::ViERender* video_render_interface,
int frame_provider_id);
// Similar in function to RenderInWindow, this function instead renders to
// a file using a to-file-renderer. The frame provider id is a source of
// video frames, for instance a capture device or a video channel.
void RenderToFile(webrtc::ViERender* renderer_interface,
int frame_provider_id,
ViEToFileRenderer* to_file_renderer);
// Configures RTP-RTCP.
void ConfigureRtpRtcp(webrtc::ViERTP_RTCP* rtcp_interface,
ProtectionMethod protection_method,
int video_channel);
// Finds a codec in the codec list. Returns true on success, false otherwise.
// The resulting codec is filled into result on success but is zeroed out
// on failure.
bool FindSpecificCodec(webrtc::VideoCodecType of_type,
webrtc::ViECodec* codec_interface,
webrtc::VideoCodec* result);
// Sets up the provided codec with a resolution that takes individual codec
// quirks into account (except if the forced* variables are
// != kDoNotForceResolution)
void SetSuitableResolution(webrtc::VideoCodec* video_codec,
int forced_codec_width,
int forced_codec_height);
#endif // WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_GENERAL_PRIMITIVES_H_

View file

@ -1,172 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/primitives/input_helpers.h"
#include <assert.h>
#include <stdio.h>
#include <stdlib.h>
#include <algorithm>
#include "gflags/gflags.h"
namespace webrtc {
DEFINE_string(override, "",
"Makes it possible to override choices or inputs. All choices and "
"inputs will use their default values unless you override them in this "
"flag's argument. There can be several comma-separated overrides specified:"
" Overrides are specified as \"title=option text\" for choices and "
"\"title=value\" for regular inputs. Note that the program will stop if "
"you provide input not accepted by the input's validator through this flag."
"\n\nExample: --override \"Enter destination IP=192.168.0.1, "
"Select a codec=VP8\"");
class AcceptAllNonEmptyValidator : public InputValidator {
public:
bool InputOk(const std::string& value) const {
return value.length() > 0;
}
};
InputBuilder::InputBuilder(const std::string& title,
const InputValidator* input_validator,
const OverrideRegistry& override_registry)
: input_source_(stdin), input_validator_(input_validator),
override_registry_(override_registry), default_value_(""), title_(title) {
}
InputBuilder::~InputBuilder() {
delete input_validator_;
}
std::string InputBuilder::AskForInput() const {
if (override_registry_.HasOverrideFor(title_))
return GetOverride();
if (!FLAGS_override.empty() && !default_value_.empty())
return default_value_;
// We don't know the answer already, so ask the user.
return ActuallyAskUser();
}
std::string InputBuilder::ActuallyAskUser() const {
printf("\n%s%s\n", title_.c_str(), additional_info_.c_str());
if (!default_value_.empty())
printf("Hit enter for default (%s):\n", default_value_.c_str());
printf("# ");
char raw_input[128];
if (!fgets(raw_input, 128, input_source_)) {
// If we get here the user probably hit CTRL+D.
exit(1);
}
std::string input = raw_input;
input = input.substr(0, input.size() - 1); // Strip last \n.
if (input.empty() && !default_value_.empty())
return default_value_;
if (!input_validator_->InputOk(input)) {
printf("Invalid input. Please try again.\n");
return ActuallyAskUser();
}
return input;
}
InputBuilder& InputBuilder::WithInputSource(FILE* input_source) {
input_source_ = input_source;
return *this;
}
InputBuilder& InputBuilder::WithInputValidator(
const InputValidator* input_validator) {
// If there's a default value, it must be accepted by the input validator.
assert(default_value_.empty() || input_validator->InputOk(default_value_));
delete input_validator_;
input_validator_ = input_validator;
return *this;
}
InputBuilder& InputBuilder::WithDefault(const std::string& default_value) {
assert(input_validator_->InputOk(default_value));
default_value_ = default_value;
return *this;
}
InputBuilder& InputBuilder::WithAdditionalInfo(const std::string& info) {
additional_info_ = info;
return *this;
}
const std::string& InputBuilder::GetOverride() const {
const std::string& override = override_registry_.GetOverrideFor(title_);
if (!input_validator_->InputOk(override)) {
printf("Fatal: Input validator for \"%s\" does not accept override %s.\n",
title_.c_str(), override.c_str());
exit(1);
}
return override;
}
OverrideRegistry::OverrideRegistry(const std::string& overrides) {
std::vector<std::string> all_overrides = Split(overrides, ",");
std::vector<std::string>::const_iterator override = all_overrides.begin();
for (; override != all_overrides.end(); ++override) {
std::vector<std::string> key_value = Split(*override, "=");
if (key_value.size() != 2) {
printf("Fatal: Override %s is malformed.", (*override).c_str());
exit(1);
}
std::string key = key_value[0];
std::string value = key_value[1];
overrides_[key] = value;
}
}
bool OverrideRegistry::HasOverrideFor(const std::string& title) const {
return overrides_.find(title) != overrides_.end();
}
const std::string& OverrideRegistry::GetOverrideFor(
const std::string& title) const {
assert(HasOverrideFor(title));
return (*overrides_.find(title)).second;
}
InputBuilder TypedInput(const std::string& title) {
static OverrideRegistry override_registry_(FLAGS_override);
return InputBuilder(
title, new AcceptAllNonEmptyValidator(), override_registry_);
}
std::vector<std::string> Split(const std::string& to_split,
const std::string& delimiter) {
std::vector<std::string> result;
size_t current_pos = 0;
size_t next_delimiter = 0;
while ((next_delimiter = to_split.find(delimiter, current_pos)) !=
std::string::npos) {
std::string part = to_split.substr(
current_pos, next_delimiter - current_pos);
result.push_back(part);
current_pos = next_delimiter + 1;
}
std::string last_part = to_split.substr(current_pos);
if (!last_part.empty())
result.push_back(last_part);
return result;
}
} // namespace webrtc

View file

@ -1,116 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_
#define WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_
#include <stdlib.h>
#include <map>
#include <string>
#include <vector>
#include "gflags/gflags.h"
namespace webrtc {
class InputValidator;
class OverrideRegistry;
// This class handles general user input to the application.
class InputBuilder {
public:
// The input builder takes ownership of the validator (but not the
// override registry).
InputBuilder(const std::string& title,
const InputValidator* input_validator,
const OverrideRegistry& override_registry);
~InputBuilder();
// Ask the user for input, reads input from the input source and returns
// the answer. This method will keep asking the user until a correct answer
// is returned and is thereby guaranteed to return a response that is
// acceptable to the input validator.
//
// In some cases we will not actually ask the user for input, for instance
// if the --choose-defaults or --override flags are specified. See the
// definition of those flags in the .cc file for more information.
std::string AskForInput() const;
// Replaces the input source where we ask for input. Default is stdin.
InputBuilder& WithInputSource(FILE* input_source);
// Sets the input validator. The input builder takes ownership. If a default
// value has been set, it must be acceptable to this validator.
InputBuilder& WithInputValidator(const InputValidator* input_validator);
// Sets a default value if the user doesn't want to give input. This value
// must be acceptable to the input validator.
InputBuilder& WithDefault(const std::string& default_value);
// Prints additional info after the title.
InputBuilder& WithAdditionalInfo(const std::string& title);
private:
const std::string& GetOverride() const;
std::string ActuallyAskUser() const;
FILE* input_source_;
const InputValidator* input_validator_;
const OverrideRegistry& override_registry_;
std::string default_value_;
std::string title_;
std::string additional_info_;
};
// Keeps track of overrides for any input points. Overrides are passed in the
// format Title 1=Value 1,Title 2=Value 2. Spaces are not trimmed anywhere.
class OverrideRegistry {
public:
OverrideRegistry(const std::string& overrides);
bool HasOverrideFor(const std::string& title) const;
const std::string& GetOverrideFor(const std::string& title) const;
private:
typedef std::map<std::string, std::string> OverrideMap;
OverrideMap overrides_;
};
class InputValidator {
public:
virtual ~InputValidator() {}
virtual bool InputOk(const std::string& value) const = 0;
};
// Ensures input is an integer between low and high (inclusive).
class IntegerWithinRangeValidator : public InputValidator {
public:
IntegerWithinRangeValidator(int low, int high)
: low_(low), high_(high) {}
bool InputOk(const std::string& input) const {
int value = atoi(input.c_str());
// Note: atoi returns 0 on failure.
if (value == 0 && input.length() > 0 && input[0] != '0')
return false; // Probably bad input.
return value >= low_ && value <= high_;
}
private:
int low_;
int high_;
};
std::vector<std::string> Split(const std::string& to_split,
const std::string& delimiter);
// Convenience method for creating an input builder.
InputBuilder TypedInput(const std::string& title);
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_

View file

@ -1,80 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/video_engine/test/auto_test/primitives/fake_stdin.h"
#include "webrtc/video_engine/test/auto_test/primitives/input_helpers.h"
namespace webrtc {
class InputHelpersTest: public testing::Test {
};
TEST_F(InputHelpersTest, AcceptsAnyInputExceptEmptyByDefault) {
FILE* fake_stdin = FakeStdin("\n\nWhatever\n");
std::string result = TypedInput("Title")
.WithInputSource(fake_stdin).AskForInput();
EXPECT_EQ("Whatever", result);
fclose(fake_stdin);
}
TEST_F(InputHelpersTest, ReturnsDefaultOnEmptyInputIfDefaultSet) {
FILE* fake_stdin = FakeStdin("\n\nWhatever\n");
std::string result = TypedInput("Title")
.WithInputSource(fake_stdin)
.WithDefault("MyDefault")
.AskForInput();
EXPECT_EQ("MyDefault", result);
fclose(fake_stdin);
}
TEST_F(InputHelpersTest, ObeysInputValidator) {
class ValidatorWhichOnlyAcceptsFooBar : public InputValidator {
public:
bool InputOk(const std::string& input) const {
return input == "FooBar";
}
};
FILE* fake_stdin = FakeStdin("\nFoo\nBar\nFoo Bar\nFooBar\n");
std::string result = TypedInput("Title")
.WithInputSource(fake_stdin)
.WithInputValidator(new ValidatorWhichOnlyAcceptsFooBar())
.AskForInput();
EXPECT_EQ("FooBar", result);
fclose(fake_stdin);
}
TEST_F(InputHelpersTest, OverrideRegistryParsesOverridesCorrectly) {
// TODO(phoglund): Ignore spaces where appropriate
OverrideRegistry override_registry("My Title=Value,My Choice=1");
EXPECT_TRUE(override_registry.HasOverrideFor("My Title"));
EXPECT_EQ("Value", override_registry.GetOverrideFor("My Title"));
EXPECT_TRUE(override_registry.HasOverrideFor("My Choice"));
EXPECT_EQ("1", override_registry.GetOverrideFor("My Choice"));
EXPECT_FALSE(override_registry.HasOverrideFor("Not Overridden"));
}
TEST_F(InputHelpersTest, ObeysOverridesBeforeAnythingElse) {
class CarelessValidator : public InputValidator {
public:
bool InputOk(const std::string& input) const {
return true;
}
};
FILE* fake_stdin = FakeStdin("\nFoo\nBar\nFoo Bar\nFooBar\n");
OverrideRegistry override_registry("My Title=Value,My Choice=1");
EXPECT_EQ("Value", InputBuilder("My Title",
new CarelessValidator(), override_registry)
.WithDefault("Whatever")
.WithInputSource(fake_stdin).AskForInput());
fclose(fake_stdin);
}
};

View file

@ -1,162 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// vie_autotest.cc
//
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include <stdio.h>
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/video_render/include/video_render.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/auto_test/primitives/general_primitives.h"
#include "webrtc/video_engine/test/libvietest/include/tb_capture_device.h"
#include "webrtc/video_engine/test/libvietest/include/tb_interfaces.h"
#include "webrtc/video_engine/test/libvietest/include/tb_video_channel.h"
DEFINE_bool(include_timing_dependent_tests, true,
"If true, we will include tests / parts of tests that are known "
"to break in slow execution environments (such as valgrind).");
// ViETest implementation
FILE* ViETest::log_file_ = NULL;
char* ViETest::log_str_ = NULL;
std::string ViETest::GetResultOutputPath() {
return webrtc::test::OutputPath();
}
// ViEAutoTest implementation
ViEAutoTest::ViEAutoTest(void* window1, void* window2) :
_window1(window1),
_window2(window2),
_renderType(webrtc::kRenderDefault),
_vrm1(webrtc::VideoRender::CreateVideoRender(
4561, window1, false, _renderType)),
_vrm2(webrtc::VideoRender::CreateVideoRender(
4562, window2, false, _renderType))
{
assert(_vrm1);
assert(_vrm2);
}
ViEAutoTest::~ViEAutoTest()
{
webrtc::VideoRender::DestroyVideoRender(_vrm1);
_vrm1 = NULL;
webrtc::VideoRender::DestroyVideoRender(_vrm2);
_vrm2 = NULL;
}
void ViEAutoTest::ViEStandardTest()
{
ViEBaseStandardTest();
ViECaptureStandardTest();
ViECodecStandardTest();
ViEImageProcessStandardTest();
ViERenderStandardTest();
ViERtpRtcpStandardTest();
}
void ViEAutoTest::ViEExtendedTest()
{
ViEBaseExtendedTest();
ViECaptureExtendedTest();
ViECodecExtendedTest();
ViEImageProcessExtendedTest();
ViERenderExtendedTest();
}
void ViEAutoTest::ViEAPITest()
{
ViEBaseAPITest();
ViECaptureAPITest();
ViECodecAPITest();
ViEImageProcessAPITest();
ViERenderAPITest();
ViERtpRtcpAPITest();
}
void ViEAutoTest::PrintVideoCodec(const webrtc::VideoCodec videoCodec)
{
ViETest::Log("Video Codec Information:");
switch (videoCodec.codecType)
{
case webrtc::kVideoCodecVP8:
ViETest::Log("\tcodecType: VP8");
break;
case webrtc::kVideoCodecVP9:
ViETest::Log("\tcodecType: VP9");
break;
case webrtc::kVideoCodecI420:
ViETest::Log("\tcodecType: I420");
break;
case webrtc::kVideoCodecH264:
ViETest::Log("\tcodecType: H264");
break;
case webrtc::kVideoCodecRED:
ViETest::Log("\tcodecType: RED");
break;
case webrtc::kVideoCodecULPFEC:
ViETest::Log("\tcodecType: ULPFEC");
break;
case webrtc::kVideoCodecGeneric:
ViETest::Log("\tcodecType: GENERIC");
break;
case webrtc::kVideoCodecUnknown:
ViETest::Log("\tcodecType: UNKNOWN");
break;
}
ViETest::Log("\theight: %u", videoCodec.height);
ViETest::Log("\tmaxBitrate: %u", videoCodec.maxBitrate);
ViETest::Log("\tmaxFramerate: %u", videoCodec.maxFramerate);
ViETest::Log("\tminBitrate: %u", videoCodec.minBitrate);
ViETest::Log("\tplName: %s", videoCodec.plName);
ViETest::Log("\tplType: %u", videoCodec.plType);
ViETest::Log("\tstartBitrate: %u", videoCodec.startBitrate);
ViETest::Log("\twidth: %u", videoCodec.width);
ViETest::Log("");
}
void ViEAutoTest::PrintAudioCodec(const webrtc::CodecInst audioCodec)
{
ViETest::Log("Audio Codec Information:");
ViETest::Log("\tchannels: %u", audioCodec.channels);
ViETest::Log("\t: %u", audioCodec.pacsize);
ViETest::Log("\t: %u", audioCodec.plfreq);
ViETest::Log("\t: %s", audioCodec.plname);
ViETest::Log("\t: %d", audioCodec.pltype);
ViETest::Log("\t: %u", audioCodec.rate);
ViETest::Log("");
}
void ViEAutoTest::RenderCaptureDeviceAndOutputStream(
TbInterfaces* video_engine,
TbVideoChannel* video_channel,
TbCaptureDevice* capture_device) {
RenderInWindow(
video_engine->render, capture_device->captureId, _window1, 0);
RenderInWindow(
video_engine->render, video_channel->videoChannel, _window2, 1);
}
void ViEAutoTest::StopRenderCaptureDeviceAndOutputStream(
TbInterfaces* video_engine,
TbVideoChannel* video_channel,
TbCaptureDevice* capture_device) {
StopRenderInWindow(video_engine->render, capture_device->captureId);
StopRenderInWindow(video_engine->render, video_channel->videoChannel);
}

View file

@ -1,176 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_android.h"
#include <android/log.h>
#include <stdio.h>
#include "webrtc/modules/video_capture/video_capture_internal.h"
#include "webrtc/modules/video_render/video_render_internal.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
int ViEAutoTestAndroid::RunAutotest(int testSelection, int subTestSelection,
void* window1, void* window2,
JavaVM* javaVM, void* env, void* context) {
ViEAutoTest vieAutoTest(window1, window2);
ViETest::Log("RunAutoTest(%d, %d)", testSelection, subTestSelection);
webrtc::SetCaptureAndroidVM(javaVM, static_cast<jobject>(context));
webrtc::SetRenderAndroidVM(javaVM);
#ifndef WEBRTC_ANDROID_OPENSLES
// voice engine calls into ADM directly
webrtc::VoiceEngine::SetAndroidObjects(javaVM, context);
#endif
if (subTestSelection == 0) {
// Run all selected test
switch (testSelection) {
case 0:
vieAutoTest.ViEStandardTest();
break;
case 1:
vieAutoTest.ViEAPITest();
break;
case 2:
vieAutoTest.ViEExtendedTest();
break;
case 3:
vieAutoTest.ViELoopbackCall();
break;
default:
break;
}
}
switch (testSelection) {
case 0: // Specific standard test
switch (subTestSelection) {
case 1: // base
vieAutoTest.ViEBaseStandardTest();
break;
case 2: // capture
vieAutoTest.ViECaptureStandardTest();
break;
case 3: // codec
vieAutoTest.ViECodecStandardTest();
break;
case 6: // image process
vieAutoTest.ViEImageProcessStandardTest();
break;
#if 0 // vie_autotest_network.cc isn't actually pulled into the build at all!
case 7: // network
vieAutoTest.ViENetworkStandardTest();
break;
#endif
case 8: // Render
vieAutoTest.ViERenderStandardTest();
break;
case 9: // RTP/RTCP
vieAutoTest.ViERtpRtcpStandardTest();
break;
default:
break;
}
break;
case 1:// specific API
switch (subTestSelection) {
case 1: // base
vieAutoTest.ViEBaseAPITest();
break;
case 2: // capture
vieAutoTest.ViECaptureAPITest();
break;
case 3: // codec
vieAutoTest.ViECodecAPITest();
break;
case 6: // image process
vieAutoTest.ViEImageProcessAPITest();
break;
#if 0 // vie_autotest_network.cc isn't actually pulled into the build at all!
case 7: // network
vieAutoTest.ViENetworkAPITest();
break;
#endif
case 8: // Render
vieAutoTest.ViERenderAPITest();
break;
case 9: // RTP/RTCP
vieAutoTest.ViERtpRtcpAPITest();
break;
case 10:
break;
default:
break;
}
break;
case 2:// specific extended
switch (subTestSelection) {
case 1: // base
vieAutoTest.ViEBaseExtendedTest();
break;
case 2: // capture
vieAutoTest.ViECaptureExtendedTest();
break;
case 3: // codec
vieAutoTest.ViECodecExtendedTest();
break;
case 6: // image process
vieAutoTest.ViEImageProcessExtendedTest();
break;
case 7: // Render
vieAutoTest.ViERenderExtendedTest();
break;
case 8: // RTP/RTCP
// Note that this test is removed. It hasn't been properly cleaned up
// because this hopefully going away soon.
break;
default:
break;
}
break;
case 3:
vieAutoTest.ViELoopbackCall();
break;
default:
break;
}
return 0;
}
int main(int argc, char** argv) {
// TODO(leozwang): Add real tests here
return 0;
}

View file

@ -1,237 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_capture/include/video_capture_factory.h"
#include "webrtc/test/channel_transport/include/channel_transport.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/auto_test/primitives/base_primitives.h"
#include "webrtc/video_engine/test/auto_test/primitives/general_primitives.h"
#include "webrtc/video_engine/test/libvietest/include/tb_interfaces.h"
void ViEAutoTest::ViEBaseStandardTest() {
// ***************************************************************
// Begin create/initialize WebRTC Video Engine for testing
// ***************************************************************
TbInterfaces interfaces("ViEBaseStandardTest");
// ***************************************************************
// Engine ready. Set up the test case:
// ***************************************************************
int video_channel = -1;
EXPECT_EQ(0, interfaces.base->CreateChannel(video_channel));
webrtc::VideoCaptureModule* video_capture_module = NULL;
const unsigned int kMaxDeviceNameLength = 128;
char device_name[kMaxDeviceNameLength];
memset(device_name, 0, kMaxDeviceNameLength);
int capture_id;
webrtc::ViEBase* base_interface = interfaces.base;
webrtc::ViERender* render_interface = interfaces.render;
webrtc::ViECapture* capture_interface = interfaces.capture;
FindCaptureDeviceOnSystem(capture_interface,
device_name,
kMaxDeviceNameLength,
&capture_id,
&video_capture_module);
EXPECT_TRUE(video_capture_module);
if (!video_capture_module)
return;
EXPECT_EQ(0, capture_interface->ConnectCaptureDevice(capture_id,
video_channel));
EXPECT_EQ(0, capture_interface->StartCapture(capture_id));
ConfigureRtpRtcp(interfaces.rtp_rtcp, kNack, video_channel);
EXPECT_EQ(0, render_interface->RegisterVideoRenderModule(*_vrm1));
EXPECT_EQ(0, render_interface->RegisterVideoRenderModule(*_vrm2));
RenderInWindow(render_interface, capture_id, _window1, 0);
RenderInWindow(render_interface, video_channel, _window2, 1);
// ***************************************************************
// Run the actual test:
// ***************************************************************
ViETest::Log("You should shortly see a local preview from camera %s"
" in window 1 and the remote video in window 2.", device_name);
::TestI420CallSetup(interfaces.codec, interfaces.video_engine,
base_interface, interfaces.network, interfaces.rtp_rtcp,
video_channel, device_name);
// ***************************************************************
// Testing finished. Tear down Video Engine
// ***************************************************************
EXPECT_EQ(0, capture_interface->DisconnectCaptureDevice(video_channel));
EXPECT_EQ(0, capture_interface->StopCapture(capture_id));
EXPECT_EQ(0, base_interface->StopReceive(video_channel));
EXPECT_EQ(0, render_interface->StopRender(video_channel));
EXPECT_EQ(0, render_interface->RemoveRenderer(video_channel));
EXPECT_EQ(0, render_interface->RemoveRenderer(capture_id));
EXPECT_EQ(0, render_interface->DeRegisterVideoRenderModule(*_vrm1));
EXPECT_EQ(0, render_interface->DeRegisterVideoRenderModule(*_vrm2));
EXPECT_EQ(0, capture_interface->ReleaseCaptureDevice(capture_id));
video_capture_module->Release();
video_capture_module = NULL;
EXPECT_EQ(0, base_interface->DeleteChannel(video_channel));
}
void ViEAutoTest::ViEBaseExtendedTest() {
// Start with standard test
ViEBaseAPITest();
ViEBaseStandardTest();
}
void ViEAutoTest::ViEBaseAPITest() {
// ***************************************************************
// Begin create/initialize WebRTC Video Engine for testing
// ***************************************************************
// Get the ViEBase API
webrtc::ViEBase* vie_base = webrtc::ViEBase::GetInterface(NULL);
EXPECT_EQ(NULL, vie_base) << "Should return null for a bad ViE pointer";
webrtc::VideoEngine* video_engine = webrtc::VideoEngine::Create();
EXPECT_TRUE(NULL != video_engine);
std::string trace_file_path =
ViETest::GetResultOutputPath() + "ViEBaseAPI_trace.txt";
EXPECT_EQ(0, video_engine->SetTraceFile(trace_file_path.c_str()));
vie_base = webrtc::ViEBase::GetInterface(video_engine);
EXPECT_TRUE(NULL != vie_base);
webrtc::ViENetwork* vie_network =
webrtc::ViENetwork::GetInterface(video_engine);
EXPECT_TRUE(vie_network != NULL);
webrtc::ViERTP_RTCP* vie_rtp =
webrtc::ViERTP_RTCP::GetInterface(video_engine);
EXPECT_TRUE(vie_rtp != NULL);
// ***************************************************************
// Engine ready. Begin testing class
// ***************************************************************
char version[1024] = "";
EXPECT_EQ(0, vie_base->GetVersion(version));
EXPECT_EQ(0, vie_base->LastError());
int video_channel = -1;
EXPECT_EQ(0, vie_base->Init());
EXPECT_EQ(0, vie_base->CreateChannel(video_channel));
int video_channel2 = -1;
int video_channel3 = -1;
EXPECT_EQ(0, vie_base->CreateChannel(video_channel2));
EXPECT_NE(video_channel, video_channel2) <<
"Should allocate new number for independent channel";
EXPECT_EQ(0, vie_base->DeleteChannel(video_channel2));
EXPECT_EQ(-1, vie_base->CreateChannel(video_channel2, video_channel + 1))
<< "Should fail since neither channel exists (the second must)";
// Create a receive only channel and a send channel. Verify we can't send on
// the receive only channel.
EXPECT_EQ(0, vie_base->CreateReceiveChannel(video_channel2,
video_channel));
EXPECT_EQ(0, vie_base->CreateChannel(video_channel3, video_channel));
const char* ip_address = "127.0.0.1\0";
const int send_port = 1234;
EXPECT_EQ(0, vie_rtp->SetLocalSSRC(video_channel, 1));
EXPECT_EQ(0, vie_rtp->SetLocalSSRC(video_channel, 2));
EXPECT_EQ(0, vie_rtp->SetLocalSSRC(video_channel, 3));
webrtc::test::VideoChannelTransport* video_channel_transport_1 =
new webrtc::test::VideoChannelTransport(vie_network, video_channel);
ASSERT_EQ(0, video_channel_transport_1->SetSendDestination(ip_address,
send_port));
webrtc::test::VideoChannelTransport* video_channel_transport_2 =
new webrtc::test::VideoChannelTransport(vie_network, video_channel2);
webrtc::test::VideoChannelTransport* video_channel_transport_3 =
new webrtc::test::VideoChannelTransport(vie_network, video_channel3);
ASSERT_EQ(0, video_channel_transport_3->SetSendDestination(ip_address,
send_port + 4));
EXPECT_EQ(0, vie_base->StartSend(video_channel));
EXPECT_EQ(-1, vie_base->StartSend(video_channel2));
EXPECT_EQ(0, vie_base->StartSend(video_channel3));
EXPECT_EQ(0, vie_base->StopSend(video_channel));
EXPECT_EQ(0, vie_base->StopSend(video_channel3));
// Test Voice Engine integration with Video Engine.
webrtc::VoiceEngine* voice_engine = NULL;
webrtc::VoEBase* voe_base = NULL;
int audio_channel = -1;
voice_engine = webrtc::VoiceEngine::Create();
EXPECT_TRUE(NULL != voice_engine);
voe_base = webrtc::VoEBase::GetInterface(voice_engine);
EXPECT_TRUE(NULL != voe_base);
EXPECT_EQ(0, voe_base->Init());
audio_channel = voe_base->CreateChannel();
EXPECT_NE(-1, audio_channel);
// Connect before setting VoE.
EXPECT_NE(0, vie_base->ConnectAudioChannel(video_channel, audio_channel))
<< "Should fail since Voice Engine is not set yet.";
// Then do it right.
EXPECT_EQ(0, vie_base->SetVoiceEngine(voice_engine));
EXPECT_EQ(0, vie_base->ConnectAudioChannel(video_channel, audio_channel));
// ***************************************************************
// Testing finished. Tear down Video Engine
// ***************************************************************
EXPECT_NE(0, vie_base->DisconnectAudioChannel(video_channel + 5)) <<
"Should fail: disconnecting bogus channel";
EXPECT_EQ(0, vie_base->DisconnectAudioChannel(video_channel));
// Clean up voice engine
EXPECT_EQ(0, vie_rtp->Release());
EXPECT_EQ(0, vie_network->Release());
EXPECT_EQ(0, vie_base->SetVoiceEngine(NULL));
// VoiceEngine reference counting is per object, not per interface, so
// Release should return != 0.
EXPECT_NE(0, voe_base->Release());
EXPECT_TRUE(webrtc::VoiceEngine::Delete(voice_engine));
webrtc::ViEBase* vie_base2 = webrtc::ViEBase::GetInterface(video_engine);
EXPECT_TRUE(NULL != vie_base2);
EXPECT_EQ(1, vie_base->Release()) <<
"There should be one interface left.";
EXPECT_FALSE(webrtc::VideoEngine::Delete(video_engine)) <<
"Should fail since there are interfaces left.";
delete video_channel_transport_1;
delete video_channel_transport_2;
delete video_channel_transport_3;
EXPECT_EQ(0, vie_base->Release());
EXPECT_TRUE(webrtc::VideoEngine::Delete(video_engine));
}

View file

@ -1,542 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "gflags/gflags.h"
#include "webrtc/common_types.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/video_capture/include/video_capture_factory.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/libvietest/include/tb_interfaces.h"
#include "webrtc/video_engine/test/libvietest/include/tb_video_channel.h"
#include "webrtc/voice_engine/include/voe_base.h"
DEFINE_bool(capture_test_ensure_resolution_alignment_in_capture_device, true,
"If true, we will give resolutions slightly below a reasonable "
"value to test the camera's ability to choose a good resolution. "
"If false, we will provide reasonable resolutions instead.");
class CaptureObserver : public webrtc::ViECaptureObserver {
public:
CaptureObserver()
: brightness_(webrtc::Normal),
alarm_(webrtc::AlarmCleared),
frame_rate_(0) {}
virtual void BrightnessAlarm(const int capture_id,
const webrtc::Brightness brightness) {
brightness_ = brightness;
switch (brightness) {
case webrtc::Normal:
ViETest::Log(" BrightnessAlarm Normal");
break;
case webrtc::Bright:
ViETest::Log(" BrightnessAlarm Bright");
break;
case webrtc::Dark:
ViETest::Log(" BrightnessAlarm Dark");
break;
}
}
virtual void CapturedFrameRate(const int capture_id,
const unsigned char frame_rate) {
ViETest::Log(" CapturedFrameRate %u", frame_rate);
frame_rate_ = frame_rate;
}
virtual void NoPictureAlarm(const int capture_id,
const webrtc::CaptureAlarm alarm) {
alarm_ = alarm;
if (alarm == webrtc::AlarmRaised) {
ViETest::Log("NoPictureAlarm CARaised.");
} else {
ViETest::Log("NoPictureAlarm CACleared.");
}
}
webrtc::Brightness brightness_;
webrtc::CaptureAlarm alarm_;
unsigned char frame_rate_;
};
class CaptureEffectFilter : public webrtc::ViEEffectFilter {
public:
CaptureEffectFilter(unsigned int expected_width, unsigned int expected_height)
: number_of_captured_frames_(0),
expected_width_(expected_width),
expected_height_(expected_height) {
}
// Implements video_engineEffectFilter.
virtual int Transform(size_t size,
unsigned char* frame_buffer,
int64_t ntp_time_ms,
unsigned int timestamp,
unsigned int width,
unsigned int height) {
EXPECT_TRUE(frame_buffer != NULL);
EXPECT_EQ(expected_width_, width);
EXPECT_EQ(expected_height_, height);
++number_of_captured_frames_;
return 0;
}
int number_of_captured_frames_;
protected:
unsigned int expected_width_;
unsigned int expected_height_;
};
void ViEAutoTest::ViECaptureStandardTest() {
/// **************************************************************
// Begin create/initialize WebRTC Video Engine for testing
/// **************************************************************
/// **************************************************************
// Engine ready. Begin testing class
/// **************************************************************
TbInterfaces video_engine("video_engineCaptureStandardTest");
webrtc::VideoCaptureModule::DeviceInfo* dev_info =
webrtc::VideoCaptureFactory::CreateDeviceInfo(0);
ASSERT_TRUE(dev_info != NULL);
int number_of_capture_devices = dev_info->NumberOfDevices();
ViETest::Log("Number of capture devices %d",
number_of_capture_devices);
ASSERT_GT(number_of_capture_devices, 0)
<< "This test requires a capture device (i.e. a webcam)";
#if !defined(WEBRTC_MAC)
int capture_device_id[10] = {0};
webrtc::VideoCaptureModule* vcpms[10] = {0};
#endif
// Check capabilities
for (int device_index = 0; device_index < number_of_capture_devices;
++device_index) {
char device_name[128];
char device_unique_name[512];
EXPECT_EQ(0, dev_info->GetDeviceName(device_index,
device_name,
sizeof(device_name),
device_unique_name,
sizeof(device_unique_name)));
ViETest::Log("Found capture device %s\nUnique name %s",
device_name, device_unique_name);
#if !defined(WEBRTC_MAC) // these functions will return -1
int number_of_capabilities =
dev_info->NumberOfCapabilities(device_unique_name);
EXPECT_GT(number_of_capabilities, 0);
for (int cap_index = 0; cap_index < number_of_capabilities; ++cap_index) {
webrtc::VideoCaptureCapability capability;
EXPECT_EQ(0, dev_info->GetCapability(device_unique_name, cap_index,
capability));
ViETest::Log("Capture capability %d (of %u)", cap_index + 1,
number_of_capabilities);
ViETest::Log("width %d, height %d, frame rate %d",
capability.width, capability.height, capability.maxFPS);
ViETest::Log("expected delay %d, color type %d, encoding %d",
capability.expectedCaptureDelay, capability.rawType,
capability.codecType);
EXPECT_GT(capability.width, 0);
EXPECT_GT(capability.height, 0);
EXPECT_GT(capability.maxFPS, -1); // >= 0
EXPECT_GT(capability.expectedCaptureDelay, 0);
}
#endif
}
// Capture Capability Functions are not supported on WEBRTC_MAC.
#if !defined(WEBRTC_MAC)
// Check allocation. Try to allocate them all after each other.
for (int device_index = 0; device_index < number_of_capture_devices;
++device_index) {
char device_name[128];
char device_unique_name[512];
EXPECT_EQ(0, dev_info->GetDeviceName(device_index,
device_name,
sizeof(device_name),
device_unique_name,
sizeof(device_unique_name)));
webrtc::VideoCaptureModule* vcpm =
webrtc::VideoCaptureFactory::Create(device_index, device_unique_name);
EXPECT_TRUE(vcpm != NULL);
if (!vcpm)
continue;
vcpm->AddRef();
vcpms[device_index] = vcpm;
EXPECT_EQ(0, video_engine.capture->AllocateCaptureDevice(
*vcpm, capture_device_id[device_index]));
webrtc::VideoCaptureCapability capability;
EXPECT_EQ(0, dev_info->GetCapability(device_unique_name, 0, capability));
// Test that the camera select the closest capability to the selected
// width and height.
CaptureEffectFilter filter(capability.width, capability.height);
EXPECT_EQ(0, video_engine.image_process->RegisterCaptureEffectFilter(
capture_device_id[device_index], filter));
ViETest::Log("Testing Device %s capability width %d height %d",
device_unique_name, capability.width, capability.height);
if (FLAGS_capture_test_ensure_resolution_alignment_in_capture_device) {
// This tests that the capture device properly aligns to a
// multiple of 16 (or at least 8).
capability.height = capability.height - 2;
capability.width = capability.width - 2;
}
webrtc::CaptureCapability vie_capability;
vie_capability.width = capability.width;
vie_capability.height = capability.height;
vie_capability.codecType = capability.codecType;
vie_capability.maxFPS = capability.maxFPS;
vie_capability.rawType = capability.rawType;
EXPECT_EQ(0, video_engine.capture->StartCapture(
capture_device_id[device_index], vie_capability));
webrtc::TickTime start_time = webrtc::TickTime::Now();
while (filter.number_of_captured_frames_ < 10 &&
(webrtc::TickTime::Now() - start_time).Milliseconds() < 10000) {
AutoTestSleep(100);
}
EXPECT_GT(filter.number_of_captured_frames_, 9)
<< "Should capture at least some frames";
EXPECT_EQ(0, video_engine.image_process->DeregisterCaptureEffectFilter(
capture_device_id[device_index]));
#ifdef WEBRTC_ANDROID // Can only allocate one camera at the time on Android.
EXPECT_EQ(0, video_engine.capture->StopCapture(
capture_device_id[device_index]));
EXPECT_EQ(0, video_engine.capture->ReleaseCaptureDevice(
capture_device_id[device_index]));
#endif
}
/// **************************************************************
// Testing finished. Tear down Video Engine
/// **************************************************************
delete dev_info;
// Stop all started capture devices.
for (int device_index = 0; device_index < number_of_capture_devices;
++device_index) {
#if !defined(WEBRTC_ANDROID)
// Don't stop on Android since we can only allocate one camera.
EXPECT_EQ(0, video_engine.capture->StopCapture(
capture_device_id[device_index]));
EXPECT_EQ(0, video_engine.capture->ReleaseCaptureDevice(
capture_device_id[device_index]));
#endif // !WEBRTC_ANDROID
if (vcpms[device_index])
vcpms[device_index]->Release();
}
#endif // !WEBRTC_MAC
}
void ViEAutoTest::ViECaptureExtendedTest() {
ViECaptureExternalCaptureTest();
}
void ViEAutoTest::ViECaptureAPITest() {
/// **************************************************************
// Begin create/initialize WebRTC Video Engine for testing
/// **************************************************************
/// **************************************************************
// Engine ready. Begin testing class
/// **************************************************************
TbInterfaces video_engine("video_engineCaptureAPITest");
video_engine.capture->NumberOfCaptureDevices();
char device_name[128];
char device_unique_name[512];
int capture_id = 0;
webrtc::VideoCaptureModule::DeviceInfo* dev_info =
webrtc::VideoCaptureFactory::CreateDeviceInfo(0);
ASSERT_TRUE(dev_info != NULL);
ASSERT_GT(dev_info->NumberOfDevices(), 0u)
<< "This test requires a capture device (i.e. a webcam)";
// Get the first capture device
EXPECT_EQ(0, dev_info->GetDeviceName(0, device_name,
sizeof(device_name),
device_unique_name,
sizeof(device_unique_name)));
webrtc::VideoCaptureModule* vcpm =
webrtc::VideoCaptureFactory::Create(0, device_unique_name);
vcpm->AddRef();
EXPECT_TRUE(vcpm != NULL);
// Allocate capture device.
EXPECT_EQ(0, video_engine.capture->AllocateCaptureDevice(*vcpm, capture_id));
// Start the capture device.
EXPECT_EQ(0, video_engine.capture->StartCapture(capture_id));
// Start again. Should fail.
EXPECT_NE(0, video_engine.capture->StartCapture(capture_id));
EXPECT_EQ(kViECaptureDeviceAlreadyStarted, video_engine.LastError());
// Start invalid capture device.
EXPECT_NE(0, video_engine.capture->StartCapture(capture_id + 1));
EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
// Stop invalid capture device.
EXPECT_NE(0, video_engine.capture->StopCapture(capture_id + 1));
EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
// Stop the capture device.
EXPECT_EQ(0, video_engine.capture->StopCapture(capture_id));
// Stop the capture device again.
EXPECT_NE(0, video_engine.capture->StopCapture(capture_id));
EXPECT_EQ(kViECaptureDeviceNotStarted, video_engine.LastError());
// Connect to invalid channel.
EXPECT_NE(0, video_engine.capture->ConnectCaptureDevice(capture_id, 0));
EXPECT_EQ(kViECaptureDeviceInvalidChannelId,
video_engine.LastError());
TbVideoChannel channel(video_engine);
// Connect invalid capture_id.
EXPECT_NE(0, video_engine.capture->ConnectCaptureDevice(capture_id + 1,
channel.videoChannel));
EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
// Connect the capture device to the channel.
EXPECT_EQ(0, video_engine.capture->ConnectCaptureDevice(capture_id,
channel.videoChannel));
// Connect the channel again.
EXPECT_NE(0, video_engine.capture->ConnectCaptureDevice(capture_id,
channel.videoChannel));
EXPECT_EQ(kViECaptureDeviceAlreadyConnected,
video_engine.LastError());
// Start the capture device.
EXPECT_EQ(0, video_engine.capture->StartCapture(capture_id));
// Release invalid capture device.
EXPECT_NE(0, video_engine.capture->ReleaseCaptureDevice(capture_id + 1));
EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
// Release the capture device.
EXPECT_EQ(0, video_engine.capture->ReleaseCaptureDevice(capture_id));
// Release the capture device again.
EXPECT_NE(0, video_engine.capture->ReleaseCaptureDevice(capture_id));
EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
// Test GetOrientation.
webrtc::VideoRotation orientation;
char dummy_name[5];
EXPECT_NE(0, dev_info->GetOrientation(dummy_name, orientation));
// Test SetRotation.
EXPECT_NE(0, video_engine.capture->SetVideoRotation(
capture_id, webrtc::kVideoRotation_90));
EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
// Allocate capture device.
EXPECT_EQ(0, video_engine.capture->AllocateCaptureDevice(*vcpm, capture_id));
EXPECT_EQ(0, video_engine.capture->SetVideoRotation(
capture_id, webrtc::kVideoRotation_0));
EXPECT_EQ(0, video_engine.capture->SetVideoRotation(
capture_id, webrtc::kVideoRotation_90));
EXPECT_EQ(0, video_engine.capture->SetVideoRotation(
capture_id, webrtc::kVideoRotation_180));
EXPECT_EQ(0, video_engine.capture->SetVideoRotation(
capture_id, webrtc::kVideoRotation_270));
// Release the capture device
EXPECT_EQ(0, video_engine.capture->ReleaseCaptureDevice(capture_id));
/// **************************************************************
// Testing finished. Tear down Video Engine
/// **************************************************************
delete dev_info;
vcpm->Release();
}
void ViEAutoTest::ViECaptureExternalCaptureTest() {
/// **************************************************************
// Begin create/initialize WebRTC Video Engine for testing
/// **************************************************************
TbInterfaces video_engine("video_engineCaptureExternalCaptureTest");
TbVideoChannel channel(video_engine);
channel.StartReceive();
channel.StartSend();
webrtc::VideoCaptureExternal* external_capture = NULL;
int capture_id = 0;
// Allocate the external capture device.
webrtc::VideoCaptureModule* vcpm =
webrtc::VideoCaptureFactory::Create(0, external_capture);
EXPECT_TRUE(vcpm != NULL);
EXPECT_TRUE(external_capture != NULL);
vcpm->AddRef();
EXPECT_EQ(0, video_engine.capture->AllocateCaptureDevice(*vcpm, capture_id));
// Connect the capture device to the channel.
EXPECT_EQ(0, video_engine.capture->ConnectCaptureDevice(capture_id,
channel.videoChannel));
// Render the local capture.
EXPECT_EQ(0, video_engine.render->AddRenderer(capture_id, _window1, 1, 0.0,
0.0, 1.0, 1.0));
// Render the remote capture.
EXPECT_EQ(0, video_engine.render->AddRenderer(channel.videoChannel, _window2,
1, 0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, video_engine.render->StartRender(capture_id));
EXPECT_EQ(0, video_engine.render->StartRender(channel.videoChannel));
// Register observer.
CaptureObserver observer;
EXPECT_EQ(0, video_engine.capture->RegisterObserver(capture_id, observer));
// Enable brightness alarm.
EXPECT_EQ(0, video_engine.capture->EnableBrightnessAlarm(capture_id, true));
CaptureEffectFilter effect_filter(176, 144);
EXPECT_EQ(0, video_engine.image_process->RegisterCaptureEffectFilter(
capture_id, effect_filter));
// Call started.
ViETest::Log("You should see local preview from external capture\n"
"in window 1 and the remote video in window 2.\n");
/// **************************************************************
// Engine ready. Begin testing class
/// **************************************************************
const size_t video_frame_length = (176 * 144 * 3) / 2;
unsigned char* video_frame = new unsigned char[video_frame_length];
memset(video_frame, 128, 176 * 144);
int frame_count = 0;
webrtc::VideoCaptureCapability capability;
capability.width = 176;
capability.height = 144;
capability.rawType = webrtc::kVideoI420;
ViETest::Log("Testing external capturing and frame rate callbacks.");
// TODO(mflodman) Change when using a real file!
// while (fread(video_frame, video_frame_length, 1, foreman) == 1)
while (frame_count < 120) {
external_capture->IncomingFrame(
video_frame, video_frame_length, capability,
webrtc::TickTime::MillisecondTimestamp());
AutoTestSleep(33);
if (effect_filter.number_of_captured_frames_ > 2) {
EXPECT_EQ(webrtc::Normal, observer.brightness_) <<
"Brightness or picture alarm should not have been called yet.";
EXPECT_EQ(webrtc::AlarmCleared, observer.alarm_) <<
"Brightness or picture alarm should not have been called yet.";
}
frame_count++;
}
// Test brightness alarm.
// Test bright image.
for (int i = 0; i < 176 * 144; ++i) {
if (video_frame[i] <= 155)
video_frame[i] = video_frame[i] + 100;
else
video_frame[i] = 255;
}
ViETest::Log("Testing Brighness alarm");
for (int frame = 0; frame < 30; ++frame) {
external_capture->IncomingFrame(
video_frame, video_frame_length, capability,
webrtc::TickTime::MillisecondTimestamp());
AutoTestSleep(33);
}
EXPECT_EQ(webrtc::Bright, observer.brightness_) <<
"Should be bright at this point since we are using a bright image.";
// Test Dark image
for (int i = 0; i < 176 * 144; ++i) {
video_frame[i] = video_frame[i] > 200 ? video_frame[i] - 200 : 0;
}
for (int frame = 0; frame < 30; ++frame) {
external_capture->IncomingFrame(
video_frame, video_frame_length, capability,
webrtc::TickTime::MillisecondTimestamp());
AutoTestSleep(33);
}
EXPECT_EQ(webrtc::Dark, observer.brightness_) <<
"Should be dark at this point since we are using a dark image.";
EXPECT_GT(effect_filter.number_of_captured_frames_, 150) <<
"Frames should have been played.";
EXPECT_GE(observer.frame_rate_, 29) <<
"Frame rate callback should be approximately correct.";
EXPECT_LE(observer.frame_rate_, 30) <<
"Frame rate callback should be approximately correct.";
// Test no picture alarm
ViETest::Log("Testing NoPictureAlarm.");
AutoTestSleep(1050);
EXPECT_EQ(webrtc::AlarmRaised, observer.alarm_) <<
"No picture alarm should be raised.";
for (int frame = 0; frame < 10; ++frame) {
external_capture->IncomingFrame(
video_frame, video_frame_length, capability,
webrtc::TickTime::MillisecondTimestamp());
AutoTestSleep(33);
}
EXPECT_EQ(webrtc::AlarmCleared, observer.alarm_) <<
"Alarm should be cleared since ge just got some data.";
delete video_frame;
// Release the capture device
EXPECT_EQ(0, video_engine.capture->ReleaseCaptureDevice(capture_id));
// Release the capture device again
EXPECT_NE(0, video_engine.capture->ReleaseCaptureDevice(capture_id));
EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
vcpm->Release();
/// **************************************************************
// Testing finished. Tear down Video Engine
/// **************************************************************
}

View file

@ -1,140 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/engine_configurations.h"
#import "webrtc/modules/video_render/mac/cocoa_render_view.h"
#import "webrtc/test/testsupport/mac/run_threaded_main_mac.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_mac_cocoa.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_main.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_main.h"
@implementation TestCocoaUi
// TODO(phoglund): This file probably leaks memory like crazy. Find someone
// who understands objective-c memory management and fix it.
- (void)prepareToCreateWindowsWithSize:(AutoTestRect)window1Size
andSize:(AutoTestRect)window2Size
withTitle:(void*)window1_title
andTitle:(void*)window2_title {
window1Size_ = window1Size;
window2Size_ = window2Size;
window1Title_ = window1_title;
window2Title_ = window2_title;
}
- (void)createWindows:(NSObject*)ignored {
NSRect window1Frame = NSMakeRect(
window1Size_.origin.x, window1Size_.origin.y,
window1Size_.size.width, window1Size_.size.height);
window1_ = [[NSWindow alloc]
initWithContentRect:window1Frame
styleMask:NSTitledWindowMask
backing:NSBackingStoreBuffered
defer:NO];
[window1_ orderOut:nil];
NSRect render_view1_frame = NSMakeRect(
0, 0, window1Size_.size.width, window1Size_.size.height);
cocoaRenderView1_ =
[[CocoaRenderView alloc] initWithFrame:render_view1_frame];
[[window1_ contentView] addSubview:(NSView*)cocoaRenderView1_];
[window1_ setTitle:[NSString stringWithFormat:@"%s", window1Title_]];
[window1_ makeKeyAndOrderFront:NSApp];
NSRect window2_frame = NSMakeRect(
window2Size_.origin.x, window2Size_.origin.y,
window2Size_.size.width, window2Size_.size.height);
window2_ = [[NSWindow alloc]
initWithContentRect:window2_frame
styleMask:NSTitledWindowMask
backing:NSBackingStoreBuffered
defer:NO];
[window2_ orderOut:nil];
NSRect render_view2_frame = NSMakeRect(
0, 0, window1Size_.size.width, window1Size_.size.height);
cocoaRenderView2_ =
[[CocoaRenderView alloc] initWithFrame:render_view2_frame];
[[window2_ contentView] addSubview:(NSView*)cocoaRenderView2_];
[window2_ setTitle:[NSString stringWithFormat:@"%s", window2Title_]];
[window2_ makeKeyAndOrderFront:NSApp];
}
- (NSWindow*)window1 {
return window1_;
}
- (NSWindow*)window2 {
return window2_;
}
- (CocoaRenderView*)cocoaRenderView1 {
return cocoaRenderView1_;
}
- (CocoaRenderView*)cocoaRenderView2 {
return cocoaRenderView2_;
}
@end
ViEAutoTestWindowManager::ViEAutoTestWindowManager() {
cocoa_ui_ = [[TestCocoaUi alloc] init];
}
ViEAutoTestWindowManager::~ViEAutoTestWindowManager() {
[cocoa_ui_ release];
}
int ViEAutoTestWindowManager::CreateWindows(AutoTestRect window1Size,
AutoTestRect window2Size,
void* window1_title,
void* window2_title) {
[cocoa_ui_ prepareToCreateWindowsWithSize:window1Size
andSize:window2Size
withTitle:window1_title
andTitle:window2_title];
[cocoa_ui_ performSelectorOnMainThread:@selector(createWindows:)
withObject:nil
waitUntilDone:YES];
return 0;
}
int ViEAutoTestWindowManager::TerminateWindows() {
[[cocoa_ui_ window1] close];
[[cocoa_ui_ window2] close];
return 0;
}
void* ViEAutoTestWindowManager::GetWindow1() {
return [cocoa_ui_ cocoaRenderView1];
}
void* ViEAutoTestWindowManager::GetWindow2() {
return [cocoa_ui_ cocoaRenderView2];
}
bool ViEAutoTestWindowManager::SetTopmostWindow() {
return true;
}
// This is acts as our "main" for mac. The actual (reusable) main is defined in
// testsupport/mac/run_threaded_main_mac.mm.
int ImplementThisToRunYourTest(int argc, char** argv) {
ViEAutoTestMain auto_test;
return auto_test.RunTests(argc, argv);
}

View file

@ -1,830 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/video_coding/codecs/i420/main/interface/i420.h"
#include "webrtc/test/channel_transport/include/channel_transport.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_external_codec.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/libvietest/include/tb_I420_codec.h"
#include "webrtc/video_engine/test/libvietest/include/tb_capture_device.h"
#include "webrtc/video_engine/test/libvietest/include/tb_interfaces.h"
#include "webrtc/video_engine/test/libvietest/include/tb_video_channel.h"
#include "webrtc/voice_engine/include/voe_base.h"
class TestCodecObserver : public webrtc::ViEEncoderObserver,
public webrtc::ViEDecoderObserver {
public:
int incoming_codec_called_;
int incoming_rate_called_;
int decoder_timing_called_;
int outgoing_rate_called_;
unsigned char last_payload_type_;
uint16_t last_width_;
uint16_t last_height_;
unsigned int last_outgoing_framerate_;
unsigned int last_outgoing_bitrate_;
unsigned int last_incoming_framerate_;
unsigned int last_incoming_bitrate_;
unsigned int suspend_change_called_;
webrtc::VideoCodec incoming_codec_;
TestCodecObserver()
: incoming_codec_called_(0),
incoming_rate_called_(0),
decoder_timing_called_(0),
outgoing_rate_called_(0),
last_payload_type_(0),
last_width_(0),
last_height_(0),
last_outgoing_framerate_(0),
last_outgoing_bitrate_(0),
last_incoming_framerate_(0),
last_incoming_bitrate_(0),
suspend_change_called_(0) {
memset(&incoming_codec_, 0, sizeof(incoming_codec_));
}
virtual void IncomingCodecChanged(const int video_channel,
const webrtc::VideoCodec& video_codec) {
incoming_codec_called_++;
last_payload_type_ = video_codec.plType;
last_width_ = video_codec.width;
last_height_ = video_codec.height;
memcpy(&incoming_codec_, &video_codec, sizeof(video_codec));
}
virtual void IncomingRate(const int video_channel,
const unsigned int framerate,
const unsigned int bitrate) {
incoming_rate_called_++;
last_incoming_framerate_ += framerate;
last_incoming_bitrate_ += bitrate;
}
virtual void DecoderTiming(int decode_ms,
int max_decode_ms,
int current_delay_ms,
int target_delay_ms,
int jitter_buffer_ms,
int min_playout_delay_ms,
int render_delay_ms) {
++decoder_timing_called_;
// TODO(fischman): anything useful to be done with the data here?
}
virtual void OutgoingRate(const int video_channel,
const unsigned int framerate,
const unsigned int bitrate) {
outgoing_rate_called_++;
last_outgoing_framerate_ += framerate;
last_outgoing_bitrate_ += bitrate;
}
void SuspendChange(int video_channel, bool is_suspended) override {
suspend_change_called_++;
}
virtual void RequestNewKeyFrame(const int video_channel) {
}
};
class RenderFilter : public webrtc::ViEEffectFilter {
public:
int num_frames_;
unsigned int last_render_width_;
unsigned int last_render_height_;
RenderFilter()
: num_frames_(0),
last_render_width_(0),
last_render_height_(0) {
}
virtual ~RenderFilter() {
}
virtual int Transform(size_t size,
unsigned char* frame_buffer,
int64_t ntp_time_ms,
unsigned int timestamp,
unsigned int width,
unsigned int height) {
num_frames_++;
last_render_width_ = width;
last_render_height_ = height;
return 0;
}
};
void ViEAutoTest::ViECodecStandardTest() {
TbInterfaces interfaces("ViECodecStandardTest");
TbCaptureDevice capture_device = TbCaptureDevice(interfaces);
int capture_id = capture_device.captureId;
webrtc::VideoEngine* video_engine = interfaces.video_engine;
webrtc::ViEBase* base = interfaces.base;
webrtc::ViECapture* capture = interfaces.capture;
webrtc::ViERender* render = interfaces.render;
webrtc::ViECodec* codec = interfaces.codec;
webrtc::ViERTP_RTCP* rtp_rtcp = interfaces.rtp_rtcp;
webrtc::ViENetwork* network = interfaces.network;
int video_channel = -1;
EXPECT_EQ(0, base->CreateChannel(video_channel));
EXPECT_EQ(0, capture->ConnectCaptureDevice(capture_id, video_channel));
EXPECT_EQ(0, rtp_rtcp->SetRTCPStatus(
video_channel, webrtc::kRtcpCompound_RFC4585));
EXPECT_EQ(0, rtp_rtcp->SetKeyFrameRequestMethod(
video_channel, webrtc::kViEKeyFrameRequestPliRtcp));
EXPECT_EQ(0, rtp_rtcp->SetTMMBRStatus(video_channel, true));
EXPECT_EQ(0, render->AddRenderer(capture_id, _window1, 0, 0.0, 0.0, 1.0,
1.0));
EXPECT_EQ(0, render->AddRenderer(video_channel, _window2, 1, 0.0, 0.0, 1.0,
1.0));
EXPECT_EQ(0, render->StartRender(capture_id));
EXPECT_EQ(0, render->StartRender(video_channel));
webrtc::VideoCodec video_codec;
memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
for (int idx = 0; idx < codec->NumberOfCodecs(); idx++) {
EXPECT_EQ(0, codec->GetCodec(idx, video_codec));
if (video_codec.codecType != webrtc::kVideoCodecI420) {
video_codec.width = 640;
video_codec.height = 480;
}
if (video_codec.codecType == webrtc::kVideoCodecI420) {
video_codec.width = 176;
video_codec.height = 144;
}
EXPECT_EQ(0, codec->SetReceiveCodec(video_channel, video_codec));
}
for (int idx = 0; idx < codec->NumberOfCodecs(); idx++) {
EXPECT_EQ(0, codec->GetCodec(idx, video_codec));
if (video_codec.codecType == webrtc::kVideoCodecVP8) {
EXPECT_EQ(0, codec->SetSendCodec(video_channel, video_codec));
break;
}
}
const char* ip_address = "127.0.0.1";
const uint16_t rtp_port = 6000;
rtc::scoped_ptr<webrtc::test::VideoChannelTransport> video_channel_transport(
new webrtc::test::VideoChannelTransport(network, video_channel));
ASSERT_EQ(0, video_channel_transport->SetSendDestination(ip_address,
rtp_port));
ASSERT_EQ(0, video_channel_transport->SetLocalReceiver(rtp_port));
EXPECT_EQ(0, base->StartReceive(video_channel));
EXPECT_EQ(0, base->StartSend(video_channel));
// Make sure all codecs runs
{
webrtc::ViEImageProcess* image_process =
webrtc::ViEImageProcess::GetInterface(video_engine);
TestCodecObserver codec_observer;
EXPECT_EQ(0, codec->RegisterDecoderObserver(video_channel, codec_observer));
ViETest::Log("Loop through all codecs for %d seconds",
kAutoTestSleepTimeMs / 1000);
for (int i = 0; i < codec->NumberOfCodecs() - 2; i++) {
EXPECT_EQ(0, codec->GetCodec(i, video_codec));
if (video_codec.codecType == webrtc::kVideoCodecI420) {
// Lower resolution to sockets keep up.
video_codec.width = 176;
video_codec.height = 144;
video_codec.maxFramerate = 15;
}
EXPECT_EQ(0, codec->SetSendCodec(video_channel, video_codec));
ViETest::Log("\t %d. %s", i, video_codec.plName);
RenderFilter frame_counter;
EXPECT_EQ(0, image_process->RegisterRenderEffectFilter(video_channel,
frame_counter));
AutoTestSleep(kAutoTestSleepTimeMs);
// Verify we've received and decoded correct payload.
EXPECT_EQ(video_codec.codecType,
codec_observer.incoming_codec_.codecType);
// This requirement is quite relaxed, but it's hard to say what's an
// acceptable number of received frames when we take into account the
// wide variety of devices (and that we run under valgrind).
EXPECT_GT(frame_counter.num_frames_, 0);
EXPECT_EQ(0, image_process->DeregisterRenderEffectFilter(
video_channel));
}
image_process->Release();
EXPECT_EQ(0, codec->DeregisterDecoderObserver(video_channel));
ViETest::Log("Done!");
}
// Test Callbacks
TestCodecObserver codec_observer;
EXPECT_EQ(0, codec->RegisterEncoderObserver(video_channel, codec_observer));
EXPECT_EQ(0, codec->RegisterDecoderObserver(video_channel, codec_observer));
ViETest::Log("\nTesting codec callbacks...");
for (int idx = 0; idx < codec->NumberOfCodecs(); idx++) {
EXPECT_EQ(0, codec->GetCodec(idx, video_codec));
if (video_codec.codecType == webrtc::kVideoCodecVP8) {
EXPECT_EQ(0, codec->SetSendCodec(video_channel, video_codec));
break;
}
}
AutoTestSleep(kAutoTestSleepTimeMs);
// Verify the delay estimates are larger than 0.
int avg_send_delay = 0;
int max_send_delay = 0;
EXPECT_TRUE(codec->GetSendSideDelay(video_channel, &avg_send_delay,
&max_send_delay));
EXPECT_GT(avg_send_delay, 0);
EXPECT_GE(max_send_delay, avg_send_delay);
int receive_delay_ms = 0;
EXPECT_EQ(0, codec->GetReceiveSideDelay(video_channel, &receive_delay_ms));
EXPECT_GT(receive_delay_ms, 0);
EXPECT_EQ(0, base->StopSend(video_channel));
EXPECT_EQ(0, codec->DeregisterEncoderObserver(video_channel));
EXPECT_EQ(0, codec->DeregisterDecoderObserver(video_channel));
EXPECT_GT(codec_observer.incoming_codec_called_, 0);
EXPECT_GT(codec_observer.incoming_rate_called_, 0);
EXPECT_GT(codec_observer.decoder_timing_called_, 0);
EXPECT_GT(codec_observer.outgoing_rate_called_, 0);
EXPECT_EQ(0, base->StopReceive(video_channel));
EXPECT_EQ(0, render->StopRender(video_channel));
EXPECT_EQ(0, render->RemoveRenderer(capture_id));
EXPECT_EQ(0, render->RemoveRenderer(video_channel));
EXPECT_EQ(0, capture->DisconnectCaptureDevice(video_channel));
EXPECT_EQ(0, base->DeleteChannel(video_channel));
}
void ViEAutoTest::ViECodecExtendedTest() {
{
ViETest::Log(" ");
ViETest::Log("========================================");
ViETest::Log(" ViECodec Extended Test\n");
ViECodecExternalCodecTest();
TbInterfaces interfaces("ViECodecExtendedTest");
webrtc::ViEBase* base = interfaces.base;
webrtc::ViECapture* capture = interfaces.capture;
webrtc::ViERender* render = interfaces.render;
webrtc::ViECodec* codec = interfaces.codec;
webrtc::ViERTP_RTCP* rtp_rtcp = interfaces.rtp_rtcp;
webrtc::ViENetwork* network = interfaces.network;
TbCaptureDevice capture_device = TbCaptureDevice(interfaces);
int capture_id = capture_device.captureId;
int video_channel = -1;
EXPECT_EQ(0, base->CreateChannel(video_channel));
EXPECT_EQ(0, capture->ConnectCaptureDevice(capture_id, video_channel));
EXPECT_EQ(0, rtp_rtcp->SetRTCPStatus(
video_channel, webrtc::kRtcpCompound_RFC4585));
EXPECT_EQ(0, rtp_rtcp->SetKeyFrameRequestMethod(
video_channel, webrtc::kViEKeyFrameRequestPliRtcp));
EXPECT_EQ(0, rtp_rtcp->SetTMMBRStatus(video_channel, true));
EXPECT_EQ(0, render->AddRenderer(capture_id, _window1, 0, 0.0, 0.0, 1.0,
1.0));
EXPECT_EQ(0, render->AddRenderer(video_channel, _window2, 1, 0.0, 0.0, 1.0,
1.0));
EXPECT_EQ(0, render->StartRender(capture_id));
EXPECT_EQ(0, render->StartRender(video_channel));
webrtc::VideoCodec video_codec;
memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
for (int idx = 0; idx < codec->NumberOfCodecs(); idx++) {
EXPECT_EQ(0, codec->GetCodec(idx, video_codec));
if (video_codec.codecType != webrtc::kVideoCodecI420) {
video_codec.width = 640;
video_codec.height = 480;
}
EXPECT_EQ(0, codec->SetReceiveCodec(video_channel, video_codec));
}
const char* ip_address = "127.0.0.1";
const uint16_t rtp_port = 6000;
rtc::scoped_ptr<webrtc::test::VideoChannelTransport>
video_channel_transport(
new webrtc::test::VideoChannelTransport(network, video_channel));
ASSERT_EQ(0, video_channel_transport->SetSendDestination(ip_address,
rtp_port));
ASSERT_EQ(0, video_channel_transport->SetLocalReceiver(rtp_port));
EXPECT_EQ(0, base->StartSend(video_channel));
EXPECT_EQ(0, base->StartReceive(video_channel));
// Codec specific tests
memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
EXPECT_EQ(0, base->StopSend(video_channel));
TestCodecObserver codec_observer;
EXPECT_EQ(0, codec->RegisterEncoderObserver(video_channel, codec_observer));
EXPECT_EQ(0, codec->RegisterDecoderObserver(video_channel, codec_observer));
EXPECT_EQ(0, base->StopReceive(video_channel));
EXPECT_EQ(0, render->StopRender(video_channel));
EXPECT_EQ(0, render->RemoveRenderer(capture_id));
EXPECT_EQ(0, render->RemoveRenderer(video_channel));
EXPECT_EQ(0, capture->DisconnectCaptureDevice(video_channel));
EXPECT_EQ(0, base->DeleteChannel(video_channel));
}
// Multiple send channels.
{
// Create two channels, where the second channel is created from the
// first channel. Send different resolutions on the channels and verify
// the received streams.
TbInterfaces video_engine("ViECodecExtendedTest2");
TbCaptureDevice tb_capture(video_engine);
webrtc::ViENetwork* network = video_engine.network;
// Create channel 1.
int video_channel_1 = -1;
EXPECT_EQ(0, video_engine.base->CreateChannel(video_channel_1));
// Create channel 2 based on the first channel.
int video_channel_2 = -1;
EXPECT_EQ(0, video_engine.base->CreateChannel(
video_channel_2, video_channel_1));
EXPECT_NE(video_channel_1, video_channel_2)
<< "Channel 2 should be unique.";
const char* ip_address = "127.0.0.1";
uint16_t rtp_port_1 = 12000;
uint16_t rtp_port_2 = 13000;
rtc::scoped_ptr<webrtc::test::VideoChannelTransport>
video_channel_transport_1(
new webrtc::test::VideoChannelTransport(network, video_channel_1));
ASSERT_EQ(0, video_channel_transport_1->SetSendDestination(ip_address,
rtp_port_1));
ASSERT_EQ(0, video_channel_transport_1->SetLocalReceiver(rtp_port_1));
rtc::scoped_ptr<webrtc::test::VideoChannelTransport>
video_channel_transport_2(
new webrtc::test::VideoChannelTransport(network, video_channel_2));
ASSERT_EQ(0, video_channel_transport_2->SetSendDestination(ip_address,
rtp_port_2));
ASSERT_EQ(0, video_channel_transport_2->SetLocalReceiver(rtp_port_2));
EXPECT_EQ(0, video_engine.rtp_rtcp->SetLocalSSRC(video_channel_1, 1));
EXPECT_EQ(0, video_engine.rtp_rtcp->SetLocalSSRC(video_channel_2, 2));
tb_capture.ConnectTo(video_channel_1);
tb_capture.ConnectTo(video_channel_2);
EXPECT_EQ(0, video_engine.rtp_rtcp->SetKeyFrameRequestMethod(
video_channel_1, webrtc::kViEKeyFrameRequestPliRtcp));
EXPECT_EQ(0, video_engine.rtp_rtcp->SetKeyFrameRequestMethod(
video_channel_2, webrtc::kViEKeyFrameRequestPliRtcp));
EXPECT_EQ(0, video_engine.render->AddRenderer(video_channel_1, _window1, 0,
0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, video_engine.render->StartRender(video_channel_1));
EXPECT_EQ(0, video_engine.render->AddRenderer(video_channel_2, _window2, 0,
0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, video_engine.render->StartRender(video_channel_2));
// Set Send codec.
uint16_t codec_width = 320;
uint16_t codec_height = 240;
bool codec_set = false;
webrtc::VideoCodec video_codec;
webrtc::VideoCodec send_codec1;
webrtc::VideoCodec send_codec2;
for (int idx = 0; idx < video_engine.codec->NumberOfCodecs(); idx++) {
EXPECT_EQ(0, video_engine.codec->GetCodec(idx, video_codec));
EXPECT_EQ(0, video_engine.codec->SetReceiveCodec(video_channel_1,
video_codec));
if (video_codec.codecType == webrtc::kVideoCodecVP8) {
memcpy(&send_codec1, &video_codec, sizeof(video_codec));
send_codec1.width = codec_width;
send_codec1.height = codec_height;
EXPECT_EQ(0, video_engine.codec->SetSendCodec(
video_channel_1, send_codec1));
memcpy(&send_codec2, &video_codec, sizeof(video_codec));
send_codec2.width = 2 * codec_width;
send_codec2.height = 2 * codec_height;
EXPECT_EQ(0, video_engine.codec->SetSendCodec(
video_channel_2, send_codec2));
codec_set = true;
break;
}
}
EXPECT_TRUE(codec_set);
// We need to verify using render effect filter since we won't trigger
// a decode reset in loopback (due to using the same SSRC).
RenderFilter filter1;
RenderFilter filter2;
EXPECT_EQ(0, video_engine.image_process->RegisterRenderEffectFilter(
video_channel_1, filter1));
EXPECT_EQ(0, video_engine.image_process->RegisterRenderEffectFilter(
video_channel_2, filter2));
EXPECT_EQ(0, video_engine.base->StartReceive(video_channel_1));
EXPECT_EQ(0, video_engine.base->StartSend(video_channel_1));
EXPECT_EQ(0, video_engine.base->StartReceive(video_channel_2));
EXPECT_EQ(0, video_engine.base->StartSend(video_channel_2));
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, video_engine.base->StopReceive(video_channel_1));
EXPECT_EQ(0, video_engine.base->StopSend(video_channel_1));
EXPECT_EQ(0, video_engine.base->StopReceive(video_channel_2));
EXPECT_EQ(0, video_engine.base->StopSend(video_channel_2));
EXPECT_EQ(0, video_engine.image_process->DeregisterRenderEffectFilter(
video_channel_1));
EXPECT_EQ(0, video_engine.image_process->DeregisterRenderEffectFilter(
video_channel_2));
EXPECT_EQ(send_codec1.width, filter1.last_render_width_);
EXPECT_EQ(send_codec1.height, filter1.last_render_height_);
EXPECT_EQ(send_codec2.width, filter2.last_render_width_);
EXPECT_EQ(send_codec2.height, filter2.last_render_height_);
EXPECT_EQ(0, video_engine.base->DeleteChannel(video_channel_1));
EXPECT_EQ(0, video_engine.base->DeleteChannel(video_channel_2));
}
}
void ViEAutoTest::ViECodecAPITest() {
webrtc::VideoEngine* video_engine = NULL;
video_engine = webrtc::VideoEngine::Create();
EXPECT_TRUE(video_engine != NULL);
webrtc::ViEBase* base = webrtc::ViEBase::GetInterface(video_engine);
EXPECT_EQ(0, base->Init());
int video_channel = -1;
EXPECT_EQ(0, base->CreateChannel(video_channel));
webrtc::ViECodec* codec = webrtc::ViECodec::GetInterface(video_engine);
EXPECT_TRUE(codec != NULL);
webrtc::VideoCodec video_codec;
memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
const int number_of_codecs = codec->NumberOfCodecs();
for (int i = 0; i < number_of_codecs; i++) {
EXPECT_EQ(0, codec->GetCodec(i, video_codec));
if (video_codec.codecType == webrtc::kVideoCodecVP8) {
video_codec.codecSpecific.VP8.automaticResizeOn = true;
video_codec.codecSpecific.VP8.frameDroppingOn = true;
video_codec.codecSpecific.VP8.keyFrameInterval = 300;
EXPECT_EQ(0, codec->SetSendCodec(video_channel, video_codec));
break;
}
}
const unsigned int kMinBitrate = 123;
video_codec.minBitrate = kMinBitrate;
video_codec.startBitrate = 50;
EXPECT_EQ(0, codec->SetSendCodec(video_channel, video_codec));
EXPECT_EQ(0, codec->GetSendCodec(video_channel, video_codec));
// We don't allow allocated start bitrate to be decreased via SetSendCodec,
// and the default bitrate available in the allocator is 300.
EXPECT_EQ(300u, video_codec.startBitrate);
memset(&video_codec, 0, sizeof(video_codec));
EXPECT_EQ(0, codec->GetSendCodec(video_channel, video_codec));
EXPECT_EQ(webrtc::kVideoCodecVP8, video_codec.codecType);
EXPECT_TRUE(video_codec.codecSpecific.VP8.automaticResizeOn);
EXPECT_TRUE(video_codec.codecSpecific.VP8.frameDroppingOn);
EXPECT_EQ(300, video_codec.codecSpecific.VP8.keyFrameInterval);
for (int i = 0; i < number_of_codecs; i++) {
EXPECT_EQ(0, codec->GetCodec(i, video_codec));
if (video_codec.codecType == webrtc::kVideoCodecI420) {
EXPECT_EQ(0, codec->SetSendCodec(video_channel, video_codec));
break;
}
}
memset(&video_codec, 0, sizeof(video_codec));
EXPECT_EQ(0, codec->GetSendCodec(video_channel, video_codec));
EXPECT_EQ(webrtc::kVideoCodecI420, video_codec.codecType);
// Register a generic codec
memset(&video_codec, 0, sizeof(video_codec));
video_codec.codecType = webrtc::kVideoCodecGeneric;
strcpy(video_codec.plName, "generic-codec");
uint8_t payload_type = 127;
video_codec.plType = payload_type;
video_codec.minBitrate = 100;
video_codec.startBitrate = 500;
video_codec.maxBitrate = 10000;
video_codec.width = 1920;
video_codec.height = 1080;
video_codec.maxFramerate = 30;
video_codec.qpMax = 50;
webrtc::ViEExternalCodec* external_codec =
webrtc::ViEExternalCodec::GetInterface(video_engine);
EXPECT_TRUE(external_codec != NULL);
// Any encoder will do.
webrtc::I420Encoder encoder;
EXPECT_EQ(0, external_codec->RegisterExternalSendCodec(video_channel,
payload_type, &encoder,
false));
EXPECT_EQ(0, codec->SetSendCodec(video_channel, video_codec));
memset(&video_codec, 0, sizeof(video_codec));
EXPECT_EQ(0, codec->GetSendCodec(video_channel, video_codec));
EXPECT_EQ(webrtc::kVideoCodecGeneric, video_codec.codecType);
EXPECT_EQ(0, base->DeleteChannel(video_channel));
EXPECT_EQ(0, external_codec->Release());
EXPECT_EQ(0, codec->Release());
EXPECT_EQ(0, base->Release());
EXPECT_TRUE(webrtc::VideoEngine::Delete(video_engine));
}
void ViEAutoTest::ViECodecExternalCodecTest() {
ViETest::Log(" ");
ViETest::Log("========================================");
ViETest::Log(" ViEExternalCodec Test\n");
/// **************************************************************
// Begin create/initialize WebRTC Video Engine for testing
/// **************************************************************
/// **************************************************************
// Engine ready. Begin testing class
/// **************************************************************
#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
int number_of_errors = 0;
{
int error = 0;
TbInterfaces ViE("ViEExternalCodec");
TbCaptureDevice capture_device(ViE);
TbVideoChannel channel(ViE, webrtc::kVideoCodecI420, 352, 288, 30,
(352 * 288 * 3 * 8 * 30) / (2 * 1000));
capture_device.ConnectTo(channel.videoChannel);
error = ViE.render->AddRenderer(channel.videoChannel, _window1, 0, 0.0, 0.0,
1.0, 1.0);
number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
error = ViE.render->StartRender(channel.videoChannel);
number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
channel.StartReceive();
channel.StartSend();
ViETest::Log("Using internal I420 codec");
AutoTestSleep(kAutoTestSleepTimeMs / 2);
webrtc::ViEExternalCodec* vie_external_codec =
webrtc::ViEExternalCodec::GetInterface(ViE.video_engine);
number_of_errors += ViETest::TestError(vie_external_codec != NULL,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
webrtc::VideoCodec codec;
error = ViE.codec->GetSendCodec(channel.videoChannel, codec);
number_of_errors += ViETest::TestError(vie_external_codec != NULL,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
// Use external encoder instead.
{
TbI420Encoder ext_encoder;
// Test to register on wrong channel.
error = vie_external_codec->RegisterExternalSendCodec(
channel.videoChannel + 5, codec.plType, &ext_encoder, false);
number_of_errors += ViETest::TestError(error == -1,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(
ViE.LastError() == kViECodecInvalidArgument,
"ERROR: %s at line %d", __FUNCTION__, __LINE__);
error = vie_external_codec->RegisterExternalSendCodec(
channel.videoChannel, codec.plType, &ext_encoder, false);
number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
// Use new external encoder
error = ViE.codec->SetSendCodec(channel.videoChannel, codec);
number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
TbI420Decoder ext_decoder;
error = vie_external_codec->RegisterExternalReceiveCodec(
channel.videoChannel, codec.plType, &ext_decoder);
number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
error = ViE.codec->SetReceiveCodec(channel.videoChannel, codec);
number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
ViETest::Log("Using external I420 codec");
AutoTestSleep(kAutoTestSleepTimeMs);
// Test to deregister on wrong channel
error = vie_external_codec->DeRegisterExternalSendCodec(
channel.videoChannel + 5, codec.plType);
number_of_errors += ViETest::TestError(error == -1,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(
ViE.LastError() == kViECodecInvalidArgument, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
// Test to deregister wrong payload type.
error = vie_external_codec->DeRegisterExternalSendCodec(
channel.videoChannel, codec.plType - 1);
number_of_errors += ViETest::TestError(error == -1,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
// Deregister external send codec
error = vie_external_codec->DeRegisterExternalSendCodec(
channel.videoChannel, codec.plType);
number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
error = vie_external_codec->DeRegisterExternalReceiveCodec(
channel.videoChannel, codec.plType);
number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
// Verify that the encoder and decoder has been used
TbI420Encoder::FunctionCalls encode_calls =
ext_encoder.GetFunctionCalls();
number_of_errors += ViETest::TestError(encode_calls.InitEncode == 1,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(encode_calls.Release == 1,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(encode_calls.Encode > 30,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(
encode_calls.RegisterEncodeCompleteCallback == 1,
"ERROR: %s at line %d", __FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(
encode_calls.SetChannelParameters > 1, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(encode_calls.SetRates > 1,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
TbI420Decoder::FunctionCalls decode_calls =
ext_decoder.GetFunctionCalls();
number_of_errors += ViETest::TestError(decode_calls.InitDecode == 1,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(decode_calls.Release == 1,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(decode_calls.Decode > 30,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(
decode_calls.RegisterDecodeCompleteCallback == 1,
"ERROR: %s at line %d", __FUNCTION__, __LINE__);
ViETest::Log("Changing payload type Using external I420 codec");
codec.plType = codec.plType - 1;
error = vie_external_codec->RegisterExternalReceiveCodec(
channel.videoChannel, codec.plType, &ext_decoder);
number_of_errors += ViETest::TestError(error == 0,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
error = ViE.codec->SetReceiveCodec(channel.videoChannel,
codec);
number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
error = vie_external_codec->RegisterExternalSendCodec(
channel.videoChannel, codec.plType, &ext_encoder, false);
number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
// Use new external encoder
error = ViE.codec->SetSendCodec(channel.videoChannel,
codec);
number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
AutoTestSleep(kAutoTestSleepTimeMs / 2);
/// **************************************************************
// Testing finished. Tear down Video Engine
/// **************************************************************
error = vie_external_codec->DeRegisterExternalSendCodec(
channel.videoChannel, codec.plType);
number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
error = vie_external_codec->DeRegisterExternalReceiveCodec(
channel.videoChannel, codec.plType);
number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
// Verify that the encoder and decoder has been used
encode_calls = ext_encoder.GetFunctionCalls();
number_of_errors += ViETest::TestError(encode_calls.InitEncode == 2,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(encode_calls.Release == 2,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(encode_calls.Encode > 30,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(
encode_calls.RegisterEncodeCompleteCallback == 2,
"ERROR: %s at line %d", __FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(
encode_calls.SetChannelParameters > 1, "ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(encode_calls.SetRates > 1,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
decode_calls = ext_decoder.GetFunctionCalls();
number_of_errors += ViETest::TestError(decode_calls.InitDecode == 2,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(decode_calls.Release == 2,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(decode_calls.Decode > 30,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
number_of_errors += ViETest::TestError(
decode_calls.RegisterDecodeCompleteCallback == 2,
"ERROR: %s at line %d", __FUNCTION__, __LINE__);
int remaining_interfaces = vie_external_codec->Release();
number_of_errors += ViETest::TestError(remaining_interfaces == 0,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
} // tbI420Encoder and ext_decoder goes out of scope.
ViETest::Log("Using internal I420 codec");
AutoTestSleep(kAutoTestSleepTimeMs / 2);
}
if (number_of_errors > 0) {
// Test failed
ViETest::Log(" ");
ViETest::Log(" ERROR ViEExternalCodec Test FAILED!");
ViETest::Log(" Number of errors: %d", number_of_errors);
ViETest::Log("========================================");
ViETest::Log(" ");
return;
}
ViETest::Log(" ");
ViETest::Log(" ViEExternalCodec Test PASSED!");
ViETest::Log("========================================");
ViETest::Log(" ");
return;
#else
ViETest::Log(" ViEExternalCodec not enabled\n");
return;
#endif
}

View file

@ -1,232 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// vie_autotest_image_process.cc
//
// Settings
#include "webrtc/engine_configurations.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/libvietest/include/tb_capture_device.h"
#include "webrtc/video_engine/test/libvietest/include/tb_interfaces.h"
#include "webrtc/video_engine/test/libvietest/include/tb_video_channel.h"
class MyEffectFilter: public webrtc::ViEEffectFilter
{
public:
MyEffectFilter() {}
~MyEffectFilter() {}
virtual int Transform(size_t size,
unsigned char* frame_buffer,
int64_t ntp_time_ms,
unsigned int timestamp,
unsigned int width,
unsigned int height)
{
// Black and white
memset(frame_buffer + (2 * size) / 3, 0x7f, size / 3);
return 0;
}
};
void ViEAutoTest::ViEImageProcessStandardTest()
{
//***************************************************************
// Begin create/initialize WebRTC Video Engine for testing
//***************************************************************
int rtpPort = 6000;
// Create VIE
TbInterfaces ViE("ViEImageProcessStandardTest");
// Create a video channel
TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
// Create a capture device
TbCaptureDevice tbCapture(ViE);
tbCapture.ConnectTo(tbChannel.videoChannel);
tbChannel.StartReceive(rtpPort);
tbChannel.StartSend(rtpPort);
MyEffectFilter effectFilter;
RenderCaptureDeviceAndOutputStream(&ViE, &tbChannel, &tbCapture);
ViETest::Log("Capture device is renderered in Window 1");
ViETest::Log("Remote stream is renderered in Window 2");
AutoTestSleep(kAutoTestSleepTimeMs);
//***************************************************************
// Engine ready. Begin testing class
//***************************************************************
EXPECT_EQ(0, ViE.image_process->RegisterCaptureEffectFilter(
tbCapture.captureId, effectFilter));
ViETest::Log("Black and white filter registered for capture device, "
"affects both windows");
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.image_process->DeregisterCaptureEffectFilter(
tbCapture.captureId));
EXPECT_EQ(0, ViE.image_process->RegisterRenderEffectFilter(
tbChannel.videoChannel, effectFilter));
ViETest::Log("Remove capture effect filter, adding filter for incoming "
"stream");
ViETest::Log("Only Window 2 should be black and white");
AutoTestSleep(kAutoTestSleepTimeMs);
StopRenderCaptureDeviceAndOutputStream(&ViE, &tbChannel, &tbCapture);
tbCapture.Disconnect(tbChannel.videoChannel);
int rtpPort2 = rtpPort + 100;
// Create a video channel
TbVideoChannel tbChannel2(ViE, webrtc::kVideoCodecVP8);
tbCapture.ConnectTo(tbChannel2.videoChannel);
tbChannel2.StartReceive(rtpPort2);
tbChannel2.StartSend(rtpPort2);
EXPECT_EQ(0, ViE.render->AddRenderer(
tbChannel2.videoChannel, _window1, 1, 0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, ViE.render->StartRender(tbChannel2.videoChannel));
EXPECT_EQ(0, ViE.image_process->DeregisterRenderEffectFilter(
tbChannel.videoChannel));
ViETest::Log("Local renderer removed, added new channel and rendering in "
"Window1.");
EXPECT_EQ(0, ViE.image_process->RegisterCaptureEffectFilter(
tbCapture.captureId, effectFilter));
ViETest::Log("Black and white filter registered for capture device, "
"affects both windows");
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.image_process->DeregisterCaptureEffectFilter(
tbCapture.captureId));
EXPECT_EQ(0, ViE.image_process->RegisterSendEffectFilter(
tbChannel.videoChannel, effectFilter));
ViETest::Log("Capture filter removed.");
ViETest::Log("Black and white filter registered for one channel, Window2 "
"should be black and white");
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.image_process->DeregisterSendEffectFilter(
tbChannel.videoChannel));
EXPECT_EQ(0, ViE.render->RemoveRenderer(tbChannel2.videoChannel));
tbCapture.Disconnect(tbChannel2.videoChannel);
//***************************************************************
// Testing finished. Tear down Video Engine
//***************************************************************
}
void ViEAutoTest::ViEImageProcessExtendedTest()
{
}
void ViEAutoTest::ViEImageProcessAPITest()
{
TbInterfaces ViE("ViEImageProcessAPITest");
TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
TbCaptureDevice tbCapture(ViE);
tbCapture.ConnectTo(tbChannel.videoChannel);
MyEffectFilter effectFilter;
//
// Capture effect filter
//
// Add effect filter
EXPECT_EQ(0, ViE.image_process->RegisterCaptureEffectFilter(
tbCapture.captureId, effectFilter));
// Add again -> error
EXPECT_NE(0, ViE.image_process->RegisterCaptureEffectFilter(
tbCapture.captureId, effectFilter));
EXPECT_EQ(0, ViE.image_process->DeregisterCaptureEffectFilter(
tbCapture.captureId));
EXPECT_EQ(0, ViE.image_process->DeregisterCaptureEffectFilter(
tbCapture.captureId));
// Non-existing capture device
EXPECT_NE(0, ViE.image_process->RegisterCaptureEffectFilter(
tbChannel.videoChannel, effectFilter));
//
// Render effect filter
//
EXPECT_EQ(0, ViE.image_process->RegisterRenderEffectFilter(
tbChannel.videoChannel, effectFilter));
EXPECT_NE(0, ViE.image_process->RegisterRenderEffectFilter(
tbChannel.videoChannel, effectFilter));
EXPECT_EQ(0, ViE.image_process->DeregisterRenderEffectFilter(
tbChannel.videoChannel));
EXPECT_EQ(0, ViE.image_process->DeregisterRenderEffectFilter(
tbChannel.videoChannel));
// Non-existing channel id
EXPECT_NE(0, ViE.image_process->RegisterRenderEffectFilter(
tbCapture.captureId, effectFilter));
//
// Send effect filter
//
EXPECT_EQ(0, ViE.image_process->RegisterSendEffectFilter(
tbChannel.videoChannel, effectFilter));
EXPECT_NE(0, ViE.image_process->RegisterSendEffectFilter(
tbChannel.videoChannel, effectFilter));
EXPECT_EQ(0, ViE.image_process->DeregisterSendEffectFilter(
tbChannel.videoChannel));
EXPECT_EQ(0, ViE.image_process->DeregisterSendEffectFilter(
tbChannel.videoChannel));
EXPECT_NE(0, ViE.image_process->RegisterSendEffectFilter(
tbCapture.captureId, effectFilter));
//
// Deflickering
//
EXPECT_EQ(0, ViE.image_process->EnableDeflickering(
tbCapture.captureId, true));
EXPECT_NE(0, ViE.image_process->EnableDeflickering(
tbCapture.captureId, true));
EXPECT_EQ(0, ViE.image_process->EnableDeflickering(
tbCapture.captureId, false));
EXPECT_NE(0, ViE.image_process->EnableDeflickering(
tbCapture.captureId, false));
EXPECT_NE(0, ViE.image_process->EnableDeflickering(
tbChannel.videoChannel, true));
//
// Color enhancement
//
EXPECT_EQ(0, ViE.image_process->EnableColorEnhancement(
tbChannel.videoChannel, false));
EXPECT_EQ(0, ViE.image_process->EnableColorEnhancement(
tbChannel.videoChannel, true));
EXPECT_EQ(0, ViE.image_process->EnableColorEnhancement(
tbChannel.videoChannel, false));
EXPECT_NE(0, ViE.image_process->EnableColorEnhancement(
tbCapture.captureId, true));
tbCapture.Disconnect(tbChannel.videoChannel);
}

View file

@ -1,142 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// vie_autotest_linux.cc
//
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_linux.h"
#include <string>
#include "webrtc/engine_configurations.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_main.h"
ViEAutoTestWindowManager::ViEAutoTestWindowManager()
: _hdsp1(NULL),
_hdsp2(NULL) {
}
ViEAutoTestWindowManager::~ViEAutoTestWindowManager() {
TerminateWindows();
}
void* ViEAutoTestWindowManager::GetWindow1() {
return reinterpret_cast<void*>(_hwnd1);
}
void* ViEAutoTestWindowManager::GetWindow2() {
return reinterpret_cast<void*>(_hwnd2);
}
int ViEAutoTestWindowManager::TerminateWindows() {
if (_hdsp1) {
ViEDestroyWindow(&_hwnd1, _hdsp1);
_hdsp1 = NULL;
}
if (_hdsp2) {
ViEDestroyWindow(&_hwnd2, _hdsp2);
_hdsp2 = NULL;
}
return 0;
}
int ViEAutoTestWindowManager::CreateWindows(AutoTestRect window1Size,
AutoTestRect window2Size,
void* window1Title,
void* window2Title) {
ViECreateWindow(&_hwnd1, &_hdsp1, window1Size.origin.x,
window1Size.origin.y, window1Size.size.width,
window1Size.size.height,
reinterpret_cast<char*>(window1Title));
ViECreateWindow(&_hwnd2, &_hdsp2, window2Size.origin.x,
window2Size.origin.y, window2Size.size.width,
window2Size.size.height,
reinterpret_cast<char*>(window2Title));
return 0;
}
int ViEAutoTestWindowManager::ViECreateWindow(Window *out_window,
Display **out_display, int x_pos,
int y_pos, int width, int height,
char* title) {
Display* display = XOpenDisplay(NULL);
if (display == NULL) {
// There's no point to continue if this happens: nothing will work anyway.
printf("Failed to connect to X server: X environment likely broken\n");
exit(-1);
}
int screen = DefaultScreen(display);
// Try to establish a 24-bit TrueColor display
// (our environment must allow this).
XVisualInfo visual_info;
if (XMatchVisualInfo(display, screen, 24, TrueColor, &visual_info) == 0) {
printf("Failed to establish 24-bit TrueColor in X environment.\n");
exit(-1);
}
// Create suitable window attributes.
XSetWindowAttributes window_attributes;
window_attributes.colormap = XCreateColormap(
display, DefaultRootWindow(display), visual_info.visual, AllocNone);
window_attributes.event_mask = StructureNotifyMask | ExposureMask;
window_attributes.background_pixel = 0;
window_attributes.border_pixel = 0;
unsigned long attribute_mask = CWBackPixel | CWBorderPixel | CWColormap |
CWEventMask;
Window _window = XCreateWindow(display, DefaultRootWindow(display), x_pos,
y_pos, width, height, 0, visual_info.depth,
InputOutput, visual_info.visual,
attribute_mask, &window_attributes);
// Set window name.
XStoreName(display, _window, title);
XSetIconName(display, _window, title);
// Make x report events for mask.
XSelectInput(display, _window, StructureNotifyMask);
// Map the window to the display.
XMapWindow(display, _window);
// Wait for map event.
XEvent event;
do {
XNextEvent(display, &event);
} while (event.type != MapNotify || event.xmap.event != _window);
*out_window = _window;
*out_display = display;
return 0;
}
int ViEAutoTestWindowManager::ViEDestroyWindow(Window *window,
Display *display) {
XUnmapWindow(display, *window);
XDestroyWindow(display, *window);
XSync(display, false);
XCloseDisplay(display);
return 0;
}
bool ViEAutoTestWindowManager::SetTopmostWindow() {
return 0;
}
int main(int argc, char** argv) {
ViEAutoTestMain auto_test;
return auto_test.RunTests(argc, argv);
}

View file

@ -1,763 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// vie_autotest_loopback.cc
//
// This code is also used as sample code for ViE 3.0
//
// ===================================================================
//
// BEGIN: VideoEngine 3.0 Sample Code
//
#include <iostream>
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/test/channel_transport/include/channel_transport.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_external_codec.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/libvietest/include/tb_external_transport.h"
#include "webrtc/voice_engine/include/voe_base.h"
const uint32_t kSsrc = 0x01234567;
const uint32_t kRtxSsrc = 0x01234568;
const int kRtxPayloadType = 98;
const int kPayloadType = 100;
#define VCM_RED_PAYLOAD_TYPE 96
#define VCM_ULPFEC_PAYLOAD_TYPE 97
int VideoEngineSampleCode(void* window1, void* window2)
{
//********************************************************
// Begin create/initialize Video Engine for testing
//********************************************************
int error = 0;
//
// Create a VideoEngine instance
//
webrtc::VideoEngine* ptrViE = NULL;
ptrViE = webrtc::VideoEngine::Create();
if (ptrViE == NULL)
{
printf("ERROR in VideoEngine::Create\n");
return -1;
}
error = ptrViE->SetTraceFilter(webrtc::kTraceAll);
if (error == -1)
{
printf("ERROR in VideoEngine::SetTraceFilter\n");
return -1;
}
std::string trace_file =
ViETest::GetResultOutputPath() + "ViELoopbackCall_trace.txt";
error = ptrViE->SetTraceFile(trace_file.c_str());
if (error == -1)
{
printf("ERROR in VideoEngine::SetTraceFile\n");
return -1;
}
//
// Init VideoEngine and create a channel
//
webrtc::ViEBase* ptrViEBase = webrtc::ViEBase::GetInterface(ptrViE);
if (ptrViEBase == NULL)
{
printf("ERROR in ViEBase::GetInterface\n");
return -1;
}
error = ptrViEBase->Init();
if (error == -1)
{
printf("ERROR in ViEBase::Init\n");
return -1;
}
webrtc::ViERTP_RTCP* ptrViERtpRtcp =
webrtc::ViERTP_RTCP::GetInterface(ptrViE);
if (ptrViERtpRtcp == NULL)
{
printf("ERROR in ViERTP_RTCP::GetInterface\n");
return -1;
}
int videoChannel = -1;
error = ptrViEBase->CreateChannel(videoChannel);
if (error == -1)
{
printf("ERROR in ViEBase::CreateChannel\n");
return -1;
}
//
// List available capture devices, allocate and connect.
//
webrtc::ViECapture* ptrViECapture =
webrtc::ViECapture::GetInterface(ptrViE);
if (ptrViEBase == NULL)
{
printf("ERROR in ViECapture::GetInterface\n");
return -1;
}
const unsigned int KMaxDeviceNameLength = 128;
const unsigned int KMaxUniqueIdLength = 256;
char deviceName[KMaxDeviceNameLength];
memset(deviceName, 0, KMaxDeviceNameLength);
char uniqueId[KMaxUniqueIdLength];
memset(uniqueId, 0, KMaxUniqueIdLength);
printf("Available capture devices:\n");
int captureIdx = 0;
for (captureIdx = 0;
captureIdx < ptrViECapture->NumberOfCaptureDevices();
captureIdx++)
{
memset(deviceName, 0, KMaxDeviceNameLength);
memset(uniqueId, 0, KMaxUniqueIdLength);
error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
KMaxDeviceNameLength, uniqueId,
KMaxUniqueIdLength);
if (error == -1)
{
printf("ERROR in ViECapture::GetCaptureDevice\n");
return -1;
}
printf("\t %d. %s\n", captureIdx + 1, deviceName);
}
printf("\nChoose capture device: ");
#ifdef WEBRTC_ANDROID
captureIdx = 0;
printf("0\n");
#else
if (scanf("%d", &captureIdx) != 1)
{
printf("Error in scanf()\n");
return -1;
}
getc(stdin);
captureIdx = captureIdx - 1; // Compensate for idx start at 1.
#endif
error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
KMaxDeviceNameLength, uniqueId,
KMaxUniqueIdLength);
if (error == -1)
{
printf("ERROR in ViECapture::GetCaptureDevice\n");
return -1;
}
int captureId = 0;
error = ptrViECapture->AllocateCaptureDevice(uniqueId, KMaxUniqueIdLength,
captureId);
if (error == -1)
{
printf("ERROR in ViECapture::AllocateCaptureDevice\n");
return -1;
}
error = ptrViECapture->ConnectCaptureDevice(captureId, videoChannel);
if (error == -1)
{
printf("ERROR in ViECapture::ConnectCaptureDevice\n");
return -1;
}
error = ptrViECapture->StartCapture(captureId);
if (error == -1)
{
printf("ERROR in ViECapture::StartCapture\n");
return -1;
}
//
// RTP/RTCP settings
//
error = ptrViERtpRtcp->SetRTCPStatus(videoChannel,
webrtc::kRtcpCompound_RFC4585);
if (error == -1)
{
printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
return -1;
}
error = ptrViERtpRtcp->SetKeyFrameRequestMethod(
videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);
if (error == -1)
{
printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
return -1;
}
error = ptrViERtpRtcp->SetRembStatus(videoChannel, true, true);
if (error == -1)
{
printf("ERROR in ViERTP_RTCP::SetTMMBRStatus\n");
return -1;
}
// Setting SSRC manually (arbitrary value), as otherwise we will get a clash
// (loopback), and a new SSRC will be set, which will reset the receiver.
error = ptrViERtpRtcp->SetLocalSSRC(videoChannel, kSsrc);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetLocalSSRC\n");
return -1;
}
error = ptrViERtpRtcp->SetLocalSSRC(videoChannel, kRtxSsrc,
webrtc::kViEStreamTypeRtx, 0);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetLocalSSRC\n");
return -1;
}
error = ptrViERtpRtcp->SetRemoteSSRCType(videoChannel,
webrtc::kViEStreamTypeRtx,
kRtxSsrc);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetRtxReceivePayloadType\n");
return -1;
}
error = ptrViERtpRtcp->SetRtxSendPayloadType(videoChannel, kRtxPayloadType,
kPayloadType);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetRtxSendPayloadType\n");
return -1;
}
error = ptrViERtpRtcp->SetRtxReceivePayloadType(
videoChannel, kRtxPayloadType, kPayloadType);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetRtxReceivePayloadType\n");
return -1;
}
//
// Set up rendering
//
webrtc::ViERender* ptrViERender = webrtc::ViERender::GetInterface(ptrViE);
if (ptrViERender == NULL) {
printf("ERROR in ViERender::GetInterface\n");
return -1;
}
error
= ptrViERender->AddRenderer(captureId, window1, 0, 0.0, 0.0, 1.0, 1.0);
if (error == -1)
{
printf("ERROR in ViERender::AddRenderer\n");
return -1;
}
error = ptrViERender->StartRender(captureId);
if (error == -1)
{
printf("ERROR in ViERender::StartRender\n");
return -1;
}
error = ptrViERender->AddRenderer(videoChannel, window2, 1, 0.0, 0.0, 1.0,
1.0);
if (error == -1)
{
printf("ERROR in ViERender::AddRenderer\n");
return -1;
}
error = ptrViERender->StartRender(videoChannel);
if (error == -1)
{
printf("ERROR in ViERender::StartRender\n");
return -1;
}
//
// Setup codecs
//
webrtc::ViECodec* ptrViECodec = webrtc::ViECodec::GetInterface(ptrViE);
if (ptrViECodec == NULL)
{
printf("ERROR in ViECodec::GetInterface\n");
return -1;
}
// Check available codecs and prepare receive codecs
printf("\nAvailable codecs:\n");
webrtc::VideoCodec videoCodec;
memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
int codecIdx = 0;
for (codecIdx = 0; codecIdx < ptrViECodec->NumberOfCodecs(); codecIdx++)
{
error = ptrViECodec->GetCodec(codecIdx, videoCodec);
if (error == -1)
{
printf("ERROR in ViECodec::GetCodec\n");
return -1;
}
// try to keep the test frame size small when I420
if (videoCodec.codecType == webrtc::kVideoCodecI420)
{
videoCodec.width = 176;
videoCodec.height = 144;
}
error = ptrViECodec->SetReceiveCodec(videoChannel, videoCodec);
if (error == -1)
{
printf("ERROR in ViECodec::SetReceiveCodec\n");
return -1;
}
if (videoCodec.codecType != webrtc::kVideoCodecRED
&& videoCodec.codecType != webrtc::kVideoCodecULPFEC)
{
printf("\t %d. %s\n", codecIdx + 1, videoCodec.plName);
}
}
printf("%d. VP8 over Generic.\n", ptrViECodec->NumberOfCodecs() + 1);
printf("Choose codec: ");
#ifdef WEBRTC_ANDROID
codecIdx = 0;
printf("0\n");
#else
if (scanf("%d", &codecIdx) != 1)
{
printf("Error in scanf()\n");
return -1;
}
getc(stdin);
codecIdx = codecIdx - 1; // Compensate for idx start at 1.
#endif
// VP8 over generic transport gets this special one.
if (codecIdx == ptrViECodec->NumberOfCodecs()) {
for (codecIdx = 0; codecIdx < ptrViECodec->NumberOfCodecs(); ++codecIdx) {
error = ptrViECodec->GetCodec(codecIdx, videoCodec);
assert(error != -1);
if (videoCodec.codecType == webrtc::kVideoCodecVP8)
break;
}
assert(videoCodec.codecType == webrtc::kVideoCodecVP8);
videoCodec.codecType = webrtc::kVideoCodecGeneric;
// Any plName should work with generic
strcpy(videoCodec.plName, "VP8-GENERIC");
uint8_t pl_type = 127;
videoCodec.plType = pl_type;
webrtc::ViEExternalCodec* external_codec = webrtc::ViEExternalCodec
::GetInterface(ptrViE);
assert(external_codec != NULL);
error = external_codec->RegisterExternalSendCodec(videoChannel, pl_type,
webrtc::VP8Encoder::Create(), false);
assert(error != -1);
error = external_codec->RegisterExternalReceiveCodec(videoChannel,
pl_type, webrtc::VP8Decoder::Create(), false);
assert(error != -1);
} else {
error = ptrViECodec->GetCodec(codecIdx, videoCodec);
if (error == -1) {
printf("ERROR in ViECodec::GetCodec\n");
return -1;
}
}
// Set spatial resolution option
std::string str;
std::cout << std::endl;
std::cout << "Enter frame size option (default is CIF):" << std::endl;
std::cout << "1. QCIF (176X144) " << std::endl;
std::cout << "2. CIF (352X288) " << std::endl;
std::cout << "3. VGA (640X480) " << std::endl;
std::cout << "4. 4CIF (704X576) " << std::endl;
std::cout << "5. WHD (1280X720) " << std::endl;
std::cout << "6. FHD (1920X1080) " << std::endl;
std::getline(std::cin, str);
int resolnOption = atoi(str.c_str());
switch (resolnOption)
{
case 1:
videoCodec.width = 176;
videoCodec.height = 144;
break;
case 2:
videoCodec.width = 352;
videoCodec.height = 288;
break;
case 3:
videoCodec.width = 640;
videoCodec.height = 480;
break;
case 4:
videoCodec.width = 704;
videoCodec.height = 576;
break;
case 5:
videoCodec.width = 1280;
videoCodec.height = 720;
break;
case 6:
videoCodec.width = 1920;
videoCodec.height = 1080;
break;
}
// Set number of temporal layers.
std::cout << std::endl;
std::cout << "Choose number of temporal layers for VP8 (1 to 4). ";
std::cout << "Press enter for default (=1) for other codecs: \n";
std::getline(std::cin, str);
int numTemporalLayers = atoi(str.c_str());
if (numTemporalLayers != 0 &&
videoCodec.codecType == webrtc::kVideoCodecVP8) {
videoCodec.codecSpecific.VP8.numberOfTemporalLayers = numTemporalLayers;
} else if (videoCodec.codecType == webrtc::kVideoCodecVP9) {
// Temporal layers for vp9 not yet supported in webrtc.
numTemporalLayers = 1;
videoCodec.codecSpecific.VP9.numberOfTemporalLayers = 1;
}
// Set start bit rate
std::cout << std::endl;
std::cout << "Choose start rate (in kbps). Press enter for default: ";
std::getline(std::cin, str);
int startRate = atoi(str.c_str());
if(startRate != 0)
{
videoCodec.startBitrate=startRate;
}
error = ptrViECodec->SetSendCodec(videoChannel, videoCodec);
assert(error != -1);
error = ptrViECodec->SetReceiveCodec(videoChannel, videoCodec);
assert(error != -1);
//
// Choose Protection Mode
//
std::cout << std::endl;
std::cout << "Enter Protection Method:" << std::endl;
std::cout << "0. None" << std::endl;
std::cout << "1. FEC" << std::endl;
std::cout << "2. NACK" << std::endl;
std::cout << "3. NACK+FEC" << std::endl;
std::getline(std::cin, str);
int protectionMethod = atoi(str.c_str());
error = 0;
bool temporalToggling = true;
switch (protectionMethod)
{
case 0: // None: default is no protection
break;
case 1: // FEC only
error = ptrViERtpRtcp->SetFECStatus(videoChannel,
true,
VCM_RED_PAYLOAD_TYPE,
VCM_ULPFEC_PAYLOAD_TYPE);
temporalToggling = false;
break;
case 2: // Nack only
error = ptrViERtpRtcp->SetNACKStatus(videoChannel, true);
break;
case 3: // Hybrid NAck and FEC
error = ptrViERtpRtcp->SetHybridNACKFECStatus(
videoChannel,
true,
VCM_RED_PAYLOAD_TYPE,
VCM_ULPFEC_PAYLOAD_TYPE);
temporalToggling = false;
break;
}
if (error < 0)
{
printf("ERROR in ViERTP_RTCP::SetProtectionStatus\n");
}
// Set up buffering delay.
std::cout << std::endl;
std::cout << "Set buffering delay (mS). Press enter for default(0mS): ";
std::getline(std::cin, str);
int buffering_delay = atoi(str.c_str());
if (buffering_delay != 0) {
error = ptrViERtpRtcp->SetSenderBufferingMode(videoChannel,
buffering_delay);
if (error < 0)
printf("ERROR in ViERTP_RTCP::SetSenderBufferingMode\n");
error = ptrViERtpRtcp->SetReceiverBufferingMode(videoChannel,
buffering_delay);
if (error < 0)
printf("ERROR in ViERTP_RTCP::SetReceiverBufferingMode\n");
}
//
// Address settings
//
webrtc::ViENetwork* ptrViENetwork =
webrtc::ViENetwork::GetInterface(ptrViE);
if (ptrViENetwork == NULL)
{
printf("ERROR in ViENetwork::GetInterface\n");
return -1;
}
// Setup transport.
TbExternalTransport* extTransport = NULL;
webrtc::test::VideoChannelTransport* video_channel_transport = NULL;
int testMode = 0;
std::cout << std::endl;
std::cout << "Enter 1 for testing packet loss and delay with "
"external transport: ";
std::string test_str;
std::getline(std::cin, test_str);
testMode = atoi(test_str.c_str());
if (testMode == 1)
{
// Avoid changing SSRC due to collision.
error = ptrViERtpRtcp->SetLocalSSRC(videoChannel, 1);
extTransport = new TbExternalTransport(*ptrViENetwork, videoChannel,
NULL);
error = ptrViENetwork->RegisterSendTransport(videoChannel,
*extTransport);
if (error == -1)
{
printf("ERROR in ViECodec::RegisterSendTransport \n");
return -1;
}
// Setting uniform loss. Actual values will be set by user.
NetworkParameters network;
network.loss_model = kUniformLoss;
// Set up packet loss value
std::cout << "Enter Packet Loss Percentage" << std::endl;
std::string rate_str;
std::getline(std::cin, rate_str);
network.packet_loss_rate = atoi(rate_str.c_str());
if (network.packet_loss_rate > 0) {
temporalToggling = false;
}
// Set network delay value
std::cout << "Enter network delay value [mS]" << std::endl;
std::string delay_str;
std::getline(std::cin, delay_str);
network.mean_one_way_delay = atoi(delay_str.c_str());
extTransport->SetNetworkParameters(network);
if (numTemporalLayers > 1 && temporalToggling) {
extTransport->SetTemporalToggle(numTemporalLayers);
} else {
// Disabled
extTransport->SetTemporalToggle(0);
}
}
else
{
video_channel_transport = new webrtc::test::VideoChannelTransport(
ptrViENetwork, videoChannel);
const char* ipAddress = "127.0.0.1";
const unsigned short rtpPort = 6000;
std::cout << std::endl;
std::cout << "Using rtp port: " << rtpPort << std::endl;
std::cout << std::endl;
error = video_channel_transport->SetLocalReceiver(rtpPort);
if (error == -1)
{
printf("ERROR in SetLocalReceiver\n");
return -1;
}
error = video_channel_transport->SetSendDestination(ipAddress, rtpPort);
if (error == -1)
{
printf("ERROR in SetSendDestination\n");
return -1;
}
}
error = ptrViEBase->StartReceive(videoChannel);
if (error == -1)
{
printf("ERROR in ViENetwork::StartReceive\n");
return -1;
}
error = ptrViEBase->StartSend(videoChannel);
if (error == -1)
{
printf("ERROR in ViENetwork::StartSend\n");
return -1;
}
//********************************************************
// Engine started
//********************************************************
// Call started
printf("\nLoopback call started\n\n");
printf("Press enter to stop...");
while ((getc(stdin)) != '\n')
;
//********************************************************
// Testing finished. Tear down Video Engine
//********************************************************
error = ptrViEBase->StopReceive(videoChannel);
if (error == -1)
{
printf("ERROR in ViEBase::StopReceive\n");
return -1;
}
error = ptrViEBase->StopSend(videoChannel);
if (error == -1)
{
printf("ERROR in ViEBase::StopSend\n");
return -1;
}
error = ptrViERender->StopRender(captureId);
if (error == -1)
{
printf("ERROR in ViERender::StopRender\n");
return -1;
}
error = ptrViERender->RemoveRenderer(captureId);
if (error == -1)
{
printf("ERROR in ViERender::RemoveRenderer\n");
return -1;
}
error = ptrViERender->StopRender(videoChannel);
if (error == -1)
{
printf("ERROR in ViERender::StopRender\n");
return -1;
}
error = ptrViERender->RemoveRenderer(videoChannel);
if (error == -1)
{
printf("ERROR in ViERender::RemoveRenderer\n");
return -1;
}
error = ptrViECapture->StopCapture(captureId);
if (error == -1)
{
printf("ERROR in ViECapture::StopCapture\n");
return -1;
}
error = ptrViECapture->DisconnectCaptureDevice(videoChannel);
if (error == -1)
{
printf("ERROR in ViECapture::DisconnectCaptureDevice\n");
return -1;
}
error = ptrViECapture->ReleaseCaptureDevice(captureId);
if (error == -1)
{
printf("ERROR in ViECapture::ReleaseCaptureDevice\n");
return -1;
}
error = ptrViEBase->DeleteChannel(videoChannel);
if (error == -1)
{
printf("ERROR in ViEBase::DeleteChannel\n");
return -1;
}
delete video_channel_transport;
delete extTransport;
int remainingInterfaces = 0;
remainingInterfaces = ptrViECodec->Release();
remainingInterfaces += ptrViECapture->Release();
remainingInterfaces += ptrViERtpRtcp->Release();
remainingInterfaces += ptrViERender->Release();
remainingInterfaces += ptrViENetwork->Release();
remainingInterfaces += ptrViEBase->Release();
if (remainingInterfaces > 0)
{
printf("ERROR: Could not release all interfaces\n");
return -1;
}
bool deleted = webrtc::VideoEngine::Delete(ptrViE);
if (deleted == false)
{
printf("ERROR in VideoEngine::Delete\n");
return -1;
}
return 0;
//
// END: VideoEngine 3.0 Sample Code
//
// ===================================================================
}
int ViEAutoTest::ViELoopbackCall()
{
ViETest::Log(" ");
ViETest::Log("========================================");
ViETest::Log(" ViE Autotest Loopback Call\n");
if (VideoEngineSampleCode(_window1, _window2) == 0)
{
ViETest::Log(" ");
ViETest::Log(" ViE Autotest Loopback Call Done");
ViETest::Log("========================================");
ViETest::Log(" ");
return 0;
}
ViETest::Log(" ");
ViETest::Log(" ViE Autotest Loopback Call Failed");
ViETest::Log("========================================");
ViETest::Log(" ");
return 1;
}

View file

@ -1,212 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_main.h"
#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/test/field_trial.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_window_creator.h"
DEFINE_bool(automated, false, "Run Video engine tests in noninteractive mode.");
DEFINE_bool(auto_custom_call, false, "Run custom call directly.");
DEFINE_string(force_fieldtrials, "",
"Field trials control experimental feature code which can be forced. "
"E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
" will assign the group Enable to field trial WebRTC-FooFeature.");
static const std::string kStandardTest = "ViEStandardIntegrationTest";
static const std::string kExtendedTest = "ViEExtendedIntegrationTest";
static const std::string kApiTest = "ViEApiIntegrationTest";
ViEAutoTestMain::ViEAutoTestMain() {
index_to_test_method_map_[1] = "RunsBaseTestWithoutErrors";
index_to_test_method_map_[2] = "RunsCaptureTestWithoutErrors";
index_to_test_method_map_[3] = "RunsCodecTestWithoutErrors";
index_to_test_method_map_[4] = "[unused]";
index_to_test_method_map_[5] = "RunsImageProcessTestWithoutErrors";
index_to_test_method_map_[6] = "RunsNetworkTestWithoutErrors";
index_to_test_method_map_[7] = "RunsRenderTestWithoutErrors";
index_to_test_method_map_[8] = "RunsRtpRtcpTestWithoutErrors";
}
int ViEAutoTestMain::RunTests(int argc, char** argv) {
// Initialize logging.
ViETest::Init();
// Initialize WebRTC testing framework so paths to resources can be resolved.
webrtc::test::SetExecutablePath(argv[0]);
// Initialize the testing framework.
testing::InitGoogleTest(&argc, argv);
// AllowCommandLineParsing allows us to ignore flags passed on to us by
// Chromium build bots without having to explicitly disable them.
google::AllowCommandLineReparsing();
// Parse remaining flags:
google::ParseCommandLineFlags(&argc, &argv, true);
// Initialize field trial
webrtc::test::InitFieldTrialsFromString(FLAGS_force_fieldtrials);
int result;
if (FLAGS_automated) {
// Run in automated mode.
#if defined(WEBRTC_LINUX)
// All window-related tests are disabled on Linux for now.
// See https://code.google.com/p/chromium/issues/detail?id=318760
return 0;
#endif
result = RUN_ALL_TESTS();
} else if (FLAGS_auto_custom_call) {
// Run automated custom call.
result = RunSpecialTestCase(8);
} else {
// Run in interactive mode.
result = RunInteractiveMode();
}
ViETest::Terminate();
return result;
}
int ViEAutoTestMain::AskUserForTestCase() {
int choice;
std::string answer;
do {
ViETest::Log("\nSpecific tests:");
ViETest::Log("\t 0. Go back to previous menu.");
// Print all test method choices. Assumes that map sorts on its key.
int last_valid_choice = 0;
std::map<int, std::string>::const_iterator iterator;
for (iterator = index_to_test_method_map_.begin();
iterator != index_to_test_method_map_.end();
++iterator) {
ViETest::Log("\t %d. %s", iterator->first, iterator->second.c_str());
last_valid_choice = iterator->first;
}
ViETest::Log("Choose specific test:");
choice = AskUserForNumber(0, last_valid_choice);
} while (choice == kInvalidChoice);
return choice;
}
int ViEAutoTestMain::AskUserForNumber(int min_allowed, int max_allowed) {
int result;
if (scanf("%d", &result) <= 0) {
ViETest::Log("\nPlease enter a number instead, then hit enter.");
getc(stdin);
return kInvalidChoice;
}
getc(stdin); // Consume enter key.
if (result < min_allowed || result > max_allowed) {
ViETest::Log("%d-%d are valid choices. Please try again.", min_allowed,
max_allowed);
return kInvalidChoice;
}
return result;
}
int ViEAutoTestMain::RunTestMatching(const std::string test_case,
const std::string test_method) {
testing::FLAGS_gtest_filter = test_case + "." + test_method;
return RUN_ALL_TESTS();
}
int ViEAutoTestMain::RunSpecificTestCaseIn(const std::string test_case_name)
{
// If user says 0, it means don't run anything.
int specific_choice = AskUserForTestCase();
if (specific_choice != 0){
return RunTestMatching(test_case_name,
index_to_test_method_map_[specific_choice]);
}
return 0;
}
int ViEAutoTestMain::RunSpecialTestCase(int choice) {
// 7-10 don't run in GTest and need to initialize by themselves.
assert(choice >= 7 && choice <= 10);
// Create the windows
ViEWindowCreator windowCreator;
ViEAutoTestWindowManagerInterface* windowManager =
windowCreator.CreateTwoWindows();
// Create the test cases
ViEAutoTest vieAutoTest(windowManager->GetWindow1(),
windowManager->GetWindow2());
int errors = 0;
switch (choice) {
case 7: errors = vieAutoTest.ViELoopbackCall(); break;
case 8: errors = vieAutoTest.ViECustomCall(); break;
case 9: errors = vieAutoTest.ViESimulcastCall(); break;
case 10: errors = vieAutoTest.ViERecordCall(); break;
}
windowCreator.TerminateWindows();
return errors;
}
int ViEAutoTestMain::RunInteractiveMode() {
ViETest::Log(" ============================== ");
ViETest::Log(" WebRTC ViE 3.x Autotest ");
ViETest::Log(" ============================== \n");
int choice = 0;
int errors = 0;
do {
ViETest::Log("Test types: ");
ViETest::Log("\t 0. Quit");
ViETest::Log("\t 1. All standard tests (delivery test)");
ViETest::Log("\t 2. All API tests");
ViETest::Log("\t 3. All extended test");
ViETest::Log("\t 4. Specific standard test");
ViETest::Log("\t 5. Specific API test");
ViETest::Log("\t 6. Specific extended test");
ViETest::Log("\t 7. Simple loopback call");
ViETest::Log("\t 8. Custom configure a call");
ViETest::Log("\t 9. Simulcast in loopback");
ViETest::Log("\t 10. Record");
ViETest::Log("Select type of test:");
choice = AskUserForNumber(0, 10);
if (choice == kInvalidChoice) {
continue;
}
switch (choice) {
case 0: break;
case 1: errors = RunTestMatching(kStandardTest, "*"); break;
case 2: errors = RunTestMatching(kApiTest, "*"); break;
case 3: errors = RunTestMatching(kExtendedTest, "*"); break;
case 4: errors = RunSpecificTestCaseIn(kStandardTest); break;
case 5: errors = RunSpecificTestCaseIn(kApiTest); break;
case 6: errors = RunSpecificTestCaseIn(kExtendedTest); break;
default: errors = RunSpecialTestCase(choice); break;
}
} while (choice != 0);
if (errors) {
ViETest::Log("Test done with errors, see ViEAutotestLog.txt for test "
"result.\n");
return 1;
} else {
ViETest::Log("Test done without errors, see ViEAutotestLog.txt for "
"test result.\n");
return 0;
}
}

View file

@ -1,535 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// vie_autotest_network.cc
//
#include "webrtc/engine_configurations.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/libvietest/include/tb_capture_device.h"
#include "webrtc/video_engine/test/libvietest/include/tb_external_transport.h"
#include "webrtc/video_engine/test/libvietest/include/tb_interfaces.h"
#include "webrtc/video_engine/test/libvietest/include/tb_video_channel.h"
#if defined(_WIN32)
#include <qos.h>
#endif
void ViEAutoTest::ViENetworkStandardTest()
{
TbInterfaces ViE("ViENetworkStandardTest"); // Create VIE
TbCaptureDevice tbCapture(ViE);
{
// Create a video channel
TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
tbCapture.ConnectTo(tbChannel.videoChannel);
RenderCaptureDeviceAndOutputStream(&ViE, &tbChannel, &tbCapture);
// ***************************************************************
// Engine ready. Begin testing class
// ***************************************************************
//
// Transport
//
TbExternalTransport testTransport(*ViE.network, tbChannel.videoChannel,
NULL);
EXPECT_EQ(0, ViE.network->RegisterSendTransport(
tbChannel.videoChannel, testTransport));
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
tbChannel.videoChannel, webrtc::kViEKeyFrameRequestPliRtcp));
ViETest::Log("Call started using external transport, video should "
"see video in both windows\n");
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.network->DeregisterSendTransport(
tbChannel.videoChannel));
char myIpAddress[64];
memset(myIpAddress, 0, 64);
unsigned short rtpPort = 1234;
memcpy(myIpAddress, "127.0.0.1", sizeof("127.0.0.1"));
EXPECT_EQ(0, ViE.network->SetLocalReceiver(
tbChannel.videoChannel, rtpPort, rtpPort + 1, myIpAddress));
EXPECT_EQ(0, ViE.network->SetSendDestination(
tbChannel.videoChannel, myIpAddress, rtpPort,
rtpPort + 1, rtpPort));
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
ViETest::Log("Changed to WebRTC SocketTransport, you should still see "
"video in both windows\n");
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.network->SetSourceFilter(
tbChannel.videoChannel, rtpPort + 10, rtpPort + 11, myIpAddress));
ViETest::Log("Added UDP port filter for incorrect ports, you should "
"not see video in Window2");
AutoTestSleep(2000);
EXPECT_EQ(0, ViE.network->SetSourceFilter(
tbChannel.videoChannel, rtpPort, rtpPort + 1, "123.1.1.0"));
ViETest::Log("Added IP filter for incorrect IP address, you should not "
"see video in Window2");
AutoTestSleep(2000);
EXPECT_EQ(0, ViE.network->SetSourceFilter(
tbChannel.videoChannel, rtpPort, rtpPort + 1, myIpAddress));
ViETest::Log("Added IP filter for this computer, you should see video "
"in Window2 again\n");
AutoTestSleep(kAutoTestSleepTimeMs);
tbCapture.Disconnect(tbChannel.videoChannel);
}
}
void ViEAutoTest::ViENetworkExtendedTest()
{
//***************************************************************
// Begin create/initialize WebRTC Video Engine for testing
//***************************************************************
TbInterfaces ViE("ViENetworkExtendedTest"); // Create VIE
TbCaptureDevice tbCapture(ViE);
EXPECT_EQ(0, ViE.render->AddRenderer(
tbCapture.captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
{
//
// ToS
//
// Create a video channel
TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
tbCapture.ConnectTo(tbChannel.videoChannel);
const char* remoteIp = "192.168.200.1";
int DSCP = 0;
bool useSetSockOpt = false;
webrtc::VideoCodec videoCodec;
EXPECT_EQ(0, ViE.codec->GetSendCodec(
tbChannel.videoChannel, videoCodec));
videoCodec.maxFramerate = 5;
EXPECT_EQ(0, ViE.codec->SetSendCodec(
tbChannel.videoChannel, videoCodec));
//***************************************************************
// Engine ready. Begin testing class
//***************************************************************
char myIpAddress[64];
memset(myIpAddress, 0, 64);
unsigned short rtpPort = 9000;
EXPECT_EQ(0, ViE.network->GetLocalIP(myIpAddress, false));
EXPECT_EQ(0, ViE.network->SetLocalReceiver(
tbChannel.videoChannel, rtpPort, rtpPort + 1, myIpAddress));
EXPECT_EQ(0, ViE.network->SetSendDestination(
tbChannel.videoChannel, remoteIp, rtpPort, rtpPort + 1, rtpPort));
// ToS
int tos_result = ViE.network->SetSendToS(tbChannel.videoChannel, 2);
EXPECT_EQ(0, tos_result);
if (tos_result != 0)
{
ViETest::Log("ViESetSendToS error!.");
ViETest::Log("You must be admin to run these tests.");
ViETest::Log("On Win7 and late Vista, you need to right click the "
"exe and choose");
ViETest::Log("\"Run as administrator\"\n");
getc(stdin);
}
EXPECT_EQ(0, ViE.network->GetSendToS(
tbChannel.videoChannel, DSCP, useSetSockOpt)); // No ToS set
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
ViETest::Log("Use Wireshark to capture the outgoing video stream and "
"verify ToS settings\n");
ViETest::Log(" DSCP set to 0x%x\n", DSCP);
AutoTestSleep(1000);
EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 63));
EXPECT_EQ(0, ViE.network->GetSendToS(
tbChannel.videoChannel, DSCP, useSetSockOpt)); // No ToS set
ViETest::Log(" DSCP set to 0x%x\n", DSCP);
AutoTestSleep(1000);
EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 0));
EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 2, true));
EXPECT_EQ(0, ViE.network->GetSendToS(
tbChannel.videoChannel, DSCP, useSetSockOpt)); // No ToS set
ViETest::Log(" DSCP set to 0x%x\n", DSCP);
AutoTestSleep(1000);
EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 63, true));
EXPECT_EQ(0, ViE.network->GetSendToS(
tbChannel.videoChannel, DSCP, useSetSockOpt)); // No ToS set
ViETest::Log(" DSCP set to 0x%x\n", DSCP);
AutoTestSleep(1000);
tbCapture.Disconnect(tbChannel.videoChannel);
}
//***************************************************************
// Testing finished. Tear down Video Engine
//***************************************************************
}
void ViEAutoTest::ViENetworkAPITest()
{
//***************************************************************
// Begin create/initialize WebRTC Video Engine for testing
//***************************************************************
TbInterfaces ViE("ViENetworkAPITest"); // Create VIE
{
// Create a video channel
TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecI420);
//***************************************************************
// Engine ready. Begin testing class
//***************************************************************
//
// External transport
//
TbExternalTransport testTransport(*ViE.network, tbChannel.videoChannel,
NULL);
EXPECT_EQ(0, ViE.network->RegisterSendTransport(
tbChannel.videoChannel, testTransport));
EXPECT_NE(0, ViE.network->RegisterSendTransport(
tbChannel.videoChannel, testTransport));
// Create a empty RTP packet.
unsigned char packet[3000];
memset(packet, 0, sizeof(packet));
packet[0] = 0x80; // V=2, P=0, X=0, CC=0
packet[1] = 0x7C; // M=0, PT = 124 (I420)
// Create a empty RTCP app packet.
unsigned char rtcpacket[3000];
memset(rtcpacket,0, sizeof(rtcpacket));
rtcpacket[0] = 0x80; // V=2, P=0, X=0, CC=0
rtcpacket[1] = 0xCC; // M=0, PT = 204 (RTCP app)
rtcpacket[2] = 0x0;
rtcpacket[3] = 0x03; // 3 Octets long.
EXPECT_NE(0, ViE.network->ReceivedRTPPacket(
tbChannel.videoChannel, packet, 1500));
EXPECT_NE(0, ViE.network->ReceivedRTCPPacket(
tbChannel.videoChannel, rtcpacket, 1500));
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.network->ReceivedRTPPacket(
tbChannel.videoChannel, packet, 1500));
EXPECT_EQ(0, ViE.network->ReceivedRTCPPacket(
tbChannel.videoChannel, rtcpacket, 1500));
EXPECT_NE(0, ViE.network->ReceivedRTPPacket(
tbChannel.videoChannel, packet, 11));
EXPECT_NE(0, ViE.network->ReceivedRTPPacket(
tbChannel.videoChannel, packet, 11));
EXPECT_EQ(0, ViE.network->ReceivedRTPPacket(
tbChannel.videoChannel, packet, 3000));
EXPECT_EQ(0, ViE.network->ReceivedRTPPacket(
tbChannel.videoChannel, packet, 3000));
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
EXPECT_NE(0, ViE.network->DeregisterSendTransport(
tbChannel.videoChannel)); // Sending
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.network->DeregisterSendTransport(
tbChannel.videoChannel));
EXPECT_NE(0, ViE.network->DeregisterSendTransport(
tbChannel.videoChannel)); // Already deregistered
//
// Local receiver
//
EXPECT_EQ(0, ViE.network->SetLocalReceiver(
tbChannel.videoChannel, 1234, 1235, "127.0.0.1"));
EXPECT_EQ(0, ViE.network->SetLocalReceiver(
tbChannel.videoChannel, 1234, 1235, "127.0.0.1"));
EXPECT_EQ(0, ViE.network->SetLocalReceiver(
tbChannel.videoChannel, 1236, 1237, "127.0.0.1"));
unsigned short rtpPort = 0;
unsigned short rtcpPort = 0;
char ipAddress[64];
memset(ipAddress, 0, 64);
EXPECT_EQ(0, ViE.network->GetLocalReceiver(
tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_NE(0, ViE.network->SetLocalReceiver(
tbChannel.videoChannel, 1234, 1235, "127.0.0.1"));
EXPECT_EQ(0, ViE.network->GetLocalReceiver(
tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
//
// Send destination
//
EXPECT_EQ(0, ViE.network->SetSendDestination(
tbChannel.videoChannel, "127.0.0.1", 1234, 1235, 1234, 1235));
EXPECT_EQ(0, ViE.network->SetSendDestination(
tbChannel.videoChannel, "127.0.0.1", 1236, 1237, 1234, 1235));
unsigned short sourceRtpPort = 0;
unsigned short sourceRtcpPort = 0;
EXPECT_EQ(0, ViE.network->GetSendDestination(
tbChannel.videoChannel, ipAddress, rtpPort, rtcpPort,
sourceRtpPort, sourceRtcpPort));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
// Not allowed while sending
EXPECT_NE(0, ViE.network->SetSendDestination(
tbChannel.videoChannel, "127.0.0.1", 1234, 1235, 1234, 1235));
EXPECT_EQ(kViENetworkAlreadySending, ViE.base->LastError());
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.network->SetSendDestination(
tbChannel.videoChannel, "127.0.0.1", 1234, 1235, 1234, 1235));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.network->GetSendDestination(
tbChannel.videoChannel, ipAddress, rtpPort, rtcpPort,
sourceRtpPort, sourceRtcpPort));
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
//
// Address information
//
// GetSourceInfo: Tested in functional test
EXPECT_EQ(0, ViE.network->GetLocalIP(ipAddress, false));
// TODO(unknown): IPv6
//
// Filter
//
EXPECT_NE(0, ViE.network->GetSourceFilter(
tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
EXPECT_EQ(0, ViE.network->SetSourceFilter(
tbChannel.videoChannel, 1234, 1235, "10.10.10.10"));
EXPECT_EQ(0, ViE.network->SetSourceFilter(
tbChannel.videoChannel, 1236, 1237, "127.0.0.1"));
EXPECT_EQ(0, ViE.network->GetSourceFilter(
tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
EXPECT_EQ(0, ViE.network->SetSourceFilter(
tbChannel.videoChannel, 0, 0, NULL));
EXPECT_NE(0, ViE.network->GetSourceFilter(
tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
}
{
TbVideoChannel tbChannel(ViE); // Create a video channel
EXPECT_EQ(0, ViE.network->SetLocalReceiver(
tbChannel.videoChannel, 1234));
int DSCP = 0;
bool useSetSockOpt = false;
// SetSockOpt should work without a locally bind socket
EXPECT_EQ(0, ViE.network->GetSendToS(
tbChannel.videoChannel, DSCP, useSetSockOpt)); // No ToS set
EXPECT_EQ(0, DSCP);
// Invalid input
EXPECT_NE(0, ViE.network->SetSendToS(tbChannel.videoChannel, -1, true));
// Invalid input
EXPECT_NE(0, ViE.network->SetSendToS(tbChannel.videoChannel, 64, true));
// Valid
EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 20, true));
EXPECT_EQ(0, ViE.network->GetSendToS(
tbChannel.videoChannel, DSCP, useSetSockOpt));
EXPECT_EQ(20, DSCP);
EXPECT_TRUE(useSetSockOpt);
// Disable
EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 0, true));
EXPECT_EQ(0, ViE.network->GetSendToS(
tbChannel.videoChannel, DSCP, useSetSockOpt));
EXPECT_EQ(0, DSCP);
char myIpAddress[64];
memset(myIpAddress, 0, 64);
// Get local ip to be able to set ToS withtou setSockOpt
EXPECT_EQ(0, ViE.network->GetLocalIP(myIpAddress, false));
EXPECT_EQ(0, ViE.network->SetLocalReceiver(
tbChannel.videoChannel, 1234, 1235, myIpAddress));
// Invalid input
EXPECT_NE(0, ViE.network->SetSendToS(
tbChannel.videoChannel, -1, false));
EXPECT_NE(0, ViE.network->SetSendToS(
tbChannel.videoChannel, 64, false)); // Invalid input
EXPECT_EQ(0, ViE.network->GetSendToS(
tbChannel.videoChannel, DSCP, useSetSockOpt)); // No ToS set
EXPECT_EQ(0, DSCP);
int tos_result = ViE.network->SetSendToS(
tbChannel.videoChannel, 20, false); // Valid
EXPECT_EQ(0, tos_result);
if (tos_result != 0)
{
ViETest::Log("ViESetSendToS error!.");
ViETest::Log("You must be admin to run these tests.");
ViETest::Log("On Win7 and late Vista, you need to right click the "
"exe and choose");
ViETest::Log("\"Run as administrator\"\n");
getc(stdin);
}
EXPECT_EQ(0, ViE.network->GetSendToS(
tbChannel.videoChannel, DSCP, useSetSockOpt));
EXPECT_EQ(20, DSCP);
#ifdef _WIN32
EXPECT_FALSE(useSetSockOpt);
#else // useSetSockOpt is true on Linux and Mac
EXPECT_TRUE(useSetSockOpt);
#endif
EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 0, false));
EXPECT_EQ(0, ViE.network->GetSendToS(
tbChannel.videoChannel, DSCP, useSetSockOpt));
EXPECT_EQ(0, DSCP);
}
{
// From qos.h. (*) -> supported by ViE
//
// #define SERVICETYPE_NOTRAFFIC 0x00000000
// #define SERVICETYPE_BESTEFFORT 0x00000001 (*)
// #define SERVICETYPE_CONTROLLEDLOAD 0x00000002 (*)
// #define SERVICETYPE_GUARANTEED 0x00000003 (*)
// #define SERVICETYPE_NETWORK_UNAVAILABLE 0x00000004
// #define SERVICETYPE_GENERAL_INFORMATION 0x00000005
// #define SERVICETYPE_NOCHANGE 0x00000006
// #define SERVICETYPE_NONCONFORMING 0x00000009
// #define SERVICETYPE_NETWORK_CONTROL 0x0000000A
// #define SERVICETYPE_QUALITATIVE 0x0000000D (*)
//
// #define SERVICE_BESTEFFORT 0x80010000
// #define SERVICE_CONTROLLEDLOAD 0x80020000
// #define SERVICE_GUARANTEED 0x80040000
// #define SERVICE_QUALITATIVE 0x80200000
TbVideoChannel tbChannel(ViE); // Create a video channel
#if defined(_WIN32)
// These tests are disabled since they currently fail on Windows.
// Exact reason is unkown.
// See https://code.google.com/p/webrtc/issues/detail?id=1266.
// TODO(mflodman): remove these APIs?
//// No socket
//EXPECT_NE(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICETYPE_BESTEFFORT));
//EXPECT_EQ(0, ViE.network->SetLocalReceiver(
// tbChannel.videoChannel, 1234));
//// Sender not initialized
//EXPECT_NE(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICETYPE_BESTEFFORT));
//EXPECT_EQ(0, ViE.network->SetSendDestination(
// tbChannel.videoChannel, "127.0.0.1", 12345));
//// Try to set all non-supported service types
//EXPECT_NE(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICETYPE_NOTRAFFIC));
//EXPECT_NE(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICETYPE_NETWORK_UNAVAILABLE));
//EXPECT_NE(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICETYPE_GENERAL_INFORMATION));
//EXPECT_NE(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICETYPE_NOCHANGE));
//EXPECT_NE(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICETYPE_NONCONFORMING));
//EXPECT_NE(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICETYPE_NOTRAFFIC));
//EXPECT_NE(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICETYPE_NETWORK_CONTROL));
//EXPECT_NE(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICE_BESTEFFORT));
//EXPECT_NE(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICE_CONTROLLEDLOAD));
//EXPECT_NE(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICE_GUARANTEED));
//EXPECT_NE(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICE_QUALITATIVE));
//// Loop through valid service settings
//bool enabled = false;
//int serviceType = 0;
//int overrideDSCP = 0;
//EXPECT_EQ(0, ViE.network->GetSendGQoS(
// tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
//EXPECT_FALSE(enabled);
//EXPECT_EQ(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICETYPE_BESTEFFORT));
//EXPECT_EQ(0, ViE.network->GetSendGQoS(
// tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
//EXPECT_TRUE(enabled);
//EXPECT_EQ(SERVICETYPE_BESTEFFORT, serviceType);
//EXPECT_FALSE(overrideDSCP);
//EXPECT_EQ(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICETYPE_CONTROLLEDLOAD));
//EXPECT_EQ(0, ViE.network->GetSendGQoS(
// tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
//EXPECT_TRUE(enabled);
//EXPECT_EQ(SERVICETYPE_CONTROLLEDLOAD, serviceType);
//EXPECT_FALSE(overrideDSCP);
//EXPECT_EQ(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICETYPE_GUARANTEED));
//EXPECT_EQ(0, ViE.network->GetSendGQoS(
// tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
//EXPECT_TRUE(enabled);
//EXPECT_EQ(SERVICETYPE_GUARANTEED, serviceType);
//EXPECT_FALSE(overrideDSCP);
//EXPECT_EQ(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, true, SERVICETYPE_QUALITATIVE));
//EXPECT_EQ(0, ViE.network->GetSendGQoS(
// tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
//EXPECT_TRUE(enabled);
//EXPECT_EQ(SERVICETYPE_QUALITATIVE, serviceType);
//EXPECT_FALSE(overrideDSCP);
//EXPECT_EQ(0, ViE.network->SetSendGQoS(
// tbChannel.videoChannel, false, SERVICETYPE_QUALITATIVE));
//EXPECT_EQ(0, ViE.network->GetSendGQoS(
// tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
//EXPECT_FALSE(enabled);
#endif
}
{
//
// MTU and packet burst
//
// Create a video channel
TbVideoChannel tbChannel(ViE);
// Invalid input
EXPECT_NE(0, ViE.network->SetMTU(tbChannel.videoChannel, 1600));
// Valid input
EXPECT_EQ(0, ViE.network->SetMTU(tbChannel.videoChannel, 800));
}
//***************************************************************
// Testing finished. Tear down Video Engine
//***************************************************************
}

View file

@ -1,597 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// vie_autotest_record.cc
//
// This code is also used as sample code for ViE 3.0
//
#include <fstream>
#include <stdio.h>
#include "webrtc/common_types.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/test/channel_transport/include/channel_transport.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/libvietest/include/tb_external_transport.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_network.h"
#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
#define VCM_RED_PAYLOAD_TYPE 96
#define VCM_ULPFEC_PAYLOAD_TYPE 97
#define DEFAULT_AUDIO_PORT 11113
#define DEFAULT_AUDIO_CODEC "ISAC"
#define DEFAULT_VIDEO_CODEC_WIDTH 640
#define DEFAULT_VIDEO_CODEC_HEIGHT 480
#define DEFAULT_VIDEO_CODEC_START_RATE 1000
#define DEFAULT_RECORDING_FOLDER "RECORDING"
#define DEFAULT_RECORDING_AUDIO "/audio_debug.aec"
#define DEFAULT_RECORDING_VIDEO "/video_debug.yuv"
#define DEFAULT_RECORDING_AUDIO_RTP "/audio_rtpdump.rtp"
#define DEFAULT_RECORDING_VIDEO_RTP "/video_rtpdump.rtp"
bool GetAudioDevices(webrtc::VoEBase* voe_base,
webrtc::VoEHardware* voe_hardware,
char* recording_device_name,
int& recording_device_index,
char* playbackDeviceName,
int& playback_device_index);
bool GetAudioCodecRecord(webrtc::VoECodec* voe_codec,
webrtc::CodecInst& audio_codec);
int VideoEngineSampleRecordCode(void* window1, void* window2) {
int error = 0;
// Audio settings.
int audio_tx_port = DEFAULT_AUDIO_PORT;
int audio_rx_port = DEFAULT_AUDIO_PORT;
webrtc::CodecInst audio_codec;
int audio_channel = -1;
int audio_capture_device_index = -1;
int audio_playback_device_index = -1;
const unsigned int KMaxDeviceNameLength = 128;
const unsigned int KMaxUniqueIdLength = 256;
char deviceName[KMaxDeviceNameLength];
char audio_capture_device_name[KMaxUniqueIdLength] = "";
char audio_playbackDeviceName[KMaxUniqueIdLength] = "";
// Network settings.
const char* ipAddress = "127.0.0.1";
const int rtpPort = 6000;
//
// Create a VideoEngine instance
//
webrtc::VideoEngine* ptrViE = NULL;
ptrViE = webrtc::VideoEngine::Create();
if (ptrViE == NULL) {
printf("ERROR in VideoEngine::Create\n");
return -1;
}
error = ptrViE->SetTraceFilter(webrtc::kTraceAll);
if (error == -1) {
printf("ERROR in VideoEngine::SetTraceLevel\n");
return -1;
}
std::string trace_file =
ViETest::GetResultOutputPath() + "ViERecordCall_trace.txt";
error = ptrViE->SetTraceFile(trace_file.c_str());
if (error == -1) {
printf("ERROR in VideoEngine::SetTraceFile\n");
return -1;
}
//
// Create a VoE instance
//
webrtc::VoiceEngine* voe = webrtc::VoiceEngine::Create();
//
// Init VideoEngine and create a channel
//
webrtc::ViEBase* ptrViEBase = webrtc::ViEBase::GetInterface(ptrViE);
if (ptrViEBase == NULL) {
printf("ERROR in ViEBase::GetInterface\n");
return -1;
}
error = ptrViEBase->Init();
if (error == -1) {
printf("ERROR in ViEBase::Init\n");
return -1;
}
webrtc::VoEBase* voe_base = webrtc::VoEBase::GetInterface(voe);
if (voe_base == NULL) {
printf("ERROR in VoEBase::GetInterface\n");
return -1;
}
error = voe_base->Init();
if (error == -1) {
printf("ERROR in VoEBase::Init\n");
return -1;
}
int videoChannel = -1;
error = ptrViEBase->CreateChannel(videoChannel);
if (error == -1) {
printf("ERROR in ViEBase::CreateChannel\n");
return -1;
}
webrtc::VoEHardware* voe_hardware =
webrtc::VoEHardware::GetInterface(voe);
webrtc::VoECodec* voe_codec = webrtc::VoECodec::GetInterface(voe);
webrtc::VoEAudioProcessing* voe_apm =
webrtc::VoEAudioProcessing::GetInterface(voe);
webrtc::VoENetwork* voe_network =
webrtc::VoENetwork::GetInterface(voe);
// Get the audio device for the call.
memset(audio_capture_device_name, 0, KMaxUniqueIdLength);
memset(audio_playbackDeviceName, 0, KMaxUniqueIdLength);
GetAudioDevices(voe_base, voe_hardware, audio_capture_device_name,
audio_capture_device_index, audio_playbackDeviceName,
audio_playback_device_index);
// Get the audio codec for the call.
memset(static_cast<void*>(&audio_codec), 0, sizeof(audio_codec));
GetAudioCodecRecord(voe_codec, audio_codec);
audio_channel = voe_base->CreateChannel();
rtc::scoped_ptr<webrtc::test::VoiceChannelTransport> voice_channel_transport(
new webrtc::test::VoiceChannelTransport(voe_network, audio_channel));
voice_channel_transport->SetSendDestination(ipAddress, audio_tx_port);
voice_channel_transport->SetLocalReceiver(audio_rx_port);
voe_hardware->SetRecordingDevice(audio_capture_device_index);
voe_hardware->SetPlayoutDevice(audio_playback_device_index);
voe_codec->SetSendCodec(audio_channel, audio_codec);
voe_apm->SetAgcStatus(true, webrtc::kAgcDefault);
voe_apm->SetNsStatus(true, webrtc::kNsHighSuppression);
//
// List available capture devices, allocate and connect.
//
webrtc::ViECapture* ptrViECapture =
webrtc::ViECapture::GetInterface(ptrViE);
if (ptrViECapture == NULL) {
printf("ERROR in ViECapture::GetInterface\n");
return -1;
}
webrtc::VoERTP_RTCP* ptrVoERtpRtcp =
webrtc::VoERTP_RTCP::GetInterface(voe);
if (ptrVoERtpRtcp == NULL) {
printf("ERROR in VoERTP_RTCP::GetInterface\n");
return -1;
}
memset(deviceName, 0, KMaxDeviceNameLength);
char uniqueId[KMaxUniqueIdLength];
memset(uniqueId, 0, KMaxUniqueIdLength);
printf("Available capture devices:\n");
int captureIdx = 0;
for (captureIdx = 0;
captureIdx < ptrViECapture->NumberOfCaptureDevices();
captureIdx++) {
memset(deviceName, 0, KMaxDeviceNameLength);
memset(uniqueId, 0, KMaxUniqueIdLength);
error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
KMaxDeviceNameLength, uniqueId,
KMaxUniqueIdLength);
if (error == -1) {
printf("ERROR in ViECapture::GetCaptureDevice\n");
return -1;
}
printf("\t %d. %s\n", captureIdx + 1, deviceName);
}
printf("\nChoose capture device: ");
#ifdef WEBRTC_ANDROID
captureIdx = 0;
printf("0\n");
#else
if (scanf("%d", &captureIdx) != 1) {
printf("Error in scanf()\n");
return -1;
}
getc(stdin);
captureIdx = captureIdx - 1; // Compensate for idx start at 1.
#endif
error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
KMaxDeviceNameLength, uniqueId,
KMaxUniqueIdLength);
if (error == -1) {
printf("ERROR in ViECapture::GetCaptureDevice\n");
return -1;
}
int captureId = 0;
error = ptrViECapture->AllocateCaptureDevice(uniqueId, KMaxUniqueIdLength,
captureId);
if (error == -1) {
printf("ERROR in ViECapture::AllocateCaptureDevice\n");
return -1;
}
error = ptrViECapture->ConnectCaptureDevice(captureId, videoChannel);
if (error == -1) {
printf("ERROR in ViECapture::ConnectCaptureDevice\n");
return -1;
}
error = ptrViECapture->StartCapture(captureId);
if (error == -1) {
printf("ERROR in ViECapture::StartCapture\n");
return -1;
}
//
// RTP/RTCP settings
//
webrtc::ViERTP_RTCP* ptrViERtpRtcp =
webrtc::ViERTP_RTCP::GetInterface(ptrViE);
if (ptrViERtpRtcp == NULL) {
printf("ERROR in ViERTP_RTCP::GetInterface\n");
return -1;
}
error = ptrViERtpRtcp->SetRTCPStatus(videoChannel,
webrtc::kRtcpCompound_RFC4585);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
return -1;
}
error = ptrViERtpRtcp->SetKeyFrameRequestMethod(
videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
return -1;
}
error = ptrViERtpRtcp->SetRembStatus(videoChannel, true, true);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetTMMBRStatus\n");
return -1;
}
//
// Set up rendering
//
webrtc::ViERender* ptrViERender = webrtc::ViERender::GetInterface(ptrViE);
if (ptrViERender == NULL) {
printf("ERROR in ViERender::GetInterface\n");
return -1;
}
error = ptrViERender->AddRenderer(captureId, window1, 0, 0.0, 0.0, 1.0, 1.0);
if (error == -1) {
printf("ERROR in ViERender::AddRenderer\n");
return -1;
}
error = ptrViERender->StartRender(captureId);
if (error == -1) {
printf("ERROR in ViERender::StartRender\n");
return -1;
}
error = ptrViERender->AddRenderer(videoChannel, window2, 1, 0.0, 0.0, 1.0,
1.0);
if (error == -1) {
printf("ERROR in ViERender::AddRenderer\n");
return -1;
}
error = ptrViERender->StartRender(videoChannel);
if (error == -1) {
printf("ERROR in ViERender::StartRender\n");
return -1;
}
//
// Setup codecs
//
webrtc::ViECodec* ptrViECodec = webrtc::ViECodec::GetInterface(ptrViE);
if (ptrViECodec == NULL) {
printf("ERROR in ViECodec::GetInterface\n");
return -1;
}
webrtc::VideoCodec videoCodec;
memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
int codecIdx = 0;
#ifdef WEBRTC_ANDROID
codecIdx = 0;
printf("0\n");
#else
codecIdx = 0; // Compensate for idx start at 1.
#endif
error = ptrViECodec->GetCodec(codecIdx, videoCodec);
if (error == -1) {
printf("ERROR in ViECodec::GetCodec\n");
return -1;
}
// Set spatial resolution option
videoCodec.width = DEFAULT_VIDEO_CODEC_WIDTH;
videoCodec.height = DEFAULT_VIDEO_CODEC_HEIGHT;
// Set start bit rate
videoCodec.startBitrate = DEFAULT_VIDEO_CODEC_START_RATE;
error = ptrViECodec->SetSendCodec(videoChannel, videoCodec);
if (error == -1) {
printf("ERROR in ViECodec::SetSendCodec\n");
return -1;
}
//
// Address settings
//
webrtc::ViENetwork* ptrViENetwork =
webrtc::ViENetwork::GetInterface(ptrViE);
if (ptrViENetwork == NULL) {
printf("ERROR in ViENetwork::GetInterface\n");
return -1;
}
webrtc::test::VideoChannelTransport* video_channel_transport =
new webrtc::test::VideoChannelTransport(ptrViENetwork, videoChannel);
error = video_channel_transport->SetSendDestination(ipAddress, rtpPort);
if (error == -1) {
printf("ERROR in SetSendDestination\n");
return -1;
}
error = video_channel_transport->SetLocalReceiver(rtpPort);
if (error == -1) {
printf("ERROR in SetLocalReceiver\n");
return -1;
}
std::string str;
int enable_labeling = 0;
std::cout << std::endl;
std::cout << "Do you want to label this recording?" << std::endl;
std::cout << "0. No (default)." << std::endl;
std::cout << "1. This call will be labeled on the fly." << std::endl;
std::getline(std::cin, str);
enable_labeling = atoi(str.c_str());
uint32_t folder_time = static_cast<uint32_t>
(webrtc::TickTime::MillisecondTimestamp());
std::stringstream folder_time_str;
folder_time_str << folder_time;
const std::string folder_name = "recording" + folder_time_str.str();
printf("recording name = %s\n", folder_name.c_str());
// TODO(mikhal): use file_utils.
#ifdef WIN32
_mkdir(folder_name.c_str());
#else
mkdir(folder_name.c_str(), 0777);
#endif
const std::string audio_filename = folder_name + DEFAULT_RECORDING_AUDIO;
const std::string video_filename = folder_name + DEFAULT_RECORDING_VIDEO;
const std::string audio_rtp_filename = folder_name +
DEFAULT_RECORDING_AUDIO_RTP;
const std::string video_rtp_filename = folder_name +
DEFAULT_RECORDING_VIDEO_RTP;
std::fstream timing;
if (enable_labeling == 1) {
std::cout << "Press enter to stamp current time."<< std::endl;
std::string timing_file = folder_name + "/labeling.txt";
timing.open(timing_file.c_str(), std::fstream::out | std::fstream::app);
}
printf("\nPress enter to start recording\n");
std::getline(std::cin, str);
printf("\nRecording started\n\n");
error = ptrViEBase->StartReceive(videoChannel);
if (error == -1) {
printf("ERROR in ViENetwork::StartReceive\n");
return -1;
}
error = ptrViEBase->StartSend(videoChannel);
if (error == -1) {
printf("ERROR in ViENetwork::StartSend\n");
return -1;
}
error = voe_base->StartSend(audio_channel);
if (error == -1) {
printf("ERROR in VoENetwork::StartSend\n");
return -1;
}
// Engine started
voe_apm->StartDebugRecording(audio_filename.c_str());
ptrViECodec->StartDebugRecording(videoChannel, video_filename.c_str());
ptrViERtpRtcp->StartRTPDump(videoChannel,
video_rtp_filename.c_str(), webrtc::kRtpOutgoing);
ptrVoERtpRtcp->StartRTPDump(audio_channel,
audio_rtp_filename.c_str(), webrtc::kRtpOutgoing);
printf("Press s + enter to stop...");
int64_t clock_time;
if (enable_labeling == 1) {
clock_time = webrtc::TickTime::MillisecondTimestamp();
timing << clock_time << std::endl;
}
char c = getc(stdin);
fflush(stdin);
while (c != 's') {
if (c == '\n' && enable_labeling == 1) {
clock_time = webrtc::TickTime::MillisecondTimestamp();
timing << clock_time << std::endl;
}
c = getc(stdin);
}
if (enable_labeling == 1) {
clock_time = webrtc::TickTime::MillisecondTimestamp();
timing << clock_time << std::endl;
}
ptrViERtpRtcp->StopRTPDump(videoChannel, webrtc::kRtpOutgoing);
ptrVoERtpRtcp->StopRTPDump(audio_channel, webrtc::kRtpOutgoing);
voe_apm->StopDebugRecording();
ptrViECodec->StopDebugRecording(videoChannel);
if (enable_labeling == 1)
timing.close();
// Recording finished. Tear down Video Engine.
error = ptrViEBase->StopReceive(videoChannel);
if (error == -1) {
printf("ERROR in ViEBase::StopReceive\n");
return -1;
}
error = ptrViEBase->StopSend(videoChannel);
if (error == -1) {
printf("ERROR in ViEBase::StopSend\n");
return -1;
}
error = voe_base->StopSend(audio_channel);
error = ptrViERender->StopRender(captureId);
if (error == -1) {
printf("ERROR in ViERender::StopRender\n");
return -1;
}
error = ptrViERender->RemoveRenderer(captureId);
if (error == -1) {
printf("ERROR in ViERender::RemoveRenderer\n");
return -1;
}
error = ptrViERender->StopRender(videoChannel);
if (error == -1) {
printf("ERROR in ViERender::StopRender\n");
return -1;
}
error = ptrViERender->RemoveRenderer(videoChannel);
if (error == -1) {
printf("ERROR in ViERender::RemoveRenderer\n");
return -1;
}
error = ptrViECapture->StopCapture(captureId);
if (error == -1) {
printf("ERROR in ViECapture::StopCapture\n");
return -1;
}
error = ptrViECapture->DisconnectCaptureDevice(videoChannel);
if (error == -1) {
printf("ERROR in ViECapture::DisconnectCaptureDevice\n");
return -1;
}
error = ptrViECapture->ReleaseCaptureDevice(captureId);
if (error == -1) {
printf("ERROR in ViECapture::ReleaseCaptureDevice\n");
return -1;
}
error = ptrViEBase->DeleteChannel(videoChannel);
if (error == -1) {
printf("ERROR in ViEBase::DeleteChannel\n");
return -1;
}
delete video_channel_transport;
int remainingInterfaces = 0;
remainingInterfaces = ptrViECodec->Release();
remainingInterfaces += ptrViECapture->Release();
remainingInterfaces += ptrViERtpRtcp->Release();
remainingInterfaces += ptrViERender->Release();
remainingInterfaces += ptrViENetwork->Release();
remainingInterfaces += ptrViEBase->Release();
if (remainingInterfaces > 0) {
printf("ERROR: Could not release all interfaces\n");
return -1;
}
bool deleted = webrtc::VideoEngine::Delete(ptrViE);
if (deleted == false) {
printf("ERROR in VideoEngine::Delete\n");
return -1;
}
return 0;
}
// TODO(mikhal): Place above functionality under this class.
int ViEAutoTest::ViERecordCall() {
ViETest::Log(" ");
ViETest::Log("========================================");
ViETest::Log(" ViE Record Call\n");
if (VideoEngineSampleRecordCode(_window1, _window2) == 0) {
ViETest::Log(" ");
ViETest::Log(" ViE Autotest Record Call Done");
ViETest::Log("========================================");
ViETest::Log(" ");
return 0;
}
ViETest::Log(" ");
ViETest::Log(" ViE Autotest Record Call Failed");
ViETest::Log("========================================");
ViETest::Log(" ");
return 1;
}
bool GetAudioCodecRecord(webrtc::VoECodec* voe_codec,
webrtc::CodecInst& audio_codec) {
int error = 0;
int number_of_errors = 0;
memset(&audio_codec, 0, sizeof(webrtc::CodecInst));
while (1) {
int codec_idx = 0;
int default_codec_idx = 0;
for (codec_idx = 0; codec_idx < voe_codec->NumOfCodecs(); codec_idx++) {
error = voe_codec->GetCodec(codec_idx, audio_codec);
number_of_errors += ViETest::TestError(error == 0,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
// Test for default codec index.
if (strcmp(audio_codec.plname, DEFAULT_AUDIO_CODEC) == 0) {
default_codec_idx = codec_idx;
}
}
error = voe_codec->GetCodec(default_codec_idx, audio_codec);
number_of_errors += ViETest::TestError(error == 0,
"ERROR: %s at line %d",
__FUNCTION__, __LINE__);
return true;
}
assert(false);
return false;
}

View file

@ -1,312 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// vie_autotest_render.cc
//
#include "webrtc/base/format_macros.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/modules/video_render/include/video_render.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/video_engine/test/libvietest/include/tb_capture_device.h"
#include "webrtc/video_engine/test/libvietest/include/tb_interfaces.h"
#include "webrtc/video_engine/test/libvietest/include/tb_video_channel.h"
#if defined(WIN32)
#include <ddraw.h>
#include <tchar.h>
#include <windows.h>
#elif defined(WEBRTC_LINUX)
//From windgi.h
#undef RGB
#define RGB(r,g,b) ((unsigned long)(((unsigned char)(r)|((unsigned short)((unsigned char)(g))<<8))|(((unsigned long)(unsigned char)(b))<<16)))
//From ddraw.h
/* typedef struct _DDCOLORKEY
{
DWORD dwColorSpaceLowValue; // low boundary of color space that is to
DWORD dwColorSpaceHighValue; // high boundary of color space that is
} DDCOLORKEY;*/
#elif defined(WEBRTC_MAC)
#endif
class ViEAutoTestExternalRenderer: public webrtc::ExternalRenderer
{
public:
ViEAutoTestExternalRenderer() :
_width(0),
_height(0)
{
}
virtual int FrameSizeChange(unsigned int width, unsigned int height,
unsigned int numberOfStreams)
{
_width = width;
_height = height;
return 0;
}
virtual int DeliverFrame(unsigned char* buffer,
size_t bufferSize,
uint32_t time_stamp,
int64_t ntp_time_ms,
int64_t render_time,
void* /*handle*/) {
if (bufferSize != CalcBufferSize(webrtc::kI420, _width, _height)) {
ViETest::Log("Incorrect render buffer received, of length = %" PRIuS
"\n", bufferSize);
return 0;
}
return 0;
}
virtual int DeliverI420Frame(const webrtc::I420VideoFrame& webrtc_frame) {
EXPECT_EQ(webrtc_frame.width(), _width);
EXPECT_EQ(webrtc_frame.height(), _height);
return 0;
}
virtual bool IsTextureSupported() { return false; }
public:
virtual ~ViEAutoTestExternalRenderer()
{
}
private:
int _width, _height;
};
void ViEAutoTest::ViERenderStandardTest()
{
//***************************************************************
// Begin create/initialize WebRTC Video Engine for testing
//***************************************************************
int rtpPort = 6000;
TbInterfaces ViE("ViERenderStandardTest");
// Create a video channel
TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
TbCaptureDevice tbCapture(ViE); // Create a capture device
tbCapture.ConnectTo(tbChannel.videoChannel);
tbChannel.StartReceive(rtpPort);
tbChannel.StartSend(rtpPort);
EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
EXPECT_EQ(0, ViE.render->AddRenderer(
tbCapture.captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm2));
EXPECT_EQ(0, ViE.render->AddRenderer(
tbChannel.videoChannel, _window2, 1, 0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, ViE.render->StartRender(tbChannel.videoChannel));
ViETest::Log("\nCapture device is renderered in Window 1");
ViETest::Log("Remote stream is renderered in Window 2");
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.render->StopRender(tbCapture.captureId));
EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
// PIP and full screen rendering is not supported on Android
#ifndef WEBRTC_ANDROID
EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
EXPECT_EQ(0, ViE.render->AddRenderer(
tbCapture.captureId, _window2, 0, 0.75, 0.75, 1.0, 1.0));
EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
ViETest::Log("\nCapture device is now rendered in Window 2, PiP.");
ViETest::Log("Switching to full screen rendering in %d seconds.\n",
kAutoTestSleepTimeMs / 1000);
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
EXPECT_EQ(0, ViE.render->RemoveRenderer(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm2));
// Destroy render module and create new in full screen mode
webrtc::VideoRender::DestroyVideoRender(_vrm1);
_vrm1 = NULL;
_vrm1 = webrtc::VideoRender::CreateVideoRender(
4563, _window1, true, _renderType);
EXPECT_TRUE(_vrm1 != NULL);
EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
EXPECT_EQ(0, ViE.render->AddRenderer(
tbCapture.captureId, _window1, 0, 0.75f, 0.75f, 1.0f, 1.0f));
EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
EXPECT_EQ(0, ViE.render->AddRenderer(
tbChannel.videoChannel, _window1, 1, 0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, ViE.render->StartRender(tbChannel.videoChannel));
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
EXPECT_EQ(0, ViE.render->RemoveRenderer(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
// Destroy full screen render module and create new in normal mode
webrtc::VideoRender::DestroyVideoRender(_vrm1);
_vrm1 = NULL;
_vrm1 = webrtc::VideoRender::CreateVideoRender(
4561, _window1, false, _renderType);
EXPECT_TRUE(_vrm1 != NULL);
#endif
//***************************************************************
// Engine ready. Begin testing class
//***************************************************************
//***************************************************************
// Testing finished. Tear down Video Engine
//***************************************************************
tbCapture.Disconnect(tbChannel.videoChannel);
}
void ViEAutoTest::ViERenderExtendedTest()
{
int rtpPort = 6000;
TbInterfaces ViE("ViERenderExtendedTest");
// Create a video channel
TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
TbCaptureDevice tbCapture(ViE); // Create a capture device
tbCapture.ConnectTo(tbChannel.videoChannel);
tbChannel.StartReceive(rtpPort);
tbChannel.StartSend(rtpPort);
EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
EXPECT_EQ(0, ViE.render->AddRenderer(
tbCapture.captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm2));
EXPECT_EQ(0, ViE.render->AddRenderer(
tbChannel.videoChannel, _window2, 1, 0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, ViE.render->StartRender(tbChannel.videoChannel));
ViETest::Log("\nCapture device is renderered in Window 1");
ViETest::Log("Remote stream is renderered in Window 2");
AutoTestSleep(kAutoTestSleepTimeMs);
#ifdef _WIN32
ViETest::Log("\nConfiguring Window2");
ViETest::Log("you will see video only in first quadrant");
EXPECT_EQ(0, ViE.render->ConfigureRender(
tbChannel.videoChannel, 0, 0.0f, 0.0f, 0.5f, 0.5f));
AutoTestSleep(kAutoTestSleepTimeMs);
ViETest::Log("you will see video only in fourth quadrant");
EXPECT_EQ(0, ViE.render->ConfigureRender(
tbChannel.videoChannel, 0, 0.5f, 0.5f, 1.0f, 1.0f));
AutoTestSleep(kAutoTestSleepTimeMs);
ViETest::Log("normal video on Window2");
EXPECT_EQ(0, ViE.render->ConfigureRender(
tbChannel.videoChannel, 0, 0.0f, 0.0f, 1.0f, 1.0f));
AutoTestSleep(kAutoTestSleepTimeMs);
#endif
ViETest::Log("\nEnabling Full Screen render in 5 sec");
EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
EXPECT_EQ(0, ViE.render->RemoveRenderer(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm2));
// Destroy render module and create new in full screen mode
webrtc::VideoRender::DestroyVideoRender(_vrm1);
_vrm1 = NULL;
_vrm1 = webrtc::VideoRender::CreateVideoRender(
4563, _window1, true, _renderType);
EXPECT_TRUE(_vrm1 != NULL);
EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
EXPECT_EQ(0, ViE.render->AddRenderer(
tbCapture.captureId, _window1, 0, 0.0f, 0.0f, 1.0f, 1.0f));
EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
AutoTestSleep(kAutoTestSleepTimeMs);
ViETest::Log("\nStop renderer");
EXPECT_EQ(0, ViE.render->StopRender(tbCapture.captureId));
ViETest::Log("\nRemove renderer");
EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
// Destroy full screen render module and create new for external rendering
webrtc::VideoRender::DestroyVideoRender(_vrm1);
_vrm1 = NULL;
_vrm1 = webrtc::VideoRender::CreateVideoRender(4564, NULL, false,
_renderType);
EXPECT_TRUE(_vrm1 != NULL);
EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
ViETest::Log("\nExternal Render Test");
ViEAutoTestExternalRenderer externalRenderObj;
EXPECT_EQ(0, ViE.render->AddRenderer(
tbCapture.captureId, webrtc::kVideoI420, &externalRenderObj));
EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.render->StopRender(tbCapture.captureId));
EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
// Destroy render module for external rendering and create new in normal
// mode
webrtc::VideoRender::DestroyVideoRender(_vrm1);
_vrm1 = NULL;
_vrm1 = webrtc::VideoRender::CreateVideoRender(
4561, _window1, false, _renderType);
EXPECT_TRUE(_vrm1 != NULL);
tbCapture.Disconnect(tbChannel.videoChannel);
}
void ViEAutoTest::ViERenderAPITest() {
TbInterfaces ViE("ViERenderAPITest");
TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
TbCaptureDevice tbCapture(ViE);
tbCapture.ConnectTo(tbChannel.videoChannel);
tbChannel.StartReceive();
tbChannel.StartSend();
EXPECT_EQ(0, ViE.render->AddRenderer(
tbCapture.captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
EXPECT_EQ(0, ViE.render->AddRenderer(
tbChannel.videoChannel, _window2, 1, 0.0, 0.0, 1.0, 1.0));
EXPECT_EQ(0, ViE.render->StartRender(tbChannel.videoChannel));
// Test setting HW render delay.
// Already started.
EXPECT_EQ(-1, ViE.render->SetExpectedRenderDelay(tbChannel.videoChannel, 50));
EXPECT_EQ(0, ViE.render->StopRender(tbChannel.videoChannel));
// Invalid values.
EXPECT_EQ(-1, ViE.render->SetExpectedRenderDelay(tbChannel.videoChannel, 9));
EXPECT_EQ(-1, ViE.render->SetExpectedRenderDelay(tbChannel.videoChannel,
501));
// Valid values.
EXPECT_EQ(0, ViE.render->SetExpectedRenderDelay(tbChannel.videoChannel, 11));
EXPECT_EQ(0, ViE.render->SetExpectedRenderDelay(tbChannel.videoChannel, 499));
EXPECT_EQ(0, ViE.render->RemoveRenderer(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
tbCapture.Disconnect(tbChannel.videoChannel);
}

View file

@ -1,919 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <iostream>
#include "webrtc/engine_configurations.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/libvietest/include/tb_capture_device.h"
#include "webrtc/video_engine/test/libvietest/include/tb_external_transport.h"
#include "webrtc/video_engine/test/libvietest/include/tb_interfaces.h"
#include "webrtc/video_engine/test/libvietest/include/tb_video_channel.h"
class ViERtpObserver: public webrtc::ViERTPObserver
{
public:
ViERtpObserver()
{
}
virtual ~ViERtpObserver()
{
}
virtual void IncomingSSRCChanged(const int videoChannel,
const unsigned int SSRC)
{
}
virtual void IncomingCSRCChanged(const int videoChannel,
const unsigned int CSRC, const bool added)
{
}
};
void ViEAutoTest::ViERtpRtcpStandardTest()
{
// ***************************************************************
// Begin create/initialize WebRTC Video Engine for testing
// ***************************************************************
// Create VIE
TbInterfaces ViE("ViERtpRtcpStandardTest");
// Create a video channel
TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
// Create a capture device
TbCaptureDevice tbCapture(ViE);
tbCapture.ConnectTo(tbChannel.videoChannel);
ViETest::Log("\n");
TbExternalTransport myTransport(*(ViE.network), tbChannel.videoChannel,
NULL);
ViE.network->DeregisterSendTransport(tbChannel.videoChannel);
EXPECT_EQ(0, ViE.network->RegisterSendTransport(
tbChannel.videoChannel, myTransport));
// ***************************************************************
// Engine ready. Begin testing class
// ***************************************************************
unsigned short startSequenceNumber = 12345;
ViETest::Log("Set start sequence number: %u", startSequenceNumber);
EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
tbChannel.videoChannel, startSequenceNumber));
const unsigned int kVideoSsrc = 123456;
// Set an SSRC to avoid issues with collisions.
EXPECT_EQ(0, ViE.rtp_rtcp->SetLocalSSRC(tbChannel.videoChannel, kVideoSsrc,
webrtc::kViEStreamTypeNormal, 0));
myTransport.EnableSequenceNumberCheck();
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
AutoTestSleep(2000);
unsigned short receivedSequenceNumber =
myTransport.GetFirstSequenceNumber();
ViETest::Log("First received sequence number: %u\n",
receivedSequenceNumber);
EXPECT_EQ(startSequenceNumber, receivedSequenceNumber);
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
//
// RTCP CName
//
ViETest::Log("Testing CName\n");
const char* sendCName = "ViEAutoTestCName\0";
EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPCName(tbChannel.videoChannel, sendCName));
char returnCName[webrtc::ViERTP_RTCP::KMaxRTCPCNameLength];
memset(returnCName, 0, webrtc::ViERTP_RTCP::KMaxRTCPCNameLength);
EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPCName(
tbChannel.videoChannel, returnCName));
EXPECT_STRCASEEQ(sendCName, returnCName);
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
AutoTestSleep(1000);
if (FLAGS_include_timing_dependent_tests) {
char remoteCName[webrtc::ViERTP_RTCP::KMaxRTCPCNameLength];
memset(remoteCName, 0, webrtc::ViERTP_RTCP::KMaxRTCPCNameLength);
EXPECT_EQ(0, ViE.rtp_rtcp->GetRemoteRTCPCName(
tbChannel.videoChannel, remoteCName));
EXPECT_STRCASEEQ(sendCName, remoteCName);
}
//
// Pacing
//
webrtc::RtcpStatistics received;
int64_t recRttMs = 0;
unsigned int sentTotalBitrate = 0;
unsigned int sentVideoBitrate = 0;
unsigned int sentFecBitrate = 0;
unsigned int sentNackBitrate = 0;
ViETest::Log("Testing Pacing\n");
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
myTransport.ClearStats();
EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, true));
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
NetworkParameters network;
network.packet_loss_rate = 0;
network.loss_model = kUniformLoss;
myTransport.SetNetworkParameters(network);
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetReceiveChannelRtcpStatistics(
tbChannel.videoChannel, received, recRttMs));
EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
sentFecBitrate, sentNackBitrate));
int num_rtp_packets = 0;
int num_dropped_packets = 0;
int num_rtcp_packets = 0;
std::map<uint8_t, int> packet_counters;
myTransport.GetStats(num_rtp_packets, num_dropped_packets, num_rtcp_packets,
&packet_counters);
EXPECT_GT(num_rtp_packets, 0);
EXPECT_EQ(num_dropped_packets, 0);
EXPECT_GT(num_rtcp_packets, 0);
EXPECT_GT(sentTotalBitrate, 0u);
EXPECT_EQ(sentNackBitrate, 0u);
EXPECT_EQ(received.cumulative_lost, 0u);
//
// RTX
//
ViETest::Log("Testing NACK over RTX\n");
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
myTransport.ClearStats();
const uint8_t kRtxPayloadType = 96;
const uint8_t kPayloadType = 100;
// Temporarily disable pacing.
EXPECT_EQ(0, ViE.rtp_rtcp->SetTransmissionSmoothingStatus(
tbChannel.videoChannel, false));
EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, true));
EXPECT_EQ(0, ViE.rtp_rtcp->SetRtxSendPayloadType(
tbChannel.videoChannel, kRtxPayloadType, kPayloadType));
EXPECT_EQ(0, ViE.rtp_rtcp->SetRtxReceivePayloadType(
tbChannel.videoChannel, kRtxPayloadType, kPayloadType));
EXPECT_EQ(0, ViE.rtp_rtcp->SetLocalSSRC(tbChannel.videoChannel, 1234,
webrtc::kViEStreamTypeRtx, 0));
EXPECT_EQ(0, ViE.rtp_rtcp->SetRemoteSSRCType(tbChannel.videoChannel,
webrtc::kViEStreamTypeRtx,
1234));
EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
tbChannel.videoChannel, startSequenceNumber));
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
// Make sure the first key frame gets through.
AutoTestSleep(100);
const int kPacketLossRate = 20;
network.packet_loss_rate = kPacketLossRate;
network.loss_model = kUniformLoss;
myTransport.SetNetworkParameters(network);
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetReceiveChannelRtcpStatistics(
tbChannel.videoChannel, received, recRttMs));
EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
sentFecBitrate, sentNackBitrate));
packet_counters.clear();
myTransport.GetStats(num_rtp_packets, num_dropped_packets, num_rtcp_packets,
&packet_counters);
EXPECT_GT(num_rtp_packets, 0);
EXPECT_GT(num_dropped_packets, 0);
EXPECT_GT(num_rtcp_packets, 0);
EXPECT_GT(packet_counters[kRtxPayloadType], 0);
// Make sure we have lost packets and that they were retransmitted.
// TODO(holmer): Disabled due to being flaky. Could be a bug in our stats.
// EXPECT_GT(recCumulativeLost, 0u);
EXPECT_GT(sentTotalBitrate, 0u);
EXPECT_GT(sentNackBitrate, 0u);
//
// Statistics
//
// Stop and restart to clear stats
ViETest::Log("Testing statistics\n");
EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, false));
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
myTransport.ClearStats();
network.packet_loss_rate = kPacketLossRate;
network.loss_model = kUniformLoss;
myTransport.SetNetworkParameters(network);
// Start send to verify sending stats
EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
tbChannel.videoChannel, startSequenceNumber));
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
webrtc::RtcpStatistics sent;
int64_t sentRttMs = 0;
// Fraction lost is a transient value that can get reset after a new rtcp
// report block. Make regular polls to make sure it is propagated.
// TODO(sprang): Replace with callbacks, when those are fully implemented.
int time_to_sleep = kAutoTestSleepTimeMs;
bool got_send_channel_frac_lost = false;
bool got_receive_channel_frac_lost = false;
while (time_to_sleep > 0) {
AutoTestSleep(500);
time_to_sleep -= 500;
EXPECT_EQ(0,
ViE.rtp_rtcp->GetSendChannelRtcpStatistics(
tbChannel.videoChannel, sent, sentRttMs));
got_send_channel_frac_lost |= sent.fraction_lost > 0;
EXPECT_EQ(0,
ViE.rtp_rtcp->GetReceiveChannelRtcpStatistics(
tbChannel.videoChannel, received, recRttMs));
got_receive_channel_frac_lost |= received.fraction_lost > 0;
}
EXPECT_TRUE(got_send_channel_frac_lost);
EXPECT_TRUE(got_receive_channel_frac_lost);
EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
sentFecBitrate, sentNackBitrate));
EXPECT_GT(sentTotalBitrate, 0u);
EXPECT_EQ(sentFecBitrate, 0u);
EXPECT_EQ(sentNackBitrate, 0u);
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
AutoTestSleep(2000);
EXPECT_EQ(0, ViE.rtp_rtcp->GetSendChannelRtcpStatistics(
tbChannel.videoChannel, sent, sentRttMs));
EXPECT_GT(sent.cumulative_lost, 0u);
EXPECT_GT(sent.extended_max_sequence_number, startSequenceNumber);
EXPECT_GT(sent.jitter, 0u);
EXPECT_GT(sentRttMs, 0);
EXPECT_EQ(0, ViE.rtp_rtcp->GetReceiveChannelRtcpStatistics(
tbChannel.videoChannel, received, recRttMs));
EXPECT_GT(received.cumulative_lost, 0u);
EXPECT_GT(received.extended_max_sequence_number, startSequenceNumber);
EXPECT_GT(received.jitter, 0u);
EXPECT_GT(recRttMs, 0);
unsigned int estimated_bandwidth = 0;
EXPECT_EQ(0, ViE.rtp_rtcp->GetEstimatedSendBandwidth(
tbChannel.videoChannel,
&estimated_bandwidth));
EXPECT_GT(estimated_bandwidth, 0u);
if (FLAGS_include_timing_dependent_tests) {
EXPECT_EQ(0, ViE.rtp_rtcp->GetEstimatedReceiveBandwidth(
tbChannel.videoChannel,
&estimated_bandwidth));
EXPECT_GT(estimated_bandwidth, 0u);
int passive_channel = -1;
EXPECT_EQ(ViE.base->CreateReceiveChannel(passive_channel,
tbChannel.videoChannel), 0);
EXPECT_EQ(ViE.base->StartReceive(passive_channel), 0);
EXPECT_EQ(
ViE.rtp_rtcp->GetEstimatedReceiveBandwidth(passive_channel,
&estimated_bandwidth),
0);
EXPECT_EQ(estimated_bandwidth, 0u);
}
// Check that rec stats extended max is greater than what we've sent.
EXPECT_GE(received.extended_max_sequence_number,
sent.extended_max_sequence_number);
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
//
// Test bandwidth statistics with reserved bitrate
//
myTransport.ClearStats();
network.packet_loss_rate = 0;
network.loss_model = kUniformLoss;
myTransport.SetNetworkParameters(network);
ViE.rtp_rtcp->SetReservedTransmitBitrate(tbChannel.videoChannel, 2000000);
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
AutoTestSleep(kAutoTestSleepTimeMs);
estimated_bandwidth = 0;
EXPECT_EQ(0, ViE.rtp_rtcp->GetEstimatedSendBandwidth(tbChannel.videoChannel,
&estimated_bandwidth));
if (FLAGS_include_timing_dependent_tests) {
EXPECT_EQ(0u, estimated_bandwidth);
}
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
//
// Test bandwidth statistics with NACK and FEC separately
//
myTransport.ClearStats();
network.packet_loss_rate = kPacketLossRate;
myTransport.SetNetworkParameters(network);
EXPECT_EQ(0, ViE.rtp_rtcp->SetFECStatus(
tbChannel.videoChannel, true, 96, 97));
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
sentFecBitrate, sentNackBitrate));
if (FLAGS_include_timing_dependent_tests) {
EXPECT_GT(sentTotalBitrate, 0u);
EXPECT_GT(sentFecBitrate, 0u);
EXPECT_EQ(sentNackBitrate, 0u);
}
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.rtp_rtcp->SetFECStatus(
tbChannel.videoChannel, false, 96, 97));
EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, true));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
AutoTestSleep(4 * kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
sentFecBitrate, sentNackBitrate));
if (FLAGS_include_timing_dependent_tests) {
EXPECT_GT(sentTotalBitrate, 0u);
EXPECT_EQ(sentFecBitrate, 0u);
// TODO(holmer): Test disabled due to being too flaky on buildbots. Tests
// for new API provide partial coverage.
// EXPECT_GT(sentNackBitrate, 0u);
}
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, false));
// Test to set SSRC
network.packet_loss_rate = 0;
myTransport.SetNetworkParameters(network);
myTransport.ClearStats();
unsigned int setSSRC = 0x01234567;
ViETest::Log("Set SSRC %u", setSSRC);
EXPECT_EQ(0, ViE.rtp_rtcp->SetLocalSSRC(tbChannel.videoChannel, setSSRC));
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
myTransport.EnableSSRCCheck();
AutoTestSleep(2000);
unsigned int receivedSSRC = myTransport.ReceivedSSRC();
ViETest::Log("Received SSRC %u\n", receivedSSRC);
if (FLAGS_include_timing_dependent_tests) {
EXPECT_EQ(setSSRC, receivedSSRC);
unsigned int localSSRC = 0;
EXPECT_EQ(0, ViE.rtp_rtcp->GetLocalSSRC(
tbChannel.videoChannel, localSSRC));
EXPECT_EQ(setSSRC, localSSRC);
unsigned int remoteSSRC = 0;
EXPECT_EQ(0, ViE.rtp_rtcp->GetRemoteSSRC(
tbChannel.videoChannel, remoteSSRC));
EXPECT_EQ(setSSRC, remoteSSRC);
}
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
ViETest::Log("Testing RTP dump...\n");
std::string inDumpName =
ViETest::GetResultOutputPath() + "IncomingRTPDump.rtp";
std::string outDumpName =
ViETest::GetResultOutputPath() + "OutgoingRTPDump.rtp";
EXPECT_EQ(0, ViE.rtp_rtcp->StartRTPDump(
tbChannel.videoChannel, inDumpName.c_str(), webrtc::kRtpIncoming));
EXPECT_EQ(0, ViE.rtp_rtcp->StartRTPDump(
tbChannel.videoChannel, outDumpName.c_str(), webrtc::kRtpOutgoing));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
AutoTestSleep(1000);
EXPECT_EQ(0, ViE.rtp_rtcp->StopRTPDump(
tbChannel.videoChannel, webrtc::kRtpIncoming));
EXPECT_EQ(0, ViE.rtp_rtcp->StopRTPDump(
tbChannel.videoChannel, webrtc::kRtpOutgoing));
// Make sure data was actually saved to the file and we stored the same
// amount of data in both files
FILE* inDump = fopen(inDumpName.c_str(), "r");
fseek(inDump, 0L, SEEK_END);
long inEndPos = ftell(inDump);
fclose(inDump);
FILE* outDump = fopen(outDumpName.c_str(), "r");
fseek(outDump, 0L, SEEK_END);
// long outEndPos = ftell(outDump);
fclose(outDump);
EXPECT_GT(inEndPos, 0);
// TODO(phoglund): This is flaky for some reason. Are the sleeps too
// short above?
// EXPECT_LT(inEndPos, outEndPos + 100);
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
ViETest::Log("Testing Network Down...\n");
EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, true));
// Reenable pacing.
EXPECT_EQ(0, ViE.rtp_rtcp->SetTransmissionSmoothingStatus(
tbChannel.videoChannel, true));
webrtc::StreamDataCounters sent_before;
webrtc::StreamDataCounters received_before;
webrtc::StreamDataCounters sent_after;
webrtc::StreamDataCounters received_after;
EXPECT_EQ(0, ViE.rtp_rtcp->GetRtpStatistics(tbChannel.videoChannel,
sent_before,
received_before));
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
// Real-time mode.
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetRtpStatistics(tbChannel.videoChannel,
sent_after, received_after));
if (FLAGS_include_timing_dependent_tests) {
EXPECT_GT(received_after.transmitted.payload_bytes,
received_before.transmitted.payload_bytes);
}
// Simulate lost reception and verify that nothing is sent during that time.
ViE.network->SetNetworkTransmissionState(tbChannel.videoChannel, false);
// Allow the encoder to finish the current frame before we expect that no
// additional packets will be sent.
AutoTestSleep(kAutoTestSleepTimeMs);
received_before.transmitted.payload_bytes =
received_after.transmitted.payload_bytes;
ViETest::Log("Network Down...\n");
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetRtpStatistics(tbChannel.videoChannel,
sent_before,
received_before));
if (FLAGS_include_timing_dependent_tests) {
EXPECT_EQ(received_before.transmitted.payload_bytes,
received_after.transmitted.payload_bytes);
}
// Network reception back. Video should now be sent.
ViE.network->SetNetworkTransmissionState(tbChannel.videoChannel, true);
ViETest::Log("Network Up...\n");
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetRtpStatistics(tbChannel.videoChannel,
sent_before,
received_before));
if (FLAGS_include_timing_dependent_tests) {
EXPECT_GT(received_before.transmitted.payload_bytes,
received_after.transmitted.payload_bytes);
}
received_after.transmitted.payload_bytes =
received_before.transmitted.payload_bytes;
// Buffering mode.
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
ViE.rtp_rtcp->SetSenderBufferingMode(tbChannel.videoChannel,
kAutoTestSleepTimeMs / 2);
// Add extra delay to the receiver to make sure it doesn't flush due to
// too old packets being received (as the down-time introduced is longer
// than what we buffer at the sender).
ViE.rtp_rtcp->SetReceiverBufferingMode(tbChannel.videoChannel,
3 * kAutoTestSleepTimeMs / 2);
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
AutoTestSleep(kAutoTestSleepTimeMs);
// Simulate lost reception and verify that nothing is sent during that time.
ViETest::Log("Network Down...\n");
ViE.network->SetNetworkTransmissionState(tbChannel.videoChannel, false);
// Allow the encoder to finish the current frame before we expect that no
// additional packets will be sent.
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetRtpStatistics(tbChannel.videoChannel,
sent_before,
received_before));
if (FLAGS_include_timing_dependent_tests) {
EXPECT_GT(received_before.transmitted.payload_bytes,
received_after.transmitted.payload_bytes);
}
received_after.transmitted.payload_bytes =
received_before.transmitted.payload_bytes;
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetRtpStatistics(tbChannel.videoChannel,
sent_before,
received_before));
if (FLAGS_include_timing_dependent_tests) {
EXPECT_EQ(received_after.transmitted.payload_bytes,
received_before.transmitted.payload_bytes);
}
// Network reception back. Video should now be sent.
ViETest::Log("Network Up...\n");
ViE.network->SetNetworkTransmissionState(tbChannel.videoChannel, true);
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetRtpStatistics(tbChannel.videoChannel,
sent_before,
received_before));
if (FLAGS_include_timing_dependent_tests) {
EXPECT_GT(received_before.transmitted.payload_bytes,
received_after.transmitted.payload_bytes);
}
// TODO(holmer): Verify that the decoded framerate doesn't decrease on an
// outage when in buffering mode. This isn't currently possible because we
// don't have an API to get decoded framerate.
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
// Deregister external transport
EXPECT_EQ(0, ViE.network->DeregisterSendTransport(tbChannel.videoChannel));
//***************************************************************
// Testing finished. Tear down Video Engine
//***************************************************************
}
void ViEAutoTest::ViERtpRtcpAPITest()
{
//***************************************************************
// Begin create/initialize WebRTC Video Engine for testing
//***************************************************************
// Create VIE
TbInterfaces ViE("ViERtpRtcpAPITest");
// Create a video channel
TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
// Create a capture device
TbCaptureDevice tbCapture(ViE);
tbCapture.ConnectTo(tbChannel.videoChannel);
//***************************************************************
// Engine ready. Begin testing class
//***************************************************************
//
// Check different RTCP modes
//
webrtc::ViERTCPMode rtcpMode = webrtc::kRtcpNone;
EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
tbChannel.videoChannel, rtcpMode));
EXPECT_EQ(webrtc::kRtcpCompound_RFC4585, rtcpMode);
EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
tbChannel.videoChannel, webrtc::kRtcpCompound_RFC4585));
EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
tbChannel.videoChannel, rtcpMode));
EXPECT_EQ(webrtc::kRtcpCompound_RFC4585, rtcpMode);
EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
tbChannel.videoChannel, webrtc::kRtcpNonCompound_RFC5506));
EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
tbChannel.videoChannel, rtcpMode));
EXPECT_EQ(webrtc::kRtcpNonCompound_RFC5506, rtcpMode);
EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
tbChannel.videoChannel, webrtc::kRtcpNone));
EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
tbChannel.videoChannel, rtcpMode));
EXPECT_EQ(webrtc::kRtcpNone, rtcpMode);
EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
tbChannel.videoChannel, webrtc::kRtcpCompound_RFC4585));
//
// CName is testedn in SimpleTest
// Start sequence number is tested in SimplTEst
//
const char* testCName = "ViEAutotestCName";
EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPCName(
tbChannel.videoChannel, testCName));
char returnCName[256];
memset(returnCName, 0, 256);
EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPCName(
tbChannel.videoChannel, returnCName));
EXPECT_STRCASEEQ(testCName, returnCName);
//
// SSRC
//
EXPECT_EQ(0, ViE.rtp_rtcp->SetLocalSSRC(
tbChannel.videoChannel, 0x01234567));
EXPECT_EQ(0, ViE.rtp_rtcp->SetLocalSSRC(
tbChannel.videoChannel, 0x76543210));
unsigned int ssrc = 0;
EXPECT_EQ(0, ViE.rtp_rtcp->GetLocalSSRC(tbChannel.videoChannel, ssrc));
EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
tbChannel.videoChannel, 1000));
tbChannel.StartSend();
EXPECT_NE(0, ViE.rtp_rtcp->SetStartSequenceNumber(
tbChannel.videoChannel, 12345));
tbChannel.StopSend();
//
// Start sequence number
//
EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
tbChannel.videoChannel, 12345));
EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
tbChannel.videoChannel, 1000));
tbChannel.StartSend();
EXPECT_NE(0, ViE.rtp_rtcp->SetStartSequenceNumber(
tbChannel.videoChannel, 12345));
tbChannel.StopSend();
//
// Application specific RTCP
//
{
unsigned char subType = 3;
unsigned int name = static_cast<unsigned int> (0x41424344); // 'ABCD';
const char* data = "ViEAutoTest Data of length 32 --";
const unsigned short numBytes = 32;
tbChannel.StartSend();
EXPECT_EQ(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
tbChannel.videoChannel, subType, name, data, numBytes));
EXPECT_NE(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
tbChannel.videoChannel, subType, name, NULL, numBytes)) <<
"Should fail on NULL input.";
EXPECT_NE(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
tbChannel.videoChannel, subType, name, data, numBytes - 1)) <<
"Should fail on incorrect length.";
EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
tbChannel.videoChannel, rtcpMode));
EXPECT_EQ(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
tbChannel.videoChannel, subType, name, data, numBytes));
EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
tbChannel.videoChannel, webrtc::kRtcpCompound_RFC4585));
tbChannel.StopSend();
EXPECT_NE(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
tbChannel.videoChannel, subType, name, data, numBytes));
}
//
// Statistics
//
// Tested in SimpleTest(), we'll get errors if we haven't received a RTCP
// packet.
//
// RTP Dump
//
{
std::string output_file = webrtc::test::OutputPath() +
"DumpFileName.rtp";
const char* dumpName = output_file.c_str();
EXPECT_EQ(0, ViE.rtp_rtcp->StartRTPDump(
tbChannel.videoChannel, dumpName, webrtc::kRtpIncoming));
EXPECT_EQ(0, ViE.rtp_rtcp->StopRTPDump(
tbChannel.videoChannel, webrtc::kRtpIncoming));
EXPECT_NE(0, ViE.rtp_rtcp->StopRTPDump(
tbChannel.videoChannel, webrtc::kRtpIncoming));
EXPECT_EQ(0, ViE.rtp_rtcp->StartRTPDump(
tbChannel.videoChannel, dumpName, webrtc::kRtpOutgoing));
EXPECT_EQ(0, ViE.rtp_rtcp->StopRTPDump(
tbChannel.videoChannel, webrtc::kRtpOutgoing));
EXPECT_NE(0, ViE.rtp_rtcp->StopRTPDump(
tbChannel.videoChannel, webrtc::kRtpOutgoing));
EXPECT_NE(0, ViE.rtp_rtcp->StartRTPDump(
tbChannel.videoChannel, dumpName, (webrtc::RTPDirections) 3));
}
//
// RTP/RTCP Observers
//
{
ViERtpObserver rtpObserver;
EXPECT_EQ(0, ViE.rtp_rtcp->RegisterRTPObserver(
tbChannel.videoChannel, rtpObserver));
EXPECT_NE(0, ViE.rtp_rtcp->RegisterRTPObserver(
tbChannel.videoChannel, rtpObserver));
EXPECT_EQ(0, ViE.rtp_rtcp->DeregisterRTPObserver(
tbChannel.videoChannel));
EXPECT_NE(0, ViE.rtp_rtcp->DeregisterRTPObserver(
tbChannel.videoChannel));
}
//
// PLI
//
{
EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
tbChannel.videoChannel, webrtc::kViEKeyFrameRequestPliRtcp));
EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
tbChannel.videoChannel, webrtc::kViEKeyFrameRequestPliRtcp));
EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
tbChannel.videoChannel, webrtc::kViEKeyFrameRequestNone));
EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
tbChannel.videoChannel, webrtc::kViEKeyFrameRequestNone));
}
//
// NACK
//
{
EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, true));
}
// Timestamp offset extension.
// Valid range is 1 to 14 inclusive.
EXPECT_EQ(-1, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
tbChannel.videoChannel, true, 0));
EXPECT_EQ(-1, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
tbChannel.videoChannel, true, 15));
EXPECT_EQ(0, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
tbChannel.videoChannel, true, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
tbChannel.videoChannel, true, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
tbChannel.videoChannel, false, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
tbChannel.videoChannel, true, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
tbChannel.videoChannel, false, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
tbChannel.videoChannel, false, 3));
EXPECT_EQ(-1, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
tbChannel.videoChannel, true, 0));
EXPECT_EQ(-1, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
tbChannel.videoChannel, true, 15));
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
tbChannel.videoChannel, true, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
tbChannel.videoChannel, true, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
tbChannel.videoChannel, false, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
tbChannel.videoChannel, true, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
tbChannel.videoChannel, false, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
tbChannel.videoChannel, false, 3));
// Absolute send time extension.
// Valid range is 1 to 14 inclusive.
EXPECT_EQ(-1, ViE.rtp_rtcp->SetSendAbsoluteSendTimeStatus(
tbChannel.videoChannel, true, 0));
EXPECT_EQ(-1, ViE.rtp_rtcp->SetSendAbsoluteSendTimeStatus(
tbChannel.videoChannel, true, 15));
EXPECT_EQ(0, ViE.rtp_rtcp->SetSendAbsoluteSendTimeStatus(
tbChannel.videoChannel, true, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetSendAbsoluteSendTimeStatus(
tbChannel.videoChannel, true, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetSendAbsoluteSendTimeStatus(
tbChannel.videoChannel, false, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetSendAbsoluteSendTimeStatus(
tbChannel.videoChannel, true, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetSendAbsoluteSendTimeStatus(
tbChannel.videoChannel, false, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetSendAbsoluteSendTimeStatus(
tbChannel.videoChannel, false, 3));
EXPECT_EQ(-1, ViE.rtp_rtcp->SetReceiveAbsoluteSendTimeStatus(
tbChannel.videoChannel, true, 0));
EXPECT_EQ(-1, ViE.rtp_rtcp->SetReceiveAbsoluteSendTimeStatus(
tbChannel.videoChannel, true, 15));
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveAbsoluteSendTimeStatus(
tbChannel.videoChannel, true, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveAbsoluteSendTimeStatus(
tbChannel.videoChannel, true, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveAbsoluteSendTimeStatus(
tbChannel.videoChannel, false, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveAbsoluteSendTimeStatus(
tbChannel.videoChannel, true, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveAbsoluteSendTimeStatus(
tbChannel.videoChannel, false, 3));
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveAbsoluteSendTimeStatus(
tbChannel.videoChannel, false, 3));
// Transmission smoothening.
const int invalid_channel_id = 17;
EXPECT_EQ(-1, ViE.rtp_rtcp->SetTransmissionSmoothingStatus(
invalid_channel_id, true));
EXPECT_EQ(0, ViE.rtp_rtcp->SetTransmissionSmoothingStatus(
tbChannel.videoChannel, true));
EXPECT_EQ(0, ViE.rtp_rtcp->SetTransmissionSmoothingStatus(
tbChannel.videoChannel, true));
EXPECT_EQ(0, ViE.rtp_rtcp->SetTransmissionSmoothingStatus(
tbChannel.videoChannel, false));
EXPECT_EQ(0, ViE.rtp_rtcp->SetTransmissionSmoothingStatus(
tbChannel.videoChannel, false));
// Buffering mode - sender side.
EXPECT_EQ(-1, ViE.rtp_rtcp->SetSenderBufferingMode(
invalid_channel_id, 0));
int invalid_delay = -1;
EXPECT_EQ(-1, ViE.rtp_rtcp->SetSenderBufferingMode(
tbChannel.videoChannel, invalid_delay));
invalid_delay = 15000;
EXPECT_EQ(-1, ViE.rtp_rtcp->SetSenderBufferingMode(
tbChannel.videoChannel, invalid_delay));
EXPECT_EQ(0, ViE.rtp_rtcp->SetSenderBufferingMode(
tbChannel.videoChannel, 5000));
// Buffering mode - receiver side.
// Run without VoE to verify it that does not crash, but return an error.
EXPECT_EQ(-1, ViE.rtp_rtcp->SetReceiverBufferingMode(
tbChannel.videoChannel, 0));
EXPECT_EQ(-1, ViE.rtp_rtcp->SetReceiverBufferingMode(
tbChannel.videoChannel, 2000));
// Set VoE (required to set up stream-sync).
webrtc::VoiceEngine* voice_engine = webrtc::VoiceEngine::Create();
EXPECT_TRUE(NULL != voice_engine);
webrtc::VoEBase* voe_base = webrtc::VoEBase::GetInterface(voice_engine);
EXPECT_TRUE(NULL != voe_base);
EXPECT_EQ(0, voe_base->Init());
int audio_channel = voe_base->CreateChannel();
EXPECT_NE(-1, audio_channel);
EXPECT_EQ(0, ViE.base->SetVoiceEngine(voice_engine));
EXPECT_EQ(0, ViE.base->ConnectAudioChannel(tbChannel.videoChannel,
audio_channel));
EXPECT_EQ(-1, ViE.rtp_rtcp->SetReceiverBufferingMode(
invalid_channel_id, 0));
EXPECT_EQ(-1, ViE.rtp_rtcp->SetReceiverBufferingMode(
tbChannel.videoChannel, invalid_delay));
invalid_delay = 15000;
EXPECT_EQ(-1, ViE.rtp_rtcp->SetReceiverBufferingMode(
tbChannel.videoChannel, invalid_delay));
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiverBufferingMode(
tbChannel.videoChannel, 5000));
// Real-time mode - sender side.
EXPECT_EQ(0, ViE.rtp_rtcp->SetSenderBufferingMode(
tbChannel.videoChannel, 0));
// Real-time mode - receiver side.
EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiverBufferingMode(
tbChannel.videoChannel, 0));
EXPECT_EQ(0, ViE.base->DisconnectAudioChannel(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->SetVoiceEngine(NULL));
EXPECT_EQ(0, voe_base->DeleteChannel(audio_channel));
voe_base->Release();
EXPECT_TRUE(webrtc::VoiceEngine::Delete(voice_engine));
//***************************************************************
// Testing finished. Tear down Video Engine
//***************************************************************
}

View file

@ -1,642 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <iostream> // NOLINT
#include "webrtc/common_types.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/libvietest/include/tb_external_transport.h"
#include "webrtc/voice_engine/include/voe_base.h"
enum RelayMode {
kRelayOneStream = 1,
kRelayAllStreams = 2
};
#define VCM_RED_PAYLOAD_TYPE 96
#define VCM_ULPFEC_PAYLOAD_TYPE 97
const int kNumStreams = 3;
void InitialSingleStreamSettings(webrtc::VideoCodec* video_codec) {
video_codec->numberOfSimulcastStreams = 0;
video_codec->width = 1200;
video_codec->height = 800;
}
void SetSimulcastSettings(webrtc::VideoCodec* video_codec) {
video_codec->width = 1280;
video_codec->height = 720;
// Simulcast settings.
video_codec->numberOfSimulcastStreams = kNumStreams;
video_codec->simulcastStream[0].width = 320;
video_codec->simulcastStream[0].height = 180;
video_codec->simulcastStream[0].numberOfTemporalLayers = 0;
video_codec->simulcastStream[0].maxBitrate = 100;
video_codec->simulcastStream[0].targetBitrate = 100;
video_codec->simulcastStream[0].minBitrate = 0;
video_codec->simulcastStream[0].qpMax = video_codec->qpMax;
video_codec->simulcastStream[1].width = 640;
video_codec->simulcastStream[1].height = 360;
video_codec->simulcastStream[1].numberOfTemporalLayers = 0;
video_codec->simulcastStream[1].maxBitrate = 500;
video_codec->simulcastStream[1].targetBitrate = 500;
video_codec->simulcastStream[1].minBitrate = 200;
video_codec->simulcastStream[1].qpMax = video_codec->qpMax;
video_codec->simulcastStream[2].width = 1280;
video_codec->simulcastStream[2].height = 720;
video_codec->simulcastStream[2].numberOfTemporalLayers = 0;
video_codec->simulcastStream[2].maxBitrate = 1200;
video_codec->simulcastStream[2].targetBitrate = 1200;
video_codec->simulcastStream[2].minBitrate = 900;
video_codec->simulcastStream[2].qpMax = video_codec->qpMax;
}
void RuntimeSingleStreamSettings(webrtc::VideoCodec* video_codec) {
SetSimulcastSettings(video_codec);
video_codec->width = 1200;
video_codec->height = 800;
video_codec->numberOfSimulcastStreams = kNumStreams;
video_codec->simulcastStream[0].maxBitrate = 0;
video_codec->simulcastStream[0].targetBitrate = 0;
video_codec->simulcastStream[0].minBitrate = 0;
video_codec->simulcastStream[1].maxBitrate = 0;
video_codec->simulcastStream[1].targetBitrate = 0;
video_codec->simulcastStream[1].minBitrate = 0;
video_codec->simulcastStream[2].maxBitrate = 0;
video_codec->simulcastStream[2].targetBitrate = 0;
video_codec->simulcastStream[2].minBitrate = 0;
}
int VideoEngineSimulcastTest(void* window1, void* window2) {
// *******************************************************
// Begin create/initialize Video Engine for testing
// *******************************************************
int error = 0;
int receive_channels[kNumStreams];
// Create a VideoEngine instance.
webrtc::VideoEngine* video_engine = NULL;
video_engine = webrtc::VideoEngine::Create();
if (video_engine == NULL) {
printf("ERROR in VideoEngine::Create\n");
return -1;
}
error = video_engine->SetTraceFilter(webrtc::kTraceAll);
if (error == -1) {
printf("ERROR in VideoEngine::SetTraceLevel\n");
return -1;
}
std::string trace_file =
ViETest::GetResultOutputPath() + "ViESimulcast_trace.txt";
error = video_engine->SetTraceFile(trace_file.c_str());
if (error == -1) {
printf("ERROR in VideoEngine::SetTraceFile\n");
return -1;
}
// Init VideoEngine and create a channel.
webrtc::ViEBase* vie_base = webrtc::ViEBase::GetInterface(video_engine);
if (vie_base == NULL) {
printf("ERROR in ViEBase::GetInterface\n");
return -1;
}
error = vie_base->Init();
if (error == -1) {
printf("ERROR in ViEBase::Init\n");
return -1;
}
RelayMode relay_mode = kRelayOneStream;
printf("Select relay mode:\n");
printf("\t1. Relay one stream\n");
printf("\t2. Relay all streams\n");
if (scanf("%d", reinterpret_cast<int*>(&relay_mode)) != 1) {
printf("Error in scanf()\n");
return -1;
}
getchar();
webrtc::ViERTP_RTCP* vie_rtp_rtcp =
webrtc::ViERTP_RTCP::GetInterface(video_engine);
if (vie_rtp_rtcp == NULL) {
printf("ERROR in ViERTP_RTCP::GetInterface\n");
return -1;
}
int video_channel = -1;
error = vie_base->CreateChannel(video_channel);
if (error == -1) {
printf("ERROR in ViEBase::CreateChannel\n");
return -1;
}
for (int i = 0; i < kNumStreams; ++i) {
receive_channels[i] = -1;
error = vie_base->CreateReceiveChannel(receive_channels[i], video_channel);
if (error == -1) {
printf("ERROR in ViEBase::CreateChannel\n");
return -1;
}
}
// List available capture devices, allocate and connect.
webrtc::ViECapture* vie_capture =
webrtc::ViECapture::GetInterface(video_engine);
if (vie_base == NULL) {
printf("ERROR in ViECapture::GetInterface\n");
return -1;
}
const unsigned int KMaxDeviceNameLength = 128;
const unsigned int KMaxUniqueIdLength = 256;
char device_name[KMaxDeviceNameLength];
memset(device_name, 0, KMaxDeviceNameLength);
char unique_id[KMaxUniqueIdLength];
memset(unique_id, 0, KMaxUniqueIdLength);
printf("Available capture devices:\n");
int capture_idx = 0;
for (capture_idx = 0; capture_idx < vie_capture->NumberOfCaptureDevices();
capture_idx++) {
memset(device_name, 0, KMaxDeviceNameLength);
memset(unique_id, 0, KMaxUniqueIdLength);
error = vie_capture->GetCaptureDevice(capture_idx, device_name,
KMaxDeviceNameLength, unique_id,
KMaxUniqueIdLength);
if (error == -1) {
printf("ERROR in ViECapture::GetCaptureDevice\n");
return -1;
}
printf("\t %d. %s\n", capture_idx + 1, device_name);
}
printf("\nChoose capture device: ");
#ifdef WEBRTC_ANDROID
capture_idx = 0;
printf("0\n");
#else
if (scanf("%d", &capture_idx) != 1) {
printf("Error in scanf()\n");
return -1;
}
getchar();
// Compensate for idx start at 1.
capture_idx = capture_idx - 1;
#endif
error = vie_capture->GetCaptureDevice(capture_idx, device_name,
KMaxDeviceNameLength, unique_id,
KMaxUniqueIdLength);
if (error == -1) {
printf("ERROR in ViECapture::GetCaptureDevice\n");
return -1;
}
int capture_id = 0;
error = vie_capture->AllocateCaptureDevice(unique_id, KMaxUniqueIdLength,
capture_id);
if (error == -1) {
printf("ERROR in ViECapture::AllocateCaptureDevice\n");
return -1;
}
error = vie_capture->ConnectCaptureDevice(capture_id, video_channel);
if (error == -1) {
printf("ERROR in ViECapture::ConnectCaptureDevice\n");
return -1;
}
error = vie_capture->StartCapture(capture_id);
if (error == -1) {
printf("ERROR in ViECapture::StartCapture\n");
return -1;
}
// RTP/RTCP settings.
error = vie_rtp_rtcp->SetRTCPStatus(video_channel,
webrtc::kRtcpCompound_RFC4585);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
return -1;
}
vie_rtp_rtcp->SetRembStatus(video_channel, true, false);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
return -1;
}
error = vie_rtp_rtcp->SetKeyFrameRequestMethod(
video_channel, webrtc::kViEKeyFrameRequestPliRtcp);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
return -1;
}
for (int i = 0; i < kNumStreams; ++i) {
error = vie_rtp_rtcp->SetRTCPStatus(receive_channels[i],
webrtc::kRtcpCompound_RFC4585);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
return -1;
}
vie_rtp_rtcp->SetRembStatus(receive_channels[i], false, true);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
return -1;
}
error = vie_rtp_rtcp->SetKeyFrameRequestMethod(
receive_channels[i], webrtc::kViEKeyFrameRequestPliRtcp);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
return -1;
}
}
// Set up rendering.
webrtc::ViERender* vie_render = webrtc::ViERender::GetInterface(video_engine);
if (vie_render == NULL) {
printf("ERROR in ViERender::GetInterface\n");
return -1;
}
error = vie_render->AddRenderer(capture_id, window1, 0, 0.0, 0.0, 1.0, 1.0);
if (error == -1) {
printf("ERROR in ViERender::AddRenderer\n");
return -1;
}
error = vie_render->StartRender(capture_id);
if (error == -1) {
printf("ERROR in ViERender::StartRender\n");
return -1;
}
// Only rendering the thumbnail.
int channel_to_render = video_channel;
if (relay_mode == kRelayAllStreams) {
channel_to_render = receive_channels[0];
}
error = vie_render->AddRenderer(channel_to_render, window2, 1, 0.0, 0.0, 1.0,
1.0);
if (error == -1) {
printf("ERROR in ViERender::AddRenderer\n");
return -1;
}
error = vie_render->StartRender(channel_to_render);
if (error == -1) {
printf("ERROR in ViERender::StartRender\n");
return -1;
}
// Setup codecs.
webrtc::ViECodec* vie_codec = webrtc::ViECodec::GetInterface(video_engine);
if (vie_codec == NULL) {
printf("ERROR in ViECodec::GetInterface\n");
return -1;
}
// Check available codecs and prepare receive codecs.
printf("\nAvailable codecs:\n");
webrtc::VideoCodec video_codec;
memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
int codec_idx = 0;
for (codec_idx = 0; codec_idx < vie_codec->NumberOfCodecs(); codec_idx++) {
error = vie_codec->GetCodec(codec_idx, video_codec);
if (error == -1) {
printf("ERROR in ViECodec::GetCodec\n");
return -1;
}
// Try to keep the test frame size small when I420.
if (video_codec.codecType != webrtc::kVideoCodecVP8) {
continue;
}
for (int i = 0; i < kNumStreams; ++i) {
error = vie_codec->SetReceiveCodec(receive_channels[i], video_codec);
if (error == -1) {
printf("ERROR in ViECodec::SetReceiveCodec\n");
return -1;
}
}
if (video_codec.codecType != webrtc::kVideoCodecRED &&
video_codec.codecType != webrtc::kVideoCodecULPFEC) {
printf("\t %d. %s\n", codec_idx + 1, video_codec.plName);
}
break;
}
error = vie_codec->GetCodec(codec_idx, video_codec);
if (error == -1) {
printf("ERROR in ViECodec::GetCodec\n");
return -1;
}
bool simulcast_mode = true;
int num_streams = 1;
// Set spatial resolution option.
if (simulcast_mode) {
SetSimulcastSettings(&video_codec);
num_streams = video_codec.numberOfSimulcastStreams;
} else {
InitialSingleStreamSettings(&video_codec);
num_streams = 1;
}
// Set start bit rate.
std::string str;
std::cout << std::endl;
std::cout << "Choose start rate (in kbps). Press enter for default: ";
std::getline(std::cin, str);
int start_rate = atoi(str.c_str());
if (start_rate != 0) {
video_codec.startBitrate = start_rate;
}
error = vie_codec->SetSendCodec(video_channel, video_codec);
if (error == -1) {
printf("ERROR in ViECodec::SetSendCodec\n");
return -1;
}
// Address settings.
webrtc::ViENetwork* vie_network =
webrtc::ViENetwork::GetInterface(video_engine);
if (vie_network == NULL) {
printf("ERROR in ViENetwork::GetInterface\n");
return -1;
}
TbExternalTransport::SsrcChannelMap ssrc_channel_map;
for (int idx = 0; idx < num_streams; idx++) {
error = vie_rtp_rtcp->SetLocalSSRC(video_channel, idx + 1, // SSRC
webrtc::kViEStreamTypeNormal, idx);
ssrc_channel_map[idx + 1] = receive_channels[idx];
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetLocalSSRC(idx:%d)\n",
idx);
return -1;
}
}
TbExternalTransport::SsrcChannelMap* channel_map = &ssrc_channel_map;
if (relay_mode == kRelayOneStream) {
channel_map = NULL;
}
// Setting External transport.
TbExternalTransport ext_transport(*vie_network, video_channel, channel_map);
error = vie_network->RegisterSendTransport(video_channel, ext_transport);
if (error == -1) {
printf("ERROR in ViECodec::RegisterSendTransport \n");
return -1;
}
for (int i = 0; i < kNumStreams; ++i) {
error = vie_network->RegisterSendTransport(receive_channels[i],
ext_transport);
if (error == -1) {
printf("ERROR in ViECodec::RegisterSendTransport \n");
return -1;
}
}
// Set network one-way delay value.
// 10 ms one-way delay.
NetworkParameters network;
network.loss_model = kUniformLoss;
network.mean_one_way_delay = 10;
ext_transport.SetNetworkParameters(network);
if (relay_mode == kRelayOneStream) {
ext_transport.SetSSRCFilter(num_streams);
}
error = vie_base->StartSend(video_channel);
if (error == -1) {
printf("ERROR in ViENetwork::StartSend\n");
return -1;
}
error = vie_base->StartReceive(video_channel);
if (error == -1) {
printf("ERROR in ViENetwork::StartReceive\n");
return -1;
}
for (int i = 0; i < kNumStreams; ++i) {
error = vie_base->StartReceive(receive_channels[i]);
if (error == -1) {
printf("ERROR in ViENetwork::StartReceive\n");
return -1;
}
}
// Create a receive channel to verify that it doesn't mess up toggling
// between single stream and simulcast.
int video_channel2 = -1;
error = vie_base->CreateReceiveChannel(video_channel2, video_channel);
if (error == -1) {
printf("ERROR in ViEBase::CreateReceiveChannel\n");
return -1;
}
// *******************************************************
// Engine started
// *******************************************************
printf("\nSimulcast call started\n\n");
do {
printf("Enter new SSRC filter 1,2 or 3\n");
printf("... or 0 to switch between simulcast and a single stream\n");
printf("Press enter to stop...");
str.clear();
std::getline(std::cin, str);
if (!str.empty()) {
int ssrc = atoi(str.c_str());
if (ssrc == 0) {
// Toggle between simulcast and a single stream with different
// resolution.
if (simulcast_mode) {
RuntimeSingleStreamSettings(&video_codec);
num_streams = 1;
printf("Disabling simulcast\n");
} else {
SetSimulcastSettings(&video_codec);
num_streams = video_codec.numberOfSimulcastStreams;
printf("Enabling simulcast\n");
}
simulcast_mode = !simulcast_mode;
if (vie_codec->SetSendCodec(video_channel, video_codec) != 0) {
printf("ERROR switching between simulcast and single stream\n");
return -1;
}
for (int idx = 0; idx < num_streams; idx++) {
error = vie_rtp_rtcp->SetLocalSSRC(video_channel, idx + 1, // SSRC
webrtc::kViEStreamTypeNormal, idx);
if (error == -1) {
printf("ERROR in ViERTP_RTCP::SetLocalSSRC(idx:%d)\n", idx);
return -1;
}
}
if (relay_mode == kRelayOneStream) {
ext_transport.SetSSRCFilter(num_streams);
}
} else if (ssrc > 0 && ssrc < 4) {
if (relay_mode == kRelayOneStream) {
ext_transport.SetSSRCFilter(ssrc);
}
} else {
printf("Invalid SSRC\n");
}
} else {
break;
}
} while (true);
// *******************************************************
// Testing finished. Tear down Video Engine
// *******************************************************
error = vie_base->DeleteChannel(video_channel2);
if (error == -1) {
printf("ERROR in ViEBase::DeleteChannel\n");
return -1;
}
for (int i = 0; i < kNumStreams; ++i) {
error = vie_base->StopReceive(receive_channels[i]);
if (error == -1) {
printf("ERROR in ViEBase::StopReceive\n");
return -1;
}
}
error = vie_base->StopReceive(video_channel);
if (error == -1) {
printf("ERROR in ViEBase::StopReceive\n");
return -1;
}
error = vie_base->StopSend(video_channel);
if (error == -1) {
printf("ERROR in ViEBase::StopSend\n");
return -1;
}
error = vie_render->StopRender(capture_id);
if (error == -1) {
printf("ERROR in ViERender::StopRender\n");
return -1;
}
error = vie_render->RemoveRenderer(capture_id);
if (error == -1) {
printf("ERROR in ViERender::RemoveRenderer\n");
return -1;
}
error = vie_render->StopRender(channel_to_render);
if (error == -1) {
printf("ERROR in ViERender::StopRender\n");
return -1;
}
error = vie_render->RemoveRenderer(channel_to_render);
if (error == -1) {
printf("ERROR in ViERender::RemoveRenderer\n");
return -1;
}
error = vie_capture->StopCapture(capture_id);
if (error == -1) {
printf("ERROR in ViECapture::StopCapture\n");
return -1;
}
error = vie_capture->DisconnectCaptureDevice(video_channel);
if (error == -1) {
printf("ERROR in ViECapture::DisconnectCaptureDevice\n");
return -1;
}
error = vie_capture->ReleaseCaptureDevice(capture_id);
if (error == -1) {
printf("ERROR in ViECapture::ReleaseCaptureDevice\n");
return -1;
}
for (int i = 0; i < kNumStreams; ++i) {
error = vie_base->DeleteChannel(receive_channels[i]);
if (error == -1) {
printf("ERROR in ViEBase::DeleteChannel\n");
return -1;
}
}
error = vie_base->DeleteChannel(video_channel);
if (error == -1) {
printf("ERROR in ViEBase::DeleteChannel\n");
return -1;
}
int remaining_interfaces = 0;
remaining_interfaces = vie_codec->Release();
remaining_interfaces += vie_capture->Release();
remaining_interfaces += vie_rtp_rtcp->Release();
remaining_interfaces += vie_render->Release();
remaining_interfaces += vie_network->Release();
remaining_interfaces += vie_base->Release();
if (remaining_interfaces > 0) {
printf("ERROR: Could not release all interfaces\n");
return -1;
}
bool deleted = webrtc::VideoEngine::Delete(video_engine);
if (deleted == false) {
printf("ERROR in VideoEngine::Delete\n");
return -1;
}
return 0;
}
int ViEAutoTest::ViESimulcastCall() {
ViETest::Log(" ");
ViETest::Log("========================================");
ViETest::Log(" ViE Autotest Simulcast Call\n");
if (VideoEngineSimulcastTest(_window1, _window2) == 0) {
ViETest::Log(" ");
ViETest::Log(" ViE Autotest Simulcast Call Done");
ViETest::Log("========================================");
ViETest::Log(" ");
return 0;
}
ViETest::Log(" ");
ViETest::Log(" ViE Autotest Simulcast Call Failed");
ViETest::Log("========================================");
ViETest::Log(" ");
return 1;
}

View file

@ -1,204 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// vie_autotest_windows.cc
//
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_windows.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_main.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include <windows.h>
#ifdef _DEBUG
//#include "vld.h"
#endif
// Disable Visual studio warnings
// 'this' : used in base member initializer list
#pragma warning(disable: 4355)
LRESULT CALLBACK ViEAutoTestWinProc(HWND hWnd, UINT uMsg, WPARAM wParam,
LPARAM lParam) {
switch (uMsg) {
case WM_DESTROY:
PostQuitMessage( WM_QUIT);
break;
case WM_COMMAND:
break;
}
return DefWindowProc(hWnd, uMsg, wParam, lParam);
}
ViEAutoTestWindowManager::ViEAutoTestWindowManager()
: _window1(NULL),
_window2(NULL),
_terminate(false),
_eventThread(webrtc::ThreadWrapper::CreateThread(
EventProcess, this, "ViEAutotestEventThread")),
_crit(*webrtc::CriticalSectionWrapper::CreateCriticalSection()),
_hwnd1(NULL),
_hwnd2(NULL),
_hwnd1Size(),
_hwnd2Size(),
_hwnd1Title(),
_hwnd2Title() {
}
ViEAutoTestWindowManager::~ViEAutoTestWindowManager() {
if (_hwnd1) {
ViEDestroyWindow(_hwnd1);
}
if (_hwnd2) {
ViEDestroyWindow(_hwnd2);
}
delete &_crit;
}
void* ViEAutoTestWindowManager::GetWindow1() {
return _window1;
}
void* ViEAutoTestWindowManager::GetWindow2() {
return _window2;
}
int ViEAutoTestWindowManager::CreateWindows(AutoTestRect window1Size,
AutoTestRect window2Size,
void* window1Title,
void* window2Title) {
_hwnd1Size.Copy(window1Size);
_hwnd2Size.Copy(window2Size);
memcpy(_hwnd1Title, window1Title, TITLE_LENGTH);
memcpy(_hwnd2Title, window2Title, TITLE_LENGTH);
_eventThread->Start();
do {
_crit.Enter();
if (_window1 != NULL) {
break;
}
_crit.Leave();
AutoTestSleep(10);
} while (true);
_crit.Leave();
return 0;
}
int ViEAutoTestWindowManager::TerminateWindows() {
_terminate = true;
_eventThread->Stop();
_crit.Enter();
_eventThread.reset();
_crit.Leave();
return 0;
}
bool ViEAutoTestWindowManager::EventProcess(void* obj) {
return static_cast<ViEAutoTestWindowManager*> (obj)->EventLoop();
}
bool ViEAutoTestWindowManager::EventLoop() {
_crit.Enter();
ViECreateWindow(_hwnd1, _hwnd1Size.origin.x, _hwnd1Size.origin.y,
_hwnd1Size.size.width, _hwnd1Size.size.height, _hwnd1Title);
ViECreateWindow(_hwnd2, _hwnd2Size.origin.x, _hwnd2Size.origin.y,
_hwnd2Size.size.width, _hwnd2Size.size.height, _hwnd2Title);
_window1 = (void*) _hwnd1;
_window2 = (void*) _hwnd2;
MSG msg;
while (!_terminate) {
if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
TranslateMessage(&msg);
DispatchMessage(&msg);
}
_crit.Leave();
AutoTestSleep(10);
_crit.Enter();
}
ViEDestroyWindow(_hwnd1);
ViEDestroyWindow(_hwnd2);
_crit.Leave();
return false;
}
int ViEAutoTestWindowManager::ViECreateWindow(HWND &hwndMain, int xPos,
int yPos, int width, int height,
TCHAR* className) {
HINSTANCE hinst = GetModuleHandle(0);
WNDCLASSEX wcx;
wcx.hInstance = hinst;
wcx.lpszClassName = className;
wcx.lpfnWndProc = (WNDPROC) ViEAutoTestWinProc;
wcx.style = CS_DBLCLKS;
wcx.hIcon = LoadIcon(NULL, IDI_APPLICATION);
wcx.hIconSm = LoadIcon(NULL, IDI_APPLICATION);
wcx.hCursor = LoadCursor(NULL, IDC_ARROW);
wcx.lpszMenuName = NULL;
wcx.cbSize = sizeof(WNDCLASSEX);
wcx.cbClsExtra = 0;
wcx.cbWndExtra = 0;
wcx.hbrBackground = GetSysColorBrush(COLOR_3DFACE);
RegisterClassEx(&wcx);
// Create the main window.
hwndMain = CreateWindowEx(0, // no extended styles
className, // class name
className, // window name
WS_OVERLAPPED | WS_THICKFRAME, // overlapped window
xPos, // horizontal position
yPos, // vertical position
width, // width
height, // height
(HWND) NULL, // no parent or owner window
(HMENU) NULL, // class menu used
hinst, // instance handle
NULL); // no window creation data
if (!hwndMain)
return -1;
// Show the window using the flag specified by the program
// that started the application, and send the application
// a WM_PAINT message.
ShowWindow(hwndMain, SW_SHOWDEFAULT);
UpdateWindow(hwndMain);
::SetWindowPos(hwndMain, HWND_TOP, xPos, yPos, width, height,
SWP_FRAMECHANGED);
return 0;
}
int ViEAutoTestWindowManager::ViEDestroyWindow(HWND& hwnd) {
::DestroyWindow(hwnd);
return 0;
}
bool ViEAutoTestWindowManager::SetTopmostWindow() {
// Meant to put terminal window on top
return true;
}
int main(int argc, char* argv[]) {
ViEAutoTestMain auto_test;
return auto_test.RunTests(argc, argv);
}

View file

@ -1,125 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_defines.h"
#include "webrtc/video_engine/test/auto_test/primitives/base_primitives.h"
#include "webrtc/video_engine/test/auto_test/primitives/framedrop_primitives.h"
#include "webrtc/video_engine/test/auto_test/primitives/general_primitives.h"
#include "webrtc/video_engine/test/libvietest/include/tb_external_transport.h"
#include "webrtc/video_engine/test/libvietest/include/tb_interfaces.h"
#include "webrtc/video_engine/test/libvietest/include/vie_external_render_filter.h"
#include "webrtc/video_engine/test/libvietest/include/vie_fake_camera.h"
#include "webrtc/video_engine/test/libvietest/include/vie_to_file_renderer.h"
bool ViEFileBasedComparisonTests::TestCallSetup(
const std::string& i420_video_file,
int width,
int height,
ViEToFileRenderer* local_file_renderer,
ViEToFileRenderer* remote_file_renderer) {
TbInterfaces interfaces("TestCallSetup");
int video_channel = -1;
EXPECT_EQ(0, interfaces.base->CreateChannel(video_channel));
ViEFakeCamera fake_camera(interfaces.capture);
if (!fake_camera.StartCameraInNewThread(i420_video_file,
width,
height)) {
// No point in continuing if we have no proper video source
ADD_FAILURE() << "Could not open input video " << i420_video_file <<
": aborting test...";
return false;
}
int capture_id = fake_camera.capture_id();
// Apparently, we need to connect external capture devices, but we should
// not start them since the external device is not a proper device.
EXPECT_EQ(0, interfaces.capture->ConnectCaptureDevice(
capture_id, video_channel));
ConfigureRtpRtcp(interfaces.rtp_rtcp, kNack, video_channel);
webrtc::ViERender* render_interface = interfaces.render;
webrtc::ViEImageProcess* image_process = interfaces.image_process;
RenderToFile(render_interface, video_channel, remote_file_renderer);
// We make a special hookup of the local renderer to use an effect filter
// instead of using the render interface for the capture device. This way
// we will only render frames that actually get sent.
webrtc::ExternalRendererEffectFilter renderer_filter(local_file_renderer);
EXPECT_EQ(0, image_process->RegisterSendEffectFilter(video_channel,
renderer_filter));
// Run the test itself:
const char* device_name = "Fake Capture Device";
::TestI420CallSetup(interfaces.codec, interfaces.video_engine,
interfaces.base, interfaces.network, interfaces.rtp_rtcp,
video_channel, device_name);
EXPECT_EQ(0, render_interface->StopRender(video_channel));
EXPECT_EQ(0, render_interface->RemoveRenderer(video_channel));
interfaces.capture->DisconnectCaptureDevice(video_channel);
// Stop sending data, clean up the camera thread and release the capture
// device. Note that this all happens after StopEverything, so this
// tests that the system doesn't mind that the external capture device sends
// data after rendering has been stopped.
fake_camera.StopCamera();
EXPECT_EQ(0, image_process->DeregisterSendEffectFilter(video_channel));
EXPECT_EQ(0, interfaces.base->DeleteChannel(video_channel));
return true;
}
void ViEFileBasedComparisonTests::TestFullStack(
const std::string& i420_video_file,
int width,
int height,
int bit_rate_kbps,
ProtectionMethod protection_method,
const NetworkParameters& network,
ViEToFileRenderer* local_file_renderer,
ViEToFileRenderer* remote_file_renderer,
FrameDropDetector* frame_drop_detector) {
TbInterfaces interfaces("TestFullStack");
// Setup camera capturing from file.
ViEFakeCamera fake_camera(interfaces.capture);
if (!fake_camera.StartCameraInNewThread(i420_video_file, width, height)) {
// No point in continuing if we have no proper video source
ADD_FAILURE() << "Could not open input video " << i420_video_file <<
": aborting test...";
return;
}
int video_channel = -1;
int capture_id = fake_camera.capture_id();
EXPECT_EQ(0, interfaces.base->CreateChannel(video_channel));
// Must set SSRC to avoid SSRC collision detection since we're sending and
// receiving from the same machine (that would cause frames being discarded
// and decoder reset).
EXPECT_EQ(0, interfaces.rtp_rtcp->SetLocalSSRC(video_channel, 12345));
EXPECT_EQ(0, interfaces.capture->ConnectCaptureDevice(
capture_id, video_channel));
ConfigureRtpRtcp(interfaces.rtp_rtcp, protection_method, video_channel);
::TestFullStack(interfaces, capture_id, video_channel, width, height,
bit_rate_kbps, network, frame_drop_detector,
remote_file_renderer, local_file_renderer);
EXPECT_TRUE(fake_camera.StopCamera());
}

View file

@ -1,55 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/interface/vie_window_creator.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_main.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_window_manager_factory.h"
#include "webrtc/voice_engine/include/voe_codec.h"
#if defined(WIN32)
#include <tchar.h>
#endif
ViEWindowCreator::ViEWindowCreator() {
#ifndef WEBRTC_ANDROID
window_manager_ =
ViEWindowManagerFactory::CreateWindowManagerForCurrentPlatform();
#endif
}
ViEWindowCreator::~ViEWindowCreator() {
delete window_manager_;
}
ViEAutoTestWindowManagerInterface*
ViEWindowCreator::CreateTwoWindows() {
#if defined(WIN32)
TCHAR window1Title[1024] = _T("ViE Autotest Window 1");
TCHAR window2Title[1024] = _T("ViE Autotest Window 2");
#else
char window1Title[1024] = "ViE Autotest Window 1";
char window2Title[1024] = "ViE Autotest Window 2";
#endif
AutoTestRect window1Size(352, 288, 600, 100);
AutoTestRect window2Size(352, 288, 1000, 100);
window_manager_->CreateWindows(window1Size, window2Size, window1Title,
window2Title);
window_manager_->SetTopmostWindow();
return window_manager_;
}
void ViEWindowCreator::TerminateWindows() {
window_manager_->TerminateWindows();
}

View file

@ -1,18 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/interface/vie_window_manager_factory.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_linux.h"
ViEAutoTestWindowManagerInterface*
ViEWindowManagerFactory::CreateWindowManagerForCurrentPlatform() {
return new ViEAutoTestWindowManager();
}

View file

@ -1,21 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/interface/vie_window_manager_factory.h"
#include "webrtc/engine_configurations.h"
#if defined(COCOA_RENDERING)
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_mac_cocoa.h"
#endif
ViEAutoTestWindowManagerInterface*
ViEWindowManagerFactory::CreateWindowManagerForCurrentPlatform() {
return new ViEAutoTestWindowManager();
}

View file

@ -1,17 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/auto_test/interface/vie_window_manager_factory.h"
#include "webrtc/video_engine/test/auto_test/interface/vie_autotest_windows.h"
ViEAutoTestWindowManagerInterface*
ViEWindowManagerFactory::CreateWindowManagerForCurrentPlatform() {
return new ViEAutoTestWindowManager();
}

View file

@ -1,149 +0,0 @@
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'targets': [
{
'target_name': 'vie_auto_test',
'type': 'executable',
'dependencies': [
'<(webrtc_root)/common.gyp:webrtc_common',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:metrics_default',
'<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl',
'<(webrtc_root)/modules/modules.gyp:video_render_module_internal_impl',
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
'<(webrtc_root)/test/metrics.gyp:metrics',
'<(webrtc_root)/test/test.gyp:channel_transport',
'<(webrtc_root)/test/test.gyp:test_support',
'<(webrtc_root)/test/test.gyp:field_trial',
'video_engine_core',
'libvietest',
],
'sources': [
'interface/vie_autotest.h',
'interface/vie_autotest_defines.h',
'interface/vie_autotest_linux.h',
'interface/vie_autotest_mac_cocoa.h',
'interface/vie_autotest_main.h',
'interface/vie_autotest_window_manager_interface.h',
'interface/vie_autotest_windows.h',
'interface/vie_file_based_comparison_tests.h',
'interface/vie_window_manager_factory.h',
'interface/vie_window_creator.h',
# New, fully automated tests
'automated/legacy_fixture.cc',
'automated/two_windows_fixture.cc',
'automated/vie_api_integration_test.cc',
'automated/vie_extended_integration_test.cc',
'automated/vie_network_test.cc',
'automated/vie_standard_integration_test.cc',
'automated/vie_video_verification_test.cc',
# Test primitives
'primitives/base_primitives.cc',
'primitives/base_primitives.h',
'primitives/choice_helpers.cc',
'primitives/choice_helpers.h',
'primitives/choice_helpers_unittest.cc',
'primitives/fake_stdin.h',
'primitives/fake_stdin.cc',
'primitives/framedrop_primitives.h',
'primitives/framedrop_primitives.cc',
'primitives/framedrop_primitives_unittest.cc',
'primitives/general_primitives.cc',
'primitives/general_primitives.h',
'primitives/input_helpers.cc',
'primitives/input_helpers.h',
'primitives/input_helpers_unittest.cc',
# Platform independent
'source/vie_autotest.cc',
'source/vie_autotest_base.cc',
'source/vie_autotest_capture.cc',
'source/vie_autotest_codec.cc',
'source/vie_autotest_image_process.cc',
'source/vie_autotest_loopback.cc',
'source/vie_autotest_main.cc',
'source/vie_autotest_render.cc',
'source/vie_autotest_record.cc',
'source/vie_autotest_rtp_rtcp.cc',
'source/vie_autotest_custom_call.cc',
'source/vie_autotest_simulcast.cc',
'source/vie_file_based_comparison_tests.cc',
'source/vie_window_creator.cc',
# Platform dependent
# Android
'source/vie_autotest_android.cc',
# Linux
'source/vie_autotest_linux.cc',
'source/vie_window_manager_factory_linux.cc',
# Mac
'source/vie_autotest_cocoa_mac.mm',
'source/vie_window_manager_factory_mac.mm',
# Windows
'source/vie_autotest_win.cc',
'source/vie_window_manager_factory_win.cc',
],
'conditions': [
['OS=="android"', {
'libraries': [
'-lGLESv2',
'-llog',
],
}],
['OS=="linux"', {
# TODO(andrew): These should be provided directly by the projects
# which require them instead.
'libraries': [
'-lXext',
'-lX11',
],
}],
['OS=="mac"', {
'dependencies': [
# Use a special main for mac so we can access the webcam.
'<(webrtc_root)/test/test.gyp:test_support_main_threaded_mac',
],
'xcode_settings': {
'OTHER_LDFLAGS': [
'-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL -framework CoreVideo -framework CoreAudio -framework AudioToolbox',
],
},
}],
], # conditions
# Disable warnings to enable Win64 build, issue 1323.
'msvs_disabled_warnings': [
4267, # size_t to int truncation.
],
},
],
'conditions': [
['test_isolation_mode != "noop"', {
'targets': [
{
'target_name': 'vie_auto_test_run',
'type': 'none',
'dependencies': [
'vie_auto_test',
],
'includes': [
'../../../build/isolate.gypi',
],
'sources': [
'vie_auto_test.isolate',
],
},
],
}],
],
}

View file

@ -1,24 +0,0 @@
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'conditions': [
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
'<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/vie_auto_test<(EXECUTABLE_SUFFIX)',
],
'files': [
'<(DEPTH)/DEPS',
'<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/vie_auto_test<(EXECUTABLE_SUFFIX)',
],
},
}],
],
}

View file

@ -1,5 +0,0 @@
# These are for the common case of adding or renaming files. If you're doing
# structural changes, please get a review from a reviewer in this file.
per-file *.gyp=*
per-file *.gypi=*

View file

@ -1,85 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/video_engine/test/libvietest/include/vie_fake_camera.h"
#include <assert.h>
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/test/libvietest/include/vie_file_capture_device.h"
// This callback runs the camera thread:
bool StreamVideoFileRepeatedlyIntoCaptureDevice(void* data) {
ViEFileCaptureDevice* file_capture_device =
reinterpret_cast<ViEFileCaptureDevice*>(data);
// We want to interrupt the camera feeding thread every now and then in order
// to follow the contract for the system_wrappers thread library. 1.5 seconds
// seems about right here.
uint64_t time_slice_ms = 1500;
uint32_t max_fps = 30;
file_capture_device->ReadFileFor(time_slice_ms, max_fps);
return true;
}
ViEFakeCamera::ViEFakeCamera(webrtc::ViECapture* capture_interface)
: capture_interface_(capture_interface),
capture_id_(-1),
file_capture_device_(NULL) {
}
ViEFakeCamera::~ViEFakeCamera() {
}
bool ViEFakeCamera::StartCameraInNewThread(
const std::string& i420_test_video_path, int width, int height) {
assert(file_capture_device_ == NULL && camera_thread_ == NULL);
webrtc::ViEExternalCapture* externalCapture;
int result = capture_interface_->
AllocateExternalCaptureDevice(capture_id_, externalCapture);
if (result != 0) {
return false;
}
file_capture_device_ = new ViEFileCaptureDevice(externalCapture);
if (!file_capture_device_->OpenI420File(i420_test_video_path,
width,
height)) {
return false;
}
// Set up a thread which runs the fake camera. The capturer object is
// thread-safe.
camera_thread_ = webrtc::ThreadWrapper::CreateThread(
StreamVideoFileRepeatedlyIntoCaptureDevice, file_capture_device_,
"StreamVideoFileRepeatedlyIntoCaptureDevice");
camera_thread_->Start();
return true;
}
bool ViEFakeCamera::StopCamera() {
assert(file_capture_device_ != NULL && camera_thread_ != NULL);
camera_thread_->Stop();
file_capture_device_->CloseFile();
int result = capture_interface_->ReleaseCaptureDevice(capture_id_);
camera_thread_.reset();
delete file_capture_device_;
camera_thread_ = NULL;
file_capture_device_ = NULL;
return result == 0;
}

Some files were not shown because too many files have changed in this diff Show more