Revert "stagefright: remove Miracast sender code"

This reverts commit d0a98fa05f.

Change-Id: I0554b92c290c1ebbd1a40fc2edb43573a97d4f6a
Signed-off-by: DennySPb <dennyspb@gmail.com>
gugelfrei
Vladimir Oltean 5 years ago committed by Luca Stefani
parent 9904747886
commit 562c08b4ef

@ -0,0 +1 @@
../../media/libmedia/include/media/IHDCP.h

@ -271,6 +271,7 @@ cc_library {
srcs: [
":mediaextractorservice_aidl",
"IDataSource.cpp",
"IHDCP.cpp",
"BufferingSettings.cpp",
"mediaplayer.cpp",
"IMediaHTTPConnection.cpp",

@ -0,0 +1,359 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "IHDCP"
#include <utils/Log.h>
#include <binder/Parcel.h>
#include <media/IHDCP.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/foundation/ADebug.h>
namespace android {
enum {
OBSERVER_NOTIFY = IBinder::FIRST_CALL_TRANSACTION,
HDCP_SET_OBSERVER,
HDCP_INIT_ASYNC,
HDCP_SHUTDOWN_ASYNC,
HDCP_GET_CAPS,
HDCP_ENCRYPT,
HDCP_ENCRYPT_NATIVE,
HDCP_DECRYPT,
};
struct BpHDCPObserver : public BpInterface<IHDCPObserver> {
explicit BpHDCPObserver(const sp<IBinder> &impl)
: BpInterface<IHDCPObserver>(impl) {
}
virtual void notify(
int msg, int ext1, int ext2, const Parcel *obj) {
Parcel data, reply;
data.writeInterfaceToken(IHDCPObserver::getInterfaceDescriptor());
data.writeInt32(msg);
data.writeInt32(ext1);
data.writeInt32(ext2);
if (obj && obj->dataSize() > 0) {
data.appendFrom(const_cast<Parcel *>(obj), 0, obj->dataSize());
}
remote()->transact(OBSERVER_NOTIFY, data, &reply, IBinder::FLAG_ONEWAY);
}
};
IMPLEMENT_META_INTERFACE(HDCPObserver, "android.hardware.IHDCPObserver");
struct BpHDCP : public BpInterface<IHDCP> {
explicit BpHDCP(const sp<IBinder> &impl)
: BpInterface<IHDCP>(impl) {
}
virtual status_t setObserver(const sp<IHDCPObserver> &observer) {
Parcel data, reply;
data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
data.writeStrongBinder(IInterface::asBinder(observer));
remote()->transact(HDCP_SET_OBSERVER, data, &reply);
return reply.readInt32();
}
virtual status_t initAsync(const char *host, unsigned port) {
Parcel data, reply;
data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
data.writeCString(host);
data.writeInt32(port);
remote()->transact(HDCP_INIT_ASYNC, data, &reply);
return reply.readInt32();
}
virtual status_t shutdownAsync() {
Parcel data, reply;
data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
remote()->transact(HDCP_SHUTDOWN_ASYNC, data, &reply);
return reply.readInt32();
}
virtual uint32_t getCaps() {
Parcel data, reply;
data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
remote()->transact(HDCP_GET_CAPS, data, &reply);
return reply.readInt32();
}
virtual status_t encrypt(
const void *inData, size_t size, uint32_t streamCTR,
uint64_t *outInputCTR, void *outData) {
Parcel data, reply;
data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
data.writeInt32(size);
data.write(inData, size);
data.writeInt32(streamCTR);
remote()->transact(HDCP_ENCRYPT, data, &reply);
status_t err = reply.readInt32();
if (err != OK) {
*outInputCTR = 0;
return err;
}
*outInputCTR = reply.readInt64();
reply.read(outData, size);
return err;
}
virtual status_t encryptNative(
const sp<GraphicBuffer> &graphicBuffer,
size_t offset, size_t size, uint32_t streamCTR,
uint64_t *outInputCTR, void *outData) {
Parcel data, reply;
data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
data.write(*graphicBuffer);
data.writeInt32(offset);
data.writeInt32(size);
data.writeInt32(streamCTR);
remote()->transact(HDCP_ENCRYPT_NATIVE, data, &reply);
status_t err = reply.readInt32();
if (err != OK) {
*outInputCTR = 0;
return err;
}
*outInputCTR = reply.readInt64();
reply.read(outData, size);
return err;
}
virtual status_t decrypt(
const void *inData, size_t size,
uint32_t streamCTR, uint64_t inputCTR,
void *outData) {
Parcel data, reply;
data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
data.writeInt32(size);
data.write(inData, size);
data.writeInt32(streamCTR);
data.writeInt64(inputCTR);
remote()->transact(HDCP_DECRYPT, data, &reply);
status_t err = reply.readInt32();
if (err != OK) {
return err;
}
reply.read(outData, size);
return err;
}
};
IMPLEMENT_META_INTERFACE(HDCP, "android.hardware.IHDCP");
status_t BnHDCPObserver::onTransact(
uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
switch (code) {
case OBSERVER_NOTIFY:
{
CHECK_INTERFACE(IHDCPObserver, data, reply);
int msg = data.readInt32();
int ext1 = data.readInt32();
int ext2 = data.readInt32();
Parcel obj;
if (data.dataAvail() > 0) {
obj.appendFrom(
const_cast<Parcel *>(&data),
data.dataPosition(),
data.dataAvail());
}
notify(msg, ext1, ext2, &obj);
return OK;
}
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
status_t BnHDCP::onTransact(
uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
switch (code) {
case HDCP_SET_OBSERVER:
{
CHECK_INTERFACE(IHDCP, data, reply);
sp<IHDCPObserver> observer =
interface_cast<IHDCPObserver>(data.readStrongBinder());
reply->writeInt32(setObserver(observer));
return OK;
}
case HDCP_INIT_ASYNC:
{
CHECK_INTERFACE(IHDCP, data, reply);
const char *host = data.readCString();
unsigned port = data.readInt32();
reply->writeInt32(initAsync(host, port));
return OK;
}
case HDCP_SHUTDOWN_ASYNC:
{
CHECK_INTERFACE(IHDCP, data, reply);
reply->writeInt32(shutdownAsync());
return OK;
}
case HDCP_GET_CAPS:
{
CHECK_INTERFACE(IHDCP, data, reply);
reply->writeInt32(getCaps());
return OK;
}
case HDCP_ENCRYPT:
{
CHECK_INTERFACE(IHDCP, data, reply);
size_t size = data.readInt32();
void *inData = NULL;
// watch out for overflow
if (size <= SIZE_MAX / 2) {
inData = malloc(2 * size);
}
if (inData == NULL) {
reply->writeInt32(ERROR_OUT_OF_RANGE);
return OK;
}
void *outData = (uint8_t *)inData + size;
status_t err = data.read(inData, size);
if (err != OK) {
free(inData);
reply->writeInt32(err);
return OK;
}
uint32_t streamCTR = data.readInt32();
uint64_t inputCTR;
err = encrypt(inData, size, streamCTR, &inputCTR, outData);
reply->writeInt32(err);
if (err == OK) {
reply->writeInt64(inputCTR);
reply->write(outData, size);
}
free(inData);
inData = outData = NULL;
return OK;
}
case HDCP_ENCRYPT_NATIVE:
{
CHECK_INTERFACE(IHDCP, data, reply);
sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
data.read(*graphicBuffer);
size_t offset = data.readInt32();
size_t size = data.readInt32();
uint32_t streamCTR = data.readInt32();
void *outData = NULL;
uint64_t inputCTR;
status_t err = ERROR_OUT_OF_RANGE;
outData = malloc(size);
if (outData != NULL) {
err = encryptNative(graphicBuffer, offset, size,
streamCTR, &inputCTR, outData);
}
reply->writeInt32(err);
if (err == OK) {
reply->writeInt64(inputCTR);
reply->write(outData, size);
}
free(outData);
outData = NULL;
return OK;
}
case HDCP_DECRYPT:
{
CHECK_INTERFACE(IHDCP, data, reply);
size_t size = data.readInt32();
size_t bufSize = 2 * size;
// watch out for overflow
void *inData = NULL;
if (bufSize > size) {
inData = malloc(bufSize);
}
if (inData == NULL) {
reply->writeInt32(ERROR_OUT_OF_RANGE);
return OK;
}
void *outData = (uint8_t *)inData + size;
data.read(inData, size);
uint32_t streamCTR = data.readInt32();
uint64_t inputCTR = data.readInt64();
status_t err = decrypt(inData, size, streamCTR, inputCTR, outData);
reply->writeInt32(err);
if (err == OK) {
reply->write(outData, size);
}
free(inData);
inData = outData = NULL;
return OK;
}
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
} // namespace android

@ -20,6 +20,7 @@
#include <binder/Parcel.h>
#include <binder/IMemory.h>
#include <media/IHDCP.h>
#include <media/IMediaCodecList.h>
#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
@ -39,6 +40,7 @@ enum {
CREATE = IBinder::FIRST_CALL_TRANSACTION,
CREATE_MEDIA_RECORDER,
CREATE_METADATA_RETRIEVER,
MAKE_HDCP,
ADD_BATTERY_DATA,
PULL_BATTERY_DATA,
LISTEN_FOR_REMOTE_DISPLAY,
@ -83,6 +85,14 @@ public:
return interface_cast<IMediaRecorder>(reply.readStrongBinder());
}
virtual sp<IHDCP> makeHDCP(bool createEncryptionModule) {
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
data.writeInt32(createEncryptionModule);
remote()->transact(MAKE_HDCP, data, &reply);
return interface_cast<IHDCP>(reply.readStrongBinder());
}
virtual void addBatteryData(uint32_t params) {
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
@ -151,6 +161,13 @@ status_t BnMediaPlayerService::onTransact(
reply->writeStrongBinder(IInterface::asBinder(retriever));
return NO_ERROR;
} break;
case MAKE_HDCP: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
bool createEncryptionModule = data.readInt32();
sp<IHDCP> hdcp = makeHDCP(createEncryptionModule);
reply->writeStrongBinder(IInterface::asBinder(hdcp));
return NO_ERROR;
} break;
case ADD_BATTERY_DATA: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
uint32_t params = data.readInt32();

@ -0,0 +1,120 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <binder/IInterface.h>
#include <media/hardware/HDCPAPI.h>
#include <media/stagefright/foundation/ABase.h>
#include <ui/GraphicBuffer.h>
namespace android {
struct IHDCPObserver : public IInterface {
DECLARE_META_INTERFACE(HDCPObserver);
virtual void notify(
int msg, int ext1, int ext2, const Parcel *obj) = 0;
private:
DISALLOW_EVIL_CONSTRUCTORS(IHDCPObserver);
};
struct IHDCP : public IInterface {
DECLARE_META_INTERFACE(HDCP);
// Called to specify the observer that receives asynchronous notifications
// from the HDCP implementation to signal completion/failure of asynchronous
// operations (such as initialization) or out of band events.
virtual status_t setObserver(const sp<IHDCPObserver> &observer) = 0;
// Request to setup an HDCP session with the specified host listening
// on the specified port.
virtual status_t initAsync(const char *host, unsigned port) = 0;
// Request to shutdown the active HDCP session.
virtual status_t shutdownAsync() = 0;
// Returns the capability bitmask of this HDCP session.
// Possible return values (please refer to HDCAPAPI.h):
// HDCP_CAPS_ENCRYPT: mandatory, meaning the HDCP module can encrypt
// from an input byte-array buffer to an output byte-array buffer
// HDCP_CAPS_ENCRYPT_NATIVE: the HDCP module supports encryption from
// a native buffer to an output byte-array buffer. The format of the
// input native buffer is specific to vendor's encoder implementation.
// It is the same format as that used by the encoder when
// "storeMetaDataInBuffers" extension is enabled on its output port.
virtual uint32_t getCaps() = 0;
// ENCRYPTION only:
// Encrypt data according to the HDCP spec. "size" bytes of data are
// available at "inData" (virtual address), "size" may not be a multiple
// of 128 bits (16 bytes). An equal number of encrypted bytes should be
// written to the buffer at "outData" (virtual address).
// This operation is to be synchronous, i.e. this call does not return
// until outData contains size bytes of encrypted data.
// streamCTR will be assigned by the caller (to 0 for the first PES stream,
// 1 for the second and so on)
// inputCTR _will_be_maintained_by_the_callee_ for each PES stream.
virtual status_t encrypt(
const void *inData, size_t size, uint32_t streamCTR,
uint64_t *outInputCTR, void *outData) = 0;
// Encrypt data according to the HDCP spec. "size" bytes of data starting
// at location "offset" are available in "buffer" (buffer handle). "size"
// may not be a multiple of 128 bits (16 bytes). An equal number of
// encrypted bytes should be written to the buffer at "outData" (virtual
// address). This operation is to be synchronous, i.e. this call does not
// return until outData contains size bytes of encrypted data.
// streamCTR will be assigned by the caller (to 0 for the first PES stream,
// 1 for the second and so on)
// inputCTR _will_be_maintained_by_the_callee_ for each PES stream.
virtual status_t encryptNative(
const sp<GraphicBuffer> &graphicBuffer,
size_t offset, size_t size, uint32_t streamCTR,
uint64_t *outInputCTR, void *outData) = 0;
// DECRYPTION only:
// Decrypt data according to the HDCP spec.
// "size" bytes of encrypted data are available at "inData"
// (virtual address), "size" may not be a multiple of 128 bits (16 bytes).
// An equal number of decrypted bytes should be written to the buffer
// at "outData" (virtual address).
// This operation is to be synchronous, i.e. this call does not return
// until outData contains size bytes of decrypted data.
// Both streamCTR and inputCTR will be provided by the caller.
virtual status_t decrypt(
const void *inData, size_t size,
uint32_t streamCTR, uint64_t inputCTR,
void *outData) = 0;
private:
DISALLOW_EVIL_CONSTRUCTORS(IHDCP);
};
struct BnHDCPObserver : public BnInterface<IHDCPObserver> {
virtual status_t onTransact(
uint32_t code, const Parcel &data, Parcel *reply,
uint32_t flags = 0);
};
struct BnHDCP : public BnInterface<IHDCP> {
virtual status_t onTransact(
uint32_t code, const Parcel &data, Parcel *reply,
uint32_t flags = 0);
};
} // namespace android

@ -33,6 +33,7 @@
namespace android {
class IMediaPlayer;
struct IHDCP;
class IMediaCodecList;
struct IMediaHTTPService;
class IMediaRecorder;
@ -51,6 +52,7 @@ public:
virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client,
audio_session_t audioSessionId = AUDIO_SESSION_ALLOCATE,
const std::string opPackage = "") = 0;
virtual sp<IHDCP> makeHDCP(bool createEncryptionModule) = 0;
virtual sp<IMediaCodecList> getCodecList() const = 0;
// Connects to a remote display.

@ -3,11 +3,13 @@ cc_library_shared {
srcs: [
"ActivityManager.cpp",
"DeathNotifier.cpp",
"HDCP.cpp",
"MediaPlayerFactory.cpp",
"MediaPlayerService.cpp",
"MediaRecorderClient.cpp",
"MetadataRetrieverClient.cpp",
"StagefrightMetadataRetriever.cpp",
"RemoteDisplay.cpp",
"StagefrightRecorder.cpp",
"TestPlayerStub.cpp",
],
@ -38,6 +40,7 @@ cc_library_shared {
"libnetd_client",
"libpowermanager",
"libstagefright",
"libstagefright_wfd",
"libstagefright_foundation",
"libstagefright_httplive",
"libutils",
@ -59,6 +62,7 @@ cc_library_shared {
include_dirs: [
"frameworks/av/media/libstagefright/rtsp",
"frameworks/av/media/libstagefright/webm",
"frameworks/av/media/libstagefright/wifi-display",
],
local_include_dirs: ["include"],

@ -0,0 +1,175 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "HDCP"
#include <utils/Log.h>
#include "HDCP.h"
#include <media/stagefright/foundation/ADebug.h>
#include <dlfcn.h>
namespace android {
HDCP::HDCP(bool createEncryptionModule)
: mIsEncryptionModule(createEncryptionModule),
mLibHandle(NULL),
mHDCPModule(NULL) {
mLibHandle = dlopen("libstagefright_hdcp.so", RTLD_NOW);
if (mLibHandle == NULL) {
ALOGE("Unable to locate libstagefright_hdcp.so");
return;
}
typedef HDCPModule *(*CreateHDCPModuleFunc)(
void *, HDCPModule::ObserverFunc);
CreateHDCPModuleFunc createHDCPModule =
mIsEncryptionModule
? (CreateHDCPModuleFunc)dlsym(mLibHandle, "createHDCPModule")
: (CreateHDCPModuleFunc)dlsym(
mLibHandle, "createHDCPModuleForDecryption");
if (createHDCPModule == NULL) {
ALOGE("Unable to find symbol 'createHDCPModule'.");
} else if ((mHDCPModule = createHDCPModule(
this, &HDCP::ObserveWrapper)) == NULL) {
ALOGE("createHDCPModule failed.");
}
}
HDCP::~HDCP() {
Mutex::Autolock autoLock(mLock);
if (mHDCPModule != NULL) {
delete mHDCPModule;
mHDCPModule = NULL;
}
if (mLibHandle != NULL) {
dlclose(mLibHandle);
mLibHandle = NULL;
}
}
status_t HDCP::setObserver(const sp<IHDCPObserver> &observer) {
Mutex::Autolock autoLock(mLock);
if (mHDCPModule == NULL) {
return NO_INIT;
}
mObserver = observer;
return OK;
}
status_t HDCP::initAsync(const char *host, unsigned port) {
Mutex::Autolock autoLock(mLock);
if (mHDCPModule == NULL) {
return NO_INIT;
}
return mHDCPModule->initAsync(host, port);
}
status_t HDCP::shutdownAsync() {
Mutex::Autolock autoLock(mLock);
if (mHDCPModule == NULL) {
return NO_INIT;
}
return mHDCPModule->shutdownAsync();
}
uint32_t HDCP::getCaps() {
Mutex::Autolock autoLock(mLock);
if (mHDCPModule == NULL) {
return NO_INIT;
}
return mHDCPModule->getCaps();
}
status_t HDCP::encrypt(
const void *inData, size_t size, uint32_t streamCTR,
uint64_t *outInputCTR, void *outData) {
Mutex::Autolock autoLock(mLock);
CHECK(mIsEncryptionModule);
if (mHDCPModule == NULL) {
*outInputCTR = 0;
return NO_INIT;
}
return mHDCPModule->encrypt(inData, size, streamCTR, outInputCTR, outData);
}
status_t HDCP::encryptNative(
const sp<GraphicBuffer> &graphicBuffer,
size_t offset, size_t size, uint32_t streamCTR,
uint64_t *outInputCTR, void *outData) {
Mutex::Autolock autoLock(mLock);
CHECK(mIsEncryptionModule);
if (mHDCPModule == NULL) {
*outInputCTR = 0;
return NO_INIT;
}
return mHDCPModule->encryptNative(graphicBuffer->handle,
offset, size, streamCTR, outInputCTR, outData);
}
status_t HDCP::decrypt(
const void *inData, size_t size,
uint32_t streamCTR, uint64_t outInputCTR, void *outData) {
Mutex::Autolock autoLock(mLock);
CHECK(!mIsEncryptionModule);
if (mHDCPModule == NULL) {
return NO_INIT;
}
return mHDCPModule->decrypt(inData, size, streamCTR, outInputCTR, outData);
}
// static
void HDCP::ObserveWrapper(void *me, int msg, int ext1, int ext2) {
static_cast<HDCP *>(me)->observe(msg, ext1, ext2);
}
void HDCP::observe(int msg, int ext1, int ext2) {
Mutex::Autolock autoLock(mLock);
if (mObserver != NULL) {
mObserver->notify(msg, ext1, ext2, NULL /* obj */);
}
}
} // namespace android

@ -0,0 +1,66 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HDCP_H_
#define HDCP_H_
#include <media/IHDCP.h>
#include <utils/Mutex.h>
namespace android {
struct HDCP : public BnHDCP {
explicit HDCP(bool createEncryptionModule);
virtual ~HDCP();
virtual status_t setObserver(const sp<IHDCPObserver> &observer);
virtual status_t initAsync(const char *host, unsigned port);
virtual status_t shutdownAsync();
virtual uint32_t getCaps();
virtual status_t encrypt(
const void *inData, size_t size, uint32_t streamCTR,
uint64_t *outInputCTR, void *outData);
virtual status_t encryptNative(
const sp<GraphicBuffer> &graphicBuffer,
size_t offset, size_t size, uint32_t streamCTR,
uint64_t *outInputCTR, void *outData);
virtual status_t decrypt(
const void *inData, size_t size,
uint32_t streamCTR, uint64_t outInputCTR, void *outData);
private:
Mutex mLock;
bool mIsEncryptionModule;
void *mLibHandle;
HDCPModule *mHDCPModule;
sp<IHDCPObserver> mObserver;
static void ObserveWrapper(void *me, int msg, int ext1, int ext2);
void observe(int msg, int ext1, int ext2);
DISALLOW_EVIL_CONSTRUCTORS(HDCP);
};
} // namespace android
#endif // HDCP_H_

@ -82,6 +82,8 @@
#include "TestPlayerStub.h"
#include "nuplayer/NuPlayerDriver.h"
#include "HDCP.h"
#include "RemoteDisplay.h"
static const int kDumpLockRetries = 50;
static const int kDumpLockSleepUs = 20000;
@ -504,13 +506,18 @@ sp<IMediaCodecList> MediaPlayerService::getCodecList() const {
return MediaCodecList::getLocalInstance();
}
sp<IHDCP> MediaPlayerService::makeHDCP(bool createEncryptionModule) {
return new HDCP(createEncryptionModule);
}
sp<IRemoteDisplay> MediaPlayerService::listenForRemoteDisplay(
const String16 &/*opPackageName*/,
const sp<IRemoteDisplayClient>& /*client*/,
const String8& /*iface*/) {
ALOGE("listenForRemoteDisplay is no longer supported!");
const String16 &opPackageName,
const sp<IRemoteDisplayClient>& client, const String8& iface) {
if (!checkPermission("android.permission.CONTROL_WIFI_DISPLAY")) {
return NULL;
}
return NULL;
return new RemoteDisplay(opPackageName, client, iface.string());
}
status_t MediaPlayerService::AudioOutput::dump(int fd, const Vector<String16>& args) const

@ -242,6 +242,7 @@ public:
const std::string opPackageName);
virtual sp<IMediaCodecList> getCodecList() const;
virtual sp<IHDCP> makeHDCP(bool createEncryptionModule);
virtual sp<IRemoteDisplay> listenForRemoteDisplay(const String16 &opPackageName,
const sp<IRemoteDisplayClient>& client, const String8& iface);

@ -0,0 +1,66 @@
/*
* Copyright 2012, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "RemoteDisplay.h"
#include "source/WifiDisplaySource.h"
#include <media/IRemoteDisplayClient.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/ANetworkSession.h>
namespace android {
RemoteDisplay::RemoteDisplay(
const String16 &opPackageName,
const sp<IRemoteDisplayClient> &client,
const char *iface)
: mLooper(new ALooper),
mNetSession(new ANetworkSession) {
mLooper->setName("wfd_looper");
mSource = new WifiDisplaySource(opPackageName, mNetSession, client);
mLooper->registerHandler(mSource);
mNetSession->start();
mLooper->start();
mSource->start(iface);
}
RemoteDisplay::~RemoteDisplay() {
}
status_t RemoteDisplay::pause() {
return mSource->pause();
}
status_t RemoteDisplay::resume() {
return mSource->resume();
}
status_t RemoteDisplay::dispose() {
mSource->stop();
mSource.clear();
mLooper->stop();
mNetSession->stop();
return OK;
}
} // namespace android

@ -0,0 +1,59 @@
/*
* Copyright 2012, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef REMOTE_DISPLAY_H_
#define REMOTE_DISPLAY_H_
#include <media/IMediaPlayerService.h>
#include <media/IRemoteDisplay.h>
#include <media/stagefright/foundation/ABase.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
namespace android {
struct ALooper;
struct ANetworkSession;
class IRemoteDisplayClient;
struct WifiDisplaySource;
struct RemoteDisplay : public BnRemoteDisplay {
RemoteDisplay(
const String16 &opPackageName,
const sp<IRemoteDisplayClient> &client,
const char *iface);
virtual status_t pause();
virtual status_t resume();
virtual status_t dispose();
protected:
virtual ~RemoteDisplay();
private:
sp<ALooper> mNetLooper;
sp<ALooper> mLooper;
sp<ANetworkSession> mNetSession;
sp<WifiDisplaySource> mSource;
DISALLOW_EVIL_CONSTRUCTORS(RemoteDisplay);
};
} // namespace android
#endif // REMOTE_DISPLAY_H_

@ -0,0 +1,51 @@
cc_library_shared {
name: "libstagefright_wfd",
srcs: [
"MediaSender.cpp",
"Parameters.cpp",
"rtp/RTPSender.cpp",
"source/Converter.cpp",
"source/MediaPuller.cpp",
"source/PlaybackSession.cpp",
"source/RepeaterSource.cpp",
"source/TSPacketizer.cpp",
"source/WifiDisplaySource.cpp",
"VideoFormats.cpp",
],
include_dirs: [
"frameworks/av/media/libstagefright",
"frameworks/native/include/media/openmax",
"frameworks/native/include/media/hardware",
"frameworks/av/media/libstagefright/mpeg2ts",
],
shared_libs: [
"libbinder",
"libcutils",
"liblog",
"libmedia",
"libstagefright",
"libstagefright_foundation",
"libui",
"libgui",
"libutils",
],
cflags: [
"-Wno-multichar",
"-Werror",
"-Wall",
],
sanitize: {
misc_undefined: [
"signed-integer-overflow",
],
cfi: true,
diag: {
cfi: true,
},
},
}

@ -0,0 +1,519 @@
/*
* Copyright 2013, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaSender"
#include <utils/Log.h>
#include "MediaSender.h"
#include "rtp/RTPSender.h"
#include "source/TSPacketizer.h"
#include "include/avc_utils.h"
#include <media/IHDCP.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/ANetworkSession.h>
#include <ui/GraphicBuffer.h>
namespace android {
MediaSender::MediaSender(
const sp<ANetworkSession> &netSession,
const sp<AMessage> &notify)
: mNetSession(netSession),
mNotify(notify),
mMode(MODE_UNDEFINED),
mGeneration(0),
mPrevTimeUs(-1ll),
mInitDoneCount(0),
mLogFile(NULL) {
// mLogFile = fopen("/data/misc/log.ts", "wb");
}
MediaSender::~MediaSender() {
if (mLogFile != NULL) {
fclose(mLogFile);
mLogFile = NULL;
}
}
status_t MediaSender::setHDCP(const sp<IHDCP> &hdcp) {
if (mMode != MODE_UNDEFINED) {
return INVALID_OPERATION;
}
mHDCP = hdcp;
return OK;
}
ssize_t MediaSender::addTrack(const sp<AMessage> &format, uint32_t flags) {
if (mMode != MODE_UNDEFINED) {
return INVALID_OPERATION;
}
TrackInfo info;
info.mFormat = format;
info.mFlags = flags;
info.mPacketizerTrackIndex = -1;
AString mime;
CHECK(format->findString("mime", &mime));
info.mIsAudio = !strncasecmp("audio/", mime.c_str(), 6);
size_t index = mTrackInfos.size();
mTrackInfos.push_back(info);
return index;
}
status_t MediaSender::initAsync(
ssize_t trackIndex,
const char *remoteHost,
int32_t remoteRTPPort,
RTPSender::TransportMode rtpMode,
int32_t remoteRTCPPort,
RTPSender::TransportMode rtcpMode,
int32_t *localRTPPort) {
if (trackIndex < 0) {
if (mMode != MODE_UNDEFINED) {
return INVALID_OPERATION;
}
uint32_t flags = 0;
if (mHDCP != NULL) {
// XXX Determine proper HDCP version.
flags |= TSPacketizer::EMIT_HDCP20_DESCRIPTOR;
}
mTSPacketizer = new TSPacketizer(flags);
status_t err = OK;
for (size_t i = 0; i < mTrackInfos.size(); ++i) {
TrackInfo *info = &mTrackInfos.editItemAt(i);
ssize_t packetizerTrackIndex =
mTSPacketizer->addTrack(info->mFormat);
if (packetizerTrackIndex < 0) {
err = packetizerTrackIndex;
break;
}
info->mPacketizerTrackIndex = packetizerTrackIndex;
}
if (err == OK) {
sp<AMessage> notify = new AMessage(kWhatSenderNotify, this);
notify->setInt32("generation", mGeneration);
mTSSender = new RTPSender(mNetSession, notify);
looper()->registerHandler(mTSSender);
err = mTSSender->initAsync(
remoteHost,
remoteRTPPort,
rtpMode,
remoteRTCPPort,
rtcpMode,
localRTPPort);
if (err != OK) {
looper()->unregisterHandler(mTSSender->id());
mTSSender.clear();
}
}
if (err != OK) {
for (size_t i = 0; i < mTrackInfos.size(); ++i) {
TrackInfo *info = &mTrackInfos.editItemAt(i);
info->mPacketizerTrackIndex = -1;
}
mTSPacketizer.clear();
return err;
}
mMode = MODE_TRANSPORT_STREAM;
mInitDoneCount = 1;
return OK;
}
if (mMode == MODE_TRANSPORT_STREAM) {
return INVALID_OPERATION;
}
if ((size_t)trackIndex >= mTrackInfos.size()) {
return -ERANGE;
}
TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
if (info->mSender != NULL) {
return INVALID_OPERATION;
}
sp<AMessage> notify = new AMessage(kWhatSenderNotify, this);
notify->setInt32("generation", mGeneration);
notify->setSize("trackIndex", trackIndex);
info->mSender = new RTPSender(mNetSession, notify);
looper()->registerHandler(info->mSender);
status_t err = info->mSender->initAsync(
remoteHost,
remoteRTPPort,
rtpMode,
remoteRTCPPort,
rtcpMode,
localRTPPort);
if (err != OK) {
looper()->unregisterHandler(info->mSender->id());
info->mSender.clear();
return err;
}
if (mMode == MODE_UNDEFINED) {
mInitDoneCount = mTrackInfos.size();
}
mMode = MODE_ELEMENTARY_STREAMS;
return OK;
}
status_t MediaSender::queueAccessUnit(
size_t trackIndex, const sp<ABuffer> &accessUnit) {
if (mMode == MODE_UNDEFINED) {
return INVALID_OPERATION;
}
if (trackIndex >= mTrackInfos.size()) {
return -ERANGE;
}
if (mMode == MODE_TRANSPORT_STREAM) {
TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
info->mAccessUnits.push_back(accessUnit);
mTSPacketizer->extractCSDIfNecessary(info->mPacketizerTrackIndex);
for (;;) {
ssize_t minTrackIndex = -1;
int64_t minTimeUs = -1ll;
for (size_t i = 0; i < mTrackInfos.size(); ++i) {
const TrackInfo &info = mTrackInfos.itemAt(i);
if (info.mAccessUnits.empty()) {
minTrackIndex = -1;
minTimeUs = -1ll;
break;
}
int64_t timeUs;
const sp<ABuffer> &accessUnit = *info.mAccessUnits.begin();
CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
if (minTrackIndex < 0 || timeUs < minTimeUs) {
minTrackIndex = i;
minTimeUs = timeUs;
}
}
if (minTrackIndex < 0) {
return OK;
}
TrackInfo *info = &mTrackInfos.editItemAt(minTrackIndex);
sp<ABuffer> accessUnit = *info->mAccessUnits.begin();
info->mAccessUnits.erase(info->mAccessUnits.begin());
sp<ABuffer> tsPackets;
status_t err = packetizeAccessUnit(
minTrackIndex, accessUnit, &tsPackets);
if (err == OK) {
if (mLogFile != NULL) {
fwrite(tsPackets->data(), 1, tsPackets->size(), mLogFile);
}
int64_t timeUs;
CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
tsPackets->meta()->setInt64("timeUs", timeUs);
err = mTSSender->queueBuffer(
tsPackets,
33 /* packetType */,
RTPSender::PACKETIZATION_TRANSPORT_STREAM);
}
if (err != OK) {
return err;
}
}
}
TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
return info->mSender->queueBuffer(
accessUnit,
info->mIsAudio ? 96 : 97 /* packetType */,
info->mIsAudio
? RTPSender::PACKETIZATION_AAC : RTPSender::PACKETIZATION_H264);
}
void MediaSender::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatSenderNotify:
{
int32_t generation;
CHECK(msg->findInt32("generation", &generation));
if (generation != mGeneration) {
break;
}
onSenderNotify(msg);
break;
}
default:
TRESPASS();
}
}
void MediaSender::onSenderNotify(const sp<AMessage> &msg) {
int32_t what;
CHECK(msg->findInt32("what", &what));
switch (what) {
case RTPSender::kWhatInitDone:
{
--mInitDoneCount;
int32_t err;
CHECK(msg->findInt32("err", &err));
if (err != OK) {
notifyInitDone(err);
++mGeneration;
break;
}
if (mInitDoneCount == 0) {
notifyInitDone(OK);
}
break;
}
case RTPSender::kWhatError:
{
int32_t err;
CHECK(msg->findInt32("err", &err));
notifyError(err);
break;
}
case kWhatNetworkStall:
{
size_t numBytesQueued;
CHECK(msg->findSize("numBytesQueued", &numBytesQueued));
notifyNetworkStall(numBytesQueued);
break;
}
case kWhatInformSender:
{
int64_t avgLatencyUs;
CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs));
int64_t maxLatencyUs;
CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs));
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatInformSender);
notify->setInt64("avgLatencyUs", avgLatencyUs);
notify->setInt64("maxLatencyUs", maxLatencyUs);
notify->post();
break;
}
default:
TRESPASS();
}
}
void MediaSender::notifyInitDone(status_t err) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatInitDone);
notify->setInt32("err", err);
notify->post();
}
void MediaSender::notifyError(status_t err) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatError);
notify->setInt32("err", err);
notify->post();
}
void MediaSender::notifyNetworkStall(size_t numBytesQueued) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatNetworkStall);
notify->setSize("numBytesQueued", numBytesQueued);
notify->post();
}
status_t MediaSender::packetizeAccessUnit(
size_t trackIndex,
sp<ABuffer> accessUnit,
sp<ABuffer> *tsPackets) {
const TrackInfo &info = mTrackInfos.itemAt(trackIndex);
uint32_t flags = 0;
bool isHDCPEncrypted = false;
uint64_t inputCTR;
uint8_t HDCP_private_data[16];
bool manuallyPrependSPSPPS =
!info.mIsAudio
&& (info.mFlags & FLAG_MANUALLY_PREPEND_SPS_PPS)
&& IsIDR(accessUnit);
if (mHDCP != NULL && !info.mIsAudio) {
isHDCPEncrypted = true;
if (manuallyPrependSPSPPS) {
accessUnit = mTSPacketizer->prependCSD(
info.mPacketizerTrackIndex, accessUnit);
}
status_t err;
native_handle_t* handle;
if (accessUnit->meta()->findPointer("handle", (void**)&handle)
&& handle != NULL) {
int32_t rangeLength, rangeOffset;
sp<AMessage> notify;
CHECK(accessUnit->meta()->findInt32("rangeOffset", &rangeOffset));
CHECK(accessUnit->meta()->findInt32("rangeLength", &rangeLength));
CHECK(accessUnit->meta()->findMessage("notify", &notify)
&& notify != NULL);
CHECK_GE((int32_t)accessUnit->size(), rangeLength);
sp<GraphicBuffer> grbuf(new GraphicBuffer(
rangeOffset + rangeLength /* width */, 1 /* height */,
HAL_PIXEL_FORMAT_Y8, 1 /* layerCount */,
GRALLOC_USAGE_HW_VIDEO_ENCODER,
rangeOffset + rangeLength /* stride */, handle,
false /* keepOwnership */));
err = mHDCP->encryptNative(
grbuf, rangeOffset, rangeLength,
trackIndex /* streamCTR */,
&inputCTR,
accessUnit->data());
notify->post();
} else {
err = mHDCP->encrypt(
accessUnit->data(), accessUnit->size(),
trackIndex /* streamCTR */,
&inputCTR,
accessUnit->data());
}
if (err != OK) {
ALOGE("Failed to HDCP-encrypt media data (err %d)",
err);
return err;
}
HDCP_private_data[0] = 0x00;
HDCP_private_data[1] =
(((trackIndex >> 30) & 3) << 1) | 1;
HDCP_private_data[2] = (trackIndex >> 22) & 0xff;
HDCP_private_data[3] =
(((trackIndex >> 15) & 0x7f) << 1) | 1;
HDCP_private_data[4] = (trackIndex >> 7) & 0xff;
HDCP_private_data[5] =
((trackIndex & 0x7f) << 1) | 1;
HDCP_private_data[6] = 0x00;
HDCP_private_data[7] =
(((inputCTR >> 60) & 0x0f) << 1) | 1;
HDCP_private_data[8] = (inputCTR >> 52) & 0xff;
HDCP_private_data[9] =
(((inputCTR >> 45) & 0x7f) << 1) | 1;
HDCP_private_data[10] = (inputCTR >> 37) & 0xff;
HDCP_private_data[11] =
(((inputCTR >> 30) & 0x7f) << 1) | 1;
HDCP_private_data[12] = (inputCTR >> 22) & 0xff;
HDCP_private_data[13] =
(((inputCTR >> 15) & 0x7f) << 1) | 1;
HDCP_private_data[14] = (inputCTR >> 7) & 0xff;
HDCP_private_data[15] =
((inputCTR & 0x7f) << 1) | 1;
flags |= TSPacketizer::IS_ENCRYPTED;
} else if (manuallyPrependSPSPPS) {
flags |= TSPacketizer::PREPEND_SPS_PPS_TO_IDR_FRAMES;
}
int64_t timeUs = ALooper::GetNowUs();
if (mPrevTimeUs < 0ll || mPrevTimeUs + 100000ll <= timeUs) {
flags |= TSPacketizer::EMIT_PCR;
flags |= TSPacketizer::EMIT_PAT_AND_PMT;
mPrevTimeUs = timeUs;
}
mTSPacketizer->packetize(
info.mPacketizerTrackIndex,
accessUnit,
tsPackets,
flags,
!isHDCPEncrypted ? NULL : HDCP_private_data,
!isHDCPEncrypted ? 0 : sizeof(HDCP_private_data),
info.mIsAudio ? 2 : 0 /* numStuffingBytes */);
return OK;
}
} // namespace android

@ -0,0 +1,132 @@
/*
* Copyright 2013, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MEDIA_SENDER_H_
#define MEDIA_SENDER_H_
#include "rtp/RTPSender.h"
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AHandler.h>
#include <utils/Errors.h>
#include <utils/Vector.h>
namespace android {
struct ABuffer;
struct ANetworkSession;
struct AMessage;
struct IHDCP;
struct TSPacketizer;
// This class facilitates sending of data from one or more media tracks
// through one or more RTP channels, either providing a 1:1 mapping from
// track to RTP channel or muxing all tracks into a single RTP channel and
// using transport stream encapsulation.
// Optionally the (video) data is encrypted using the provided hdcp object.
struct MediaSender : public AHandler {
enum {
kWhatInitDone,
kWhatError,
kWhatNetworkStall,
kWhatInformSender,
};
MediaSender(
const sp<ANetworkSession> &netSession,
const sp<AMessage> &notify);
status_t setHDCP(const sp<IHDCP> &hdcp);
enum FlagBits {
FLAG_MANUALLY_PREPEND_SPS_PPS = 1,
};
ssize_t addTrack(const sp<AMessage> &format, uint32_t flags);
// If trackIndex == -1, initialize for transport stream muxing.
status_t initAsync(
ssize_t trackIndex,
const char *remoteHost,
int32_t remoteRTPPort,
RTPSender::TransportMode rtpMode,
int32_t remoteRTCPPort,
RTPSender::TransportMode rtcpMode,
int32_t *localRTPPort);
status_t queueAccessUnit(
size_t trackIndex, const sp<ABuffer> &accessUnit);
protected:
virtual void onMessageReceived(const sp<AMessage> &msg);
virtual ~MediaSender();
private:
enum {
kWhatSenderNotify,
};
enum Mode {
MODE_UNDEFINED,
MODE_TRANSPORT_STREAM,
MODE_ELEMENTARY_STREAMS,
};
struct TrackInfo {
sp<AMessage> mFormat;
uint32_t mFlags;
sp<RTPSender> mSender;
List<sp<ABuffer> > mAccessUnits;
ssize_t mPacketizerTrackIndex;
bool mIsAudio;
};
sp<ANetworkSession> mNetSession;
sp<AMessage> mNotify;
sp<IHDCP> mHDCP;
Mode mMode;
int32_t mGeneration;
Vector<TrackInfo> mTrackInfos;
sp<TSPacketizer> mTSPacketizer;
sp<RTPSender> mTSSender;
int64_t mPrevTimeUs;
size_t mInitDoneCount;
FILE *mLogFile;
void onSenderNotify(const sp<AMessage> &msg);
void notifyInitDone(status_t err);
void notifyError(status_t err);
void notifyNetworkStall(size_t numBytesQueued);
status_t packetizeAccessUnit(
size_t trackIndex,
sp<ABuffer> accessUnit,
sp<ABuffer> *tsPackets);
DISALLOW_EVIL_CONSTRUCTORS(MediaSender);
};
} // namespace android
#endif // MEDIA_SENDER_H_

@ -0,0 +1,92 @@
/*
* Copyright 2012, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "Parameters.h"
#include <media/stagefright/MediaErrors.h>
namespace android {
// static
sp<Parameters> Parameters::Parse(const char *data, size_t size) {
sp<Parameters> params = new Parameters;
status_t err = params->parse(data, size);
if (err != OK) {
return NULL;
}
return params;
}
Parameters::Parameters() {}
Parameters::~Parameters() {}
status_t Parameters::parse(const char *data, size_t size) {
size_t i = 0;
while (i < size) {
size_t nameStart = i;
while (i < size && data[i] != ':') {
++i;
}
if (i == size || i == nameStart) {
return ERROR_MALFORMED;
}
AString name(&data[nameStart], i - nameStart);
name.trim();
name.tolower();
++i;
size_t valueStart = i;
while (i + 1 < size && (data[i] != '\r' || data[i + 1] != '\n')) {
++i;
}
AString value(&data[valueStart], i - valueStart);
value.trim();
mDict.add(name, value);
while (i + 1 < size && data[i] == '\r' && data[i + 1] == '\n') {
i += 2;
}
}
return OK;
}
bool Parameters::findParameter(const char *name, AString *value) const {
AString key = name;
key.tolower();
ssize_t index = mDict.indexOfKey(key);
if (index < 0) {
value->clear();
return false;
}
*value = mDict.valueAt(index);
return true;
}
} // namespace android

@ -0,0 +1,41 @@
/*
* Copyright 2012, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AString.h>
#include <utils/KeyedVector.h>
#include <utils/RefBase.h>
namespace android {
struct Parameters : public RefBase {
static sp<Parameters> Parse(const char *data, size_t size);
bool findParameter(const char *name, AString *value) const;
protected:
virtual ~Parameters();
private:
KeyedVector<AString, AString> mDict;
Parameters();
status_t parse(const char *data, size_t size);
DISALLOW_EVIL_CONSTRUCTORS(Parameters);
};
} // namespace android

@ -0,0 +1,550 @@
/*
* Copyright 2013, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "VideoFormats"
#include <utils/Log.h>
#include "VideoFormats.h"
#include <media/stagefright/foundation/ADebug.h>
namespace android {
// static
const VideoFormats::config_t VideoFormats::mResolutionTable[][32] = {
{
// CEA Resolutions
{ 640, 480, 60, false, 0, 0},
{ 720, 480, 60, false, 0, 0},
{ 720, 480, 60, true, 0, 0},
{ 720, 576, 50, false, 0, 0},
{ 720, 576, 50, true, 0, 0},
{ 1280, 720, 30, false, 0, 0},
{ 1280, 720, 60, false, 0, 0},
{ 1920, 1080, 30, false, 0, 0},
{ 1920, 1080, 60, false, 0, 0},
{ 1920, 1080, 60, true, 0, 0},
{ 1280, 720, 25, false, 0, 0},
{ 1280, 720, 50, false, 0, 0},
{ 1920, 1080, 25, false, 0, 0},
{ 1920, 1080, 50, false, 0, 0},
{ 1920, 1080, 50, true, 0, 0},
{ 1280, 720, 24, false, 0, 0},
{ 1920, 1080, 24, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
},
{
// VESA Resolutions
{ 800, 600, 30, false, 0, 0},
{ 800, 600, 60, false, 0, 0},
{ 1024, 768, 30, false, 0, 0},
{ 1024, 768, 60, false, 0, 0},
{ 1152, 864, 30, false, 0, 0},
{ 1152, 864, 60, false, 0, 0},
{ 1280, 768, 30, false, 0, 0},
{ 1280, 768, 60, false, 0, 0},
{ 1280, 800, 30, false, 0, 0},
{ 1280, 800, 60, false, 0, 0},
{ 1360, 768, 30, false, 0, 0},
{ 1360, 768, 60, false, 0, 0},
{ 1366, 768, 30, false, 0, 0},
{ 1366, 768, 60, false, 0, 0},
{ 1280, 1024, 30, false, 0, 0},
{ 1280, 1024, 60, false, 0, 0},
{ 1400, 1050, 30, false, 0, 0},
{ 1400, 1050, 60, false, 0, 0},
{ 1440, 900, 30, false, 0, 0},
{ 1440, 900, 60, false, 0, 0},
{ 1600, 900, 30, false, 0, 0},
{ 1600, 900, 60, false, 0, 0},
{ 1600, 1200, 30, false, 0, 0},
{ 1600, 1200, 60, false, 0, 0},
{ 1680, 1024, 30, false, 0, 0},
{ 1680, 1024, 60, false, 0, 0},
{ 1680, 1050, 30, false, 0, 0},
{ 1680, 1050, 60, false, 0, 0},
{ 1920, 1200, 30, false, 0, 0},
{ 1920, 1200, 60, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
},
{
// HH Resolutions
{ 800, 480, 30, false, 0, 0},
{ 800, 480, 60, false, 0, 0},
{ 854, 480, 30, false, 0, 0},
{ 854, 480, 60, false, 0, 0},
{ 864, 480, 30, false, 0, 0},
{ 864, 480, 60, false, 0, 0},
{ 640, 360, 30, false, 0, 0},
{ 640, 360, 60, false, 0, 0},
{ 960, 540, 30, false, 0, 0},
{ 960, 540, 60, false, 0, 0},
{ 848, 480, 30, false, 0, 0},
{ 848, 480, 60, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
{ 0, 0, 0, false, 0, 0},
}
};
VideoFormats::VideoFormats() {
memcpy(mConfigs, mResolutionTable, sizeof(mConfigs));
for (size_t i = 0; i < kNumResolutionTypes; ++i) {
mResolutionEnabled[i] = 0;
}
setNativeResolution(RESOLUTION_CEA, 0); // default to 640x480 p60
}
void VideoFormats::setNativeResolution(ResolutionType type, size_t index) {
CHECK_LT(type, kNumResolutionTypes);
CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
mNativeType = type;
mNativeIndex = index;
setResolutionEnabled(type, index);
}
void VideoFormats::getNativeResolution(
ResolutionType *type, size_t *index) const {
*type = mNativeType;
*index = mNativeIndex;
}
void VideoFormats::disableAll() {
for (size_t i = 0; i < kNumResolutionTypes; ++i) {
mResolutionEnabled[i] = 0;
for (size_t j = 0; j < 32; j++) {
mConfigs[i][j].profile = mConfigs[i][j].level = 0;
}
}
}
void VideoFormats::enableAll() {
for (size_t i = 0; i < kNumResolutionTypes; ++i) {
mResolutionEnabled[i] = 0xffffffff;
for (size_t j = 0; j < 32; j++) {
mConfigs[i][j].profile = (1ul << PROFILE_CBP);
mConfigs[i][j].level = (1ul << LEVEL_31);
}
}
}
void VideoFormats::enableResolutionUpto(
ResolutionType type, size_t index,
ProfileType profile, LevelType level) {
size_t width, height, fps, score;
bool interlaced;
if (!GetConfiguration(type, index, &width, &height,
&fps, &interlaced)) {
ALOGE("Maximum resolution not found!");
return;
}
score = width * height * fps * (!interlaced + 1);
for (size_t i = 0; i < kNumResolutionTypes; ++i) {
for (size_t j = 0; j < 32; j++) {
if (GetConfiguration((ResolutionType)i, j,
&width, &height, &fps, &interlaced)
&& score >= width * height * fps * (!interlaced + 1)) {
setResolutionEnabled((ResolutionType)i, j);
setProfileLevel((ResolutionType)i, j, profile, level);
}
}
}
}
void VideoFormats::setResolutionEnabled(
ResolutionType type, size_t index, bool enabled) {
CHECK_LT(type, kNumResolutionTypes);
CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
if (enabled) {
mResolutionEnabled[type] |= (1ul << index);
mConfigs[type][index].profile = (1ul << PROFILE_CBP);
mConfigs[type][index].level = (1ul << LEVEL_31);
} else {
mResolutionEnabled[type] &= ~(1ul << index);
mConfigs[type][index].profile = 0;
mConfigs[type][index].level = 0;
}
}
void VideoFormats::setProfileLevel(
ResolutionType type, size_t index,
ProfileType profile, LevelType level) {
CHECK_LT(type, kNumResolutionTypes);
CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
mConfigs[type][index].profile = (1ul << profile);
mConfigs[type][index].level = (1ul << level);
}
void VideoFormats::getProfileLevel(
ResolutionType type, size_t index,
ProfileType *profile, LevelType *level) const{
CHECK_LT(type, kNumResolutionTypes);
CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
int i, bestProfile = -1, bestLevel = -1;
for (i = 0; i < kNumProfileTypes; ++i) {
if (mConfigs[type][index].profile & (1ul << i)) {
bestProfile = i;
}
}
for (i = 0; i < kNumLevelTypes; ++i) {
if (mConfigs[type][index].level & (1ul << i)) {
bestLevel = i;
}
}
if (bestProfile == -1 || bestLevel == -1) {
ALOGE("Profile or level not set for resolution type %d, index %zu",
type, index);
bestProfile = PROFILE_CBP;
bestLevel = LEVEL_31;
}
*profile = (ProfileType) bestProfile;
*level = (LevelType) bestLevel;
}
bool VideoFormats::isResolutionEnabled(
ResolutionType type, size_t index) const {
CHECK_LT(type, kNumResolutionTypes);
CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
return mResolutionEnabled[type] & (1ul << index);
}
// static
bool VideoFormats::GetConfiguration(
ResolutionType type,
size_t index,
size_t *width, size_t *height, size_t *framesPerSecond,
bool *interlaced) {
CHECK_LT(type, kNumResolutionTypes);
if (index >= 32) {
return false;
}
const config_t *config = &mResolutionTable[type][index];
if (config->width == 0) {
return false;
}
if (width) {
*width = config->width;
}
if (height) {
*height = config->height;
}
if (framesPerSecond) {
*framesPerSecond = config->framesPerSecond;
}
if (interlaced) {
*interlaced = config->interlaced;
}
return true;
}
bool VideoFormats::parseH264Codec(const char *spec) {
unsigned profile, level, res[3];
if (sscanf(
spec,
"%02x %02x %08X %08X %08X",
&profile,
&level,
&res[0],
&res[1],
&res[2]) != 5) {
return false;
}
for (size_t i = 0; i < kNumResolutionTypes; ++i) {
for (size_t j = 0; j < 32; ++j) {
if (res[i] & (1ul << j)){
mResolutionEnabled[i] |= (1ul << j);
if (profile > mConfigs[i][j].profile) {
// prefer higher profile (even if level is lower)
mConfigs[i][j].profile = profile;
mConfigs[i][j].level = level;
} else if (profile == mConfigs[i][j].profile &&
level > mConfigs[i][j].level) {
mConfigs[i][j].level = level;
}
}
}
}
return true;
}
// static
bool VideoFormats::GetProfileLevel(
ProfileType profile, LevelType level, unsigned *profileIdc,
unsigned *levelIdc, unsigned *constraintSet) {
CHECK_LT(profile, kNumProfileTypes);
CHECK_LT(level, kNumLevelTypes);
static const unsigned kProfileIDC[kNumProfileTypes] = {
66, // PROFILE_CBP
100, // PROFILE_CHP
};
static const unsigned kLevelIDC[kNumLevelTypes] = {
31, // LEVEL_31
32, // LEVEL_32
40, // LEVEL_40
41, // LEVEL_41
42, // LEVEL_42
};
static const unsigned kConstraintSet[kNumProfileTypes] = {
0xc0, // PROFILE_CBP
0x0c, // PROFILE_CHP
};
if (profileIdc) {
*profileIdc = kProfileIDC[profile];
}
if (levelIdc) {
*levelIdc = kLevelIDC[level];
}
if (constraintSet) {
*constraintSet = kConstraintSet[profile];
}
return true;
}
bool VideoFormats::parseFormatSpec(const char *spec) {
CHECK_EQ(kNumResolutionTypes, 3);
disableAll();
unsigned native, dummy;
size_t size = strlen(spec);
size_t offset = 0;
if (sscanf(spec, "%02x %02x ", &native, &dummy) != 2) {
return false;
}
offset += 6; // skip native and preferred-display-mode-supported
CHECK_LE(offset + 58, size);
while (offset < size) {
parseH264Codec(spec + offset);
offset += 60; // skip H.264-codec + ", "
}
mNativeIndex = native >> 3;
mNativeType = (ResolutionType)(native & 7);
bool success;
if (mNativeType >= kNumResolutionTypes) {
success = false;
} else {
success = GetConfiguration(
mNativeType, mNativeIndex, NULL, NULL, NULL, NULL);
}
if (!success) {
ALOGW("sink advertised an illegal native resolution, fortunately "
"this value is ignored for the time being...");
}
return true;
}
AString VideoFormats::getFormatSpec(bool forM4Message) const {
CHECK_EQ(kNumResolutionTypes, 3);
// wfd_video_formats:
// 1 byte "native"
// 1 byte "preferred-display-mode-supported" 0 or 1
// one or more avc codec structures
// 1 byte profile
// 1 byte level
// 4 byte CEA mask
// 4 byte VESA mask
// 4 byte HH mask
// 1 byte latency
// 2 byte min-slice-slice
// 2 byte slice-enc-params
// 1 byte framerate-control-support
// max-hres (none or 2 byte)
// max-vres (none or 2 byte)
return AStringPrintf(
"%02x 00 %02x %02x %08x %08x %08x 00 0000 0000 00 none none",
forM4Message ? 0x00 : ((mNativeIndex << 3) | mNativeType),
mConfigs[mNativeType][mNativeIndex].profile,
mConfigs[mNativeType][mNativeIndex].level,
mResolutionEnabled[0],
mResolutionEnabled[1],
mResolutionEnabled[2]);
}
// static
bool VideoFormats::PickBestFormat(
const VideoFormats &sinkSupported,
const VideoFormats &sourceSupported,
ResolutionType *chosenType,
size_t *chosenIndex,
ProfileType *chosenProfile,
LevelType *chosenLevel) {
#if 0
// Support for the native format is a great idea, the spec includes
// these features, but nobody supports it and the tests don't validate it.
ResolutionType nativeType;
size_t nativeIndex;
sinkSupported.getNativeResolution(&nativeType, &nativeIndex);
if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) {
if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) {
ALOGI("Choosing sink's native resolution");
*chosenType = nativeType;
*chosenIndex = nativeIndex;
return true;
}
} else {
ALOGW("Sink advertised native resolution that it doesn't "
"actually support... ignoring");
}
sourceSupported.getNativeResolution(&nativeType, &nativeIndex);
if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) {
if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) {
ALOGI("Choosing source's native resolution");
*chosenType = nativeType;
*chosenIndex = nativeIndex;
return true;
}
} else {
ALOGW("Source advertised native resolution that it doesn't "
"actually support... ignoring");
}
#endif
bool first = true;
uint32_t bestScore = 0;
size_t bestType = 0;
size_t bestIndex = 0;
for (size_t i = 0; i < kNumResolutionTypes; ++i) {
for (size_t j = 0; j < 32; ++j) {
size_t width, height, framesPerSecond;
bool interlaced;
if (!GetConfiguration(
(ResolutionType)i,
j,
&width, &height, &framesPerSecond, &interlaced)) {
break;
}
if (!sinkSupported.isResolutionEnabled((ResolutionType)i, j)
|| !sourceSupported.isResolutionEnabled(
(ResolutionType)i, j)) {
continue;
}
ALOGV("type %zu, index %zu, %zu x %zu %c%zu supported",
i, j, width, height, interlaced ? 'i' : 'p', framesPerSecond);
uint32_t score = width * height * framesPerSecond;
if (!interlaced) {
score *= 2;
}
if (first || score > bestScore) {
bestScore = score;
bestType = i;
bestIndex = j;
first = false;
}
}
}
if (first) {
return false;
}
*chosenType = (ResolutionType)bestType;
*chosenIndex = bestIndex;
// Pick the best profile/level supported by both sink and source.
ProfileType srcProfile, sinkProfile;
LevelType srcLevel, sinkLevel;
sourceSupported.getProfileLevel(
(ResolutionType)bestType, bestIndex,
&srcProfile, &srcLevel);
sinkSupported.getProfileLevel(
(ResolutionType)bestType, bestIndex,
&sinkProfile, &sinkLevel);
*chosenProfile = srcProfile < sinkProfile ? srcProfile : sinkProfile;
*chosenLevel = srcLevel < sinkLevel ? srcLevel : sinkLevel;
return true;
}
} // namespace android

@ -0,0 +1,125 @@
/*
* Copyright 2013, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef VIDEO_FORMATS_H_
#define VIDEO_FORMATS_H_
#include <media/stagefright/foundation/ABase.h>
#include <stdint.h>
namespace android {
struct AString;
// This class encapsulates that video resolution capabilities of a wfd source
// or sink as outlined in the wfd specs. Currently three sets of resolutions
// are specified, each of which supports up to 32 resolutions.
// In addition to its capabilities each sink/source also publishes its
// "native" resolution, presumably one that is preferred among all others
// because it wouldn't require any scaling and directly corresponds to the
// display capabilities/pixels.
struct VideoFormats {
VideoFormats();
struct config_t {
size_t width, height, framesPerSecond;
bool interlaced;
unsigned char profile, level;
};
enum ProfileType {
PROFILE_CBP = 0,
PROFILE_CHP,
kNumProfileTypes,
};
enum LevelType {
LEVEL_31 = 0,
LEVEL_32,
LEVEL_40,
LEVEL_41,
LEVEL_42,
kNumLevelTypes,
};
enum ResolutionType {
RESOLUTION_CEA,
RESOLUTION_VESA,
RESOLUTION_HH,
kNumResolutionTypes,
};
void setNativeResolution(ResolutionType type, size_t index);
void getNativeResolution(ResolutionType *type, size_t *index) const;
void disableAll();
void enableAll();
void enableResolutionUpto(
ResolutionType type, size_t index,
ProfileType profile, LevelType level);
void setResolutionEnabled(
ResolutionType type, size_t index, bool enabled = true);
bool isResolutionEnabled(ResolutionType type, size_t index) const;
void setProfileLevel(
ResolutionType type, size_t index,
ProfileType profile, LevelType level);
void getProfileLevel(
ResolutionType type, size_t index,
ProfileType *profile, LevelType *level) const;
static bool GetConfiguration(
ResolutionType type, size_t index,
size_t *width, size_t *height, size_t *framesPerSecond,
bool *interlaced);
static bool GetProfileLevel(
ProfileType profile, LevelType level,
unsigned *profileIdc, unsigned *levelIdc,
unsigned *constraintSet);
bool parseFormatSpec(const char *spec);
AString getFormatSpec(bool forM4Message = false) const;
static bool PickBestFormat(
const VideoFormats &sinkSupported,
const VideoFormats &sourceSupported,
ResolutionType *chosenType,
size_t *chosenIndex,
ProfileType *chosenProfile,
LevelType *chosenLevel);
private:
bool parseH264Codec(const char *spec);
ResolutionType mNativeType;
size_t mNativeIndex;
uint32_t mResolutionEnabled[kNumResolutionTypes];
static const config_t mResolutionTable[kNumResolutionTypes][32];
config_t mConfigs[kNumResolutionTypes][32];
DISALLOW_EVIL_CONSTRUCTORS(VideoFormats);
};
} // namespace android
#endif // VIDEO_FORMATS_H_

@ -0,0 +1,49 @@
/*
* Copyright 2013, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef RTP_BASE_H_
#define RTP_BASE_H_
namespace android {
struct RTPBase {
enum PacketizationMode {
PACKETIZATION_TRANSPORT_STREAM,
PACKETIZATION_H264,
PACKETIZATION_AAC,
PACKETIZATION_NONE,
};
enum TransportMode {
TRANSPORT_UNDEFINED,
TRANSPORT_NONE,
TRANSPORT_UDP,
TRANSPORT_TCP,
TRANSPORT_TCP_INTERLEAVED,
};
// Really UDP _payload_ size
const unsigned int kMaxUDPPacketSize = 1472; // 1472 good, 1473 bad on Android@Home
static int32_t PickRandomRTPPort();
};
} // namespace android
#endif // RTP_BASE_H_

@ -0,0 +1,808 @@
/*
* Copyright 2013, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "RTPSender"
#include <utils/Log.h>
#include "RTPSender.h"
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/ANetworkSession.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/Utils.h>
#include "include/avc_utils.h"
namespace android {
RTPSender::RTPSender(
const sp<ANetworkSession> &netSession,
const sp<AMessage> &notify)
: mNetSession(netSession),
mNotify(notify),
mRTPMode(TRANSPORT_UNDEFINED),
mRTCPMode(TRANSPORT_UNDEFINED),
mRTPSessionID(0),
mRTCPSessionID(0),
mRTPConnected(false),
mRTCPConnected(false),
mLastNTPTime(0),
mLastRTPTime(0),
mNumRTPSent(0),
mNumRTPOctetsSent(0),
mNumSRsSent(0),
mRTPSeqNo(0),
mHistorySize(0) {
}
RTPSender::~RTPSender() {
if (mRTCPSessionID != 0) {
mNetSession->destroySession(mRTCPSessionID);
mRTCPSessionID = 0;
}
if (mRTPSessionID != 0) {
mNetSession->destroySession(mRTPSessionID);
mRTPSessionID = 0;
}
}
// static
int32_t RTPBase::PickRandomRTPPort() {
// Pick an even integer in range [1024, 65534)
static const size_t kRange = (65534 - 1024) / 2;
return (int32_t)(((float)(kRange + 1) * rand()) / RAND_MAX) * 2 + 1024;
}
status_t RTPSender::initAsync(
const char *remoteHost,
int32_t remoteRTPPort,
TransportMode rtpMode,
int32_t remoteRTCPPort,
TransportMode rtcpMode,
int32_t *outLocalRTPPort) {
if (mRTPMode != TRANSPORT_UNDEFINED
|| rtpMode == TRANSPORT_UNDEFINED
|| rtpMode == TRANSPORT_NONE
|| rtcpMode == TRANSPORT_UNDEFINED) {
return INVALID_OPERATION;
}
CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED);
CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED);
if ((rtcpMode == TRANSPORT_NONE && remoteRTCPPort >= 0)
|| (rtcpMode != TRANSPORT_NONE && remoteRTCPPort < 0)) {
return INVALID_OPERATION;
}
sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, this);
sp<AMessage> rtcpNotify;
if (remoteRTCPPort >= 0) {
rtcpNotify = new AMessage(kWhatRTCPNotify, this);
}
CHECK_EQ(mRTPSessionID, 0);
CHECK_EQ(mRTCPSessionID, 0);
int32_t localRTPPort;
for (;;) {
localRTPPort = PickRandomRTPPort();
status_t err;
if (rtpMode == TRANSPORT_UDP) {
err = mNetSession->createUDPSession(
localRTPPort,
remoteHost,
remoteRTPPort,
rtpNotify,
&mRTPSessionID);
} else {
CHECK_EQ(rtpMode, TRANSPORT_TCP);
err = mNetSession->createTCPDatagramSession(
localRTPPort,
remoteHost,
remoteRTPPort,
rtpNotify,
&mRTPSessionID);
}
if (err != OK) {
continue;
}
if (remoteRTCPPort < 0) {
break;
}
if (rtcpMode == TRANSPORT_UDP) {
err = mNetSession->createUDPSession(
localRTPPort + 1,
remoteHost,
remoteRTCPPort,
rtcpNotify,
&mRTCPSessionID);
} else {
CHECK_EQ(rtcpMode, TRANSPORT_TCP);
err = mNetSession->createTCPDatagramSession(
localRTPPort + 1,
remoteHost,
remoteRTCPPort,
rtcpNotify,
&mRTCPSessionID);
}
if (err == OK) {
break;
}
mNetSession->destroySession(mRTPSessionID);
mRTPSessionID = 0;
}
if (rtpMode == TRANSPORT_UDP) {
mRTPConnected = true;
}
if (rtcpMode == TRANSPORT_UDP) {
mRTCPConnected = true;
}
mRTPMode = rtpMode;
mRTCPMode = rtcpMode;
*outLocalRTPPort = localRTPPort;
if (mRTPMode == TRANSPORT_UDP
&& (mRTCPMode == TRANSPORT_UDP || mRTCPMode == TRANSPORT_NONE)) {
notifyInitDone(OK);
}
return OK;
}
status_t RTPSender::queueBuffer(
const sp<ABuffer> &buffer, uint8_t packetType, PacketizationMode mode) {
status_t err;
switch (mode) {
case PACKETIZATION_NONE:
err = queueRawPacket(buffer, packetType);
break;
case PACKETIZATION_TRANSPORT_STREAM:
err = queueTSPackets(buffer, packetType);
break;
case PACKETIZATION_H264:
err = queueAVCBuffer(buffer, packetType);
break;
default:
TRESPASS();
}
return err;
}
status_t RTPSender::queueRawPacket(
const sp<ABuffer> &packet, uint8_t packetType) {
CHECK_LE(packet->size(), kMaxUDPPacketSize - 12);
int64_t timeUs;
CHECK(packet->meta()->findInt64("timeUs", &timeUs));
sp<ABuffer> udpPacket = new ABuffer(12 + packet->size());
udpPacket->setInt32Data(mRTPSeqNo);
uint8_t *rtp = udpPacket->data();
rtp[0] = 0x80;
rtp[1] = packetType;
rtp[2] = (mRTPSeqNo >> 8) & 0xff;
rtp[3] = mRTPSeqNo & 0xff;
++mRTPSeqNo;
uint32_t rtpTime = (timeUs * 9) / 100ll;
rtp[4] = rtpTime >> 24;
rtp[5] = (rtpTime >> 16) & 0xff;
rtp[6] = (rtpTime >> 8) & 0xff;
rtp[7] = rtpTime & 0xff;
rtp[8] = kSourceID >> 24;
rtp[9] = (kSourceID >> 16) & 0xff;
rtp[10] = (kSourceID >> 8) & 0xff;
rtp[11] = kSourceID & 0xff;
memcpy(&rtp[12], packet->data(), packet->size());
return sendRTPPacket(
udpPacket,
true /* storeInHistory */,
true /* timeValid */,
ALooper::GetNowUs());
}
status_t RTPSender::queueTSPackets(
const sp<ABuffer> &tsPackets, uint8_t packetType) {
CHECK_EQ(0u, tsPackets->size() % 188);
int64_t timeUs;
CHECK(tsPackets->meta()->findInt64("timeUs", &timeUs));
size_t srcOffset = 0;
while (srcOffset < tsPackets->size()) {
sp<ABuffer> udpPacket =
new ABuffer(12 + kMaxNumTSPacketsPerRTPPacket * 188);
udpPacket->setInt32Data(mRTPSeqNo);
uint8_t *rtp = udpPacket->data();
rtp[0] = 0x80;
rtp[1] = packetType;
rtp[2] = (mRTPSeqNo >> 8) & 0xff;
rtp[3] = mRTPSeqNo & 0xff;
++mRTPSeqNo;
int64_t nowUs = ALooper::GetNowUs();
uint32_t rtpTime = (nowUs * 9) / 100ll;
rtp[4] = rtpTime >> 24;
rtp[5] = (rtpTime >> 16) & 0xff;
rtp[6] = (rtpTime >> 8) & 0xff;
rtp[7] = rtpTime & 0xff;
rtp[8] = kSourceID >> 24;
rtp[9] = (kSourceID >> 16) & 0xff;
rtp[10] = (kSourceID >> 8) & 0xff;
rtp[11] = kSourceID & 0xff;
size_t numTSPackets = (tsPackets->size() - srcOffset) / 188;
if (numTSPackets > kMaxNumTSPacketsPerRTPPacket) {
numTSPackets = kMaxNumTSPacketsPerRTPPacket;
}
memcpy(&rtp[12], tsPackets->data() + srcOffset, numTSPackets * 188);
udpPacket->setRange(0, 12 + numTSPackets * 188);
srcOffset += numTSPackets * 188;
bool isLastPacket = (srcOffset == tsPackets->size());
status_t err = sendRTPPacket(
udpPacket,
true /* storeInHistory */,
isLastPacket /* timeValid */,
timeUs);
if (err != OK) {
return err;
}
}
return OK;
}
status_t RTPSender::queueAVCBuffer(
const sp<ABuffer> &accessUnit, uint8_t packetType) {
int64_t timeUs;
CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
uint32_t rtpTime = (timeUs * 9 / 100ll);
List<sp<ABuffer> > packets;
sp<ABuffer> out = new ABuffer(kMaxUDPPacketSize);
size_t outBytesUsed = 12; // Placeholder for RTP header.
const uint8_t *data = accessUnit->data();
size_t size = accessUnit->size();
const uint8_t *nalStart;
size_t nalSize;
while (getNextNALUnit(
&data, &size, &nalStart, &nalSize,
true /* startCodeFollows */) == OK) {
size_t bytesNeeded = nalSize + 2;
if (outBytesUsed == 12) {
++bytesNeeded;
}
if (outBytesUsed + bytesNeeded > out->capacity()) {
bool emitSingleNALPacket = false;
if (outBytesUsed == 12
&& outBytesUsed + nalSize <= out->capacity()) {
// We haven't emitted anything into the current packet yet and
// this NAL unit fits into a single-NAL-unit-packet while
// it wouldn't have fit as part of a STAP-A packet.
memcpy(out->data() + outBytesUsed, nalStart, nalSize);
outBytesUsed += nalSize;
emitSingleNALPacket = true;
}
if (outBytesUsed > 12) {
out->setRange(0, outBytesUsed);
packets.push_back(out);
out = new ABuffer(kMaxUDPPacketSize);
outBytesUsed = 12; // Placeholder for RTP header
}
if (emitSingleNALPacket) {
continue;
}
}
if (outBytesUsed + bytesNeeded <= out->capacity()) {
uint8_t *dst = out->data() + outBytesUsed;
if (outBytesUsed == 12) {
*dst++ = 24; // STAP-A header
}
*dst++ = (nalSize >> 8) & 0xff;
*dst++ = nalSize & 0xff;
memcpy(dst, nalStart, nalSize);
outBytesUsed += bytesNeeded;
continue;
}
// This single NAL unit does not fit into a single RTP packet,
// we need to emit an FU-A.
CHECK_EQ(outBytesUsed, 12u);
uint8_t nalType = nalStart[0] & 0x1f;
uint8_t nri = (nalStart[0] >> 5) & 3;
size_t srcOffset = 1;
while (srcOffset < nalSize) {
size_t copy = out->capacity() - outBytesUsed - 2;
if (copy > nalSize - srcOffset) {
copy = nalSize - srcOffset;
}
uint8_t *dst = out->data() + outBytesUsed;
dst[0] = (nri << 5) | 28;
dst[1] = nalType;
if (srcOffset == 1) {
dst[1] |= 0x80;
}
if (srcOffset + copy == nalSize) {
dst[1] |= 0x40;
}
memcpy(&dst[2], nalStart + srcOffset, copy);
srcOffset += copy;
out->setRange(0, outBytesUsed + copy + 2);
packets.push_back(out);
out = new ABuffer(kMaxUDPPacketSize);
outBytesUsed = 12; // Placeholder for RTP header
}
}
if (outBytesUsed > 12) {
out->setRange(0, outBytesUsed);
packets.push_back(out);
}
while (!packets.empty()) {
sp<ABuffer> out = *packets.begin();
packets.erase(packets.begin());
out->setInt32Data(mRTPSeqNo);
bool last = packets.empty();
uint8_t *dst = out->data();
dst[0] = 0x80;
dst[1] = packetType;
if (last) {
dst[1] |= 1 << 7; // M-bit
}
dst[2] = (mRTPSeqNo >> 8) & 0xff;
dst[3] = mRTPSeqNo & 0xff;
++mRTPSeqNo;
dst[4] = rtpTime >> 24;
dst[5] = (rtpTime >> 16) & 0xff;
dst[6] = (rtpTime >> 8) & 0xff;
dst[7] = rtpTime & 0xff;
dst[8] = kSourceID >> 24;
dst[9] = (kSourceID >> 16) & 0xff;
dst[10] = (kSourceID >> 8) & 0xff;
dst[11] = kSourceID & 0xff;
status_t err = sendRTPPacket(out, true /* storeInHistory */);
if (err != OK) {
return err;
}
}
return OK;
}
status_t RTPSender::sendRTPPacket(
const sp<ABuffer> &buffer, bool storeInHistory,
bool timeValid, int64_t timeUs) {
CHECK(mRTPConnected);
status_t err = mNetSession->sendRequest(
mRTPSessionID, buffer->data(), buffer->size(),
timeValid, timeUs);
if (err != OK) {
return err;
}
mLastNTPTime = GetNowNTP();
mLastRTPTime = U32_AT(buffer->data() + 4);
++mNumRTPSent;
mNumRTPOctetsSent += buffer->size() - 12;
if (storeInHistory) {
if (mHistorySize == kMaxHistorySize) {
mHistory.erase(mHistory.begin());
} else {
++mHistorySize;
}
mHistory.push_back(buffer);
}
return OK;
}
// static
uint64_t RTPSender::GetNowNTP() {
struct timeval tv;
gettimeofday(&tv, NULL /* timezone */);
uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec;
nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll;
uint64_t hi = nowUs / 1000000ll;
uint64_t lo = ((1ll << 32) * (nowUs % 1000000ll)) / 1000000ll;
return (hi << 32) | lo;
}
void RTPSender::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatRTPNotify:
case kWhatRTCPNotify:
onNetNotify(msg->what() == kWhatRTPNotify, msg);
break;
default:
TRESPASS();
}
}
void RTPSender::onNetNotify(bool isRTP, const sp<AMessage> &msg) {
int32_t reason;
CHECK(msg->findInt32("reason", &reason));
switch (reason) {
case ANetworkSession::kWhatError:
{
int32_t sessionID;
CHECK(msg->findInt32("sessionID", &sessionID));
int32_t err;
CHECK(msg->findInt32("err", &err));
int32_t errorOccuredDuringSend;
CHECK(msg->findInt32("send", &errorOccuredDuringSend));
AString detail;
CHECK(msg->findString("detail", &detail));
ALOGE("An error occurred during %s in session %d "
"(%d, '%s' (%s)).",
errorOccuredDuringSend ? "send" : "receive",
sessionID,
err,
detail.c_str(),
strerror(-err));
mNetSession->destroySession(sessionID);
if (sessionID == mRTPSessionID) {
mRTPSessionID = 0;
} else if (sessionID == mRTCPSessionID) {
mRTCPSessionID = 0;
}
if (!mRTPConnected
|| (mRTPMode != TRANSPORT_NONE && !mRTCPConnected)) {
// We haven't completed initialization, attach the error
// to the notification instead.
notifyInitDone(err);
break;
}
notifyError(err);
break;
}
case ANetworkSession::kWhatDatagram:
{
sp<ABuffer> data;
CHECK(msg->findBuffer("data", &data));
if (isRTP) {
ALOGW("Huh? Received data on RTP connection...");
} else {
onRTCPData(data);
}
break;
}
case ANetworkSession::kWhatConnected:
{
int32_t sessionID;
CHECK(msg->findInt32("sessionID", &sessionID));
if (isRTP) {
CHECK_EQ(mRTPMode, TRANSPORT_TCP);
CHECK_EQ(sessionID, mRTPSessionID);
mRTPConnected = true;
} else {
CHECK_EQ(mRTCPMode, TRANSPORT_TCP);
CHECK_EQ(sessionID, mRTCPSessionID);
mRTCPConnected = true;
}
if (mRTPConnected
&& (mRTCPMode == TRANSPORT_NONE || mRTCPConnected)) {
notifyInitDone(OK);
}
break;
}
case ANetworkSession::kWhatNetworkStall:
{
size_t numBytesQueued;
CHECK(msg->findSize("numBytesQueued", &numBytesQueued));
notifyNetworkStall(numBytesQueued);
break;
}
default:
TRESPASS();
}
}
status_t RTPSender::onRTCPData(const sp<ABuffer> &buffer) {
const uint8_t *data = buffer->data();
size_t size = buffer->size();
while (size > 0) {
if (size < 8) {
// Too short to be a valid RTCP header
return ERROR_MALFORMED;
}
if ((data[0] >> 6) != 2) {
// Unsupported version.
return ERROR_UNSUPPORTED;
}
if (data[0] & 0x20) {
// Padding present.
size_t paddingLength = data[size - 1];
if (paddingLength + 12 > size) {
// If we removed this much padding we'd end up with something
// that's too short to be a valid RTP header.
return ERROR_MALFORMED;
}
size -= paddingLength;
}
size_t headerLength = 4 * (data[2] << 8 | data[3]) + 4;
if (size < headerLength) {
// Only received a partial packet?
return ERROR_MALFORMED;
}
switch (data[1]) {
case 200:
case 201: // RR
parseReceiverReport(data, headerLength);
break;
case 202: // SDES
case 203:
break;
case 204: // APP
parseAPP(data, headerLength);
break;
case 205: // TSFB (transport layer specific feedback)
parseTSFB(data, headerLength);
break;
case 206: // PSFB (payload specific feedback)
// hexdump(data, headerLength);
break;
default:
{
ALOGW("Unknown RTCP packet type %u of size %zu",
(unsigned)data[1], headerLength);
break;
}
}
data += headerLength;
size -= headerLength;
}
return OK;
}
status_t RTPSender::parseReceiverReport(
const uint8_t *data, size_t /* size */) {
float fractionLost = data[12] / 256.0f;
ALOGI("lost %.2f %% of packets during report interval.",
100.0f * fractionLost);
return OK;
}
status_t RTPSender::parseTSFB(const uint8_t *data, size_t size) {
if ((data[0] & 0x1f) != 1) {
return ERROR_UNSUPPORTED; // We only support NACK for now.
}
uint32_t srcId = U32_AT(&data[8]);
if (srcId != kSourceID) {
return ERROR_MALFORMED;
}
for (size_t i = 12; i < size; i += 4) {
uint16_t seqNo = U16_AT(&data[i]);
uint16_t blp = U16_AT(&data[i + 2]);
List<sp<ABuffer> >::iterator it = mHistory.begin();
bool foundSeqNo = false;
while (it != mHistory.end()) {
const sp<ABuffer> &buffer = *it;
uint16_t bufferSeqNo = buffer->int32Data() & 0xffff;
bool retransmit = false;
if (bufferSeqNo == seqNo) {
retransmit = true;
} else if (blp != 0) {
for (size_t i = 0; i < 16; ++i) {
if ((blp & (1 << i))
&& (bufferSeqNo == ((seqNo + i + 1) & 0xffff))) {
blp &= ~(1 << i);
retransmit = true;
}
}
}
if (retransmit) {
ALOGV("retransmitting seqNo %d", bufferSeqNo);
CHECK_EQ((status_t)OK,
sendRTPPacket(buffer, false /* storeInHistory */));
if (bufferSeqNo == seqNo) {
foundSeqNo = true;
}
if (foundSeqNo && blp == 0) {
break;
}
}
++it;
}
if (!foundSeqNo || blp != 0) {
ALOGI("Some sequence numbers were no longer available for "
"retransmission (seqNo = %d, foundSeqNo = %d, blp = 0x%04x)",
seqNo, foundSeqNo, blp);
if (!mHistory.empty()) {
int32_t earliest = (*mHistory.begin())->int32Data() & 0xffff;
int32_t latest = (*--mHistory.end())->int32Data() & 0xffff;
ALOGI("have seq numbers from %d - %d", earliest, latest);
}
}
}
return OK;
}
status_t RTPSender::parseAPP(const uint8_t *data, size_t size) {
static const size_t late_offset = 8;
static const char late_string[] = "late";
static const size_t avgLatencyUs_offset = late_offset + sizeof(late_string) - 1;
static const size_t maxLatencyUs_offset = avgLatencyUs_offset + sizeof(int64_t);
if ((size >= (maxLatencyUs_offset + sizeof(int64_t)))
&& !memcmp(late_string, &data[late_offset], sizeof(late_string) - 1)) {
int64_t avgLatencyUs = (int64_t)U64_AT(&data[avgLatencyUs_offset]);
int64_t maxLatencyUs = (int64_t)U64_AT(&data[maxLatencyUs_offset]);
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatInformSender);
notify->setInt64("avgLatencyUs", avgLatencyUs);
notify->setInt64("maxLatencyUs", maxLatencyUs);
notify->post();
}
return OK;
}
void RTPSender::notifyInitDone(status_t err) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatInitDone);
notify->setInt32("err", err);
notify->post();
}
void RTPSender::notifyError(status_t err) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatError);
notify->setInt32("err", err);
notify->post();
}
void RTPSender::notifyNetworkStall(size_t numBytesQueued) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatNetworkStall);
notify->setSize("numBytesQueued", numBytesQueued);
notify->post();
}
} // namespace android

@ -0,0 +1,119 @@
/*
* Copyright 2013, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef RTP_SENDER_H_
#define RTP_SENDER_H_
#include "RTPBase.h"
#include <media/stagefright/foundation/AHandler.h>
namespace android {
struct ABuffer;
struct ANetworkSession;
// An object of this class facilitates sending of media data over an RTP
// channel. The channel is established over a UDP or TCP connection depending
// on which "TransportMode" was chosen. In addition different RTP packetization
// schemes are supported such as "Transport Stream Packets over RTP",
// or "AVC/H.264 encapsulation as specified in RFC 3984 (non-interleaved mode)"
struct RTPSender : public RTPBase, public AHandler {
enum {
kWhatInitDone,
kWhatError,
kWhatNetworkStall,
kWhatInformSender,
};
RTPSender(
const sp<ANetworkSession> &netSession,
const sp<AMessage> &notify);
status_t initAsync(
const char *remoteHost,
int32_t remoteRTPPort,
TransportMode rtpMode,
int32_t remoteRTCPPort,
TransportMode rtcpMode,
int32_t *outLocalRTPPort);
status_t queueBuffer(
const sp<ABuffer> &buffer,
uint8_t packetType,
PacketizationMode mode);
protected:
virtual ~RTPSender();
virtual void onMessageReceived(const sp<AMessage> &msg);
private:
enum {
kWhatRTPNotify,
kWhatRTCPNotify,
};
const unsigned int kMaxNumTSPacketsPerRTPPacket = (kMaxUDPPacketSize - 12) / 188;
const unsigned int kMaxHistorySize = 1024;
const unsigned int kSourceID = 0xdeadbeef;
sp<ANetworkSession> mNetSession;
sp<AMessage> mNotify;
TransportMode mRTPMode;
TransportMode mRTCPMode;
int32_t mRTPSessionID;
int32_t mRTCPSessionID;
bool mRTPConnected;
bool mRTCPConnected;
uint64_t mLastNTPTime;
uint32_t mLastRTPTime;
uint32_t mNumRTPSent;
uint32_t mNumRTPOctetsSent;
uint32_t mNumSRsSent;
uint32_t mRTPSeqNo;
List<sp<ABuffer> > mHistory;
size_t mHistorySize;
static uint64_t GetNowNTP();
status_t queueRawPacket(const sp<ABuffer> &tsPackets, uint8_t packetType);
status_t queueTSPackets(const sp<ABuffer> &tsPackets, uint8_t packetType);
status_t queueAVCBuffer(const sp<ABuffer> &accessUnit, uint8_t packetType);
status_t sendRTPPacket(
const sp<ABuffer> &packet, bool storeInHistory,
bool timeValid = false, int64_t timeUs = -1ll);
void onNetNotify(bool isRTP, const sp<AMessage> &msg);
status_t onRTCPData(const sp<ABuffer> &data);
status_t parseReceiverReport(const uint8_t *data, size_t size);
status_t parseTSFB(const uint8_t *data, size_t size);
status_t parseAPP(const uint8_t *data, size_t size);
void notifyInitDone(status_t err);
void notifyError(status_t err);
void notifyNetworkStall(size_t numBytesQueued);
DISALLOW_EVIL_CONSTRUCTORS(RTPSender);
};
} // namespace android
#endif // RTP_SENDER_H_

@ -0,0 +1,821 @@
/*
* Copyright 2012, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "Converter"
#include <utils/Log.h>
#include "Converter.h"
#include "MediaPuller.h"
#include "include/avc_utils.h"
#include <cutils/properties.h>
#include <gui/Surface.h>
#include <media/ICrypto.h>
#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <arpa/inet.h>
#include <OMX_Video.h>
namespace android {
Converter::Converter(
const sp<AMessage> &notify,
const sp<ALooper> &codecLooper,
const sp<AMessage> &outputFormat,
uint32_t flags)
: mNotify(notify),
mCodecLooper(codecLooper),
mOutputFormat(outputFormat),
mFlags(flags),
mIsVideo(false),
mIsH264(false),
mIsPCMAudio(false),
mNeedToManuallyPrependSPSPPS(false),
mDoMoreWorkPending(false)
#if ENABLE_SILENCE_DETECTION
,mFirstSilentFrameUs(-1ll)
,mInSilentMode(false)
#endif
,mPrevVideoBitrate(-1)
,mNumFramesToDrop(0)
,mEncodingSuspended(false)
{
AString mime;
CHECK(mOutputFormat->findString("mime", &mime));
if (!strncasecmp("video/", mime.c_str(), 6)) {
mIsVideo = true;
mIsH264 = !strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC);
} else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime.c_str())) {
mIsPCMAudio = true;
}
}
void Converter::releaseEncoder() {
if (mEncoder == NULL) {
return;
}
mEncoder->release();
mEncoder.clear();
mInputBufferQueue.clear();
mEncoderInputBuffers.clear();
mEncoderOutputBuffers.clear();
}
Converter::~Converter() {
CHECK(mEncoder == NULL);
}
void Converter::shutdownAsync() {
ALOGV("shutdown");
(new AMessage(kWhatShutdown, this))->post();
}
status_t Converter::init() {
status_t err = initEncoder();
if (err != OK) {
releaseEncoder();
}
return err;
}
sp<IGraphicBufferProducer> Converter::getGraphicBufferProducer() {
CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
return mGraphicBufferProducer;
}
size_t Converter::getInputBufferCount() const {
return mEncoderInputBuffers.size();
}
sp<AMessage> Converter::getOutputFormat() const {
return mOutputFormat;
}
bool Converter::needToManuallyPrependSPSPPS() const {
return mNeedToManuallyPrependSPSPPS;
}
// static
int32_t Converter::GetInt32Property(
const char *propName, int32_t defaultValue) {
char val[PROPERTY_VALUE_MAX];
if (property_get(propName, val, NULL)) {
char *end;
unsigned long x = strtoul(val, &end, 10);
if (*end == '\0' && end > val && x > 0) {
return x;
}
}
return defaultValue;
}
status_t Converter::initEncoder() {
AString outputMIME;
CHECK(mOutputFormat->findString("mime", &outputMIME));
bool isAudio = !strncasecmp(outputMIME.c_str(), "audio/", 6);
if (!mIsPCMAudio) {
mEncoder = MediaCodec::CreateByType(
mCodecLooper, outputMIME.c_str(), true /* encoder */);
if (mEncoder == NULL) {
return ERROR_UNSUPPORTED;
}
}
if (mIsPCMAudio) {
return OK;
}
int32_t audioBitrate = GetInt32Property("media.wfd.audio-bitrate", 128000);
int32_t videoBitrate = GetInt32Property("media.wfd.video-bitrate", 5000000);
mPrevVideoBitrate = videoBitrate;
ALOGI("using audio bitrate of %d bps, video bitrate of %d bps",
audioBitrate, videoBitrate);
if (isAudio) {
mOutputFormat->setInt32("bitrate", audioBitrate);
} else {
mOutputFormat->setInt32("bitrate", videoBitrate);
mOutputFormat->setInt32("bitrate-mode", OMX_Video_ControlRateConstant);
mOutputFormat->setInt32("frame-rate", 30);
mOutputFormat->setInt32("i-frame-interval", 15); // Iframes every 15 secs
// Configure encoder to use intra macroblock refresh mode
mOutputFormat->setInt32("intra-refresh-mode", OMX_VIDEO_IntraRefreshCyclic);
int width, height, mbs;
if (!mOutputFormat->findInt32("width", &width)
|| !mOutputFormat->findInt32("height", &height)) {
return ERROR_UNSUPPORTED;
}
// Update macroblocks in a cyclic fashion with 10% of all MBs within
// frame gets updated at one time. It takes about 10 frames to
// completely update a whole video frame. If the frame rate is 30,
// it takes about 333 ms in the best case (if next frame is not an IDR)
// to recover from a lost/corrupted packet.
mbs = (((width + 15) / 16) * ((height + 15) / 16) * 10) / 100;
mOutputFormat->setInt32("intra-refresh-CIR-mbs", mbs);
}
ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
mNeedToManuallyPrependSPSPPS = false;
status_t err = NO_INIT;
if (!isAudio) {
sp<AMessage> tmp = mOutputFormat->dup();
tmp->setInt32("prepend-sps-pps-to-idr-frames", 1);
err = mEncoder->configure(
tmp,
NULL /* nativeWindow */,
NULL /* crypto */,
MediaCodec::CONFIGURE_FLAG_ENCODE);
if (err == OK) {
// Encoder supported prepending SPS/PPS, we don't need to emulate
// it.
mOutputFormat = tmp;
} else {
mNeedToManuallyPrependSPSPPS = true;
ALOGI("We going to manually prepend SPS and PPS to IDR frames.");
}
}
if (err != OK) {
// We'll get here for audio or if we failed to configure the encoder
// to automatically prepend SPS/PPS in the case of video.
err = mEncoder->configure(
mOutputFormat,
NULL /* nativeWindow */,
NULL /* crypto */,
MediaCodec::CONFIGURE_FLAG_ENCODE);
}
if (err != OK) {
return err;
}
if (mFlags & FLAG_USE_SURFACE_INPUT) {
CHECK(mIsVideo);
err = mEncoder->createInputSurface(&mGraphicBufferProducer);
if (err != OK) {
return err;
}
}
err = mEncoder->start();
if (err != OK) {
return err;
}
err = mEncoder->getInputBuffers(&mEncoderInputBuffers);
if (err != OK) {
return err;
}
err = mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
if (err != OK) {
return err;
}
if (mFlags & FLAG_USE_SURFACE_INPUT) {
scheduleDoMoreWork();
}
return OK;
}
void Converter::notifyError(status_t err) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatError);
notify->setInt32("err", err);
notify->post();
}
// static
bool Converter::IsSilence(const sp<ABuffer> &accessUnit) {
const uint8_t *ptr = accessUnit->data();
const uint8_t *end = ptr + accessUnit->size();
while (ptr < end) {
if (*ptr != 0) {
return false;
}
++ptr;
}
return true;
}
void Converter::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatMediaPullerNotify:
{
int32_t what;
CHECK(msg->findInt32("what", &what));
if (!mIsPCMAudio && mEncoder == NULL) {
ALOGV("got msg '%s' after encoder shutdown.",
msg->debugString().c_str());
if (what == MediaPuller::kWhatAccessUnit) {
sp<ABuffer> accessUnit;
CHECK(msg->findBuffer("accessUnit", &accessUnit));
accessUnit->setMediaBufferBase(NULL);
}
break;
}
if (what == MediaPuller::kWhatEOS) {
mInputBufferQueue.push_back(NULL);
feedEncoderInputBuffers();
scheduleDoMoreWork();
} else {
CHECK_EQ(what, MediaPuller::kWhatAccessUnit);
sp<ABuffer> accessUnit;
CHECK(msg->findBuffer("accessUnit", &accessUnit));
if (mNumFramesToDrop > 0 || mEncodingSuspended) {
if (mNumFramesToDrop > 0) {
--mNumFramesToDrop;
ALOGI("dropping frame.");
}
accessUnit->setMediaBufferBase(NULL);
break;
}
#if 0
MediaBuffer *mbuf =
(MediaBuffer *)(accessUnit->getMediaBufferBase());
if (mbuf != NULL) {
ALOGI("queueing mbuf %p", mbuf);
mbuf->release();
}
#endif
#if ENABLE_SILENCE_DETECTION
if (!mIsVideo) {
if (IsSilence(accessUnit)) {
if (mInSilentMode) {
break;
}
int64_t nowUs = ALooper::GetNowUs();
if (mFirstSilentFrameUs < 0ll) {
mFirstSilentFrameUs = nowUs;
} else if (nowUs >= mFirstSilentFrameUs + 10000000ll) {
mInSilentMode = true;
ALOGI("audio in silent mode now.");
break;
}
} else {
if (mInSilentMode) {
ALOGI("audio no longer in silent mode.");
}
mInSilentMode = false;
mFirstSilentFrameUs = -1ll;
}
}
#endif
mInputBufferQueue.push_back(accessUnit);
feedEncoderInputBuffers();
scheduleDoMoreWork();
}
break;
}
case kWhatEncoderActivity:
{
#if 0
int64_t whenUs;
if (msg->findInt64("whenUs", &whenUs)) {
int64_t nowUs = ALooper::GetNowUs();
ALOGI("[%s] kWhatEncoderActivity after %lld us",
mIsVideo ? "video" : "audio", nowUs - whenUs);
}
#endif
mDoMoreWorkPending = false;
if (mEncoder == NULL) {
break;
}
status_t err = doMoreWork();
if (err != OK) {
notifyError(err);
} else {
scheduleDoMoreWork();
}
break;
}
case kWhatRequestIDRFrame:
{
if (mEncoder == NULL) {
break;
}
if (mIsVideo) {
ALOGV("requesting IDR frame");
mEncoder->requestIDRFrame();
}
break;
}
case kWhatShutdown:
{
ALOGI("shutting down %s encoder", mIsVideo ? "video" : "audio");
releaseEncoder();
AString mime;
CHECK(mOutputFormat->findString("mime", &mime));
ALOGI("encoder (%s) shut down.", mime.c_str());
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatShutdownCompleted);
notify->post();
break;
}
case kWhatDropAFrame:
{
++mNumFramesToDrop;
break;
}
case kWhatReleaseOutputBuffer:
{
if (mEncoder != NULL) {
size_t bufferIndex;
CHECK(msg->findInt32("bufferIndex", (int32_t*)&bufferIndex));
CHECK(bufferIndex < mEncoderOutputBuffers.size());
mEncoder->releaseOutputBuffer(bufferIndex);
}
break;
}
case kWhatSuspendEncoding:
{
int32_t suspend;
CHECK(msg->findInt32("suspend", &suspend));
mEncodingSuspended = suspend;
if (mFlags & FLAG_USE_SURFACE_INPUT) {
sp<AMessage> params = new AMessage;
params->setInt32("drop-input-frames",suspend);
mEncoder->setParameters(params);
}
break;
}
default:
TRESPASS();
}
}
void Converter::scheduleDoMoreWork() {
if (mIsPCMAudio) {
// There's no encoder involved in this case.
return;
}
if (mDoMoreWorkPending) {
return;
}
mDoMoreWorkPending = true;
#if 1
if (mEncoderActivityNotify == NULL) {
mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, this);
}
mEncoder->requestActivityNotification(mEncoderActivityNotify->dup());
#else
sp<AMessage> notify = new AMessage(kWhatEncoderActivity, this);
notify->setInt64("whenUs", ALooper::GetNowUs());
mEncoder->requestActivityNotification(notify);
#endif
}
status_t Converter::feedRawAudioInputBuffers() {
// Split incoming PCM audio into buffers of 6 AUs of 80 audio frames each
// and add a 4 byte header according to the wifi display specs.
while (!mInputBufferQueue.empty()) {
sp<ABuffer> buffer = *mInputBufferQueue.begin();
mInputBufferQueue.erase(mInputBufferQueue.begin());
int16_t *ptr = (int16_t *)buffer->data();
int16_t *stop = (int16_t *)(buffer->data() + buffer->size());
while (ptr < stop) {
*ptr = htons(*ptr);
++ptr;
}
static const size_t kFrameSize = 2 * sizeof(int16_t); // stereo
static const size_t kFramesPerAU = 80;
static const size_t kNumAUsPerPESPacket = 6;
if (mPartialAudioAU != NULL) {
size_t bytesMissingForFullAU =
kNumAUsPerPESPacket * kFramesPerAU * kFrameSize
- mPartialAudioAU->size() + 4;
size_t copy = buffer->size();
if(copy > bytesMissingForFullAU) {
copy = bytesMissingForFullAU;
}
memcpy(mPartialAudioAU->data() + mPartialAudioAU->size(),
buffer->data(),
copy);
mPartialAudioAU->setRange(0, mPartialAudioAU->size() + copy);
buffer->setRange(buffer->offset() + copy, buffer->size() - copy);
int64_t timeUs;
CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
int64_t copyUs = (int64_t)((copy / kFrameSize) * 1E6 / 48000.0);
timeUs += copyUs;
buffer->meta()->setInt64("timeUs", timeUs);
if (bytesMissingForFullAU == copy) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatAccessUnit);
notify->setBuffer("accessUnit", mPartialAudioAU);
notify->post();
mPartialAudioAU.clear();
}
}
while (buffer->size() > 0) {
sp<ABuffer> partialAudioAU =
new ABuffer(
4
+ kNumAUsPerPESPacket * kFrameSize * kFramesPerAU);
uint8_t *ptr = partialAudioAU->data();
ptr[0] = 0xa0; // 10100000b
ptr[1] = kNumAUsPerPESPacket;
ptr[2] = 0; // reserved, audio _emphasis_flag = 0
static const unsigned kQuantizationWordLength = 0; // 16-bit
static const unsigned kAudioSamplingFrequency = 2; // 48Khz
static const unsigned kNumberOfAudioChannels = 1; // stereo
ptr[3] = (kQuantizationWordLength << 6)
| (kAudioSamplingFrequency << 3)
| kNumberOfAudioChannels;
size_t copy = buffer->size();
if (copy > partialAudioAU->size() - 4) {
copy = partialAudioAU->size() - 4;
}
memcpy(&ptr[4], buffer->data(), copy);
partialAudioAU->setRange(0, 4 + copy);
buffer->setRange(buffer->offset() + copy, buffer->size() - copy);
int64_t timeUs;
CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
partialAudioAU->meta()->setInt64("timeUs", timeUs);
int64_t copyUs = (int64_t)((copy / kFrameSize) * 1E6 / 48000.0);
timeUs += copyUs;
buffer->meta()->setInt64("timeUs", timeUs);
if (copy == partialAudioAU->capacity() - 4) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatAccessUnit);
notify->setBuffer("accessUnit", partialAudioAU);
notify->post();
partialAudioAU.clear();
continue;
}
mPartialAudioAU = partialAudioAU;
}
}
return OK;
}
status_t Converter::feedEncoderInputBuffers() {
if (mIsPCMAudio) {
return feedRawAudioInputBuffers();
}
while (!mInputBufferQueue.empty()
&& !mAvailEncoderInputIndices.empty()) {
sp<ABuffer> buffer = *mInputBufferQueue.begin();
mInputBufferQueue.erase(mInputBufferQueue.begin());
size_t bufferIndex = *mAvailEncoderInputIndices.begin();
mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
int64_t timeUs = 0ll;
uint32_t flags = 0;
if (buffer != NULL) {
CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
memcpy(mEncoderInputBuffers.itemAt(bufferIndex)->data(),
buffer->data(),
buffer->size());
MediaBuffer *mediaBuffer =
(MediaBuffer *)(buffer->getMediaBufferBase());
if (mediaBuffer != NULL) {
mEncoderInputBuffers.itemAt(bufferIndex)->setMediaBufferBase(
mediaBuffer);
buffer->setMediaBufferBase(NULL);
}
} else {
flags = MediaCodec::BUFFER_FLAG_EOS;
}
status_t err = mEncoder->queueInputBuffer(
bufferIndex, 0, (buffer == NULL) ? 0 : buffer->size(),
timeUs, flags);
if (err != OK) {
return err;
}
}
return OK;
}
sp<ABuffer> Converter::prependCSD(const sp<ABuffer> &accessUnit) const {
CHECK(mCSD0 != NULL);
sp<ABuffer> dup = new ABuffer(accessUnit->size() + mCSD0->size());
memcpy(dup->data(), mCSD0->data(), mCSD0->size());
memcpy(dup->data() + mCSD0->size(), accessUnit->data(), accessUnit->size());
int64_t timeUs;
CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
dup->meta()->setInt64("timeUs", timeUs);
return dup;
}
status_t Converter::doMoreWork() {
status_t err;
if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
for (;;) {
size_t bufferIndex;
err = mEncoder->dequeueInputBuffer(&bufferIndex);
if (err != OK) {
break;
}
mAvailEncoderInputIndices.push_back(bufferIndex);
}
feedEncoderInputBuffers();
}
for (;;) {
size_t bufferIndex;
size_t offset;
size_t size;
int64_t timeUs;
uint32_t flags;
native_handle_t* handle = NULL;
err = mEncoder->dequeueOutputBuffer(
&bufferIndex, &offset, &size, &timeUs, &flags);
if (err != OK) {
if (err == INFO_FORMAT_CHANGED) {
continue;
} else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
continue;
}
if (err == -EAGAIN) {
err = OK;
}
break;
}
if (flags & MediaCodec::BUFFER_FLAG_EOS) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatEOS);
notify->post();
} else {
#if 0
if (mIsVideo) {
int32_t videoBitrate = GetInt32Property(
"media.wfd.video-bitrate", 5000000);
setVideoBitrate(videoBitrate);
}
#endif
sp<ABuffer> buffer;
sp<MediaCodecBuffer> outbuf = mEncoderOutputBuffers.itemAt(bufferIndex);
if (outbuf->meta()->findPointer("handle", (void**)&handle) &&
handle != NULL) {
int32_t rangeLength, rangeOffset;
CHECK(outbuf->meta()->findInt32("rangeOffset", &rangeOffset));
CHECK(outbuf->meta()->findInt32("rangeLength", &rangeLength));
outbuf->meta()->setPointer("handle", NULL);
// MediaSender will post the following message when HDCP
// is done, to release the output buffer back to encoder.
sp<AMessage> notify(new AMessage(kWhatReleaseOutputBuffer, this));
notify->setInt32("bufferIndex", bufferIndex);
buffer = new ABuffer(
rangeLength > (int32_t)size ? rangeLength : size);
buffer->meta()->setPointer("handle", handle);
buffer->meta()->setInt32("rangeOffset", rangeOffset);
buffer->meta()->setInt32("rangeLength", rangeLength);
buffer->meta()->setMessage("notify", notify);
} else {
buffer = new ABuffer(size);
}
buffer->meta()->setInt64("timeUs", timeUs);
ALOGV("[%s] time %lld us (%.2f secs)",
mIsVideo ? "video" : "audio", (long long)timeUs, timeUs / 1E6);
memcpy(buffer->data(), outbuf->base() + offset, size);
if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
if (!handle) {
if (mIsH264) {
mCSD0 = buffer;
}
mOutputFormat->setBuffer("csd-0", buffer);
}
} else {
if (mNeedToManuallyPrependSPSPPS
&& mIsH264
&& (mFlags & FLAG_PREPEND_CSD_IF_NECESSARY)
&& IsIDR(buffer)) {
buffer = prependCSD(buffer);
}
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatAccessUnit);
notify->setBuffer("accessUnit", buffer);
notify->post();
}
}
if (!handle) {
mEncoder->releaseOutputBuffer(bufferIndex);
}
if (flags & MediaCodec::BUFFER_FLAG_EOS) {
break;
}
}
return err;
}
void Converter::requestIDRFrame() {
(new AMessage(kWhatRequestIDRFrame, this))->post();
}
void Converter::dropAFrame() {
// Unsupported in surface input mode.
CHECK(!(mFlags & FLAG_USE_SURFACE_INPUT));
(new AMessage(kWhatDropAFrame, this))->post();
}
void Converter::suspendEncoding(bool suspend) {
sp<AMessage> msg = new AMessage(kWhatSuspendEncoding, this);
msg->setInt32("suspend", suspend);
msg->post();
}
int32_t Converter::getVideoBitrate() const {
return mPrevVideoBitrate;
}
void Converter::setVideoBitrate(int32_t bitRate) {
if (mIsVideo && mEncoder != NULL && bitRate != mPrevVideoBitrate) {
sp<AMessage> params = new AMessage;
params->setInt32("video-bitrate", bitRate);
mEncoder->setParameters(params);
mPrevVideoBitrate = bitRate;
}
}
} // namespace android

@ -0,0 +1,157 @@
/*
* Copyright 2012, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CONVERTER_H_
#define CONVERTER_H_
#include <media/stagefright/foundation/AHandler.h>
namespace android {
struct ABuffer;
class IGraphicBufferProducer;
struct MediaCodec;
class MediaCodecBuffer;
#define ENABLE_SILENCE_DETECTION 0
// Utility class that receives media access units and converts them into
// media access unit of a different format.
// Right now this'll convert raw video into H.264 and raw audio into AAC.
struct Converter : public AHandler {
enum {
kWhatAccessUnit,
kWhatEOS,
kWhatError,
kWhatShutdownCompleted,
};
enum FlagBits {
FLAG_USE_SURFACE_INPUT = 1,
FLAG_PREPEND_CSD_IF_NECESSARY = 2,
};
Converter(const sp<AMessage> &notify,
const sp<ALooper> &codecLooper,
const sp<AMessage> &outputFormat,
uint32_t flags = 0);
status_t init();
sp<IGraphicBufferProducer> getGraphicBufferProducer();
size_t getInputBufferCount() const;
sp<AMessage> getOutputFormat() const;
bool needToManuallyPrependSPSPPS() const;
void feedAccessUnit(const sp<ABuffer> &accessUnit);
void signalEOS();
void requestIDRFrame();
void dropAFrame();
void suspendEncoding(bool suspend);
void shutdownAsync();
int32_t getVideoBitrate() const;
void setVideoBitrate(int32_t bitrate);
static int32_t GetInt32Property(const char *propName, int32_t defaultValue);
enum {
// MUST not conflict with private enums below.
kWhatMediaPullerNotify = 'pulN',
};
protected:
virtual ~Converter();
virtual void onMessageReceived(const sp<AMessage> &msg);
private:
enum {
kWhatDoMoreWork,
kWhatRequestIDRFrame,
kWhatSuspendEncoding,
kWhatShutdown,
kWhatEncoderActivity,
kWhatDropAFrame,
kWhatReleaseOutputBuffer,
};
sp<AMessage> mNotify;
sp<ALooper> mCodecLooper;
sp<AMessage> mOutputFormat;
uint32_t mFlags;
bool mIsVideo;
bool mIsH264;
bool mIsPCMAudio;
bool mNeedToManuallyPrependSPSPPS;
sp<MediaCodec> mEncoder;
sp<AMessage> mEncoderActivityNotify;
sp<IGraphicBufferProducer> mGraphicBufferProducer;
Vector<sp<MediaCodecBuffer> > mEncoderInputBuffers;
Vector<sp<MediaCodecBuffer> > mEncoderOutputBuffers;
List<size_t> mAvailEncoderInputIndices;
List<sp<ABuffer> > mInputBufferQueue;
sp<ABuffer> mCSD0;
bool mDoMoreWorkPending;
#if ENABLE_SILENCE_DETECTION
int64_t mFirstSilentFrameUs;
bool mInSilentMode;
#endif
sp<ABuffer> mPartialAudioAU;
int32_t mPrevVideoBitrate;
int32_t mNumFramesToDrop;
bool mEncodingSuspended;
status_t initEncoder();
void releaseEncoder();
status_t feedEncoderInputBuffers();
void scheduleDoMoreWork();
status_t doMoreWork();
void notifyError(status_t err);
// Packetizes raw PCM audio data available in mInputBufferQueue
// into a format suitable for transport stream inclusion and
// notifies the observer.
status_t feedRawAudioInputBuffers();
static bool IsSilence(const sp<ABuffer> &accessUnit);
sp<ABuffer> prependCSD(const sp<ABuffer> &accessUnit) const;
DISALLOW_EVIL_CONSTRUCTORS(Converter);
};
} // namespace android
#endif // CONVERTER_H_

@ -0,0 +1,224 @@
/*
* Copyright 2012, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaPuller"
#include <utils/Log.h>
#include "MediaPuller.h"
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
namespace android {
MediaPuller::MediaPuller(
const sp<MediaSource> &source, const sp<AMessage> &notify)
: mSource(source),
mNotify(notify),
mPullGeneration(0),
mIsAudio(false),
mPaused(false) {
sp<MetaData> meta = source->getFormat();
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
mIsAudio = !strncasecmp(mime, "audio/", 6);
}
MediaPuller::~MediaPuller() {
}
status_t MediaPuller::postSynchronouslyAndReturnError(
const sp<AMessage> &msg) {
sp<AMessage> response;
status_t err = msg->postAndAwaitResponse(&response);
if (err != OK) {
return err;
}
if (!response->findInt32("err", &err)) {
err = OK;
}
return err;
}
status_t MediaPuller::start() {
return postSynchronouslyAndReturnError(new AMessage(kWhatStart, this));
}
void MediaPuller::stopAsync(const sp<AMessage> &notify) {
sp<AMessage> msg = new AMessage(kWhatStop, this);
msg->setMessage("notify", notify);
msg->post();
}
void MediaPuller::pause() {
(new AMessage(kWhatPause, this))->post();
}
void MediaPuller::resume() {
(new AMessage(kWhatResume, this))->post();
}
void MediaPuller::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatStart:
{
status_t err;
if (mIsAudio) {
// This atrocity causes AudioSource to deliver absolute
// systemTime() based timestamps (off by 1 us).
sp<MetaData> params = new MetaData;
params->setInt64(kKeyTime, 1ll);
err = mSource->start(params.get());
} else {
err = mSource->start();
if (err != OK) {
ALOGE("source failed to start w/ err %d", err);
}
}
if (err == OK) {
schedulePull();
}
sp<AMessage> response = new AMessage;
response->setInt32("err", err);
sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
response->postReply(replyID);
break;
}
case kWhatStop:
{
sp<MetaData> meta = mSource->getFormat();
const char *tmp;
CHECK(meta->findCString(kKeyMIMEType, &tmp));
AString mime = tmp;
ALOGI("MediaPuller(%s) stopping.", mime.c_str());
mSource->stop();
ALOGI("MediaPuller(%s) stopped.", mime.c_str());
++mPullGeneration;
sp<AMessage> notify;
CHECK(msg->findMessage("notify", &notify));
notify->post();
break;
}
case kWhatPull:
{
int32_t generation;
CHECK(msg->findInt32("generation", &generation));
if (generation != mPullGeneration) {
break;
}
MediaBuffer *mbuf;
status_t err = mSource->read(&mbuf);
if (mPaused) {
if (err == OK) {
mbuf->release();
mbuf = NULL;
}
schedulePull();
break;
}
if (err != OK) {
if (err == ERROR_END_OF_STREAM) {
ALOGI("stream ended.");
} else {
ALOGE("error %d reading stream.", err);
}
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatEOS);
notify->post();
} else {
int64_t timeUs;
CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
sp<ABuffer> accessUnit = new ABuffer(mbuf->range_length());
memcpy(accessUnit->data(),
(const uint8_t *)mbuf->data() + mbuf->range_offset(),
mbuf->range_length());
accessUnit->meta()->setInt64("timeUs", timeUs);
if (mIsAudio) {
mbuf->release();
mbuf = NULL;
} else {
// video encoder will release MediaBuffer when done
// with underlying data.
accessUnit->setMediaBufferBase(mbuf);
}
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatAccessUnit);
notify->setBuffer("accessUnit", accessUnit);
notify->post();
if (mbuf != NULL) {
ALOGV("posted mbuf %p", mbuf);
}
schedulePull();
}
break;
}
case kWhatPause:
{
mPaused = true;
break;
}
case kWhatResume:
{
mPaused = false;
break;
}
default:
TRESPASS();
}
}
void MediaPuller::schedulePull() {
sp<AMessage> msg = new AMessage(kWhatPull, this);
msg->setInt32("generation", mPullGeneration);
msg->post();
}
} // namespace android

@ -0,0 +1,68 @@
/*
* Copyright 2012, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MEDIA_PULLER_H_
#define MEDIA_PULLER_H_
#include <media/stagefright/foundation/AHandler.h>
namespace android {
struct MediaSource;
struct MediaPuller : public AHandler {
enum {
kWhatEOS,
kWhatAccessUnit
};
MediaPuller(const sp<MediaSource> &source, const sp<AMessage> &notify);
status_t start();
void stopAsync(const sp<AMessage> &notify);
void pause();
void resume();
protected:
virtual void onMessageReceived(const sp<AMessage> &msg);
virtual ~MediaPuller();
private:
enum {
kWhatStart,
kWhatStop,
kWhatPull,
kWhatPause,
kWhatResume,
};
sp<MediaSource> mSource;
sp<AMessage> mNotify;
int32_t mPullGeneration;
bool mIsAudio;
bool mPaused;
status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
void schedulePull();
DISALLOW_EVIL_CONSTRUCTORS(MediaPuller);
};
} // namespace android
#endif // MEDIA_PULLER_H_

File diff suppressed because it is too large Load Diff

@ -0,0 +1,176 @@
/*
* Copyright 2012, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef PLAYBACK_SESSION_H_
#define PLAYBACK_SESSION_H_
#include "MediaSender.h"
#include "VideoFormats.h"
#include "WifiDisplaySource.h"
#include <utils/String16.h>
namespace android {
struct ABuffer;
struct IHDCP;
class IGraphicBufferProducer;
struct MediaPuller;
struct MediaSource;
struct MediaSender;
struct NuMediaExtractor;
// Encapsulates the state of an RTP/RTCP session in the context of wifi
// display.
struct WifiDisplaySource::PlaybackSession : public AHandler {
PlaybackSession(
const String16 &opPackageName,
const sp<ANetworkSession> &netSession,
const sp<AMessage> &notify,
const struct in_addr &interfaceAddr,
const sp<IHDCP> &hdcp,
const char *path = NULL);
status_t init(
const char *clientIP,
int32_t clientRtp,
RTPSender::TransportMode rtpMode,
int32_t clientRtcp,
RTPSender::TransportMode rtcpMode,
bool enableAudio,
bool usePCMAudio,
bool enableVideo,
VideoFormats::ResolutionType videoResolutionType,
size_t videoResolutionIndex,
VideoFormats::ProfileType videoProfileType,
VideoFormats::LevelType videoLevelType);
void destroyAsync();
int32_t getRTPPort() const;
int64_t getLastLifesignUs() const;
void updateLiveness();
status_t play();
status_t finishPlay();
status_t pause();
sp<IGraphicBufferProducer> getSurfaceTexture();
void requestIDRFrame();
enum {
kWhatSessionDead,
kWhatBinaryData,
kWhatSessionEstablished,
kWhatSessionDestroyed,
};
protected:
virtual void onMessageReceived(const sp<AMessage> &msg);
virtual ~PlaybackSession();
private:
struct Track;
enum {
kWhatMediaPullerNotify,
kWhatConverterNotify,
kWhatTrackNotify,
kWhatUpdateSurface,
kWhatPause,
kWhatResume,
kWhatMediaSenderNotify,
kWhatPullExtractorSample,
};
String16 mOpPackageName;
sp<ANetworkSession> mNetSession;
sp<AMessage> mNotify;
in_addr mInterfaceAddr;
sp<IHDCP> mHDCP;
AString mMediaPath;
sp<MediaSender> mMediaSender;
int32_t mLocalRTPPort;
bool mWeAreDead;
bool mPaused;
int64_t mLastLifesignUs;
sp<IGraphicBufferProducer> mProducer;
KeyedVector<size_t, sp<Track> > mTracks;
ssize_t mVideoTrackIndex;
int64_t mPrevTimeUs;
sp<NuMediaExtractor> mExtractor;
KeyedVector<size_t, size_t> mExtractorTrackToInternalTrack;
bool mPullExtractorPending;
int32_t mPullExtractorGeneration;
int64_t mFirstSampleTimeRealUs;
int64_t mFirstSampleTimeUs;
status_t setupMediaPacketizer(bool enableAudio, bool enableVideo);
status_t setupPacketizer(
bool enableAudio,
bool usePCMAudio,
bool enableVideo,
VideoFormats::ResolutionType videoResolutionType,
size_t videoResolutionIndex,
VideoFormats::ProfileType videoProfileType,
VideoFormats::LevelType videoLevelType);
status_t addSource(
bool isVideo,
const sp<MediaSource> &source,
bool isRepeaterSource,
bool usePCMAudio,
unsigned profileIdc,
unsigned levelIdc,
unsigned contraintSet,
size_t *numInputBuffers);
status_t addVideoSource(
VideoFormats::ResolutionType videoResolutionType,
size_t videoResolutionIndex,
VideoFormats::ProfileType videoProfileType,
VideoFormats::LevelType videoLevelType);
status_t addAudioSource(bool usePCMAudio);
status_t onMediaSenderInitialized();
void notifySessionDead();
void schedulePullExtractor();
void onPullExtractor();
void onSinkFeedback(const sp<AMessage> &msg);
DISALLOW_EVIL_CONSTRUCTORS(PlaybackSession);
};
} // namespace android
#endif // PLAYBACK_SESSION_H_

@ -0,0 +1,219 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "RepeaterSource"
#include <utils/Log.h>
#include "RepeaterSource.h"
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MetaData.h>
namespace android {
RepeaterSource::RepeaterSource(const sp<MediaSource> &source, double rateHz)
: mStarted(false),
mSource(source),
mRateHz(rateHz),
mBuffer(NULL),
mResult(OK),
mLastBufferUpdateUs(-1ll),
mStartTimeUs(-1ll),
mFrameCount(0) {
}
RepeaterSource::~RepeaterSource() {
CHECK(!mStarted);
}
double RepeaterSource::getFrameRate() const {
return mRateHz;
}
void RepeaterSource::setFrameRate(double rateHz) {
Mutex::Autolock autoLock(mLock);
if (rateHz == mRateHz) {
return;
}
if (mStartTimeUs >= 0ll) {
int64_t nextTimeUs = mStartTimeUs + (mFrameCount * 1000000ll) / mRateHz;
mStartTimeUs = nextTimeUs;
mFrameCount = 0;
}
mRateHz = rateHz;
}
status_t RepeaterSource::start(MetaData *params) {
CHECK(!mStarted);
status_t err = mSource->start(params);
if (err != OK) {
return err;
}
mBuffer = NULL;
mResult = OK;
mStartTimeUs = -1ll;
mFrameCount = 0;
mLooper = new ALooper;
mLooper->setName("repeater_looper");
mLooper->start();
mReflector = new AHandlerReflector<RepeaterSource>(this);
mLooper->registerHandler(mReflector);
postRead();
mStarted = true;
return OK;
}
status_t RepeaterSource::stop() {
CHECK(mStarted);
ALOGV("stopping");
status_t err = mSource->stop();
if (mLooper != NULL) {
mLooper->stop();
mLooper.clear();
mReflector.clear();
}
if (mBuffer != NULL) {
ALOGV("releasing mbuf %p", mBuffer);
mBuffer->release();
mBuffer = NULL;
}
ALOGV("stopped");
mStarted = false;
return err;
}
sp<MetaData> RepeaterSource::getFormat() {
return mSource->getFormat();
}
status_t RepeaterSource::read(
MediaBuffer **buffer, const ReadOptions *options) {
int64_t seekTimeUs;
ReadOptions::SeekMode seekMode;
CHECK(options == NULL || !options->getSeekTo(&seekTimeUs, &seekMode));
for (;;) {
int64_t bufferTimeUs = -1ll;
if (mStartTimeUs < 0ll) {
Mutex::Autolock autoLock(mLock);
while ((mLastBufferUpdateUs < 0ll || mBuffer == NULL)
&& mResult == OK) {
mCondition.wait(mLock);
}
ALOGV("now resuming.");
mStartTimeUs = ALooper::GetNowUs();
bufferTimeUs = mStartTimeUs;
} else {
bufferTimeUs = mStartTimeUs + (mFrameCount * 1000000ll) / mRateHz;
int64_t nowUs = ALooper::GetNowUs();
int64_t delayUs = bufferTimeUs - nowUs;
if (delayUs > 0ll) {
usleep(delayUs);
}
}
bool stale = false;
{
Mutex::Autolock autoLock(mLock);
if (mResult != OK) {
CHECK(mBuffer == NULL);
return mResult;
}
#if SUSPEND_VIDEO_IF_IDLE
int64_t nowUs = ALooper::GetNowUs();
if (nowUs - mLastBufferUpdateUs > 1000000ll) {
mLastBufferUpdateUs = -1ll;
stale = true;
} else
#endif
{
mBuffer->add_ref();
*buffer = mBuffer;
(*buffer)->meta_data()->setInt64(kKeyTime, bufferTimeUs);
++mFrameCount;
}
}
if (!stale) {
break;
}
mStartTimeUs = -1ll;
mFrameCount = 0;
ALOGV("now dormant");
}
return OK;
}
void RepeaterSource::postRead() {
(new AMessage(kWhatRead, mReflector))->post();
}
void RepeaterSource::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatRead:
{
MediaBuffer *buffer;
status_t err = mSource->read(&buffer);
ALOGV("read mbuf %p", buffer);
Mutex::Autolock autoLock(mLock);
if (mBuffer != NULL) {
mBuffer->release();
mBuffer = NULL;
}
mBuffer = buffer;
mResult = err;
mLastBufferUpdateUs = ALooper::GetNowUs();
mCondition.broadcast();
if (err == OK) {
postRead();
}
break;
}
default:
TRESPASS();
}
}
void RepeaterSource::wakeUp() {
ALOGV("wakeUp");
Mutex::Autolock autoLock(mLock);
if (mLastBufferUpdateUs < 0ll && mBuffer != NULL) {
mLastBufferUpdateUs = ALooper::GetNowUs();
mCondition.broadcast();
}
}
} // namespace android

@ -0,0 +1,67 @@
#ifndef REPEATER_SOURCE_H_
#define REPEATER_SOURCE_H_
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AHandlerReflector.h>
#include <media/stagefright/MediaSource.h>
#define SUSPEND_VIDEO_IF_IDLE 0
namespace android {
// This MediaSource delivers frames at a constant rate by repeating buffers
// if necessary.
struct RepeaterSource : public MediaSource {
RepeaterSource(const sp<MediaSource> &source, double rateHz);
virtual status_t start(MetaData *params);
virtual status_t stop();
virtual sp<MetaData> getFormat();
virtual status_t read(
MediaBuffer **buffer, const ReadOptions *options);
void onMessageReceived(const sp<AMessage> &msg);
// If RepeaterSource is currently dormant, because SurfaceFlinger didn't
// send updates in a while, this is its wakeup call.
void wakeUp();
double getFrameRate() const;
void setFrameRate(double rateHz);
protected:
virtual ~RepeaterSource();
private:
enum {
kWhatRead,
};
Mutex mLock;
Condition mCondition;
bool mStarted;
sp<MediaSource> mSource;
double mRateHz;
sp<ALooper> mLooper;
sp<AHandlerReflector<RepeaterSource> > mReflector;
MediaBuffer *mBuffer;
status_t mResult;
int64_t mLastBufferUpdateUs;
int64_t mStartTimeUs;
int32_t mFrameCount;
void postRead();
DISALLOW_EVIL_CONSTRUCTORS(RepeaterSource);
};
} // namespace android
#endif // REPEATER_SOURCE_H_

File diff suppressed because it is too large Load Diff

@ -0,0 +1,94 @@
/*
* Copyright 2012, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef TS_PACKETIZER_H_
#define TS_PACKETIZER_H_
#include <media/stagefright/foundation/ABase.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
#include <utils/Vector.h>
namespace android {
struct ABuffer;
struct AMessage;
// Forms the packets of a transport stream given access units.
// Emits metadata tables (PAT and PMT) and timestamp stream (PCR) based
// on flags.
struct TSPacketizer : public RefBase {
enum {
EMIT_HDCP20_DESCRIPTOR = 1,
EMIT_HDCP21_DESCRIPTOR = 2,
};
explicit TSPacketizer(uint32_t flags);
// Returns trackIndex or error.
ssize_t addTrack(const sp<AMessage> &format);
enum {
EMIT_PAT_AND_PMT = 1,
EMIT_PCR = 2,
IS_ENCRYPTED = 4,
PREPEND_SPS_PPS_TO_IDR_FRAMES = 8,
};
status_t packetize(
size_t trackIndex, const sp<ABuffer> &accessUnit,
sp<ABuffer> *packets,
uint32_t flags,
const uint8_t *PES_private_data, size_t PES_private_data_len,
size_t numStuffingBytes = 0);
status_t extractCSDIfNecessary(size_t trackIndex);
// XXX to be removed once encoder config option takes care of this for
// encrypted mode.
sp<ABuffer> prependCSD(
size_t trackIndex, const sp<ABuffer> &accessUnit) const;
protected:
virtual ~TSPacketizer();
private:
enum {
kPID_PMT = 0x100,
kPID_PCR = 0x1000,
};
struct Track;
uint32_t mFlags;
Vector<sp<Track> > mTracks;
Vector<sp<ABuffer> > mProgramInfoDescriptors;
unsigned mPATContinuityCounter;
unsigned mPMTContinuityCounter;
uint32_t mCrcTable[256];
void initCrcTable();
uint32_t crc32(const uint8_t *start, size_t size) const;
DISALLOW_EVIL_CONSTRUCTORS(TSPacketizer);
};
} // namespace android
#endif // TS_PACKETIZER_H_

@ -0,0 +1,278 @@
/*
* Copyright 2012, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef WIFI_DISPLAY_SOURCE_H_
#define WIFI_DISPLAY_SOURCE_H_
#include "VideoFormats.h"
#include <media/stagefright/foundation/AHandler.h>
#include <media/stagefright/foundation/ANetworkSession.h>
#include <netinet/in.h>
#include <utils/String16.h>
namespace android {
struct AReplyToken;
struct IHDCP;
class IRemoteDisplayClient;
struct ParsedMessage;
// Represents the RTSP server acting as a wifi display source.
// Manages incoming connections, sets up Playback sessions as necessary.
struct WifiDisplaySource : public AHandler {
static const unsigned kWifiDisplayDefaultPort = 7236;
WifiDisplaySource(
const String16 &opPackageName,
const sp<ANetworkSession> &netSession,
const sp<IRemoteDisplayClient> &client,
const char *path = NULL);
status_t start(const char *iface);
status_t stop();
status_t pause();
status_t resume();
protected:
virtual ~WifiDisplaySource();
virtual void onMessageReceived(const sp<AMessage> &msg);
private:
struct PlaybackSession;
struct HDCPObserver;
enum State {
INITIALIZED,
AWAITING_CLIENT_CONNECTION,
AWAITING_CLIENT_SETUP,
AWAITING_CLIENT_PLAY,
ABOUT_TO_PLAY,
PLAYING,
PLAYING_TO_PAUSED,
PAUSED,
PAUSED_TO_PLAYING,
AWAITING_CLIENT_TEARDOWN,
STOPPING,
STOPPED,
};
enum {
kWhatStart,
kWhatRTSPNotify,
kWhatStop,
kWhatPause,
kWhatResume,
kWhatReapDeadClients,
kWhatPlaybackSessionNotify,
kWhatKeepAlive,
kWhatHDCPNotify,
kWhatFinishStop2,
kWhatTeardownTriggerTimedOut,
};
struct ResponseID {
int32_t mSessionID;
int32_t mCSeq;
bool operator<(const ResponseID &other) const {
return mSessionID < other.mSessionID
|| (mSessionID == other.mSessionID
&& mCSeq < other.mCSeq);
}
};
typedef status_t (WifiDisplaySource::*HandleRTSPResponseFunc)(
int32_t sessionID, const sp<ParsedMessage> &msg);
static const int64_t kReaperIntervalUs = 1000000ll;
// We request that the dongle send us a "TEARDOWN" in order to
// perform an orderly shutdown. We're willing to wait up to 2 secs
// for this message to arrive, after that we'll force a disconnect
// instead.
static const int64_t kTeardownTriggerTimeouSecs = 2;
static const int64_t kPlaybackSessionTimeoutSecs = 30;
static const int64_t kPlaybackSessionTimeoutUs =
kPlaybackSessionTimeoutSecs * 1000000ll;
static const AString sUserAgent;
String16 mOpPackageName;
State mState;
VideoFormats mSupportedSourceVideoFormats;
sp<ANetworkSession> mNetSession;
sp<IRemoteDisplayClient> mClient;
AString mMediaPath;
struct in_addr mInterfaceAddr;
int32_t mSessionID;
sp<AReplyToken> mStopReplyID;
AString mWfdClientRtpPorts;
int32_t mChosenRTPPort; // extracted from "wfd_client_rtp_ports"
bool mSinkSupportsVideo;
VideoFormats mSupportedSinkVideoFormats;
VideoFormats::ResolutionType mChosenVideoResolutionType;
size_t mChosenVideoResolutionIndex;
VideoFormats::ProfileType mChosenVideoProfile;
VideoFormats::LevelType mChosenVideoLevel;
bool mSinkSupportsAudio;
bool mUsingPCMAudio;
int32_t mClientSessionID;
struct ClientInfo {
AString mRemoteIP;
AString mLocalIP;
int32_t mLocalPort;
int32_t mPlaybackSessionID;
sp<PlaybackSession> mPlaybackSession;
};
ClientInfo mClientInfo;
bool mReaperPending;
int32_t mNextCSeq;
KeyedVector<ResponseID, HandleRTSPResponseFunc> mResponseHandlers;
// HDCP specific section >>>>
bool mUsingHDCP;
bool mIsHDCP2_0;
int32_t mHDCPPort;
sp<IHDCP> mHDCP;
sp<HDCPObserver> mHDCPObserver;
bool mHDCPInitializationComplete;
bool mSetupTriggerDeferred;
bool mPlaybackSessionEstablished;
status_t makeHDCP();
// <<<< HDCP specific section
status_t sendM1(int32_t sessionID);
status_t sendM3(int32_t sessionID);
status_t sendM4(int32_t sessionID);
enum TriggerType {
TRIGGER_SETUP,
TRIGGER_TEARDOWN,
TRIGGER_PAUSE,
TRIGGER_PLAY,
};
// M5
status_t sendTrigger(int32_t sessionID, TriggerType triggerType);
status_t sendM16(int32_t sessionID);
status_t onReceiveM1Response(
int32_t sessionID, const sp<ParsedMessage> &msg);
status_t onReceiveM3Response(
int32_t sessionID, const sp<ParsedMessage> &msg);
status_t onReceiveM4Response(
int32_t sessionID, const sp<ParsedMessage> &msg);
status_t onReceiveM5Response(
int32_t sessionID, const sp<ParsedMessage> &msg);
status_t onReceiveM16Response(
int32_t sessionID, const sp<ParsedMessage> &msg);
void registerResponseHandler(
int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func);
status_t onReceiveClientData(const sp<AMessage> &msg);
status_t onOptionsRequest(
int32_t sessionID,
int32_t cseq,
const sp<ParsedMessage> &data);
status_t onSetupRequest(
int32_t sessionID,
int32_t cseq,
const sp<ParsedMessage> &data);
status_t onPlayRequest(
int32_t sessionID,
int32_t cseq,
const sp<ParsedMessage> &data);
status_t onPauseRequest(
int32_t sessionID,
int32_t cseq,
const sp<ParsedMessage> &data);
status_t onTeardownRequest(
int32_t sessionID,
int32_t cseq,
const sp<ParsedMessage> &data);
status_t onGetParameterRequest(
int32_t sessionID,
int32_t cseq,
const sp<ParsedMessage> &data);
status_t onSetParameterRequest(
int32_t sessionID,
int32_t cseq,
const sp<ParsedMessage> &data);
void sendErrorResponse(
int32_t sessionID,
const char *errorDetail,
int32_t cseq);
static void AppendCommonResponse(
AString *response, int32_t cseq, int32_t playbackSessionID = -1ll);
void scheduleReaper();
void scheduleKeepAlive(int32_t sessionID);
int32_t makeUniquePlaybackSessionID() const;
sp<PlaybackSession> findPlaybackSession(
const sp<ParsedMessage> &data, int32_t *playbackSessionID) const;
void finishStop();
void disconnectClientAsync();
void disconnectClient2();
void finishStopAfterDisconnectingClient();
void finishStop2();
void finishPlay();
DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySource);
};
} // namespace android
#endif // WIFI_DISPLAY_SOURCE_H_
Loading…
Cancel
Save