OMX init
This commit is contained in:
124
include/media/hardware/CryptoAPI.h
Normal file
124
include/media/hardware/CryptoAPI.h
Normal file
@@ -0,0 +1,124 @@
|
||||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include <media/stagefright/MediaErrors.h>
|
||||
#include <utils/Errors.h>
|
||||
#include <utils/Vector.h>
|
||||
|
||||
#ifndef CRYPTO_API_H_
|
||||
|
||||
#define CRYPTO_API_H_
|
||||
|
||||
namespace android {
|
||||
|
||||
struct AString;
|
||||
struct CryptoPlugin;
|
||||
|
||||
struct CryptoFactory {
|
||||
CryptoFactory() {}
|
||||
virtual ~CryptoFactory() {}
|
||||
|
||||
virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) const = 0;
|
||||
|
||||
virtual status_t createPlugin(
|
||||
const uint8_t uuid[16], const void *data, size_t size,
|
||||
CryptoPlugin **plugin) = 0;
|
||||
|
||||
private:
|
||||
CryptoFactory(const CryptoFactory &);
|
||||
CryptoFactory &operator=(const CryptoFactory &);
|
||||
};
|
||||
|
||||
struct CryptoPlugin {
|
||||
enum Mode {
|
||||
kMode_Unencrypted = 0,
|
||||
kMode_AES_CTR = 1,
|
||||
kMode_AES_WV = 2,
|
||||
kMode_AES_CBC = 3,
|
||||
};
|
||||
|
||||
struct SubSample {
|
||||
uint32_t mNumBytesOfClearData;
|
||||
uint32_t mNumBytesOfEncryptedData;
|
||||
};
|
||||
|
||||
struct Pattern {
|
||||
// Number of blocks to be encrypted in the pattern. If zero, pattern
|
||||
// encryption is inoperative.
|
||||
uint32_t mEncryptBlocks;
|
||||
|
||||
// Number of blocks to be skipped (left clear) in the pattern. If zero,
|
||||
// pattern encryption is inoperative.
|
||||
uint32_t mSkipBlocks;
|
||||
};
|
||||
|
||||
CryptoPlugin() {}
|
||||
virtual ~CryptoPlugin() {}
|
||||
|
||||
// If this method returns false, a non-secure decoder will be used to
|
||||
// decode the data after decryption. The decrypt API below will have
|
||||
// to support insecure decryption of the data (secure = false) for
|
||||
// media data of the given mime type.
|
||||
virtual bool requiresSecureDecoderComponent(const char *mime) const = 0;
|
||||
|
||||
// To implement resolution constraints, the crypto plugin needs to know
|
||||
// the resolution of the video being decrypted. The media player should
|
||||
// call this method when the resolution is determined and any time it
|
||||
// is subsequently changed.
|
||||
|
||||
virtual void notifyResolution(uint32_t /* width */, uint32_t /* height */) {}
|
||||
|
||||
// A MediaDrm session may be associated with a MediaCrypto session. The
|
||||
// associated MediaDrm session is used to load decryption keys
|
||||
// into the crypto/drm plugin. The keys are then referenced by key-id
|
||||
// in the 'key' parameter to the decrypt() method.
|
||||
// Should return NO_ERROR on success, ERROR_DRM_SESSION_NOT_OPENED if
|
||||
// the session is not opened and a code from MediaErrors.h otherwise.
|
||||
virtual status_t setMediaDrmSession(const Vector<uint8_t> & /*sessionId */) {
|
||||
return ERROR_UNSUPPORTED;
|
||||
}
|
||||
|
||||
// If the error returned falls into the range
|
||||
// ERROR_DRM_VENDOR_MIN..ERROR_DRM_VENDOR_MAX, errorDetailMsg should be
|
||||
// filled in with an appropriate string.
|
||||
// At the java level these special errors will then trigger a
|
||||
// MediaCodec.CryptoException that gives clients access to both
|
||||
// the error code and the errorDetailMsg.
|
||||
// Returns a non-negative result to indicate the number of bytes written
|
||||
// to the dstPtr, or a negative result to indicate an error.
|
||||
virtual ssize_t decrypt(
|
||||
bool secure,
|
||||
const uint8_t key[16],
|
||||
const uint8_t iv[16],
|
||||
Mode mode,
|
||||
const Pattern &pattern,
|
||||
const void *srcPtr,
|
||||
const SubSample *subSamples, size_t numSubSamples,
|
||||
void *dstPtr,
|
||||
AString *errorDetailMsg) = 0;
|
||||
|
||||
private:
|
||||
CryptoPlugin(const CryptoPlugin &);
|
||||
CryptoPlugin &operator=(const CryptoPlugin &);
|
||||
};
|
||||
|
||||
} // namespace android
|
||||
|
||||
extern "C" {
|
||||
extern android::CryptoFactory *createCryptoFactory();
|
||||
}
|
||||
|
||||
#endif // CRYPTO_API_H_
|
||||
163
include/media/hardware/HDCPAPI.h
Normal file
163
include/media/hardware/HDCPAPI.h
Normal file
@@ -0,0 +1,163 @@
|
||||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HDCP_API_H_
|
||||
#define HDCP_API_H_
|
||||
|
||||
#include <utils/Errors.h>
|
||||
#include <cutils/native_handle.h>
|
||||
|
||||
namespace android {
|
||||
|
||||
// Two different kinds of modules are covered under the same HDCPModule
|
||||
// structure below, a module either implements decryption or encryption.
|
||||
struct HDCPModule {
|
||||
typedef void (*ObserverFunc)(void *cookie, int msg, int ext1, int ext2);
|
||||
|
||||
// The msg argument in calls to the observer notification function.
|
||||
enum {
|
||||
// Sent in response to a call to "HDCPModule::initAsync" once
|
||||
// initialization has either been successfully completed,
|
||||
// i.e. the HDCP session is now fully setup (AKE, Locality Check,
|
||||
// SKE and any authentication with repeaters completed) or failed.
|
||||
// ext1 should be a suitable error code (status_t), ext2 is
|
||||
// unused for ENCRYPTION and in the case of HDCP_INITIALIZATION_COMPLETE
|
||||
// holds the local TCP port the module is listening on.
|
||||
HDCP_INITIALIZATION_COMPLETE,
|
||||
HDCP_INITIALIZATION_FAILED,
|
||||
|
||||
// Sent upon completion of a call to "HDCPModule::shutdownAsync".
|
||||
// ext1 should be a suitable error code, ext2 is unused.
|
||||
HDCP_SHUTDOWN_COMPLETE,
|
||||
HDCP_SHUTDOWN_FAILED,
|
||||
|
||||
HDCP_UNAUTHENTICATED_CONNECTION,
|
||||
HDCP_UNAUTHORIZED_CONNECTION,
|
||||
HDCP_REVOKED_CONNECTION,
|
||||
HDCP_TOPOLOGY_EXECEEDED,
|
||||
HDCP_UNKNOWN_ERROR,
|
||||
|
||||
// DECRYPTION only: Indicates that a client has successfully connected,
|
||||
// a secure session established and the module is ready to accept
|
||||
// future calls to "decrypt".
|
||||
HDCP_SESSION_ESTABLISHED,
|
||||
};
|
||||
|
||||
// HDCPModule capability bit masks
|
||||
enum {
|
||||
// HDCP_CAPS_ENCRYPT: mandatory, meaning the HDCP module can encrypt
|
||||
// from an input byte-array buffer to an output byte-array buffer
|
||||
HDCP_CAPS_ENCRYPT = (1 << 0),
|
||||
// HDCP_CAPS_ENCRYPT_NATIVE: the HDCP module supports encryption from
|
||||
// a native buffer to an output byte-array buffer. The format of the
|
||||
// input native buffer is specific to vendor's encoder implementation.
|
||||
// It is the same format as that used by the encoder when
|
||||
// "storeMetaDataInBuffers" extension is enabled on its output port.
|
||||
HDCP_CAPS_ENCRYPT_NATIVE = (1 << 1),
|
||||
};
|
||||
|
||||
// Module can call the notification function to signal completion/failure
|
||||
// of asynchronous operations (such as initialization) or out of band
|
||||
// events.
|
||||
HDCPModule(void * /*cookie*/, ObserverFunc /*observerNotify*/) {};
|
||||
|
||||
virtual ~HDCPModule() {};
|
||||
|
||||
// ENCRYPTION: Request to setup an HDCP session with the host specified
|
||||
// by addr and listening on the specified port.
|
||||
// DECRYPTION: Request to setup an HDCP session, addr is the interface
|
||||
// address the module should bind its socket to. port will be 0.
|
||||
// The module will pick the port to listen on itself and report its choice
|
||||
// in the "ext2" argument of the HDCP_INITIALIZATION_COMPLETE callback.
|
||||
virtual status_t initAsync(const char *addr, unsigned port) = 0;
|
||||
|
||||
// Request to shutdown the active HDCP session.
|
||||
virtual status_t shutdownAsync() = 0;
|
||||
|
||||
// Returns the capability bitmask of this HDCP session.
|
||||
virtual uint32_t getCaps() {
|
||||
return HDCP_CAPS_ENCRYPT;
|
||||
}
|
||||
|
||||
// ENCRYPTION only:
|
||||
// Encrypt data according to the HDCP spec. "size" bytes of data are
|
||||
// available at "inData" (virtual address), "size" may not be a multiple
|
||||
// of 128 bits (16 bytes). An equal number of encrypted bytes should be
|
||||
// written to the buffer at "outData" (virtual address).
|
||||
// This operation is to be synchronous, i.e. this call does not return
|
||||
// until outData contains size bytes of encrypted data.
|
||||
// streamCTR will be assigned by the caller (to 0 for the first PES stream,
|
||||
// 1 for the second and so on)
|
||||
// inputCTR _will_be_maintained_by_the_callee_ for each PES stream.
|
||||
virtual status_t encrypt(
|
||||
const void * /*inData*/, size_t /*size*/, uint32_t /*streamCTR*/,
|
||||
uint64_t * /*outInputCTR*/, void * /*outData*/) {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
// Encrypt data according to the HDCP spec. "size" bytes of data starting
|
||||
// at location "offset" are available in "buffer" (buffer handle). "size"
|
||||
// may not be a multiple of 128 bits (16 bytes). An equal number of
|
||||
// encrypted bytes should be written to the buffer at "outData" (virtual
|
||||
// address). This operation is to be synchronous, i.e. this call does not
|
||||
// return until outData contains size bytes of encrypted data.
|
||||
// streamCTR will be assigned by the caller (to 0 for the first PES stream,
|
||||
// 1 for the second and so on)
|
||||
// inputCTR _will_be_maintained_by_the_callee_ for each PES stream.
|
||||
virtual status_t encryptNative(
|
||||
buffer_handle_t /*buffer*/, size_t /*offset*/, size_t /*size*/,
|
||||
uint32_t /*streamCTR*/, uint64_t * /*outInputCTR*/, void * /*outData*/) {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
// DECRYPTION only:
|
||||
// Decrypt data according to the HDCP spec.
|
||||
// "size" bytes of encrypted data are available at "inData"
|
||||
// (virtual address), "size" may not be a multiple of 128 bits (16 bytes).
|
||||
// An equal number of decrypted bytes should be written to the buffer
|
||||
// at "outData" (virtual address).
|
||||
// This operation is to be synchronous, i.e. this call does not return
|
||||
// until outData contains size bytes of decrypted data.
|
||||
// Both streamCTR and inputCTR will be provided by the caller.
|
||||
virtual status_t decrypt(
|
||||
const void * /*inData*/, size_t /*size*/,
|
||||
uint32_t /*streamCTR*/, uint64_t /*inputCTR*/,
|
||||
void * /*outData*/) {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
private:
|
||||
HDCPModule(const HDCPModule &);
|
||||
HDCPModule &operator=(const HDCPModule &);
|
||||
};
|
||||
|
||||
} // namespace android
|
||||
|
||||
// A shared library exporting the following methods should be included to
|
||||
// support HDCP functionality. The shared library must be called
|
||||
// "libstagefright_hdcp.so", it will be dynamically loaded into the
|
||||
// mediaserver process.
|
||||
extern "C" {
|
||||
// Create a module for ENCRYPTION.
|
||||
extern android::HDCPModule *createHDCPModule(
|
||||
void *cookie, android::HDCPModule::ObserverFunc);
|
||||
|
||||
// Create a module for DECRYPTION.
|
||||
extern android::HDCPModule *createHDCPModuleForDecryption(
|
||||
void *cookie, android::HDCPModule::ObserverFunc);
|
||||
}
|
||||
|
||||
#endif // HDCP_API_H_
|
||||
|
||||
561
include/media/hardware/HardwareAPI.h
Normal file
561
include/media/hardware/HardwareAPI.h
Normal file
@@ -0,0 +1,561 @@
|
||||
/*
|
||||
* Copyright (C) 2009 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HARDWARE_API_H_
|
||||
|
||||
#define HARDWARE_API_H_
|
||||
|
||||
#include <media/hardware/OMXPluginBase.h>
|
||||
#include <media/hardware/MetadataBufferType.h>
|
||||
#include <cutils/native_handle.h>
|
||||
#include <utils/RefBase.h>
|
||||
|
||||
#include "VideoAPI.h"
|
||||
|
||||
#include <OMX_Component.h>
|
||||
|
||||
struct ANativeWindowBuffer;
|
||||
|
||||
namespace android {
|
||||
|
||||
// This structure is used to enable Android native buffer use for either
|
||||
// graphic buffers or secure buffers.
|
||||
//
|
||||
// TO CONTROL ANDROID GRAPHIC BUFFER USAGE:
|
||||
//
|
||||
// A pointer to this struct is passed to the OMX_SetParameter when the extension
|
||||
// index for the 'OMX.google.android.index.enableAndroidNativeBuffers' extension
|
||||
// is given.
|
||||
//
|
||||
// When Android native buffer use is disabled for a port (the default state),
|
||||
// the OMX node should operate as normal, and expect UseBuffer calls to set its
|
||||
// buffers. This is the mode that will be used when CPU access to the buffer is
|
||||
// required.
|
||||
//
|
||||
// When Android native buffer use has been enabled for a given port, the video
|
||||
// color format for the port is to be interpreted as an Android pixel format
|
||||
// rather than an OMX color format. Enabling Android native buffers may also
|
||||
// change how the component receives the native buffers. If store-metadata-mode
|
||||
// is enabled on the port, the component will receive the buffers as specified
|
||||
// in the section below. Otherwise, unless the node supports the
|
||||
// 'OMX.google.android.index.useAndroidNativeBuffer2' extension, it should
|
||||
// expect to receive UseAndroidNativeBuffer calls (via OMX_SetParameter) rather
|
||||
// than UseBuffer calls for that port.
|
||||
//
|
||||
// TO CONTROL ANDROID SECURE BUFFER USAGE:
|
||||
//
|
||||
// A pointer to this struct is passed to the OMX_SetParameter when the extension
|
||||
// index for the 'OMX.google.android.index.allocateNativeHandle' extension
|
||||
// is given.
|
||||
//
|
||||
// When native handle use is disabled for a port (the default state),
|
||||
// the OMX node should operate as normal, and expect AllocateBuffer calls to
|
||||
// return buffer pointers. This is the mode that will be used for non-secure
|
||||
// buffers if component requires allocate buffers instead of use buffers.
|
||||
//
|
||||
// When native handle use has been enabled for a given port, the component
|
||||
// shall allocate native_buffer_t objects containing that can be passed between
|
||||
// processes using binder. This is the mode that will be used for secure buffers.
|
||||
// When an OMX component allocates native handle for buffers, it must close and
|
||||
// delete that handle when it frees those buffers. Even though pBuffer will point
|
||||
// to a native handle, nFilledLength, nAllocLength and nOffset will correspond
|
||||
// to the data inside the opaque buffer.
|
||||
struct EnableAndroidNativeBuffersParams {
|
||||
OMX_U32 nSize;
|
||||
OMX_VERSIONTYPE nVersion;
|
||||
OMX_U32 nPortIndex;
|
||||
OMX_BOOL enable;
|
||||
};
|
||||
|
||||
typedef struct EnableAndroidNativeBuffersParams AllocateNativeHandleParams;
|
||||
|
||||
// A pointer to this struct is passed to OMX_SetParameter() when the extension index
|
||||
// "OMX.google.android.index.storeMetaDataInBuffers" or
|
||||
// "OMX.google.android.index.storeANWBufferInMetadata" is given.
|
||||
//
|
||||
// When meta data is stored in the video buffers passed between OMX clients
|
||||
// and OMX components, interpretation of the buffer data is up to the
|
||||
// buffer receiver, and the data may or may not be the actual video data, but
|
||||
// some information helpful for the receiver to locate the actual data.
|
||||
// The buffer receiver thus needs to know how to interpret what is stored
|
||||
// in these buffers, with mechanisms pre-determined externally. How to
|
||||
// interpret the meta data is outside of the scope of this parameter.
|
||||
//
|
||||
// Currently, this is used to pass meta data from video source (camera component, for instance) to
|
||||
// video encoder to avoid memcpying of input video frame data, as well as to pass dynamic output
|
||||
// buffer to video decoder. To do this, bStoreMetaData is set to OMX_TRUE.
|
||||
//
|
||||
// If bStoreMetaData is set to false, real YUV frame data will be stored in input buffers, and
|
||||
// the output buffers contain either real YUV frame data, or are themselves native handles as
|
||||
// directed by enable/use-android-native-buffer parameter settings.
|
||||
// In addition, if no OMX_SetParameter() call is made on a port with the corresponding extension
|
||||
// index, the component should not assume that the client is not using metadata mode for the port.
|
||||
//
|
||||
// If the component supports this using the "OMX.google.android.index.storeANWBufferInMetadata"
|
||||
// extension and bStoreMetaData is set to OMX_TRUE, data is passed using the VideoNativeMetadata
|
||||
// layout as defined below. Each buffer will be accompanied by a fence. The fence must signal
|
||||
// before the buffer can be used (e.g. read from or written into). When returning such buffer to
|
||||
// the client, component must provide a new fence that must signal before the returned buffer can
|
||||
// be used (e.g. read from or written into). The component owns the incoming fenceFd, and must close
|
||||
// it when fence has signaled. The client will own and close the returned fence file descriptor.
|
||||
//
|
||||
// If the component supports this using the "OMX.google.android.index.storeMetaDataInBuffers"
|
||||
// extension and bStoreMetaData is set to OMX_TRUE, data is passed using VideoGrallocMetadata
|
||||
// (the layout of which is the VideoGrallocMetadata defined below). Camera input can be also passed
|
||||
// as "CameraSource", the layout of which is vendor dependent.
|
||||
//
|
||||
// Metadata buffers are registered with the component using UseBuffer calls, or can be allocated
|
||||
// by the component for encoder-metadata-output buffers.
|
||||
struct StoreMetaDataInBuffersParams {
|
||||
OMX_U32 nSize;
|
||||
OMX_VERSIONTYPE nVersion;
|
||||
OMX_U32 nPortIndex;
|
||||
OMX_BOOL bStoreMetaData;
|
||||
};
|
||||
|
||||
// Meta data buffer layout used to transport output frames to the decoder for
|
||||
// dynamic buffer handling.
|
||||
struct VideoGrallocMetadata {
|
||||
MetadataBufferType eType; // must be kMetadataBufferTypeGrallocSource
|
||||
#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
|
||||
OMX_PTR pHandle;
|
||||
#else
|
||||
buffer_handle_t pHandle;
|
||||
#endif
|
||||
};
|
||||
|
||||
// Legacy name for VideoGrallocMetadata struct.
|
||||
struct VideoDecoderOutputMetaData : public VideoGrallocMetadata {};
|
||||
|
||||
struct VideoNativeMetadata {
|
||||
MetadataBufferType eType; // must be kMetadataBufferTypeANWBuffer
|
||||
#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
|
||||
OMX_PTR pBuffer;
|
||||
#else
|
||||
struct ANativeWindowBuffer* pBuffer;
|
||||
#endif
|
||||
int nFenceFd; // -1 if unused
|
||||
};
|
||||
|
||||
// Meta data buffer layout for passing a native_handle to codec
|
||||
struct VideoNativeHandleMetadata {
|
||||
MetadataBufferType eType; // must be kMetadataBufferTypeNativeHandleSource
|
||||
|
||||
#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
|
||||
OMX_PTR pHandle;
|
||||
#else
|
||||
native_handle_t *pHandle;
|
||||
#endif
|
||||
};
|
||||
|
||||
// A pointer to this struct is passed to OMX_SetParameter() when the extension
|
||||
// index "OMX.google.android.index.prepareForAdaptivePlayback" is given.
|
||||
//
|
||||
// This method is used to signal a video decoder, that the user has requested
|
||||
// seamless resolution change support (if bEnable is set to OMX_TRUE).
|
||||
// nMaxFrameWidth and nMaxFrameHeight are the dimensions of the largest
|
||||
// anticipated frames in the video. If bEnable is OMX_FALSE, no resolution
|
||||
// change is expected, and the nMaxFrameWidth/Height fields are unused.
|
||||
//
|
||||
// If the decoder supports dynamic output buffers, it may ignore this
|
||||
// request. Otherwise, it shall request resources in such a way so that it
|
||||
// avoids full port-reconfiguration (due to output port-definition change)
|
||||
// during resolution changes.
|
||||
//
|
||||
// DO NOT USE THIS STRUCTURE AS IT WILL BE REMOVED. INSTEAD, IMPLEMENT
|
||||
// METADATA SUPPORT FOR VIDEO DECODERS.
|
||||
struct PrepareForAdaptivePlaybackParams {
|
||||
OMX_U32 nSize;
|
||||
OMX_VERSIONTYPE nVersion;
|
||||
OMX_U32 nPortIndex;
|
||||
OMX_BOOL bEnable;
|
||||
OMX_U32 nMaxFrameWidth;
|
||||
OMX_U32 nMaxFrameHeight;
|
||||
};
|
||||
|
||||
// A pointer to this struct is passed to OMX_SetParameter when the extension
|
||||
// index for the 'OMX.google.android.index.useAndroidNativeBuffer' extension is
|
||||
// given. This call will only be performed if a prior call was made with the
|
||||
// 'OMX.google.android.index.enableAndroidNativeBuffers' extension index,
|
||||
// enabling use of Android native buffers.
|
||||
struct UseAndroidNativeBufferParams {
|
||||
OMX_U32 nSize;
|
||||
OMX_VERSIONTYPE nVersion;
|
||||
OMX_U32 nPortIndex;
|
||||
OMX_PTR pAppPrivate;
|
||||
OMX_BUFFERHEADERTYPE **bufferHeader;
|
||||
const sp<ANativeWindowBuffer>& nativeBuffer;
|
||||
};
|
||||
|
||||
// A pointer to this struct is passed to OMX_GetParameter when the extension
|
||||
// index for the 'OMX.google.android.index.getAndroidNativeBufferUsage'
|
||||
// extension is given. The usage bits returned from this query will be used to
|
||||
// allocate the Gralloc buffers that get passed to the useAndroidNativeBuffer
|
||||
// command.
|
||||
struct GetAndroidNativeBufferUsageParams {
|
||||
OMX_U32 nSize; // IN
|
||||
OMX_VERSIONTYPE nVersion; // IN
|
||||
OMX_U32 nPortIndex; // IN
|
||||
OMX_U32 nUsage; // OUT
|
||||
};
|
||||
|
||||
// An enum OMX_COLOR_FormatAndroidOpaque to indicate an opaque colorformat
|
||||
// is declared in media/stagefright/openmax/OMX_IVCommon.h
|
||||
// This will inform the encoder that the actual
|
||||
// colorformat will be relayed by the GRalloc Buffers.
|
||||
// OMX_COLOR_FormatAndroidOpaque = 0x7F000001,
|
||||
|
||||
// A pointer to this struct is passed to OMX_SetParameter when the extension
|
||||
// index for the 'OMX.google.android.index.prependSPSPPSToIDRFrames' extension
|
||||
// is given.
|
||||
// A successful result indicates that future IDR frames will be prefixed by
|
||||
// SPS/PPS.
|
||||
struct PrependSPSPPSToIDRFramesParams {
|
||||
OMX_U32 nSize;
|
||||
OMX_VERSIONTYPE nVersion;
|
||||
OMX_BOOL bEnable;
|
||||
};
|
||||
|
||||
// A pointer to this struct is passed to OMX_GetParameter when the extension
|
||||
// index for the 'OMX.google.android.index.describeColorFormat'
|
||||
// extension is given. This method can be called from any component state
|
||||
// other than invalid. The color-format, frame width/height, and stride/
|
||||
// slice-height parameters are ones that are associated with a raw video
|
||||
// port (input or output), but the stride/slice height parameters may be
|
||||
// incorrect. bUsingNativeBuffers is OMX_TRUE if native android buffers will
|
||||
// be used (while specifying this color format).
|
||||
//
|
||||
// The component shall fill out the MediaImage structure that
|
||||
// corresponds to the described raw video format, and the potentially corrected
|
||||
// stride and slice-height info.
|
||||
//
|
||||
// The behavior is slightly different if bUsingNativeBuffers is OMX_TRUE,
|
||||
// though most implementations can ignore this difference. When using native buffers,
|
||||
// the component may change the configured color format to an optimized format.
|
||||
// Additionally, when allocating these buffers for flexible usecase, the framework
|
||||
// will set the SW_READ/WRITE_OFTEN usage flags. In this case (if bUsingNativeBuffers
|
||||
// is OMX_TRUE), the component shall fill out the MediaImage information for the
|
||||
// scenario when these SW-readable/writable buffers are locked using gralloc_lock.
|
||||
// Note, that these buffers may also be locked using gralloc_lock_ycbcr, which must
|
||||
// be supported for vendor-specific formats.
|
||||
//
|
||||
// For non-YUV packed planar/semiplanar image formats, or if bUsingNativeBuffers
|
||||
// is OMX_TRUE and the component does not support this color format with native
|
||||
// buffers, the component shall set mNumPlanes to 0, and mType to MEDIA_IMAGE_TYPE_UNKNOWN.
|
||||
|
||||
// @deprecated: use DescribeColorFormat2Params
|
||||
struct DescribeColorFormat2Params;
|
||||
struct DescribeColorFormatParams {
|
||||
OMX_U32 nSize;
|
||||
OMX_VERSIONTYPE nVersion;
|
||||
// input: parameters from OMX_VIDEO_PORTDEFINITIONTYPE
|
||||
OMX_COLOR_FORMATTYPE eColorFormat;
|
||||
OMX_U32 nFrameWidth;
|
||||
OMX_U32 nFrameHeight;
|
||||
OMX_U32 nStride;
|
||||
OMX_U32 nSliceHeight;
|
||||
OMX_BOOL bUsingNativeBuffers;
|
||||
|
||||
// output: fill out the MediaImage fields
|
||||
MediaImage sMediaImage;
|
||||
|
||||
explicit DescribeColorFormatParams(const DescribeColorFormat2Params&); // for internal use only
|
||||
};
|
||||
|
||||
// A pointer to this struct is passed to OMX_GetParameter when the extension
|
||||
// index for the 'OMX.google.android.index.describeColorFormat2'
|
||||
// extension is given. This is operationally the same as DescribeColorFormatParams
|
||||
// but can be used for HDR and RGBA/YUVA formats.
|
||||
struct DescribeColorFormat2Params {
|
||||
OMX_U32 nSize;
|
||||
OMX_VERSIONTYPE nVersion;
|
||||
// input: parameters from OMX_VIDEO_PORTDEFINITIONTYPE
|
||||
OMX_COLOR_FORMATTYPE eColorFormat;
|
||||
OMX_U32 nFrameWidth;
|
||||
OMX_U32 nFrameHeight;
|
||||
OMX_U32 nStride;
|
||||
OMX_U32 nSliceHeight;
|
||||
OMX_BOOL bUsingNativeBuffers;
|
||||
|
||||
// output: fill out the MediaImage2 fields
|
||||
MediaImage2 sMediaImage;
|
||||
|
||||
void initFromV1(const DescribeColorFormatParams&); // for internal use only
|
||||
};
|
||||
|
||||
// A pointer to this struct is passed to OMX_SetParameter or OMX_GetParameter
|
||||
// when the extension index for the
|
||||
// 'OMX.google.android.index.configureVideoTunnelMode' extension is given.
|
||||
// If the extension is supported then tunneled playback mode should be supported
|
||||
// by the codec. If bTunneled is set to OMX_TRUE then the video decoder should
|
||||
// operate in "tunneled" mode and output its decoded frames directly to the
|
||||
// sink. In this case nAudioHwSync is the HW SYNC ID of the audio HAL Output
|
||||
// stream to sync the video with. If bTunneled is set to OMX_FALSE, "tunneled"
|
||||
// mode should be disabled and nAudioHwSync should be ignored.
|
||||
// OMX_GetParameter is used to query tunneling configuration. bTunneled should
|
||||
// return whether decoder is operating in tunneled mode, and if it is,
|
||||
// pSidebandWindow should contain the codec allocated sideband window handle.
|
||||
struct ConfigureVideoTunnelModeParams {
|
||||
OMX_U32 nSize; // IN
|
||||
OMX_VERSIONTYPE nVersion; // IN
|
||||
OMX_U32 nPortIndex; // IN
|
||||
OMX_BOOL bTunneled; // IN/OUT
|
||||
OMX_U32 nAudioHwSync; // IN
|
||||
OMX_PTR pSidebandWindow; // OUT
|
||||
};
|
||||
|
||||
// Color space description (aspects) parameters.
|
||||
// This is passed via OMX_SetConfig or OMX_GetConfig to video encoders and decoders when the
|
||||
// 'OMX.google.android.index.describeColorAspects' extension is given. Component SHALL behave
|
||||
// as described below if it supports this extension.
|
||||
//
|
||||
// bDataSpaceChanged and bRequestingDataSpace is assumed to be OMX_FALSE unless noted otherwise.
|
||||
//
|
||||
// VIDEO ENCODERS: the framework uses OMX_SetConfig to specify color aspects of the coded video.
|
||||
// This may happen:
|
||||
// a) before the component transitions to idle state
|
||||
// b) before the input frame is sent via OMX_EmptyThisBuffer in executing state
|
||||
// c) during execution, just before an input frame with a different color aspect information
|
||||
// is sent.
|
||||
//
|
||||
// The framework also uses OMX_GetConfig to
|
||||
// d) verify the color aspects that will be written to the stream
|
||||
// e) (optional) verify the color aspects that should be reported to the container for a
|
||||
// given dataspace/pixelformat received
|
||||
//
|
||||
// 1. Encoders SHOULD maintain an internal color aspect state, initialized to Unspecified values.
|
||||
// This represents the values that will be written into the bitstream.
|
||||
// 2. Upon OMX_SetConfig, they SHOULD update their internal state to the aspects received
|
||||
// (including Unspecified values). For specific aspect values that are not supported by the
|
||||
// codec standard, encoders SHOULD substitute Unspecified values; or they MAY use a suitable
|
||||
// alternative (e.g. to suggest the use of BT.709 EOTF instead of SMPTE 240M.)
|
||||
// 3. OMX_GetConfig SHALL return the internal state (values that will be written).
|
||||
// 4. OMX_SetConfig SHALL always succeed before receiving the first frame. It MAY fail afterwards,
|
||||
// but only if the configured values would change AND the component does not support updating the
|
||||
// color information to those values mid-stream. If component supports updating a portion of
|
||||
// the color information, those values should be updated in the internal state, and OMX_SetConfig
|
||||
// SHALL succeed. Otherwise, the internal state SHALL remain intact and OMX_SetConfig SHALL fail
|
||||
// with OMX_ErrorUnsupportedSettings.
|
||||
// 5. When the framework receives an input frame with an unexpected dataspace, it will query
|
||||
// encoders for the color aspects that should be reported to the container using OMX_GetConfig
|
||||
// with bDataSpaceChanged set to OMX_TRUE, and nPixelFormat/nDataSpace containing the new
|
||||
// format/dataspace values. This allows vendors to use extended dataspace during capture and
|
||||
// composition (e.g. screenrecord) - while performing color-space conversion inside the encoder -
|
||||
// and encode and report a different color-space information in the bitstream/container.
|
||||
// sColorAspects contains the requested color aspects by the client for reference, which may
|
||||
// include aspects not supported by the encoding. This is used together with guidance for
|
||||
// dataspace selection; see 6. below.
|
||||
//
|
||||
// VIDEO DECODERS: the framework uses OMX_SetConfig to specify the default color aspects to use
|
||||
// for the video.
|
||||
// This may happen:
|
||||
// a) before the component transitions to idle state
|
||||
// b) during execution, when the resolution or the default color aspects change.
|
||||
//
|
||||
// The framework also uses OMX_GetConfig to
|
||||
// c) get the final color aspects reported by the coded bitstream after taking the default values
|
||||
// into account.
|
||||
//
|
||||
// 1. Decoders should maintain two color aspect states - the default state as reported by the
|
||||
// framework, and the coded state as reported by the bitstream - as each state can change
|
||||
// independently from the other.
|
||||
// 2. Upon OMX_SetConfig, it SHALL update its default state regardless of whether such aspects
|
||||
// could be supplied by the component bitstream. (E.g. it should blindly support all enumeration
|
||||
// values, even unknown ones, and the Other value). This SHALL always succeed.
|
||||
// 3. Upon OMX_GetConfig, the component SHALL return the final color aspects by replacing
|
||||
// Unspecified coded values with the default values. This SHALL always succeed.
|
||||
// 4. Whenever the component processes color aspect information in the bitstream even with an
|
||||
// Unspecified value, it SHOULD update its internal coded state with that information just before
|
||||
// the frame with the new information would be outputted, and the component SHALL signal an
|
||||
// OMX_EventPortSettingsChanged event with data2 set to the extension index.
|
||||
// NOTE: Component SHOULD NOT signal a separate event purely for color aspect change, if it occurs
|
||||
// together with a port definition (e.g. size) or crop change.
|
||||
// 5. If the aspects a component encounters in the bitstream cannot be represented with enumeration
|
||||
// values as defined below, the component SHALL set those aspects to Other. Restricted values in
|
||||
// the bitstream SHALL be treated as defined by the relevant bitstream specifications/standards,
|
||||
// or as Unspecified, if not defined.
|
||||
//
|
||||
// BOTH DECODERS AND ENCODERS: the framework uses OMX_GetConfig during idle and executing state to
|
||||
// f) (optional) get guidance for the dataspace to set for given color aspects, by setting
|
||||
// bRequestingDataSpace to OMX_TRUE. The component SHALL return OMX_ErrorUnsupportedSettings
|
||||
// IF it does not support this request.
|
||||
//
|
||||
// 6. This is an information request that can happen at any time, independent of the normal
|
||||
// configuration process. This allows vendors to use extended dataspace during capture, playback
|
||||
// and composition - while performing color-space conversion inside the component. Component
|
||||
// SHALL set the desired dataspace into nDataSpace. Otherwise, it SHALL return
|
||||
// OMX_ErrorUnsupportedSettings to let the framework choose a nearby standard dataspace.
|
||||
//
|
||||
// 6.a. For encoders, this query happens before the first frame is received using surface encoding.
|
||||
// This allows the encoder to use a specific dataspace for the color aspects (e.g. because the
|
||||
// device supports additional dataspaces, or because it wants to perform color-space extension
|
||||
// to facilitate a more optimal rendering/capture pipeline.).
|
||||
//
|
||||
// 6.b. For decoders, this query happens before the first frame, and every time the color aspects
|
||||
// change, while using surface buffers. This allows the decoder to use a specific dataspace for
|
||||
// the color aspects (e.g. because the device supports additional dataspaces, or because it wants
|
||||
// to perform color-space extension by inline color-space conversion to facilitate a more optimal
|
||||
// rendering pipeline.).
|
||||
//
|
||||
// Note: the size of sAspects may increase in the future by additional fields.
|
||||
// Implementations SHOULD NOT require a certain size.
|
||||
struct DescribeColorAspectsParams {
|
||||
OMX_U32 nSize; // IN
|
||||
OMX_VERSIONTYPE nVersion; // IN
|
||||
OMX_U32 nPortIndex; // IN
|
||||
OMX_BOOL bRequestingDataSpace; // IN
|
||||
OMX_BOOL bDataSpaceChanged; // IN
|
||||
OMX_U32 nPixelFormat; // IN
|
||||
OMX_U32 nDataSpace; // OUT
|
||||
ColorAspects sAspects; // IN/OUT
|
||||
};
|
||||
|
||||
// HDR color description parameters.
|
||||
// This is passed via OMX_SetConfig or OMX_GetConfig to video encoders and decoders when the
|
||||
// 'OMX.google.android.index.describeHDRStaticInfo' extension is given and an HDR stream
|
||||
// is detected. Component SHALL behave as described below if it supports this extension.
|
||||
//
|
||||
// Currently, only Static Metadata Descriptor Type 1 support is required.
|
||||
//
|
||||
// VIDEO ENCODERS: the framework uses OMX_SetConfig to specify the HDR static information of the
|
||||
// coded video.
|
||||
// This may happen:
|
||||
// a) before the component transitions to idle state
|
||||
// b) before the input frame is sent via OMX_EmptyThisBuffer in executing state
|
||||
// c) during execution, just before an input frame with a different HDR static
|
||||
// information is sent.
|
||||
//
|
||||
// The framework also uses OMX_GetConfig to
|
||||
// d) verify the HDR static information that will be written to the stream.
|
||||
//
|
||||
// 1. Encoders SHOULD maintain an internal HDR static info data, initialized to Unspecified values.
|
||||
// This represents the values that will be written into the bitstream.
|
||||
// 2. Upon OMX_SetConfig, they SHOULD update their internal state to the info received
|
||||
// (including Unspecified values). For specific parameters that are not supported by the
|
||||
// codec standard, encoders SHOULD substitute Unspecified values. NOTE: no other substitution
|
||||
// is allowed.
|
||||
// 3. OMX_GetConfig SHALL return the internal state (values that will be written).
|
||||
// 4. OMX_SetConfig SHALL always succeed before receiving the first frame if the encoder is
|
||||
// configured into an HDR compatible profile. It MAY fail with OMX_ErrorUnsupportedSettings error
|
||||
// code if it is not configured into such a profile, OR if the configured values would change
|
||||
// AND the component does not support updating the HDR static information mid-stream. If the
|
||||
// component supports updating a portion of the information, those values should be updated in
|
||||
// the internal state, and OMX_SetConfig SHALL succeed. Otherwise, the internal state SHALL
|
||||
// remain intact.
|
||||
//
|
||||
// VIDEO DECODERS: the framework uses OMX_SetConfig to specify the default HDR static information
|
||||
// to use for the video.
|
||||
// a) This only happens if the client supplies this information, in which case it occurs before
|
||||
// the component transitions to idle state.
|
||||
// b) This may also happen subsequently if the default HDR static information changes.
|
||||
//
|
||||
// The framework also uses OMX_GetConfig to
|
||||
// c) get the final HDR static information reported by the coded bitstream after taking the
|
||||
// default values into account.
|
||||
//
|
||||
// 1. Decoders should maintain two HDR static information structures - the default values as
|
||||
// reported by the framework, and the coded values as reported by the bitstream - as each
|
||||
// structure can change independently from the other.
|
||||
// 2. Upon OMX_SetConfig, it SHALL update its default structure regardless of whether such static
|
||||
// parameters could be supplied by the component bitstream. (E.g. it should blindly support all
|
||||
// parameter values, even seemingly illegal ones). This SHALL always succeed.
|
||||
// Note: The descriptor ID used in sInfo may change in subsequent calls. (although for now only
|
||||
// Type 1 support is required.)
|
||||
// 3. Upon OMX_GetConfig, the component SHALL return the final HDR static information by replacing
|
||||
// Unspecified coded values with the default values. This SHALL always succeed. This may be
|
||||
// provided using any supported descriptor ID (currently only Type 1) with the goal of expressing
|
||||
// the most of the available static information.
|
||||
// 4. Whenever the component processes HDR static information in the bitstream even ones with
|
||||
// Unspecified parameters, it SHOULD update its internal coded structure with that information
|
||||
// just before the frame with the new information would be outputted, and the component SHALL
|
||||
// signal an OMX_EventPortSettingsChanged event with data2 set to the extension index.
|
||||
// NOTE: Component SHOULD NOT signal a separate event purely for HDR static info change, if it
|
||||
// occurs together with a port definition (e.g. size), color aspect or crop change.
|
||||
// 5. If certain parameters of the HDR static information encountered in the bitstream cannot be
|
||||
// represented using sInfo, the component SHALL use the closest representation.
|
||||
//
|
||||
// Note: the size of sInfo may increase in the future by supporting additional descriptor types.
|
||||
// Implementations SHOULD NOT require a certain size.
|
||||
struct DescribeHDRStaticInfoParams {
|
||||
OMX_U32 nSize; // IN
|
||||
OMX_VERSIONTYPE nVersion; // IN
|
||||
OMX_U32 nPortIndex; // IN
|
||||
HDRStaticInfo sInfo; // IN/OUT
|
||||
};
|
||||
|
||||
// HDR10+ metadata configuration.
|
||||
//
|
||||
// nParamSize: size of the storage starting at nValue (must be at least 1 and at most
|
||||
// MAX_HDR10PLUSINFO_SIZE). This field must not be modified by the component.
|
||||
// nParamSizeUsed: size of the actual HDR10+ metadata starting at nValue. For OMX_SetConfig,
|
||||
// it must not be modified by the component. For OMX_GetConfig, the component
|
||||
// should put the actual size of the retrieved config in this field (and in
|
||||
// case where nParamSize is smaller than nParamSizeUsed, the component should
|
||||
// still update nParamSizeUsed without actually copying the metadata to nValue).
|
||||
// nValue: storage of the HDR10+ metadata conforming to the user_data_registered_itu_t_t35()
|
||||
// syntax of SEI message for ST 2094-40.
|
||||
//
|
||||
// This is passed via OMX_SetConfig or OMX_GetConfig to video encoders and decoders when the
|
||||
// 'OMX.google.android.index.describeHDR10PlusInfo' extension is given. In general, this config
|
||||
// is associated with a particular frame. A typical sequence of usage is as follows:
|
||||
//
|
||||
// a) OMX_SetConfig associates the config with the next input buffer sent in OMX_EmptyThisBuffer
|
||||
// (input A);
|
||||
// b) The component sends OMX_EventConfigUpdate to notify the client that there is a config
|
||||
// update on the output port that is associated with the next output buffer that's about to
|
||||
// be sent via FillBufferDone callback (output A);
|
||||
// c) The client, upon receiving the OMX_EventConfigUpdate, calls OMX_GetConfig to retrieve
|
||||
// the config and associates it with output A.
|
||||
//
|
||||
// All config updates will be retrieved in the order reported, and the client is required to
|
||||
// call OMX_GetConfig for each OMX_EventConfigUpdate for this config. Note that the order of
|
||||
// OMX_EventConfigUpdate relative to FillBufferDone callback determines which output frame
|
||||
// the config should be associated with, the actual OMX_GetConfig for the config could happen
|
||||
// before or after the component calls the FillBufferDone callback.
|
||||
//
|
||||
// Depending on the video codec type (in particular, whether the codec uses in-band or out-of-
|
||||
// band HDR10+ metadata), the component shall behave as detailed below:
|
||||
//
|
||||
// VIDEO DECODERS:
|
||||
// 1) If the codec utilizes out-of-band HDR10+ metadata, the decoder must support the sequence
|
||||
// a) ~ c) outlined above;
|
||||
// 2) If the codec utilizes in-band HDR10+ metadata, OMX_SetConfig for this config should be
|
||||
// ignored (as the metadata is embedded in the input buffer), while the notification and
|
||||
// retrieval of the config on the output as outlined in b) & c) must be supported.
|
||||
//
|
||||
// VIDEO ENCODERS:
|
||||
// 1) If the codec utilizes out-of-band HDR10+ metadata, the decoder must support the sequence
|
||||
// a) ~ c) outlined above;
|
||||
// 2) If the codec utilizes in-band HDR10+ metadata, OMX_SetConfig for this config outlined in
|
||||
// a) must be supported. The notification as outlined in b) must not be sent, and the
|
||||
// retrieval of the config via OMX_GetConfig should be ignored (as the metadata is embedded
|
||||
// in the output buffer).
|
||||
|
||||
#define MAX_HDR10PLUSINFO_SIZE 1024
|
||||
struct DescribeHDR10PlusInfoParams {
|
||||
OMX_U32 nSize; // IN
|
||||
OMX_VERSIONTYPE nVersion; // IN
|
||||
OMX_U32 nPortIndex; // IN
|
||||
OMX_U32 nParamSize; // IN
|
||||
OMX_U32 nParamSizeUsed; // IN/OUT
|
||||
OMX_U8 nValue[1]; // IN/OUT
|
||||
};
|
||||
|
||||
} // namespace android
|
||||
|
||||
extern android::OMXPluginBase *createOMXPlugin();
|
||||
|
||||
#endif // HARDWARE_API_H_
|
||||
149
include/media/hardware/MetadataBufferType.h
Normal file
149
include/media/hardware/MetadataBufferType.h
Normal file
@@ -0,0 +1,149 @@
|
||||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef METADATA_BUFFER_TYPE_H
|
||||
#define METADATA_BUFFER_TYPE_H
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
namespace android {
|
||||
#endif
|
||||
|
||||
/*
|
||||
* MetadataBufferType defines the type of the metadata buffers that
|
||||
* can be passed to video encoder component for encoding, via Stagefright
|
||||
* media recording framework. To see how to work with the metadata buffers
|
||||
* in media recording framework, please consult HardwareAPI.h
|
||||
*
|
||||
* The creator of metadata buffers and video encoder share common knowledge
|
||||
* on what is actually being stored in these metadata buffers, and
|
||||
* how the information can be used by the video encoder component
|
||||
* to locate the actual pixel data as the source input for video
|
||||
* encoder, plus whatever other information that is necessary. Stagefright
|
||||
* media recording framework does not need to know anything specific about the
|
||||
* metadata buffers, except for receving each individual metadata buffer
|
||||
* as the source input, making a copy of the metadata buffer, and passing the
|
||||
* copy via OpenMAX API to the video encoder component.
|
||||
*
|
||||
* The creator of the metadata buffers must ensure that the first
|
||||
* 4 bytes in every metadata buffer indicates its buffer type,
|
||||
* and the rest of the metadata buffer contains the
|
||||
* actual metadata information. When a video encoder component receives
|
||||
* a metadata buffer, it uses the first 4 bytes in that buffer to find
|
||||
* out the type of the metadata buffer, and takes action appropriate
|
||||
* to that type of metadata buffers (for instance, locate the actual
|
||||
* pixel data input and then encoding the input data to produce a
|
||||
* compressed output buffer).
|
||||
*
|
||||
* The following shows the layout of a metadata buffer,
|
||||
* where buffer type is a 4-byte field of MetadataBufferType,
|
||||
* and the payload is the metadata information.
|
||||
*
|
||||
* --------------------------------------------------------------
|
||||
* | buffer type | payload |
|
||||
* --------------------------------------------------------------
|
||||
*
|
||||
*/
|
||||
typedef enum {
|
||||
|
||||
/*
|
||||
* kMetadataBufferTypeCameraSource is used to indicate that
|
||||
* the source of the metadata buffer is the camera component.
|
||||
*/
|
||||
kMetadataBufferTypeCameraSource = 0,
|
||||
|
||||
/*
|
||||
* kMetadataBufferTypeGrallocSource is used to indicate that
|
||||
* the payload of the metadata buffers can be interpreted as
|
||||
* a buffer_handle_t.
|
||||
* So in this case,the metadata that the encoder receives
|
||||
* will have a byte stream that consists of two parts:
|
||||
* 1. First, there is an integer indicating that it is a GRAlloc
|
||||
* source (kMetadataBufferTypeGrallocSource)
|
||||
* 2. This is followed by the buffer_handle_t that is a handle to the
|
||||
* GRalloc buffer. The encoder needs to interpret this GRalloc handle
|
||||
* and encode the frames.
|
||||
* --------------------------------------------------------------
|
||||
* | kMetadataBufferTypeGrallocSource | buffer_handle_t buffer |
|
||||
* --------------------------------------------------------------
|
||||
*
|
||||
* See the VideoGrallocMetadata structure.
|
||||
*/
|
||||
kMetadataBufferTypeGrallocSource = 1,
|
||||
|
||||
/*
|
||||
* kMetadataBufferTypeGraphicBuffer is used to indicate that
|
||||
* the payload of the metadata buffers can be interpreted as
|
||||
* an ANativeWindowBuffer, and that a fence is provided.
|
||||
*
|
||||
* In this case, the metadata will have a byte stream that consists of three parts:
|
||||
* 1. First, there is an integer indicating that the metadata
|
||||
* contains an ANativeWindowBuffer (kMetadataBufferTypeANWBuffer)
|
||||
* 2. This is followed by the pointer to the ANativeWindowBuffer.
|
||||
* Codec must not free this buffer as it does not actually own this buffer.
|
||||
* 3. Finally, there is an integer containing a fence file descriptor.
|
||||
* The codec must wait on the fence before encoding or decoding into this
|
||||
* buffer. When the buffer is returned, codec must replace this file descriptor
|
||||
* with a new fence, that will be waited on before the buffer is replaced
|
||||
* (encoder) or read (decoder).
|
||||
* ---------------------------------
|
||||
* | kMetadataBufferTypeANWBuffer |
|
||||
* ---------------------------------
|
||||
* | ANativeWindowBuffer *buffer |
|
||||
* ---------------------------------
|
||||
* | int fenceFd |
|
||||
* ---------------------------------
|
||||
*
|
||||
* See the VideoNativeMetadata structure.
|
||||
*/
|
||||
kMetadataBufferTypeANWBuffer = 2,
|
||||
|
||||
/*
|
||||
* kMetadataBufferTypeNativeHandleSource is used to indicate that
|
||||
* the payload of the metadata buffers can be interpreted as
|
||||
* a native_handle_t.
|
||||
*
|
||||
* In this case, the metadata that the encoder receives
|
||||
* will have a byte stream that consists of two parts:
|
||||
* 1. First, there is an integer indicating that the metadata contains a
|
||||
* native handle (kMetadataBufferTypeNativeHandleSource).
|
||||
* 2. This is followed by a pointer to native_handle_t. The encoder needs
|
||||
* to interpret this native handle and encode the frame. The encoder must
|
||||
* not free this native handle as it does not actually own this native
|
||||
* handle. The handle will be freed after the encoder releases the buffer
|
||||
* back to camera.
|
||||
* ----------------------------------------------------------------
|
||||
* | kMetadataBufferTypeNativeHandleSource | native_handle_t* nh |
|
||||
* ----------------------------------------------------------------
|
||||
*
|
||||
* See the VideoNativeHandleMetadata structure.
|
||||
*/
|
||||
kMetadataBufferTypeNativeHandleSource = 3,
|
||||
|
||||
/* This value is used by framework, but is never used inside a metadata buffer */
|
||||
kMetadataBufferTypeInvalid = -1,
|
||||
|
||||
|
||||
// Add more here...
|
||||
|
||||
} MetadataBufferType;
|
||||
|
||||
#ifdef __cplusplus
|
||||
} // namespace android
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // METADATA_BUFFER_TYPE_H
|
||||
59
include/media/hardware/OMXPluginBase.h
Normal file
59
include/media/hardware/OMXPluginBase.h
Normal file
@@ -0,0 +1,59 @@
|
||||
/*
|
||||
* Copyright (C) 2009 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef OMX_PLUGIN_BASE_H_
|
||||
|
||||
#define OMX_PLUGIN_BASE_H_
|
||||
|
||||
#include <sys/types.h>
|
||||
|
||||
#include <OMX_Component.h>
|
||||
|
||||
#include <utils/String8.h>
|
||||
#include <utils/Vector.h>
|
||||
|
||||
namespace android {
|
||||
|
||||
struct OMXPluginBase {
|
||||
OMXPluginBase() {}
|
||||
virtual ~OMXPluginBase() {}
|
||||
|
||||
virtual OMX_ERRORTYPE makeComponentInstance(
|
||||
const char *name,
|
||||
const OMX_CALLBACKTYPE *callbacks,
|
||||
OMX_PTR appData,
|
||||
OMX_COMPONENTTYPE **component) = 0;
|
||||
|
||||
virtual OMX_ERRORTYPE destroyComponentInstance(
|
||||
OMX_COMPONENTTYPE *component) = 0;
|
||||
|
||||
virtual OMX_ERRORTYPE enumerateComponents(
|
||||
OMX_STRING name,
|
||||
size_t size,
|
||||
OMX_U32 index) = 0;
|
||||
|
||||
virtual OMX_ERRORTYPE getRolesOfComponent(
|
||||
const char *name,
|
||||
Vector<String8> *roles) = 0;
|
||||
|
||||
private:
|
||||
OMXPluginBase(const OMXPluginBase &);
|
||||
OMXPluginBase &operator=(const OMXPluginBase &);
|
||||
};
|
||||
|
||||
} // namespace android
|
||||
|
||||
#endif // OMX_PLUGIN_BASE_H_
|
||||
344
include/media/hardware/VideoAPI.h
Normal file
344
include/media/hardware/VideoAPI.h
Normal file
@@ -0,0 +1,344 @@
|
||||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_API_H_
|
||||
|
||||
#define VIDEO_API_H_
|
||||
|
||||
namespace android {
|
||||
|
||||
/**
|
||||
* Structure describing a media image (frame)
|
||||
* Currently only supporting YUV
|
||||
* @deprecated. Use MediaImage2 instead
|
||||
*/
|
||||
struct MediaImage {
|
||||
enum Type {
|
||||
MEDIA_IMAGE_TYPE_UNKNOWN = 0,
|
||||
MEDIA_IMAGE_TYPE_YUV,
|
||||
};
|
||||
|
||||
enum PlaneIndex {
|
||||
Y = 0,
|
||||
U,
|
||||
V,
|
||||
MAX_NUM_PLANES
|
||||
};
|
||||
|
||||
Type mType;
|
||||
uint32_t mNumPlanes; // number of planes
|
||||
uint32_t mWidth; // width of largest plane (unpadded, as in nFrameWidth)
|
||||
uint32_t mHeight; // height of largest plane (unpadded, as in nFrameHeight)
|
||||
uint32_t mBitDepth; // useable bit depth
|
||||
struct PlaneInfo {
|
||||
uint32_t mOffset; // offset of first pixel of the plane in bytes
|
||||
// from buffer offset
|
||||
uint32_t mColInc; // column increment in bytes
|
||||
uint32_t mRowInc; // row increment in bytes
|
||||
uint32_t mHorizSubsampling; // subsampling compared to the largest plane
|
||||
uint32_t mVertSubsampling; // subsampling compared to the largest plane
|
||||
};
|
||||
PlaneInfo mPlane[MAX_NUM_PLANES];
|
||||
};
|
||||
|
||||
/**
|
||||
* Structure describing a media image (frame)
|
||||
*/
|
||||
struct __attribute__ ((__packed__)) MediaImage2 {
|
||||
enum Type : uint32_t {
|
||||
MEDIA_IMAGE_TYPE_UNKNOWN = 0,
|
||||
MEDIA_IMAGE_TYPE_YUV,
|
||||
MEDIA_IMAGE_TYPE_YUVA,
|
||||
MEDIA_IMAGE_TYPE_RGB,
|
||||
MEDIA_IMAGE_TYPE_RGBA,
|
||||
MEDIA_IMAGE_TYPE_Y,
|
||||
};
|
||||
|
||||
enum PlaneIndex : uint32_t {
|
||||
Y = 0,
|
||||
U = 1,
|
||||
V = 2,
|
||||
R = 0,
|
||||
G = 1,
|
||||
B = 2,
|
||||
A = 3,
|
||||
MAX_NUM_PLANES = 4,
|
||||
};
|
||||
|
||||
Type mType;
|
||||
uint32_t mNumPlanes; // number of planes
|
||||
uint32_t mWidth; // width of largest plane (unpadded, as in nFrameWidth)
|
||||
uint32_t mHeight; // height of largest plane (unpadded, as in nFrameHeight)
|
||||
uint32_t mBitDepth; // useable bit depth (always MSB)
|
||||
uint32_t mBitDepthAllocated; // bits per component (must be 8 or 16)
|
||||
|
||||
struct __attribute__ ((__packed__)) PlaneInfo {
|
||||
uint32_t mOffset; // offset of first pixel of the plane in bytes
|
||||
// from buffer offset
|
||||
int32_t mColInc; // column increment in bytes
|
||||
int32_t mRowInc; // row increment in bytes
|
||||
uint32_t mHorizSubsampling; // subsampling compared to the largest plane
|
||||
uint32_t mVertSubsampling; // subsampling compared to the largest plane
|
||||
};
|
||||
PlaneInfo mPlane[MAX_NUM_PLANES];
|
||||
|
||||
void initFromV1(const MediaImage&); // for internal use only
|
||||
};
|
||||
|
||||
static_assert(sizeof(MediaImage2::PlaneInfo) == 20, "wrong struct size");
|
||||
static_assert(sizeof(MediaImage2) == 104, "wrong struct size");
|
||||
|
||||
/**
|
||||
* Aspects of color.
|
||||
*/
|
||||
|
||||
// NOTE: this structure is expected to grow in the future if new color aspects are
|
||||
// added to codec bitstreams. OMX component should not require a specific nSize
|
||||
// though could verify that nSize is at least the size of the structure at the
|
||||
// time of implementation. All new fields will be added at the end of the structure
|
||||
// ensuring backward compatibility.
|
||||
struct __attribute__ ((__packed__, aligned(alignof(uint32_t)))) ColorAspects {
|
||||
// this is in sync with the range values in graphics.h
|
||||
enum Range : uint32_t {
|
||||
RangeUnspecified,
|
||||
RangeFull,
|
||||
RangeLimited,
|
||||
RangeOther = 0xff,
|
||||
};
|
||||
|
||||
enum Primaries : uint32_t {
|
||||
PrimariesUnspecified,
|
||||
PrimariesBT709_5, // Rec.ITU-R BT.709-5 or equivalent
|
||||
PrimariesBT470_6M, // Rec.ITU-R BT.470-6 System M or equivalent
|
||||
PrimariesBT601_6_625, // Rec.ITU-R BT.601-6 625 or equivalent
|
||||
PrimariesBT601_6_525, // Rec.ITU-R BT.601-6 525 or equivalent
|
||||
PrimariesGenericFilm, // Generic Film
|
||||
PrimariesBT2020, // Rec.ITU-R BT.2020 or equivalent
|
||||
PrimariesOther = 0xff,
|
||||
};
|
||||
|
||||
// this partially in sync with the transfer values in graphics.h prior to the transfers
|
||||
// unlikely to be required by Android section
|
||||
enum Transfer : uint32_t {
|
||||
TransferUnspecified,
|
||||
TransferLinear, // Linear transfer characteristics
|
||||
TransferSRGB, // sRGB or equivalent
|
||||
TransferSMPTE170M, // SMPTE 170M or equivalent (e.g. BT.601/709/2020)
|
||||
TransferGamma22, // Assumed display gamma 2.2
|
||||
TransferGamma28, // Assumed display gamma 2.8
|
||||
TransferST2084, // SMPTE ST 2084 for 10/12/14/16 bit systems
|
||||
TransferHLG, // ARIB STD-B67 hybrid-log-gamma
|
||||
|
||||
// transfers unlikely to be required by Android
|
||||
TransferSMPTE240M = 0x40, // SMPTE 240M
|
||||
TransferXvYCC, // IEC 61966-2-4
|
||||
TransferBT1361, // Rec.ITU-R BT.1361 extended gamut
|
||||
TransferST428, // SMPTE ST 428-1
|
||||
TransferOther = 0xff,
|
||||
};
|
||||
|
||||
enum MatrixCoeffs : uint32_t {
|
||||
MatrixUnspecified,
|
||||
MatrixBT709_5, // Rec.ITU-R BT.709-5 or equivalent
|
||||
MatrixBT470_6M, // KR=0.30, KB=0.11 or equivalent
|
||||
MatrixBT601_6, // Rec.ITU-R BT.601-6 625 or equivalent
|
||||
MatrixSMPTE240M, // SMPTE 240M or equivalent
|
||||
MatrixBT2020, // Rec.ITU-R BT.2020 non-constant luminance
|
||||
MatrixBT2020Constant, // Rec.ITU-R BT.2020 constant luminance
|
||||
MatrixOther = 0xff,
|
||||
};
|
||||
|
||||
// this is in sync with the standard values in graphics.h
|
||||
enum Standard : uint32_t {
|
||||
StandardUnspecified,
|
||||
StandardBT709, // PrimariesBT709_5 and MatrixBT709_5
|
||||
StandardBT601_625, // PrimariesBT601_6_625 and MatrixBT601_6
|
||||
StandardBT601_625_Unadjusted, // PrimariesBT601_6_625 and KR=0.222, KB=0.071
|
||||
StandardBT601_525, // PrimariesBT601_6_525 and MatrixBT601_6
|
||||
StandardBT601_525_Unadjusted, // PrimariesBT601_6_525 and MatrixSMPTE240M
|
||||
StandardBT2020, // PrimariesBT2020 and MatrixBT2020
|
||||
StandardBT2020Constant, // PrimariesBT2020 and MatrixBT2020Constant
|
||||
StandardBT470M, // PrimariesBT470_6M and MatrixBT470_6M
|
||||
StandardFilm, // PrimariesGenericFilm and KR=0.253, KB=0.068
|
||||
StandardOther = 0xff,
|
||||
};
|
||||
|
||||
Range mRange; // IN/OUT
|
||||
Primaries mPrimaries; // IN/OUT
|
||||
Transfer mTransfer; // IN/OUT
|
||||
MatrixCoeffs mMatrixCoeffs; // IN/OUT
|
||||
};
|
||||
|
||||
static_assert(sizeof(ColorAspects) == 16, "wrong struct size");
|
||||
|
||||
/**
|
||||
* HDR Metadata.
|
||||
*/
|
||||
|
||||
// HDR Static Metadata Descriptor as defined by CTA-861-3.
|
||||
struct __attribute__ ((__packed__)) HDRStaticInfo {
|
||||
// Static_Metadata_Descriptor_ID
|
||||
enum ID : uint8_t {
|
||||
kType1 = 0, // Static Metadata Type 1
|
||||
} mID;
|
||||
|
||||
struct __attribute__ ((__packed__)) Primaries1 {
|
||||
// values are in units of 0.00002
|
||||
uint16_t x;
|
||||
uint16_t y;
|
||||
};
|
||||
|
||||
// Static Metadata Descriptor Type 1
|
||||
struct __attribute__ ((__packed__)) Type1 {
|
||||
Primaries1 mR; // display primary 0
|
||||
Primaries1 mG; // display primary 1
|
||||
Primaries1 mB; // display primary 2
|
||||
Primaries1 mW; // white point
|
||||
uint16_t mMaxDisplayLuminance; // in cd/m^2
|
||||
uint16_t mMinDisplayLuminance; // in 0.0001 cd/m^2
|
||||
uint16_t mMaxContentLightLevel; // in cd/m^2
|
||||
uint16_t mMaxFrameAverageLightLevel; // in cd/m^2
|
||||
};
|
||||
|
||||
union {
|
||||
Type1 sType1;
|
||||
};
|
||||
};
|
||||
|
||||
static_assert(sizeof(HDRStaticInfo::Primaries1) == 4, "wrong struct size");
|
||||
static_assert(sizeof(HDRStaticInfo::Type1) == 24, "wrong struct size");
|
||||
static_assert(sizeof(HDRStaticInfo) == 25, "wrong struct size");
|
||||
|
||||
#ifdef STRINGIFY_ENUMS
|
||||
|
||||
inline static const char *asString(MediaImage::Type i, const char *def = "??") {
|
||||
switch (i) {
|
||||
case MediaImage::MEDIA_IMAGE_TYPE_UNKNOWN: return "Unknown";
|
||||
case MediaImage::MEDIA_IMAGE_TYPE_YUV: return "YUV";
|
||||
default: return def;
|
||||
}
|
||||
}
|
||||
|
||||
inline static const char *asString(MediaImage::PlaneIndex i, const char *def = "??") {
|
||||
switch (i) {
|
||||
case MediaImage::Y: return "Y";
|
||||
case MediaImage::U: return "U";
|
||||
case MediaImage::V: return "V";
|
||||
default: return def;
|
||||
}
|
||||
}
|
||||
|
||||
inline static const char *asString(MediaImage2::Type i, const char *def = "??") {
|
||||
switch (i) {
|
||||
case MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN: return "Unknown";
|
||||
case MediaImage2::MEDIA_IMAGE_TYPE_YUV: return "YUV";
|
||||
case MediaImage2::MEDIA_IMAGE_TYPE_YUVA: return "YUVA";
|
||||
case MediaImage2::MEDIA_IMAGE_TYPE_RGB: return "RGB";
|
||||
case MediaImage2::MEDIA_IMAGE_TYPE_RGBA: return "RGBA";
|
||||
case MediaImage2::MEDIA_IMAGE_TYPE_Y: return "Y";
|
||||
default: return def;
|
||||
}
|
||||
}
|
||||
|
||||
inline static char asChar2(
|
||||
MediaImage2::PlaneIndex i, MediaImage2::Type j, char def = '?') {
|
||||
const char *planes = asString(j, NULL);
|
||||
// handle unknown values
|
||||
if (j == MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN || planes == NULL || i >= strlen(planes)) {
|
||||
return def;
|
||||
}
|
||||
return planes[i];
|
||||
}
|
||||
|
||||
inline static const char *asString(ColorAspects::Range i, const char *def = "??") {
|
||||
switch (i) {
|
||||
case ColorAspects::RangeUnspecified: return "Unspecified";
|
||||
case ColorAspects::RangeFull: return "Full";
|
||||
case ColorAspects::RangeLimited: return "Limited";
|
||||
case ColorAspects::RangeOther: return "Other";
|
||||
default: return def;
|
||||
}
|
||||
}
|
||||
|
||||
inline static const char *asString(ColorAspects::Primaries i, const char *def = "??") {
|
||||
switch (i) {
|
||||
case ColorAspects::PrimariesUnspecified: return "Unspecified";
|
||||
case ColorAspects::PrimariesBT709_5: return "BT709_5";
|
||||
case ColorAspects::PrimariesBT470_6M: return "BT470_6M";
|
||||
case ColorAspects::PrimariesBT601_6_625: return "BT601_6_625";
|
||||
case ColorAspects::PrimariesBT601_6_525: return "BT601_6_525";
|
||||
case ColorAspects::PrimariesGenericFilm: return "GenericFilm";
|
||||
case ColorAspects::PrimariesBT2020: return "BT2020";
|
||||
case ColorAspects::PrimariesOther: return "Other";
|
||||
default: return def;
|
||||
}
|
||||
}
|
||||
|
||||
inline static const char *asString(ColorAspects::Transfer i, const char *def = "??") {
|
||||
switch (i) {
|
||||
case ColorAspects::TransferUnspecified: return "Unspecified";
|
||||
case ColorAspects::TransferLinear: return "Linear";
|
||||
case ColorAspects::TransferSRGB: return "SRGB";
|
||||
case ColorAspects::TransferSMPTE170M: return "SMPTE170M";
|
||||
case ColorAspects::TransferGamma22: return "Gamma22";
|
||||
case ColorAspects::TransferGamma28: return "Gamma28";
|
||||
case ColorAspects::TransferST2084: return "ST2084";
|
||||
case ColorAspects::TransferHLG: return "HLG";
|
||||
case ColorAspects::TransferSMPTE240M: return "SMPTE240M";
|
||||
case ColorAspects::TransferXvYCC: return "XvYCC";
|
||||
case ColorAspects::TransferBT1361: return "BT1361";
|
||||
case ColorAspects::TransferST428: return "ST428";
|
||||
case ColorAspects::TransferOther: return "Other";
|
||||
default: return def;
|
||||
}
|
||||
}
|
||||
|
||||
inline static const char *asString(ColorAspects::MatrixCoeffs i, const char *def = "??") {
|
||||
switch (i) {
|
||||
case ColorAspects::MatrixUnspecified: return "Unspecified";
|
||||
case ColorAspects::MatrixBT709_5: return "BT709_5";
|
||||
case ColorAspects::MatrixBT470_6M: return "BT470_6M";
|
||||
case ColorAspects::MatrixBT601_6: return "BT601_6";
|
||||
case ColorAspects::MatrixSMPTE240M: return "SMPTE240M";
|
||||
case ColorAspects::MatrixBT2020: return "BT2020";
|
||||
case ColorAspects::MatrixBT2020Constant: return "BT2020Constant";
|
||||
case ColorAspects::MatrixOther: return "Other";
|
||||
default: return def;
|
||||
}
|
||||
}
|
||||
|
||||
inline static const char *asString(ColorAspects::Standard i, const char *def = "??") {
|
||||
switch (i) {
|
||||
case ColorAspects::StandardUnspecified: return "Unspecified";
|
||||
case ColorAspects::StandardBT709: return "BT709";
|
||||
case ColorAspects::StandardBT601_625: return "BT601_625";
|
||||
case ColorAspects::StandardBT601_625_Unadjusted: return "BT601_625_Unadjusted";
|
||||
case ColorAspects::StandardBT601_525: return "BT601_525";
|
||||
case ColorAspects::StandardBT601_525_Unadjusted: return "BT601_525_Unadjusted";
|
||||
case ColorAspects::StandardBT2020: return "BT2020";
|
||||
case ColorAspects::StandardBT2020Constant: return "BT2020Constant";
|
||||
case ColorAspects::StandardBT470M: return "BT470M";
|
||||
case ColorAspects::StandardFilm: return "Film";
|
||||
case ColorAspects::StandardOther: return "Other";
|
||||
default: return def;
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
} // namespace android
|
||||
|
||||
#endif // VIDEO_API_H_
|
||||
Reference in New Issue
Block a user