Asi*_*hid 3 android video-encoding video-streaming h.264 android-mediacodec
我正在尝试直接从android 设备的相机流式传输视频捕获。到目前为止,我已经能够从 android 相机的onPreviewFrame (byte[] data, Camera camera) 函数捕获每一帧,对数据进行编码,然后成功解码数据并显示到表面。我使用 android 的MediaCodec进行编码和解码。但是视频的颜色和方向不正确 [旋转 90 度]。搜索了一段时间后,我发现了这个 YV12toYUV420PackedSemiPlanar 函数 - 如果我在将原始相机数据传递给编码器之前在原始相机数据上使用这个函数,颜色显示正确,但它仍然旋转了 90 度。
public static byte[] YV12toYUV420PackedSemiPlanar(final byte[] input, final int width, final int height) {
final int frameSize = width * height;
final int qFrameSize = frameSize/4;
byte[] output = new byte[input.length];
System.arraycopy(input, 0, output, 0, frameSize);
for (int i = 0; i < (qFrameSize); i++)
{
byte b = (input[frameSize + qFrameSize + i - 32 - 320]);
output[frameSize + i*2] = b;
output[frameSize + i*2 + 1] = (input[frameSize + i - 32 - 320]);
}
System.arraycopy(input, 0, output, 0, frameSize); // Y
for (int i = 0; i < qFrameSize; i++) {
output[frameSize + i*2] = input[frameSize + i + qFrameSize]; // Cb (U)
output[frameSize + i*2 + 1] = input[frameSize + i]; // Cr (V)
}
return output;
}
Run Code Online (Sandbox Code Playgroud)

然后我在调用YV12toYUV420PackedSemiPlanar函数后使用了这个函数,rotateYUV420Degree90 。看起来方向和颜色还可以,但输出的视频非常失真。
private byte[] rotateYUV420Degree90(byte[] data, int imageWidth, int imageHeight)
{
byte [] yuv = new byte[imageWidth*imageHeight*3/2];
// Rotate the Y luma
int i = 0;
for(int x = 0;x < imageWidth;x++)
{
for(int y = imageHeight-1;y >= 0;y--)
{
yuv[i] = data[y*imageWidth+x];
i++;
}
}
// Rotate the U and V color components
i = imageWidth*imageHeight*3/2-1;
for(int x = imageWidth-1;x > 0;x=x-2)
{
for(int y = 0;y < imageHeight/2;y++)
{
yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+x];
i--;
yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+(x-1)];
i--;
}
}
return yuv;
}
Run Code Online (Sandbox Code Playgroud)

由于我对颜色规格和相机数据知之甚少,我无法理解我做错了什么。这是我的总代码 - 请看看并帮助我找到我的错误。
提前致谢。
public class MainActivity extends Activity implements SurfaceHolder.Callback {
Camera mCamera;
FileOutputStream fos;
File mVideoFile;
MediaCodec mMediaCodec;
ByteBuffer[] inputBuffers;
ByteBuffer[] outputBuffers;
MySurfaceView cameraSurfaceView ;
SurfaceView decodedSurfaceView ;
LinearLayout ll;
RelativeLayout rl;
Button btn;
boolean mPreviewRunning = false;
boolean firstTime = true;
boolean isRunning = false;
public static final String ENCODING = "h264";
private PlayerThread mPlayer = null;
Handler handler = null;
public static byte[] SPS = null;
public static byte[] PPS = null;
public static int frameID = 0;
BlockingQueue<Frame> queue = new ArrayBlockingQueue<Frame>(100);
private static class Frame
{
public int id;
public byte[] frameData;
public Frame(int id)
{
this.id = id;
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ll = new LinearLayout(getApplicationContext());
ll.setOrientation(LinearLayout.VERTICAL);
cameraSurfaceView = new MySurfaceView(getApplicationContext());
if(ENCODING.equalsIgnoreCase("h264"))
{
cameraSurfaceView.setLayoutParams(new android.widget.FrameLayout.LayoutParams(320, 240));
}
else if(ENCODING.equalsIgnoreCase("h263"))
{
cameraSurfaceView.setLayoutParams(new android.widget.FrameLayout.LayoutParams(352, 288));
}
ll.addView(cameraSurfaceView);
initCodec();
setContentView(ll);
}
@Override
protected void onPause() {
super.onPause();
mPreviewRunning = false;
if(cameraSurfaceView !=null && cameraSurfaceView.isEnabled())
cameraSurfaceView.setEnabled(false);
cameraSurfaceView = null;
if(mCamera != null)
{
mCamera.stopPreview();
mCamera.release();
}
System.exit(0);
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
};
private void initCodec() {
MediaFormat mediaFormat = null;
if(mMediaCodec != null)
{
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
}
if(ENCODING.equalsIgnoreCase("h264"))
{
mMediaCodec = MediaCodec.createEncoderByType("video/avc");
mediaFormat = MediaFormat.createVideoFormat("video/avc",
320,
240);
}
else if(ENCODING.equalsIgnoreCase("h263"))
{
mMediaCodec = MediaCodec.createEncoderByType("video/3gpp");
mediaFormat = MediaFormat.createVideoFormat("video/3gpp",
352,
288);
}
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 125000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, 8000);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
try
{
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mMediaCodec.configure(mediaFormat,
null,
null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
frameID = 0;
mMediaCodec.start();
}
catch(Exception e)
{
Toast.makeText(getApplicationContext(), "mediaformat error", Toast.LENGTH_LONG).show();
e.printStackTrace();
}
}
/**========================================================================*/
/** This function gets the starting index of the first appearance of match array in source array. The function will search in source array from startIndex position.*/
public static int find(byte[] source, byte[] match, int startIndex)
{
if(source == null || match == null)
{
Log.d("EncodeDecode", "ERROR in find : null");
return -1;
}
if(source.length == 0 || match.length == 0)
{
Log.d("EncodeDecode", "ERROR in find : length 0");
return -1;
}
int ret = -1;
int spos = startIndex;
int mpos = 0;
byte m = match[mpos];
for( ; spos < source.length; spos++ )
{
if(m == source[spos])
{
// starting match
if(mpos == 0)
ret = spos;
// finishing match
else if(mpos == match.length - 1)
return ret;
mpos++;
m = match[mpos];
}
else
{
ret = -1;
mpos = 0;
m = match[mpos];
}
}
return ret;
}
/**========================================================================*/
/** For H264 encoding, this function will retrieve SPS & PPS from the given data and will insert into SPS & PPS global arrays. */
public static void getSPS_PPS(byte[] data, int startingIndex)
{
byte[] spsHeader = {0x00, 0x00, 0x00, 0x01, 0x67};
byte[] ppsHeader = {0x00, 0x00, 0x00, 0x01, 0x68};
byte[] frameHeader = {0x00, 0x00, 0x00, 0x01};
int spsStartingIndex = -1;
int nextFrameStartingIndex = -1;
int ppsStartingIndex = -1;
spsStartingIndex = find(data, spsHeader, startingIndex);
Log.d("EncodeDecode", "spsStartingIndex: " + spsStartingIndex);
if(spsStartingIndex >= 0)
{
nextFrameStartingIndex = find(data, frameHeader, spsStartingIndex+1);
int spsLength = 0;
if(nextFrameStartingIndex>=0)
spsLength = nextFrameStartingIndex - spsStartingIndex;
else
spsLength = data.length - spsStartingIndex;
if(spsLength > 0)
{
SPS = new byte[spsLength];
System.arraycopy(data, spsStartingIndex, SPS, 0, spsLength);
}
}
ppsStartingIndex = find(data, ppsHeader, startingIndex);
Log.d("EncodeDecode", "ppsStartingIndex: " + ppsStartingIndex);
if(ppsStartingIndex >= 0)
{
nextFrameStartingIndex = find(data, frameHeader, ppsStartingIndex+1);
int ppsLength = 0;
if(nextFrameStartingIndex>=0)
ppsLength = nextFrameStartingIndex - ppsStartingIndex;
else
ppsLength = data.length - ppsStartingIndex;
if(ppsLength > 0)
{
PPS = new byte[ppsLength];
System.arraycopy(data, ppsStartingIndex, PPS, 0, ppsLength);
}
}
}
/**========================================================================*/
/** Prints the byte array in hex */
private void printByteArray(byte[] array)
{
StringBuilder sb1 = new StringBuilder();
for (byte b : array)
{
sb1.append(String.format("%02X ", b));
}
Log.d("EncodeDecode", sb1.toString());
}
public static byte[] YV12toYUV420PackedSemiPlanar(final byte[] input, final int width, final int height) {
/*
* COLOR_TI_FormatYUV420PackedSemiPlanar is NV12
* We convert by putting the corresponding U and V bytes together (interleaved).
*/
final int frameSize = width * height;
final int qFrameSize = frameSize/4;
byte[] output = new byte[input.length];
System.arraycopy(input, 0, output, 0, frameSize);
for (int i = 0; i < (qFrameSize); i++)
{
byte b = (input[frameSize + qFrameSize + i - 32 - 320]);
output[frameSize + i*2] = b;
output[frameSize + i*2 + 1] = (input[frameSize + i - 32 - 320]);
}
System.arraycopy(input, 0, output, 0, frameSize); // Y
for (int i = 0; i < qFrameSize; i++) {
output[frameSize + i*2] = input[frameSize + i + qFrameSize]; // Cb (U)
output[frameSize + i*2 + 1] = input[frameSize + i]; // Cr (V)
}
return output;
}
private byte[] rotateYUV420Degree90(byte[] data, int imageWidth, int imageHeight)
{
byte [] yuv = new byte[imageWidth*imageHeight*3/2];
// Rotate the Y luma
int i = 0;
for(int x = 0;x < imageWidth;x++)
{
for(int y = imageHeight-1;y >= 0;y--)
{
yuv[i] = data[y*imageWidth+x];
i++;
}
}
// Rotate the U and V color components
i = imageWidth*imageHeight*3/2-1;
for(int x = imageWidth-1;x > 0;x=x-2)
{
for(int y = 0;y < imageHeight/2;y++)
{
yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+x];
i--;
yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+(x-1)];
i--;
}
}
return yuv;
}
/**========================================================================*/
/** When camera receives a frame this function is called with the frame data as its parameter. It encodes the given data and then stores in frameQueue. */
private void encode(byte[] data)
{
Log.d("EncodeDecode", "ENCODE FUNCTION CALLED");
inputBuffers = mMediaCodec.getInputBuffers();
outputBuffers = mMediaCodec.getOutputBuffers();
int inputBufferIndex = mMediaCodec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0)
{
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
int size = inputBuffer.limit();
//inputBuffer.put(data);
// color right, but rotated
byte[] output = YV12toYUV420PackedSemiPlanar(data,320,240);
inputBuffer.put(output);
// color almost right, orientation ok but distorted
/*byte[] output = YV12toYUV420PackedSemiPlanar(data,320,240);
output = rotateYUV420Degree90(output,320,240);
inputBuffer.put(output);*/
mMediaCodec.queueInputBuffer(inputBufferIndex, 0 /* offset */, size, 0 /* timeUs */, 0);
Log.d("EncodeDecode", "InputBuffer queued");
}
else
{
Log.d("EncodeDecode", "inputBufferIndex < 0, returning null");
return ;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
Log.d("EncodeDecode", "outputBufferIndex = " + outputBufferIndex);
do
{
if (outputBufferIndex >= 0)
{
Frame frame = new Frame(frameID);
ByteBuffer outBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
byte idrFrameType = 0x65;
int dataLength = 0;
outBuffer.get(outData);
// If SPS & PPS is not ready then
if(ENCODING.equalsIgnoreCase("h264") && ( (SPS == null || SPS.length ==0) || (PPS == null || PPS.length == 0) ) )
getSPS_PPS(outData, 0);
dataLength = outData.length;
// If the frame is an IDR Frame then adding SPS & PPS in front of the actual frame data
if(ENCODING.equalsIgnoreCase("h264") && outData[4] == idrFrameType)
{
int totalDataLength = dataLength + SPS.length + PPS.length;
frame.frameData = new byte[totalDataLength];
System.arraycopy(SPS, 0, frame.frameData, 0, SPS.length);
System.arraycopy(PPS, 0, frame.frameData, SPS.length, PPS.length);
System.arraycopy(outData, 0 , frame.frameData, SPS.length+PPS.length, dataLength);
}
else
{
frame.frameData = new byte[dataLength];
System.arraycopy(outData, 0 , frame.frameData, 0, dataLength);
}
// for testing
Log.d("EncodeDecode" , "Frame no :: " + frameID + " :: frameSize:: " + frame.frameData.length + " :: ");
printByteArray(frame.frameData);
// if encoding type is h264 and sps & pps is ready then, enqueueing the frame in the queue
// if encoding type is h263 then, enqueueing the frame in the queue
if( (ENCODING.equalsIgnoreCase("h264") && SPS != null && PPS != null && SPS.length != 0 && PPS.length != 0) || ENCODING.equalsIgnoreCase("h263") )
{
Log.d("EncodeDecode", "enqueueing frame no: " + (frameID));
try
{
queue.put(frame);
}
catch(InterruptedException e)
{
Log.e("EncodeDecode", "interrupted while waiting");
e.printStackTrace();
}
catch(NullPointerException e)
{
Log.e("EncodeDecode", "frame is null");
e.printStackTrace();
}
catch(IllegalArgumentException e)
{
Log.e("EncodeDecode", "problem inserting in the queue");
e.printStackTrace();
}
Log.d("EncodeDecode", "frame enqueued. queue size now: " + queue.size());
if(firstTime)
{
Log.d("EncodeDecode", "adding a surface to layout for decoder");
SurfaceView sv = new SurfaceView(getApplicationContext());
handler = new Handler();
sv.getHolder().addCallback(MainActivity.this);
sv.setLayoutParams(new android.widget.FrameLayout.LayoutParams(320, 240));
ll.addView(sv,1);
MainActivity.this.setContentView(ll);
firstTime = false;
}
}
frameID++;
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
{
outputBuffers = mMediaCodec.getOutputBuffers();
Log.e("EncodeDecode","output buffer of encoder : info changed");
}
else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED)
{
Log.e("EncodeDecode","output buffer of encoder : format changed");
}
else
{
Log.e("EncodeDecode", "unknown value of outputBufferIndex : " + outputBufferIndex);
//printByteArray(data);
}
} while (outputBufferIndex >= 0);
}
private class MySurfaceView extends SurfaceView implements SurfaceHolder.Callback
{
SurfaceHolder holder;
public MySurfaceView(Context context) {
super(context);
holder = this.getHolder();
holder.addCallback(this);
}
public MySurfaceView(Context context, AttributeSet attrs) {
super(context,attrs);
holder = this.getHolder();
holder.addCallback(this);
}
public void surfaceCreated(SurfaceHolder holder) {
try
{
try
{
if(mCamera == null)
mCamera = Camera.open();
mCamera.setDisplayOrientation(90);
Log.d("EncodeDecode","Camera opened");
}
catch (Exception e)
{
Log.d("EncodeDecode","Camera open failed");
e.printStackTrace();
}
Camera.Parameters p = mCamera.getParameters();
if(ENCODING.equalsIgnoreCase("h264"))
p.setPreviewSize(320, 240);
else if(ENCODING.equalsIgnoreCase("h263"))
p.setPreviewSize(352, 288);
mCamera.setParameters(p);
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(new PreviewCallback()
{
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
Log.d("EncodeDecode", "onPreviewFrame, calling encode function");
当我制作一个可以通过 RTMP 实时广播相机帧的应用程序时,我在纵向模式下遇到了同样的问题,但我可以通过使用 TextureView 来解决它。首先,我没有在发送方旋转帧。但是我旋转了在媒体播放器中链接的 TextureView 并在接收器端调整了纹理视图的大小。
我编码如下。
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<TextureView
android:id="@+id/videoView"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</FrameLayout>
Run Code Online (Sandbox Code Playgroud)
private void updateTextureViewSize(int viewWidth, int viewHeight) {
int pivotPointX = viewWidth / 2;
int pivotPointY = viewHeight / 2;
Matrix matrix = new Matrix();
if(isLandscapeOrientation) {
matrix.preRotate(0);
matrix.setScale(1.0f, 1.0f, pivotPointX, pivotPointY);
videoView.setTransform(matrix);
videoView.setLayoutParams(new FrameLayout.LayoutParams(viewWidth, viewHeight));
} else {
matrix.preRotate(0);
matrix.setScale(1.0f, 1.0f, pivotPointX, pivotPointY);
videoView.setRotation(90);
videoView.setTranslationX(-viewWidth / 2);
videoView.setTranslationY(-viewHeight / 2);
videoView.setLayoutParams(new FrameLayout.LayoutParams(viewWidth * 2, viewHeight * 2));
}
}
Run Code Online (Sandbox Code Playgroud)
private TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Surface s = new Surface(surface);
if(mMediaPlayer != null) {
mMediaPlayer.setSurface(s);
DisplayMetrics displaymetrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
int sh = displaymetrics.heightPixels;
int sw = displaymetrics.widthPixels;
updateTextureViewSize(sw, sh);
}
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
Run Code Online (Sandbox Code Playgroud)
| 归档时间: |
|
| 查看次数: |
5118 次 |
| 最近记录: |