我从服务器接收到h264数据,我想在Android上使用mediacodec和texture view对该流进行解码。我从服务器获取数据,解析它得到SPS、PPS和视频帧数据,然后我将该数据传递给mediacodec,但函数dequeueOutputBuffer(info,100000)总是返回-1,并且我得到dequeueOutputBuffer超时。
请帮忙,我三周来一直在忙这个问题。
这是用来解码视频帧的代码。
public class H264PlayerActivity extends AppCompatActivity implements TextureView.SurfaceTextureListener {
private TextureView m_surface;// View that contains the Surface Texture
private H264Provider provider;// Object that connects to our server and gets H264 frames
private MediaCodec m_codec;// Media decoder
// private DecodeFramesTask m_frameTask;// AsyncTask that takes H264 frames and uses the decoder to update the Surface Texture
// the channel used to receive the partner's video
private ZMQ.Socket subscriber = null;
private ZMQ.Context context;
// thread handling the video reception
// byte[] byte_SPSPPS = null;
//byte[] byte_Frame = null;
public static String stringSubscribe=null;
public static String myIpAcquisition=null;
public static byte[] byte_SPSPPS = null;
public static byte[] byte_Frame = null;
boolean isIframe = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.h264player_activity);
Bundle extras = getIntent().getExtras();
if(extras!=null)
{
stringSubscribe=extras.getString("stringSubscribe");
myIpAcquisition=(extras.getString("myIpAcquisition"));
}
// Get a referance to the TextureView in the UI
m_surface = (TextureView) findViewById(R.id.textureView);
// Add this class as a call back so we can catch the events from the Surface Texture
m_surface.setSurfaceTextureListener(this);
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
@Override
// Invoked when a TextureView's SurfaceTexture is ready for use
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
// when the surface is ready, we make a H264 provider Object. When its constructor runs it starts an AsyncTask to log into our server and start getting frames
provider = new H264Provider(stringSubscribe, myIpAcquisition,byte_SPSPPS,byte_Frame);
}
@Override
// Invoked when the SurfaceTexture's buffers size changed
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
// Invoked when the specified SurfaceTexture is about to be destroyed
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
// Invoked when the specified SurfaceTexture is updated through updateTexImage()
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
private class H264Provider {
String stringSubscribe = "";
String myIpAcquisition = "";
byte[] byte_SPSPPS = null;
byte[] byte_PPS = null;
byte[] byte_Frame = null;
H264Provider(String stringSubscribe, String myIpAcquisition, byte[] byte_SPS, byte[] byte_Frame) {
this.stringSubscribe = stringSubscribe;
this.myIpAcquisition = myIpAcquisition;
this.byte_SPSPPS = byte_SPS;
this.byte_PPS = byte_PPS;
this.byte_Frame = byte_Frame;
System.out.println(" subscriber client started");
//SetUpConnection setup=new SetUpConnection();
// setup.execute();
PlayerThread mPlayer = new PlayerThread();
mPlayer.start();
}
void release(){
// close ØMQ socket
subscriber.close();
//terminate 0MQ context
context.term();
}
byte[] getCSD( ) {
return byte_SPSPPS;
}
byte[] nextFrame( ) {
return byte_Frame;
}
private class PlayerThread extends Thread
{
public PlayerThread()
{
System.out.println(" subscriber client started");
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
@Override
public void run() {
/******************************************ZMQ****************************/
// Prepare our context and subscriber
ZMQ.Context context = ZMQ.context(1);
//create 0MQ socket
ZMQ.Socket subscriber = context.socket(ZMQ.SUB);
//create outgoing connection from socket
String address = "tcp://" + myIpAcquisition + ":xxxx";
Boolean bbbb = subscriber.connect(address);
subscriber.setHWM(20);// the number of messages to queue.
Log.e("zmq_tag", "connect connect " + bbbb);
//boolean bbbb = subscriber.setSocketOpt(zmq.ZMQ.ZMQ_SNDHWM, 1);
subscriber.subscribe(stringSubscribe.getBytes(ZMQ.CHARSET));
Log.e("zmq_tag", " zmq stringSubscribe " + stringSubscribe);
boolean bRun = true;
while (bRun) {
ZMsg msg = ZMsg.recvMsg(subscriber);
String string_SPS = null;
String string_PPS = null;
String SPSPPS = null;
String string_Frame = null;
if (msg != null) {
// create a video message out of the zmq message
VideoMessage oVideoMsg = VideoMessage.fromZMsg(msg);
// wait until get Iframe
String szInfoPublisher = new String(oVideoMsg.szInfoPublisher);
Log.e("zmq_tag", "szInfoPublisher " + szInfoPublisher);
if (szInfoPublisher.contains("0000000167")) {
isIframe = true;
String[] split_IFrame = szInfoPublisher.split("0000000165");
String SPS__PPS = split_IFrame[0];
String [] split_SPSPPS=SPS__PPS.split("0000000167");
SPSPPS="0000000167" + split_SPSPPS[1];
Log.e("zmq_tag", "SPS+PPS " + SPSPPS);
String iFrame = "0000000165" + split_IFrame[1];
Log.e("zmq_tag", "IFrame " + iFrame);
string_Frame = iFrame;
} else {
if ((szInfoPublisher.contains("0000000161")||szInfoPublisher.contains("0000000141")) && isIframe) {
if (szInfoPublisher.contains("0000000161"))
{
String[] split_IFrame = szInfoPublisher.split("0000000161");
String newMSG = "0000000161" + split_IFrame[1];
Log.e("zmq_tag", " P Frame " + newMSG);
string_Frame = newMSG;
} else
if (szInfoPublisher.contains("0000000141"))
{
String[] split_IFrame = szInfoPublisher.split("0000000141");
String newMSG = "0000000141" + split_IFrame[1];
Log.e("zmq_tag", " P Frame " + newMSG);
string_Frame = newMSG;
}
} else {
isIframe = false;
}
}
}
if (SPSPPS != null) {
byte_SPSPPS = SPSPPS.getBytes();
Log.e("zmq_tag", " byte_SPSPPS " + new String(byte_SPSPPS));
}
if (string_Frame != null) {
byte_Frame = string_Frame.getBytes();
Log.e("zmq_tag", " byte_Frame " + new String(byte_Frame));
}
if(SPSPPS != null) {
// Create the format settinsg for the MediaCodec
MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, 1920, 1080);// MIMETYPE: a two-part identifier for file formats and format contents
// Set the PPS and SPS frame
format.setByteBuffer("csd-0", ByteBuffer.wrap(byte_SPSPPS));
// Set the buffer size
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 100000);
try {
// Get an instance of MediaCodec and give it its Mime type
m_codec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
// Configure the Codec
m_codec.configure(format, new Surface(m_surface.getSurfaceTexture()), null, 0);
// Start the codec
m_codec.start();
// Create the AsyncTask to get the frames and decode them using the Codec
while (!Thread.interrupted()) {
// Get the next frame
byte[] frame = byte_Frame;
Log.e("zmq_tag", " frame " + new String(frame));
// Now we need to give it to the Codec to decode into the surface
// Get the input buffer from the decoder
int inputIndex = m_codec.dequeueInputBuffer(1);// Pass in -1 here as in this example we don't have a playback time reference
Log.e("zmq_tag", "inputIndex " + inputIndex);
// If the buffer number is valid use the buffer with that index
if (inputIndex >= 0) {
ByteBuffer buffer =m_codec.getInputBuffer(inputIndex);
buffer.put(frame);
// tell the decoder to process the frame
m_codec.queueInputBuffer(inputIndex, 0, frame.length, 0, 0);
}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int outputIndex = m_codec.dequeueOutputBuffer(info, 100000);
Log.e("zmq_tag", "outputIndex " + outputIndex);
if (outputIndex >= 0) {
m_codec.releaseOutputBuffer(outputIndex, true);
}
// wait for the next frame to be ready, our server makes a frame every 250ms
try {
Thread.sleep(250);
} catch (Exception e) {
e.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
// close ØMQ socket
subscriber.close();
//terminate 0MQ context
context.term();
}
}
很抱歉,我不能发表评论,但我在您的代码中看到了一些可能的错误:
我正在解码从Android上的wifi摄像头接收到的原始h264。 这是解码时产生的视频的一个例子,除了底部部分看起来很好。 我还注意到一些奇怪的事情,当我移动摄像机时,饲料似乎运行几乎完全流畅(底部没有垃圾),一旦我把它放下,垃圾视频返回(我会以为它是相反的方式...) 我正在将h264数据解析成以澳元开头的块,每个块以澳元开头,当另一个开始时结束。 我的理解是,每个解析的“块”(以AUD开头)
我之前也尝试使用此方法创建Mediacodec 但视频质量与YouTube上分享的视频一样。
大小范围在2.5MB-20MB之间。这个问题在较长的剪辑上变得更糟,例如7分钟的范围是9MB-120MB。 正常吗? 我试图捕捉同样的场景,但还是得到了不同的结果。
我正在使用对三星S6上的h264流进行解码,发现mediacodec的输入缓冲区必须以“0001”开头(并且不需要设置pps、sps),否则ACodec将报告错误。 我也尝试使用mediaextractor播放一个mp4文件,它工作良好,但缓冲区到mediacodec不是以“0001”开始。 我不知道为什么decodec一个h264流有这样的限制,目前我需要从socket分析流,并将数据切割成小包
我有一个项目,我被要求在android中显示一个视频流,该流是原始的H.264,我正在连接到一个服务器,并将从服务器接收一个字节流。 基本上,我想知道有没有一种方法可以将原始字节发送到android的解码器并显示在Surface上? 我使用Android4.1中新的MediaCodec和MediaExtractor API成功地解码了包装在mp4容器中的H264,不幸的是,我没有找到使用这些API
我正在尝试用android低级媒体API实时解码h264 nals。 每个nal都包含一个完整的帧,所以我希望在用我的nal提供输入并调用之后,它会“立即”(当然有一个litle延迟)显示我的帧,但它没有显示。我看到了第一个帧,出列器返回第一个缓冲区,只有在将第二个缓冲区馈送给解码器之后才返回第一个缓冲区,此时该缓冲区应该呈现第二个帧。帧编码时预置为x264,因此没有B帧等。 我想可能有一种方法可