当前位置: 首页 > 知识库问答 >
问题:

从图像制作mp4视频的android应用程序

慕容俭
2023-03-14

我用ffmpeg库为android转换了一个图像到mp4视频,但这需要太长时间,我想避免第三方库,如果可能的话。请帮我解决我的问题。

谢谢你。

共有1个答案

杜河
2023-03-14

基于这里的一个存储库,请参见我编写的代码:

TextureRenderer.kt

class TextureRenderer {
    private val vertexShaderCode =
            "precision highp float;\n" +
                    "attribute vec3 vertexPosition;\n" +
                    "attribute vec2 uvs;\n" +
                    "varying vec2 varUvs;\n" +
                    "uniform mat4 mvp;\n" +
                    "\n" +
                    "void main()\n" +
                    "{\n" +
                    "\tvarUvs = uvs;\n" +
                    "\tgl_Position = mvp * vec4(vertexPosition, 1.0);\n" +
                    "}"

    private val fragmentShaderCode =
            "precision mediump float;\n" +
                    "\n" +
                    "varying vec2 varUvs;\n" +
                    "uniform sampler2D texSampler;\n" +
                    "\n" +
                    "void main()\n" +
                    "{\t\n" +
                    "\tgl_FragColor = texture2D(texSampler, varUvs);\n" +
                    "}"


    private var vertices = floatArrayOf(
            // x, y, z, u, v
            -1.0f, -1.0f, 0.0f, 0f, 0f,
            -1.0f, 1.0f, 0.0f, 0f, 1f,
            1.0f, 1.0f, 0.0f, 1f, 1f,
            1.0f, -1.0f, 0.0f, 1f, 0f
    )

    private var indices = intArrayOf(
            2, 1, 0, 0, 3, 2
    )

    private var program: Int
    private var vertexHandle: Int = 0
    private var bufferHandles = IntArray(2)
    private var uvsHandle: Int = 0
    private var mvpHandle: Int = 0
    private var samplerHandle: Int = 0
    private val textureHandle = IntArray(1)

    private var vertexBuffer: FloatBuffer = ByteBuffer.allocateDirect(vertices.size * 4).run {
        order(ByteOrder.nativeOrder())
        asFloatBuffer().apply {
            put(vertices)
            position(0)
        }
    }

    private var indexBuffer: IntBuffer = ByteBuffer.allocateDirect(indices.size * 4).run {
        order(ByteOrder.nativeOrder())
        asIntBuffer().apply {
            put(indices)
            position(0)
        }
    }

    init {
        // Create program
        val vertexShader: Int = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode)
        val fragmentShader: Int = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode)
        program = GLES20.glCreateProgram().also {
            GLES20.glAttachShader(it, vertexShader)
            GLES20.glAttachShader(it, fragmentShader)
            GLES20.glLinkProgram(it)
            vertexHandle = GLES20.glGetAttribLocation(it, "vertexPosition")
            uvsHandle = GLES20.glGetAttribLocation(it, "uvs")
            mvpHandle = GLES20.glGetUniformLocation(it, "mvp")
            samplerHandle = GLES20.glGetUniformLocation(it, "texSampler")
        }
        // Initialize buffers
        GLES20.glGenBuffers(2, bufferHandles, 0)
        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0])
        GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vertices.size * 4, vertexBuffer, GLES20.GL_DYNAMIC_DRAW)
        GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1])
        GLES20.glBufferData(GLES20.GL_ELEMENT_ARRAY_BUFFER, indices.size * 4, indexBuffer, GLES20.GL_DYNAMIC_DRAW)
        // Init texture handle
        GLES20.glGenTextures(1, textureHandle, 0)
        // Ensure I can draw transparent stuff that overlaps properly
        GLES20.glEnable(GLES20.GL_BLEND)
        GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA)
    }

    private fun loadShader(type: Int, shaderCode: String): Int {
        return GLES20.glCreateShader(type).also { shader ->
            GLES20.glShaderSource(shader, shaderCode)
            GLES20.glCompileShader(shader)
        }
    }

    fun draw(viewportWidth: Int, viewportHeight: Int, bitmap: Bitmap, mvpMatrix: FloatArray) {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT or GLES20.GL_DEPTH_BUFFER_BIT)
        GLES20.glClearColor(0f, 0f, 0f, 0f)
        GLES20.glViewport(0, 0, viewportWidth, viewportHeight)
        GLES20.glUseProgram(program)
        // Pass transformations to shader
        GLES20.glUniformMatrix4fv(mvpHandle, 1, false, mvpMatrix, 0)
        // Prepare texture for drawing
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0])
        GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1)
        GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0)
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST)
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST)
        // Prepare buffers with vertices and indices & draw
        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0])
        GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1])
        GLES20.glEnableVertexAttribArray(vertexHandle)
        GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 4 * 5, 0)
        GLES20.glEnableVertexAttribArray(uvsHandle)
        GLES20.glVertexAttribPointer(uvsHandle, 2, GLES20.GL_FLOAT, false, 4 * 5, 3 * 4)
        GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_INT, 0)
    }
}

TimelapseEncoder.kt

class TimeLapseEncoder {
    private var renderer: TextureRenderer? = null

    // MediaCodec and encoding configuration
    private var encoder: MediaCodec? = null
    private var muxer: MediaMuxer? = null
    private var mime = "video/avc"
    private var trackIndex = -1
    private var presentationTimeUs = 0L
    private var frameRate = 30.0
    private val timeoutUs = 10000L
    private val bufferInfo = MediaCodec.BufferInfo()
    private var size: Size? = null

    // EGL
    private var eglDisplay: EGLDisplay? = null
    private var eglContext: EGLContext? = null
    private var eglSurface: EGLSurface? = null

    // Surface provided by MediaCodec and used to get data produced by OpenGL
    private var surface: Surface? = null

    fun prepareForEncoding(outVideoFilePath: String, bitmapWidth: Int, bitmapHeight: Int): Boolean {
        try {
            encoder = MediaCodec.createEncoderByType(mime)
            // Try to find supported size by checking the resolution of first supplied image
            // This could also be set manually as parameter to TimeLapseEncoder
            size = getBestSupportedResolution(encoder!!, mime, Size(bitmapWidth, bitmapHeight))
            val format = getFormat(size!!)
            encoder!!.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
            // Prepare surface
            initEgl()
            // Switch to executing state - we're ready to encode
            encoder!!.start()
            // Prepare muxer
            muxer = MediaMuxer(outVideoFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
            renderer = TextureRenderer()
            return true
        } catch (e: Exception) {
            releaseEncoder()
            return false
        }
    }

    fun encodeFrame(bitmap: Bitmap, delay: Int): Boolean {
        return try {
            frameRate = 1000.0 / delay
            drainEncoder(false)
            renderer!!.draw(size!!.width, size!!.height, bitmap, getMvp())
            EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, presentationTimeUs * 1000)
            EGL14.eglSwapBuffers(eglDisplay, eglSurface)
            true
        } catch (e: Exception) {
            releaseEncoder()
            false
        }
    }

    fun finishEncoding(): Boolean {
        return try {
            drainEncoder(true)
            true
        } catch (e: Exception) {
            false
        } finally {
            releaseEncoder()
        }
    }

    private fun getBestSupportedResolution(mediaCodec: MediaCodec, mime: String, preferredResolution: Size): Size? {
        // First check if exact combination supported
        if (mediaCodec.codecInfo.getCapabilitiesForType(mime)
                        .videoCapabilities.isSizeSupported(preferredResolution.width, preferredResolution.height))
            return preferredResolution
        // I prefer similar resolution with similar aspect
        val pix = preferredResolution.width * preferredResolution.height
        val preferredAspect = preferredResolution.width.toFloat() / preferredResolution.height.toFloat()
        // I try the resolutions suggested by docs for H.264 and VP8
        // https://developer.android.com/guide/topics/media/media-formats#video-encoding
        // TODO: find more supported resolutions
        val resolutions = arrayListOf(
                Size(176, 144), Size(320, 240), Size(320, 180),
                Size(640, 360), Size(720, 480), Size(1280, 720),
                Size(1920, 1080)
        )
        resolutions.sortWith(compareBy({ pix - it.width * it.height },
                // First compare by aspect
                {
                    val aspect = if (it.width < it.height) it.width.toFloat() / it.height.toFloat()
                    else it.height.toFloat() / it.width.toFloat()
                    (preferredAspect - aspect).absoluteValue
                }))
        for (size in resolutions) {
            if (mediaCodec.codecInfo.getCapabilitiesForType(mime)
                            .videoCapabilities.isSizeSupported(size.width, size.height)
            )
                return size
        }
        return null
    }

    private fun getFormat(size: Size): MediaFormat {
        val format = MediaFormat.createVideoFormat(mime, size.width, size.height)
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface)
        format.setInteger(MediaFormat.KEY_BIT_RATE, 2000000)
        format.setInteger(MediaFormat.KEY_FRAME_RATE, 60)
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 15)

        return format
    }

    private fun initEgl() {
        surface = encoder!!.createInputSurface()
        eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY)
        if (eglDisplay == EGL14.EGL_NO_DISPLAY)
            throw RuntimeException("eglDisplay == EGL14.EGL_NO_DISPLAY: " + GLUtils.getEGLErrorString(EGL14.eglGetError()))
        val version = IntArray(2)
        if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1))
            throw RuntimeException("eglInitialize(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()))
        val attribList = intArrayOf(
                EGL14.EGL_RED_SIZE, 8,
                EGL14.EGL_GREEN_SIZE, 8,
                EGL14.EGL_BLUE_SIZE, 8,
                EGL14.EGL_ALPHA_SIZE, 8,
                EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
                EGLExt.EGL_RECORDABLE_ANDROID, 1,
                EGL14.EGL_NONE
        )
        val configs = arrayOfNulls<EGLConfig>(1)
        val nConfigs = IntArray(1)
        EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0, configs.size, nConfigs, 0)
        var err = EGL14.eglGetError()
        if (err != EGL14.EGL_SUCCESS)
            throw RuntimeException(GLUtils.getEGLErrorString(err))
        val ctxAttribs = intArrayOf(EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE)
        eglContext = EGL14.eglCreateContext(eglDisplay, configs[0], EGL14.EGL_NO_CONTEXT, ctxAttribs, 0)
        err = EGL14.eglGetError()
        if (err != EGL14.EGL_SUCCESS)
            throw RuntimeException(GLUtils.getEGLErrorString(err))
        val surfaceAttribs = intArrayOf(EGL14.EGL_NONE)
        eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, configs[0], surface, surfaceAttribs, 0)
        err = EGL14.eglGetError()
        if (err != EGL14.EGL_SUCCESS)
            throw RuntimeException(GLUtils.getEGLErrorString(err))
        if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext))
            throw RuntimeException("eglMakeCurrent(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()))
    }

    private fun drainEncoder(endOfStream: Boolean) {
        if (endOfStream)
            encoder!!.signalEndOfInputStream()
        while (true) {
            val outBufferId = encoder!!.dequeueOutputBuffer(bufferInfo, timeoutUs)
            if (outBufferId >= 0) {
                val encodedBuffer = encoder!!.getOutputBuffer(outBufferId)!!
                // MediaMuxer is ignoring KEY_FRAMERATE, so I set it manually here
                // to achieve the desired frame rate
                bufferInfo.presentationTimeUs = presentationTimeUs
                muxer!!.writeSampleData(trackIndex, encodedBuffer, bufferInfo)
                presentationTimeUs += (1000000.0 / frameRate).toLong()
                encoder!!.releaseOutputBuffer(outBufferId, false)
                // Are we finished here?
                if ((bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
                    break
            } else if (outBufferId == MediaCodec.INFO_TRY_AGAIN_LATER) {
                if (!endOfStream)
                    break
                // End of stream, but still no output available. Try again.
            } else if (outBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                trackIndex = muxer!!.addTrack(encoder!!.outputFormat)
                muxer!!.start()
            }
        }
    }

    private fun getMvp(): FloatArray {
        val mvp = FloatArray(16)
        Matrix.setIdentityM(mvp, 0)
        Matrix.scaleM(mvp, 0, 1f, -1f, 1f)
        return mvp
    }

    private fun releaseEncoder() {
        encoder?.stop()
        encoder?.release()
        encoder = null
        releaseEgl()
        muxer?.stop()
        muxer?.release()
        muxer = null
        size = null
        trackIndex = -1
        presentationTimeUs = 0L
    }

    private fun releaseEgl() {
        if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
            EGL14.eglDestroySurface(eglDisplay, eglSurface)
            EGL14.eglDestroyContext(eglDisplay, eglContext)
            EGL14.eglReleaseThread()
            EGL14.eglTerminate(eglDisplay)
        }
        surface?.release()
        surface = null
        eglDisplay = EGL14.EGL_NO_DISPLAY
        eglContext = EGL14.EGL_NO_CONTEXT
        eglSurface = EGL14.EGL_NO_SURFACE
    }

}
val outputPath = ...
val videoFile = File(outputPath)
if (videoFile.exists())
    videoFile.delete()
videoFile.parentFile!!.mkdirs()
val timeLapseEncoder = TimeLapseEncoder()
val width=...
val height=...
timeLapseEncoder.prepareForEncoding(outputPath, width, height))
val bitmap=...
val delay=... //in ms, of this specific frame
timeLapseEncoder.encodeFrame(bitmap, delay)
timeLapseEncoder.finishEncoding()
 类似资料:
  • 问题内容: 此代码有什么问题? 我在项目中有一个Raw文件(mp4视频文件), 当我这样做时,然后我从SDcard文件获取的文件不相同,因此无法加载视频:( 您是否有另一种自动将原始文件复制到的方法sdcard? 谢谢 问题答案: 如果使用InputStream进行读取,请使用OutputStream进行写入,即BufferedOutputStream包装的FileOutputStream。另外,

  • 我正在玩gstream er命令行界面,在从PNG图像创建mp4视频文件时卡住了。你们能帮忙解决这个问题吗? 使用下面的命令,我从摄像机创建了PNG图像: gst-launch-1.0。exe-v ksvideosrc!队列decodebin!视频转换!pngenc!多文件接收器位置=“frame  d.png” 我可以使用以下命令播放pPNG图像: gst-launch-1.0-v multi

  • 很长一段时间以来,我一直试图从assets文件夹中为不同类型的文件创建一个跨应用程序共享意图,但始终存在一个问题。我提出了许多不同类型的解决方案,例如: 尝试使用自定义内容提供商(此处或此处…) 尝试将文件保存在本地存储器上(此处示例) 和其他人,但由于它没有工作,我没有链接了 现在,这是我的工作解决方案,用于仅将音频文件共享给WhatsApp和Messenger,所有其他应用程序都失败了)。 我

  • 我想将两个图像(名称未按顺序编号)合并到一个视频中,但结果仅包括第一个图像的视频。 我将感谢任何帮助! ffmpeg.exe-r 1/5-iimage_x.png-r 1/5-iimage_y.png-codec: v libx264-vf fps=50video.mp4 输出: 输入0,png管道,来自'C:\Users\khj\Desktop\SpeakerLayoutTool\Images\

  • 谁能告诉我如何在android中组合图像并生成一个mp4文件,并将视频文件存储在SDCard中?