微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

如何修复MediaMuxer生成的损坏的Mp4文件?

如何解决如何修复MediaMuxer生成的损坏的Mp4文件?

我使用MediamuxerMediaCodec生成mp4视频。

我致电mMediamuxer.stop()

后,该视频可播放

但是,当用户在我获得更改以调用stop()方法之前退出应用程序时,我留下了一个无法播放的大mp4文件

反正有没有修复此mp4文件以使其可播放?

编辑

Here一个损坏的mp4文件的示例

我能够使用this online tool修复文件,但是该工具要求上传未损坏的视频作为参考。

Here是我用作参考的未损坏的mp4视频。当我上传此视频时,该工具修复了损坏的mp4文件

因此可以修复文件,但是他们是如何做到的?

如果有用,这是我用来生成损坏的和未损坏的代码

package com.tolotra.images_to_video

import android.content.ContentValues.TAG
import android.content.Context
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.media.*
import android.opengl.*
import android.util.Log
import android.util.TimingLogger
import android.view.Surface
import java.io.File
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.nio.FloatBuffer
import java.nio.IntBuffer
import java.text.SimpleDateFormat
import java.util.*


class VideoBuilder(applicationContext: Context) {


    private var frameId: Long = 0
    private lateinit var muxer: Mediamuxer
    private lateinit var glTool: OverlayRenderer
    private lateinit var encoder: MediaCodec
    private lateinit var outVideoFilePath: String
    private var context = applicationContext
    private var trackIndex: Int = 0
    private lateinit var bufferInfo: MediaCodec.BufferInfo
    private var eglContext: EGLContext? = null
    private var egldisplay: EGLdisplay? = null
    private var eglSurface: EGLSurface? = null
    private lateinit var surface: Surface


    val timeoutUs = 10000L
    val frameRate = 5
    var presentationTimeUs: Long = 0


    fun setup() {
        encoder = createEncoder()
        initInputSurface(encoder)
        encoder.start()

        outVideoFilePath = getScreenshotPath("tolotra-screen-recoder-${Date().time}.mp4")
        muxer = Mediamuxer(outVideoFilePath,Mediamuxer.OutputFormat.muxer_OUTPUT_MPEG_4)

        glTool = OverlayRenderer()
        glTool.initGl()
    }

    /**
     * Laspse is the duration between the current frame and the prevIoUs frame
     */
    fun Feed(bitmap: Bitmap,timelapse: Long) {

        frameId++
        Log.d("Feed_PROFILE","Feed frame:$frameId")
        val timings = TimingLogger("Feed_PROFILE","Feed frame:$frameId")
        // Get encoded data and Feed it to muxer
        drainEncoder(encoder,muxer,false,timelapse)

        timings.addSplit("drainEncoder done");
        // Render the bitmap/texture with OpenGL here
        glTool.render(bitmap)
        timings.addSplit("render done");

        // Set timestamp with EGL extension
        EGLExt.eglPresentationTimeANDROID(egldisplay,eglSurface,presentationTimeUs * 1000)

        // Feed encoder with next frame produced by OpenGL
        EGL14.eglSwapBuffers(egldisplay,eglSurface)

        timings.dumpToLog();
    }

    fun finish() {
        Log.d(TAG,"Finishing")

        // Drain last encoded data and finalize the video file
        drainEncoder(encoder,true,0)
        _cleanUp(encoder,muxer)

        val file = File(outVideoFilePath)

        val file_size = (file.length() / 1024).toString().toInt()
        val retriever = MediaMetadataRetriever()
        retriever.setDataSource(outVideoFilePath)
        val width =
            retriever.extractMetadata(MediaMetadataRetriever.MetaDATA_KEY_VIDEO_WIDTH)
        val height =
            retriever.extractMetadata(MediaMetadataRetriever.MetaDATA_KEY_VIDEO_HEIGHT)
        val rotation =
            retriever.extractMetadata(MediaMetadataRetriever.MetaDATA_KEY_VIDEO_ROTATION)

        val bitRate =
            retriever.extractMetadata(MediaMetadataRetriever.MetaDATA_KEY_BITRATE)

        val duration =
            java.lang.Long.valueOf(retriever.extractMetadata(MediaMetadataRetriever.MetaDATA_KEY_DURATION)) * 1000

        Log.d("Result","bitrate $bitRate duration $duration  fileSize $file_size ")

    }

    fun getScreenshotPath(fileName: String): String {
        val f = context.externalCacheDir
        val externalDir: String = f!!.path;
        val sDir: String = externalDir + File.separator + "Screen Recorder";
        val dir = File(sDir);
        val dirPath: String;
        if (dir.exists() || dir.mkdir()) {
            dirPath = sDir + File.separator + fileName;
        } else {
            dirPath = externalDir + File.separator + fileName
        }
        Log.d("Mp4 file path","Path: $dirPath")

        return dirPath;
    } //


    fun createEncoder(): MediaCodec {

        bufferInfo = MediaCodec.BufferInfo()
        val MIME = "video/avc"
        val encoder = MediaCodec.createEncoderByType(MIME)
        val width = 320
        val heigh = 512
        val format = MediaFormat.createVideoFormat(MIME,width,heigh)
        format.setInteger(
            MediaFormat.KEY_COLOR_FORMAT,MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
        )
//        format.setInteger(MediaFormat.KEY_BIT_RATE,2_000_000)
        format.setInteger(MediaFormat.KEY_BIT_RATE,350_000)
        format.setInteger(MediaFormat.KEY_FRAME_RATE,45)
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,5)

        encoder.configure(format,null,MediaCodec.CONfigURE_FLAG_ENCODE)
        trackIndex = -1;
        return encoder
    }

    fun drainEncoder(
        encoder: MediaCodec,muxer: Mediamuxer,endOfStream: Boolean,timelapseUs: Long
    ) {
        if (endOfStream)
            encoder.signalEndOfInputStream()

        while (true) {
            val outBufferId = encoder.dequeueOutputBuffer(bufferInfo,timeoutUs)

            if (outBufferId >= 0) {
                val encodedBuffer = encoder.getoutputBuffer(outBufferId)

                // Mediamuxer is ignoring KEY_FRAMERATE,so I set it manually here
                // to achieve the desired frame rate
                bufferInfo.presentationTimeUs = presentationTimeUs
                if (encodedBuffer != null) {
                    muxer.writeSampleData(trackIndex,encodedBuffer,bufferInfo)
                }

                presentationTimeUs += timelapseUs

                encoder.releaSEOutputBuffer(outBufferId,false)

                // Are we finished here?
                if ((bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
                    break
            } else if (outBufferId == MediaCodec.INFO_TRY_AGAIN_LATER) {
                if (!endOfStream)
                    break

                // End of stream,but still no output available. Try again.
            } else if (outBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                trackIndex = muxer.addTrack(encoder.outputFormat)
                muxer.start()
            }
        }
    }

    private fun initInputSurface(encoder: MediaCodec) {

        val surface = encoder.createInputSurface()

        val egldisplay = EGL14.eglGetdisplay(EGL14.EGL_DEFAULT_disPLAY)
        if (egldisplay == EGL14.EGL_NO_disPLAY)
            throw RuntimeException(
                "egldisplay == EGL14.EGL_NO_disPLAY: "
                        + glutils.getEGLErrorString(EGL14.eglGetError())
            )

        val version = IntArray(2)
        if (!EGL14.eglInitialize(egldisplay,version,1))
            throw RuntimeException("eglInitialize(): " + glutils.getEGLErrorString(EGL14.eglGetError()))

        val attribList = intArrayOf(
            EGL14.EGL_RED_SIZE,8,EGL14.EGL_GREEN_SIZE,EGL14.EGL_BLUE_SIZE,EGL14.EGL_ALPHA_SIZE,EGL14.EGL_RENDERABLE_TYPE,EGL14.EGL_OPENGL_ES2_BIT,EGLExt.EGL_RECORDABLE_ANDROID,1,EGL14.EGL_NONE
        )
        val configs = arrayOfNulls<EGLConfig>(1)
        val nConfigs = IntArray(1)
        EGL14.eglChooseConfig(egldisplay,attribList,configs,configs.size,nConfigs,0)

        var err = EGL14.eglGetError()
        if (err != EGL14.EGL_SUCCESS)
            throw RuntimeException(glutils.getEGLErrorString(err))

        val ctxAttribs = intArrayOf(
            EGL14.EGL_CONTEXT_CLIENT_VERSION,2,EGL14.EGL_NONE
        )
        val eglContext =
            EGL14.eglCreateContext(egldisplay,configs[0],EGL14.EGL_NO_CONTEXT,ctxAttribs,0)

        err = EGL14.eglGetError()
        if (err != EGL14.EGL_SUCCESS)
            throw RuntimeException(glutils.getEGLErrorString(err))

        val surfaceAttribs = intArrayOf(
            EGL14.EGL_NONE
        )
        val eglSurface =
            EGL14.eglCreateWindowSurface(egldisplay,surface,surfaceAttribs,0)
        err = EGL14.eglGetError()
        if (err != EGL14.EGL_SUCCESS)
            throw RuntimeException(glutils.getEGLErrorString(err))

        if (!EGL14.eglMakeCurrent(egldisplay,eglContext))
            throw RuntimeException("eglMakeCurrent(): " + glutils.getEGLErrorString(EGL14.eglGetError()))


        this.eglSurface = eglSurface
        this.egldisplay = egldisplay
        this.eglContext = eglContext
        this.surface = surface
    }

    private fun _cleanUp(encoder: MediaCodec,muxer: Mediamuxer) {
        if (egldisplay != EGL14.EGL_NO_disPLAY) {
            EGL14.eglDestroySurface(egldisplay,eglSurface)
            EGL14.eglDestroyContext(egldisplay,eglContext)
            EGL14.eglreleaseThread()
            EGL14.eglTerminate(egldisplay);
        }
        surface?.release();
        egldisplay = EGL14.EGL_NO_disPLAY
        eglContext = EGL14.EGL_NO_CONTEXT
        eglSurface = EGL14.EGL_NO_SURFACE

        encoder.stop()
        encoder.release()

        muxer.stop()
        muxer.release()
    }


}

class OverlayRenderer() {

    private val mvpMatrix = FloatArray(16)
    private val projectionMatrix = FloatArray(16)
    private val viewMatrix = FloatArray(16)

    private val vertexshadercode =
        "precision highp float;\n" +
                "attribute vec3 vertexPosition;\n" +
                "attribute vec2 uvs;\n" +
                "varying vec2 varUvs;\n" +
                "uniform mat4 mvp;\n" +
                "\n" +
                "void main()\n" +
                "{\n" +
                "\tvarUvs = uvs;\n" +
                "\tgl_Position = mvp * vec4(vertexPosition,1.0);\n" +
                "}"

    private val fragmentshadercode =
        "precision mediump float;\n" +
                "\n" +
                "varying vec2 varUvs;\n" +
                "uniform sampler2D texSampler;\n" +
                "\n" +
                "void main()\n" +
                "{\t\n" +
                "\tgl_FragColor = texture2D(texSampler,varUvs);\n" +
                "}"

    private var vertices = floatArrayOf(
        // x,y,z,u,v
        -1.0f,-1.0f,0.0f,0f,1.0f,1f,0f
    )

    private var indices = intArrayOf(
        2,3,2
    )

    private var program: Int = 0
    private var vertexHandle: Int = 0
    private var bufferHandles = IntArray(2)
    private var uvsHandle: Int = 0
    private var mvpHandle: Int = 0
    private var samplerHandle: Int = 0
    private val textureHandle = IntArray(1)


    val viewportWidth = 320
    val viewportHeight = 486


    var vertexBuffer: FloatBuffer = ByteBuffer.allocateDirect(vertices.size * 4).run {
        order(ByteOrder.nativeOrder())
        asFloatBuffer().apply {
            put(vertices)
            position(0)
        }
    }

    var indexBuffer: IntBuffer = ByteBuffer.allocateDirect(indices.size * 4).run {
        order(ByteOrder.nativeOrder())
        asIntBuffer().apply {
            put(indices)
            position(0)
        }
    }

    fun render(bitmap: Bitmap) {

        Log.d("Bitmap","width ${bitmap.width} height ${bitmap.height}")


// Prepare some transformations
        val mvp = FloatArray(16)
        Matrix.setIdentityM(mvp,0)
        Matrix.scaleM(mvp,-1f,1f)

        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT or GLES20.GL_DEPTH_BUFFER_BIT)
        GLES20.glClearColor(0f,0f)

        GLES20.glViewport(0,viewportWidth,viewportHeight)

        GLES20.gluseProgram(program)

// Pass transformations to shader
        GLES20.gluniformMatrix4fv(mvpHandle,mvp,0)

// Prepare texture for drawing
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,textureHandle[0])
        GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT,1)

// Pass the Bitmap to OpenGL here
        glutils.texImage2D(GLES20.GL_TEXTURE_2D,bitmap,0)

        GLES20.glTexParameteri(
            GLES20.GL_TEXTURE_2D,GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_NEAREST
        )
        GLES20.glTexParameteri(
            GLES20.GL_TEXTURE_2D,GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_NEAREST
        )

// Prepare buffers with vertices and indices & draw
        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER,bufferHandles[0])
        GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER,bufferHandles[1])

        GLES20.glEnabLevertexAttribArray(vertexHandle)
        GLES20.glVertexAttribPointer(vertexHandle,GLES20.GL_FLOAT,4 * 5,0)

        GLES20.glEnabLevertexAttribArray(uvsHandle)
        GLES20.glVertexAttribPointer(uvsHandle,3 * 4)

        GLES20.glDrawElements(GLES20.GL_TRIANGLES,6,GLES20.GL_UNSIGNED_INT,0)
    }


    fun initGl() {
        val vertexShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER).also { shader ->
            GLES20.glShaderSource(shader,vertexshadercode)
            GLES20.glCompileShader(shader)
        }

        val fragmentShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER).also { shader ->
            GLES20.glShaderSource(shader,fragmentshadercode)
            GLES20.glCompileShader(shader)
        }

        program = GLES20.glCreateProgram().also {
            GLES20.glAttachShader(it,vertexShader)
            GLES20.glAttachShader(it,fragmentShader)
            GLES20.glLinkProgram(it)

            vertexHandle = GLES20.glGetAttribLocation(it,"vertexPosition")
            uvsHandle = GLES20.glGetAttribLocation(it,"uvs")
            mvpHandle = GLES20.glGetUniformlocation(it,"mvp")
            samplerHandle = GLES20.glGetUniformlocation(it,"texSampler")
        }

        // Initialize buffers
        GLES20.glGenBuffers(2,bufferHandles,0)

        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER,bufferHandles[0])
        GLES20.glBufferData(
            GLES20.GL_ARRAY_BUFFER,vertices.size * 4,vertexBuffer,GLES20.GL_DYNAMIC_DRAW
        )

        GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER,bufferHandles[1])
        GLES20.glBufferData(
            GLES20.GL_ELEMENT_ARRAY_BUFFER,indices.size * 4,indexBuffer,GLES20.GL_DYNAMIC_DRAW
        )

        // Init texture handle
        GLES20.glGenTextures(1,textureHandle,0)

        // Ensure I can draw transparent stuff that overlaps properly
        GLES20.glEnable(GLES20.GL_BLEND)
        GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA,GLES20.GL_ONE_MINUS_SRC_ALPHA)
    }
}

解决方法

通常,MP4不是一种好的记录格式。通常,样本表会保存在内存中并关闭。因此,在断电或应用程序错误的情况下,您可以放松录音。使用MPEG-2传输流或分段的MP4,则大多数书面媒体仍然可以播放。您的文件很可能仅包含MP4“ ftyp”和“ mdat”原子,并且音频和视频交错。有了有关视频流的一些有根据的猜测和知识,就有机会提取音频和视频。 https://fix.video似乎可以做到。

MyLayer

Fix.video解析您的好文件,提取音频和视频的设置。 它使用正常文件中的信息来重新创建大部分“ moov”原子。缺少的样本表“ stXX”是通过解析您的“ mdat”原子来重新创建的。 “ mdat”原子内的视频块均以长度作为前缀,其余部分必须是AAC音频。

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。

相关推荐


Selenium Web驱动程序和Java。元素在(x,y)点处不可单击。其他元素将获得点击?
Python-如何使用点“。” 访问字典成员?
Java 字符串是不可变的。到底是什么意思?
Java中的“ final”关键字如何工作?(我仍然可以修改对象。)
“loop:”在Java代码中。这是什么,为什么要编译?
java.lang.ClassNotFoundException:sun.jdbc.odbc.JdbcOdbcDriver发生异常。为什么?
这是用Java进行XML解析的最佳库。
Java的PriorityQueue的内置迭代器不会以任何特定顺序遍历数据结构。为什么?
如何在Java中聆听按键时移动图像。
Java“Program to an interface”。这是什么意思?