Newest 'x264' Questions - Stack Overflow

http://stackoverflow.com/questions/tagged/x264

Les articles publiés sur le site

  • Writing each I frame into a different .mp4 file and adding some additional logs

    8 juin 2018, par MSD Paul

    While using x264 encoder, how to store each and every I frame (from each and every .png file (frame)) into a different .mp4 file?

    If I have 5 frames, after encoding those 5 frames into 5 I frames how to write those in different files?

    Another Question: How to put few additional log messages or print statements in few functions in x264 code which I can view on the terminal as the encoding output like other logs???

  • what x264_reference_hierarchy_reset means in x264 encoding

    7 juin 2018, par MSD Paul

    What x264_reference_hierarchy_reset( h ) - this means?

    I have noticed other than B-frames for I, P, B-ref frames this is used?

    Does it signify that this is not a reference frame? Can anyone please explain this in detail?

  • An x264-encoded video only with P frames and a different video for I frame

    24 mai 2018, par MSD Paul

    Suppose I have 5 frames, I encode first one as the I or reference frame and all other as P frames in one video using x264 codec.

    In this case, is it possible to have 2 videos, one only with an I frame and another frame only with 4 P frames?

    To decode all 4 P frames, I require the first I frame video but can we construct the videos in this way?

  • RTMP Broadcast packet body structure for Twitch

    22 mai 2018, par Dobby

    I'm currently working on a project similar to OBS, where I'm capturing screen data, encoding it with the x264 library, and then broadcasting it to a twitch server.

    Currently, the servers are accepting the data, but no video is being played - it buffers for a moment, then returns an error code "2000: network error"

    Like OBS Classic, I'm dividing each NAL provided by x264 by its type, and then making changes to each

    int frame_size = x264_encoder_encode(encoder, &nals, &num_nals, &pic_in, &pic_out);
    
        //sort the NAL's into their types and make necessary adjustments
    
        int timeOffset = int(pic_out.i_pts - pic_out.i_dts);
    
        timeOffset = htonl(timeOffset);//host to network translation, ensure the bytes are in the right format
        BYTE *timeOffsetAddr = ((BYTE*)&timeOffset) + 1;
    
        videoSection sect;
        bool foundFrame = false;
    
        uint8_t * spsPayload = NULL;
        int spsSize = 0;
    
        for (int i = 0; i/std::cout << "VideoEncoder: EncodedImages Size: " << encodedImages->size() << std::endl;
            x264_nal_t &nal = nals[i];
            //std::cout << "NAL is:" << nal.i_type << std::endl;
    
            //need to account for pps/sps, seems to always be the first frame sent
            if (nal.i_type == NAL_SPS) {
                spsSize = nal.i_payload;
                spsPayload = (uint8_t*)malloc(spsSize);
                memcpy(spsPayload, nal.p_payload, spsSize);
            } else if (nal.i_type == NAL_PPS){
                //pps always happens after sps
                if (spsPayload == NULL) {
                    std::cout << "VideoEncoder: critical error, sps not set" << std::endl;
                }
                uint8_t * payload = (uint8_t*)malloc(nal.i_payload + spsSize);
                memcpy(payload, spsPayload, spsSize);
                memcpy(payload, nal.p_payload + spsSize, nal.i_payload);
                sect = { nal.i_payload + spsSize, payload, nal.i_type };
                encodedImages->push(sect);
            } else if (nal.i_type == NAL_SEI || nal.i_type == NAL_FILLER) { 
                //these need some bytes at the start removed
                BYTE *skip = nal.p_payload;
                while (*(skip++) != 0x1);
                int skipBytes = (int)(skip - nal.p_payload);
    
                int newPayloadSize = (nal.i_payload - skipBytes);
    
                uint8_t * payload = (uint8_t*)malloc(newPayloadSize);
                memcpy(payload, nal.p_payload + skipBytes, newPayloadSize);
                sect = { newPayloadSize, payload, nal.i_type };
                encodedImages->push(sect);
    
            } else if (nal.i_type == NAL_SLICE_IDR || nal.i_type == NAL_SLICE) { 
                //these packets need an additional section at the start
                BYTE *skip = nal.p_payload;
                while (*(skip++) != 0x1);
                int skipBytes = (int)(skip - nal.p_payload);
    
                std::vector bodyData;
                if (!foundFrame) {
                    if (nal.i_type == NAL_SLICE_IDR) { bodyData.push_back(0x17); } else { bodyData.push_back(0x27); } //add a 17 or a 27 as appropriate
                    bodyData.push_back(1);
                    bodyData.push_back(*timeOffsetAddr);
    
                    foundFrame = true;
                }
    
                //put into the payload the bodyData followed by the nal payload
                uint8_t * bodyDataPayload = (uint8_t*)malloc(bodyData.size());
                memcpy(bodyDataPayload, bodyData.data(), bodyData.size() * sizeof(BYTE));
    
                int newPayloadSize = (nal.i_payload - skipBytes);
    
                uint8_t * payload = (uint8_t*)malloc(newPayloadSize + sizeof(bodyDataPayload));
                memcpy(payload, bodyDataPayload, sizeof(bodyDataPayload));
                memcpy(payload + sizeof(bodyDataPayload), nal.p_payload + skipBytes, newPayloadSize);
                int totalSize = newPayloadSize + sizeof(bodyDataPayload);
                sect = { totalSize, payload, nal.i_type };
                encodedImages->push(sect);
            } else {
                std::cout << "VideoEncoder: Nal type did not match expected" << std::endl;
                continue;
            }
        }
    

    The NAL payload data is then put into a struct, VideoSection, in a queue buffer

    //used to transfer encoded data
    struct videoSection {
        int frameSize;
        uint8_t* payload;
        int type;
    };
    

    After which it is picked up by the broadcaster, a few more changes are made, and then I call rtmp_send()

    videoSection sect = encodedImages->front();
    encodedImages->pop();
    
    //std::cout << "Broadcaster: Frame Size: " << sect.frameSize << std::endl;
    
    //two methods of sending RTMP data, _sendpacket and _write. Using sendpacket for greater control
    
    RTMPPacket * packet;
    
    unsigned char* buf = (unsigned char*)sect.payload;
    
    int type = buf[0]&0x1f; //I believe &0x1f sets a 32bit limit
    int len = sect.frameSize;
    long timeOffset = GetTickCount() - rtmp_start_time;
    
    //assign space packet will need
    packet = (RTMPPacket *)malloc(sizeof(RTMPPacket)+RTMP_MAX_HEADER_SIZE + len + 9);
    memset(packet, 0, sizeof(RTMPPacket) + RTMP_MAX_HEADER_SIZE);
    
    packet->m_body = (char *)packet + sizeof(RTMPPacket) + RTMP_MAX_HEADER_SIZE;
    packet->m_nBodySize = len + 9;
    
    //std::cout << "Broadcaster: Packet Size: " << sizeof(RTMPPacket) + RTMP_MAX_HEADER_SIZE + len + 9 << std::endl;
    //std::cout << "Broadcaster: Packet Body Size: " << len + 9 << std::endl;
    
    //set body to point to the packetbody
    unsigned char *body = (unsigned char *)packet->m_body;
    memset(body, 0, len + 9);
    
    
    
    //NAL_SLICE_IDR represents keyframe
    //first element determines packet type
    body[0] = 0x27;//inter-frame h.264
    if (sect.type == NAL_SLICE_IDR) {
        body[0] = 0x17; //h.264 codec id
    }
    
    
    //-------------------------------------------------------------------------------
    //this section taken from https://stackoverflow.com/questions/25031759/using-x264-and-librtmp-to-send-live-camera-frame-but-the-flash-cant-show
    //in an effort to understand packet format. it does not resolve my previous issues formatting the data for twitch to play it
    
    //sets body to be NAL unit
    body[1] = 0x01;
    body[2] = 0x00;
    body[3] = 0x00;
    body[4] = 0x00;
    
    //>> is a shift right
    //shift len to the right, and AND it
    /*body[5] = (len >> 24) & 0xff;
    body[6] = (len >> 16) & 0xff;
    body[7] = (len >> 8) & 0xff;
    body[8] = (len) & 0xff;*/
    
    //end code sourced from https://stackoverflow.com/questions/25031759/using-x264-and-librtmp-to-send-live-camera-frame-but-the-flash-cant-show
    //-------------------------------------------------------------------------------
    
    //copy from buffer into rest of body
    memcpy(&body[9], buf, len);
    
    //DEBUG
    
    //save individual packet body to a file with name rtmp[packetnum]
    //determine why some packets do not have 0x27 or 0x17 at the start
    //still happening, makes no sense given the above code
    
    /*std::string fileLocation = "rtmp" + std::to_string(packCount++);
    std::cout << fileLocation << std::endl;
    const char * charConversion = fileLocation.c_str();
    
    FILE* saveFile = NULL;
    saveFile = fopen(charConversion, "w+b");//open as write and binary
    if (!fwrite(body, len + 9, 1, saveFile)) {
        std::cout << "VideoEncoder: Error while trying to write to file" << std::endl;
    }
    fclose(saveFile);*/
    
    //END DEBUG
    
    //other packet details
    packet->m_hasAbsTimestamp = 0;
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
    if (rtmp != NULL) {
        packet->m_nInfoField2 = rtmp->m_stream_id;
    }
    packet->m_nChannel = 0x04;
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
    packet->m_nTimeStamp = timeOffset;
    
    //send the packet
    if (rtmp != NULL) {
        RTMP_SendPacket(rtmp, packet, TRUE);
    }
    

    I can see that Twitch is receiving the data in the inspector, at a steady 3kbps. so I'm sure something is wrong with how I'm adjusting the data before sending it. Can anyone advise me on what I'm doing wrong here?

  • gstreamer : Internal data error, in appsink "pull-sample" mode

    9 mai 2018, par Amir Raza

    I am getting Internal data error, in appsink . My application is to read .yuv data , encode and write to a buffer.

    I have accomplished the writing it file but when i changed the code to write it buffer it giving error. Its only able to write only single packet (188bytes).

    Output of program:

     (ConsoleApplication6.exe:14432): GStreamer-WARNING **: Failed to load plugin 'C:\gstreamer\1.0\x86_64\lib\gstreamer-1.0\libgstopenh264.dll': 'C:\gstreamer\1.0\x86_64\lib\gstreamer-1.0\libgstopenh264.dll': The specified procedure could not be found.
        pipeline:  filesrc location=Transformers1080p.yuv blocksize=4147200 ! videoparse  width=1920 height=1080 framerate=60/1 ! videoconvert ! video/x-raw,format=I420,width=1920,height=1080,framerate=60/1 !  x264enc ! mpegtsmux ! queue !  appsink name = sink
        Now playing: Transformers1080p.yuv
        Running...
    
         on_new_sample_from_sink
    
         sample got of size = 188
        Error: Internal data stream error.
        Returned, stopping playback
        Deleting pipeline
    

    my code:

    #define _CRT_SECURE_NO_WARNINGS 1
    //#pragma warning(disable:4996)
    #include gst.h>
    #include audio/audio.h>
    #include app/gstappsrc.h>
    #include base/gstpushsrc.h>
    #include app/gstappsink.h>
    #include video/video.h>
    #include video/gstvideometa.h>
    #include video/video-overlay-composition.h>
    
    #include 
    #include 
    
    #include 
    #include 
    
    using namespace std;
    
    GstElement *SinkBuff;
    char *out_file_path;
    FILE *out_file;
    
    //gst-launch-1.0.exe -v filesrc location=Transformers1080p.yuv blocksize=4147200 !  
    //videoconvert ! video/x-raw,format=I420,width=1920,height=1080,framerate=60/1 !  
    //openh264enc ! mpegtsmux ! filesink location=final.ts
    
    
    static gboolean bus_call(GstBus     *bus, GstMessage *msg, gpointer    data)
    {
            GMainLoop *loop = (GMainLoop *)data;
    
            switch (GST_MESSAGE_TYPE(msg))
            {
            case GST_MESSAGE_EOS:
                    g_print("End of stream\n");
                    g_main_loop_quit(loop);
                    break;
    
            case GST_MESSAGE_ERROR:
              {
                    gchar  *debug;
                    GError *error;
    
                    gst_message_parse_error(msg, &error, &debug);
                    g_free(debug);
    
                    g_printerr("Error: %s\n", error->message);
                    g_error_free(error);
    
                    g_main_loop_quit(loop);
                    break;
              }
            default:
                    break;
        }
            return TRUE;
    }
    
    /* called when the appsink notifies us that there is a new buffer ready for
    * processing */
    static void  on_new_sample_from_sink(GstElement * elt, void *ptr)
    {
            guint size;
            GstBuffer *app_buffer, *buffer;
            GstElement *source;
            GstMapInfo map = { 0 };
            GstSample *sample;
            static GstClockTime timestamp = 0;
            printf("\n on_new_sample_from_sink \n ");
            /* get the buffer from appsink */
            g_signal_emit_by_name(SinkBuff, "pull-sample", &sample, NULL);
            if (sample)
            {
                    buffer = gst_sample_get_buffer(sample);
                    gst_buffer_map(buffer, &map, GST_MAP_READ);
    
                    printf("\n sample got of size = %d \n", map.size);
                    //Buffer
                    fwrite((char *)map.data, 1, sizeof(map.size), out_file);
    
                    gst_buffer_unmap(buffer, &map);
                    gst_sample_unref(sample);
            }
    }
    
    
    int main(int   argc, char *argv[])
    {
            GMainLoop *loop;
            int width, height;
    
            GstElement *pipeline;
            GError *error = NULL;
            GstBus *bus;
            char pipeline_desc[1024];
            out_file = fopen("output.ts", "wb");
    
    
            /* Initialisation */
            gst_init(&argc, &argv);
    
            // Create gstreamer loop
            loop = g_main_loop_new(NULL, FALSE);
    
            sprintf(
                    pipeline_desc,
                    " filesrc location=Transformers1080p.yuv blocksize=4147200 !"
                    " videoparse  width=1920 height=1080 framerate=60/1 !"
                    " videoconvert ! video/x-raw,format=I420,width=1920,height=1080,framerate=60/1 ! "
                    //" x264enc ! mpegtsmux ! filesink location=final.ts");
                    " x264enc ! mpegtsmux ! queue !  appsink name = sink");
    
    
            printf("pipeline: %s\n", pipeline_desc);
    
            /* Create gstreamer elements */
            pipeline = gst_parse_launch(pipeline_desc, &error);
    
            /* TODO: Handle recoverable errors. */
    
            if (!pipeline) {
                    g_printerr("Pipeline could not be created. Exiting.\n");
                    return -1;
            }
    
            /* get sink */
            SinkBuff = gst_bin_get_by_name(GST_BIN(pipeline), "sink");
            g_object_set(G_OBJECT(SinkBuff), "emit-signals", TRUE, "sync", FALSE, NULL);
            g_signal_connect(SinkBuff, "new-sample", G_CALLBACK(on_new_sample_from_sink), NULL);
    
    
            /* Set up the pipeline */
            /* we add a message handler */
            bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
            gst_bus_add_watch(bus, bus_call, loop);
            gst_object_unref(bus);
    
            /* Set the pipeline to "playing" state*/
            g_print("Now playing: Transformers1080p.yuv \n");
            gst_element_set_state(pipeline, GST_STATE_PLAYING);
    
            /* Iterate */
            g_print("Running...\n");
            g_main_loop_run(loop);
    
            /* Out of the main loop, clean up nicely */
            g_print("Returned, stopping playback\n");
            gst_element_set_state(pipeline, GST_STATE_NULL);
    
            g_print("Deleting pipeline\n");
            gst_object_unref(GST_OBJECT(pipeline));
            fclose(out_file);
            g_main_loop_unref(loop);
    
    
            return 0;
    }