profile
viewpoint

Ask questionshow can I use FFmpeg and h264 ?

My config code `module.exports = { listenIp: '0.0.0.0', listenPort: 3000, sslCrt: './crt/domain.crt', sslKey: './crt/domain.key', mediasoup: { // Worker settings worker: { rtcMinPort: 10000, rtcMaxPort: 10100, logLevel: 'warn', logTags: [ 'info', 'ice', 'dtls', 'rtp', 'srtp', 'rtcp', // 'rtx', // 'bwe', // 'score', // 'simulcast', // 'svc' ], }, // Router settings router: { mediaCodecs: [ { kind : 'video', mimeType : 'video/h264', clockRate : 90000, parameters : { 'packetization-mode' : 1, 'profile-level-id' : '4d0032', 'level-asymmetry-allowed' : 1, 'x-google-start-bitrate' : 1000 } }, { kind : 'video', mimeType : 'video/h264', clockRate : 90000, parameters : { 'packetization-mode' : 1, 'profile-level-id' : '42e01f', 'level-asymmetry-allowed' : 1, 'x-google-start-bitrate' : 1000 } }, { kind : 'video', mimeType : 'video/h264', clockRate : 90000, parameters : { 'packetization-mode' : 1, 'profile-level-id' : '42001f', 'level-asymmetry-allowed' : 1, 'x-google-start-bitrate' : 1000 } }, { kind : 'video', mimeType : 'video/h264', clockRate : 90000, parameters : { 'packetization-mode' : 1, 'profile-level-id' : '640032', 'level-asymmetry-allowed' : 1, 'x-google-start-bitrate' : 1000 } }

    ]
},
// WebRtcTransport settings
webRtcTransport: {
  listenIps: [
    {
      ip: '192.168.125.128',
      announcedIp: null,
    }
  ],
  maxIncomingBitrate: 1500000,
  initialAvailableOutgoingBitrate: 1000000,
},
plainRtpTransport: {
  listenIp: '192.168.125.128',
  rtcpMux: false,
  comedia: true
}

}, }; `

My server code

`const mediasoup = require('mediasoup'); const fs = require('fs'); const config = require('./config');

const server = require('http').createServer();

class myServe { constructor(config) { this.config = config; this.createSocketServe(); this.runMediasoupWorker()

}
//创建socetIo;
async createSocketServe() {
    const io = require('socket.io')(server, {
        serveClient: false,
        path: '/server',
        log: false,
    });
    server.listen(this.config.listenPort);


    io.on('connection', (socket) => {
        console.log('client connected');
        socket.on('disconnect', () => {
            console.log('client disconnected');
        });

        socket.on('connect_error', (err) => {
            console.error('client connection error', err);
        });
        socket.on('getRouterRtpCapabilities', async (data, callback) => {
            //给前端提交服务器的路由信息
            callback(this.mediasoupRouter.rtpCapabilities);
        });
        //消费者请求
        socket.on('createConsumerTransport', async (data, callback) => {
            try {
                const { transport, params } = await this.createWebRtcTransport();
                this.consumerTransport = transport;
                callback(params);
            } catch (err) {
                console.error(err);
                callback({ error: err.message });
            }
        });

        socket.on('consume', async (data, callback) => {
            callback(await this.createConsumer(this.videoProducer, data.rtpCapabilities));
        });
        socket.on('connectConsumerTransport', async (data, callback) => {
            await this.consumerTransport.connect({ dtlsParameters: data.dtlsParameters });
            callback();
        });
        socket.on('resume', async (data, callback) => {
            await this.consumer.resume();
            callback();
        });

    })
}
//创建通道的函数
async createWebRtcTransport() {
    const {
        maxIncomingBitrate,
        initialAvailableOutgoingBitrate
    } = this.config.mediasoup.webRtcTransport;
    //在路由上创建前往后推的通道
    const transport = await this.mediasoupRouter.createWebRtcTransport({
        listenIps: this.config.mediasoup.webRtcTransport.listenIps,
        enableUdp: true,
        enableTcp: true,
        preferUdp: true,
        initialAvailableOutgoingBitrate,
    });
    // console.log(transport)
    if (maxIncomingBitrate) {
        try {
            await transport.setMaxIncomingBitrate(maxIncomingBitrate);
        } catch (error) {
        }
    }
    return {
        transport,//通道信息
        params: {
            id: transport.id,
            iceParameters: transport.iceParameters,
            iceCandidates: transport.iceCandidates,
            dtlsParameters: transport.dtlsParameters
        },
    };
}
//创建Mediasoup的工作线程
async runMediasoupWorker() {
    let worker = await mediasoup.createWorker({
        logLevel: this.config.mediasoup.worker.logLevel,
        logTags: this.config.mediasoup.worker.logTags,
        rtcMinPort: this.config.mediasoup.worker.rtcMinPort,
        rtcMaxPort: this.config.mediasoup.worker.rtcMaxPort,
    });
    worker.on('died', () => {
        console.error('mediasoup worker died, exiting in 2 seconds... [pid:%d]', worker.pid);
        setTimeout(() => process.exit(1), 2000);
    });
    //获取配置
    const mediaCodecs = this.config.mediasoup.router.mediaCodecs;
    //创建工作线程的路由
    this.mediasoupRouter = await worker.createRouter({ mediaCodecs });
    //创建rtsp的通道
    this.createRTSPTransport()
    console.log("路由创建完成");
}

async createConsumer(producer, rtpCapabilities) {
    //判断给定的 RTP 功能是否有效使用给定的生产者。,生产者 ID,潜在使用者的 RTP 功能
    if (!this.mediasoupRouter.canConsume(
        {
            producerId: producer.id,
            rtpCapabilities,
        })
    ) {
        console.error('can not consume');
        return;
    }
    let consumer;

    try {
        consumer = await this.consumerTransport.consume({
            producerId: producer.id,
            rtpCapabilities,
            paused: producer.kind === 'video',
        });
        this.consumer = consumer;
    } catch (error) {
        console.error('consume failed', error);
        return;
    }

    if (consumer.type === 'simulcast') {
        await consumer.setPreferredLayers({ spatialLayer: 2, temporalLayer: 2 });
    }

    return {
        producerId: producer.id,
        id: consumer.id,
        kind: consumer.kind,
        rtpParameters: consumer.rtpParameters,
        type: consumer.type,
        producerPaused: consumer.producerPaused
    };
}
//接收rtsp的流
async createRTSPTransport() {

    let videoTransport = await this.mediasoupRouter.createPlainTransport(this.config.mediasoup.plainRtpTransport);

    // Read the transport local RTP port.
    const videoRtpPort = videoTransport.tuple.localPort;
    console.log(videoRtpPort);
    // => 3501

    // Read the transport local RTCP port.
    const videoRtcpPort = videoTransport.rtcpTuple.localPort;
    console.log(videoRtcpPort);
    // => 2989


    let videoProducer = await videoTransport.produce(
        {
            kind: 'video',
            rtpParameters:
            {
                codecs:
                    [
                        {
                            mimeType     : 'video/h264',
                            clockRate    : 90000,
                            payloadType  : 104,
                            parameters :
                            {
                                'packetization-mode'      : 1,
                                'profile-level-id'        : '42001f',
                                'level-asymmetry-allowed' : 1,
                                'x-google-start-bitrate'  : 1000
                            }
                        }
                    ],
                encodings: [{ ssrc: 22222222 }]
            }
        });
        // await videoProducer.enableTraceEvent([ "rtp", "pli" ]);

        // videoProducer.on("trace", (trace) =>
        // {
        //   console.log(trace)
        // });
        

    this.videoProducer = videoProducer;
    console.log(`请在控制台调用推流:bash push.sh ${videoRtpPort} ${videoRtcpPort}`);


}

} let myserve = new myServe(config)`

My html code

`class appObj { constructor(video) { this.video=video;

    this.device;
    this.init();
    this.ws();
}
init() {
    if (typeof navigator.mediaDevices.getDisplayMedia === 'undefined') {
        console.log("err");
    }
    const handlerName = mediasoupClient.detectDevice();

    if (handlerName) {
        console.log("detected handler: %s", handlerName);
    } else {
        console.warn("no suitable handler found for current browser/device");
    }
    //创建mediasoup-client的终端
    this.device = new mediasoupClient.Device();
}
ws() {
    const opts = {
        path: '/server',
        transports: ['websocket'],
    };
    const serverUrl = `http://192.168.125.128:3000`;
    let socket = io(serverUrl, opts);
    this.socket = socket;
    socket.request = socketPromise(socket);
    socket.on('connect', async () => {
        console.log("连接成功")
        //连接成功,去流媒体服务器询问服务器的路由信息
       let routerRtpCapabilities = await socket.request('getRouterRtpCapabilities');
        //把路由信息给客户端
        await this.device.load({ routerRtpCapabilities })

        //通知后台创建生产者通道,把自己的流传向服务器
        // await this.createClientSendPipe();

        //通知后台把流传前台
        await this.subscribe()
        
    });
    const device = new mediasoupClient.Device();

}
//创建前端向后台发送的传输通道createSendTransport
async createClientSendPipe(){
    //创建生产者的连接通道,先向后台发生自己客户端的信息
    const data = await this.socket.request('createProducerTransport', {
        forceTcp: false,
        rtpCapabilities: this.device.rtpCapabilities,
    });
    //服务器传输通道创建成功,接收到服务器的通道信息
    if (data.error) {
        console.error(data.error);
        return;
      }
    //根据服务器的通道信息创建自己的通道信息
    const transport = this.device.createSendTransport(data);
    transport.on('connect', async ({ dtlsParameters }, callback, errback) => {
        //数据通道连接成功
        console.log("数据通道连接成功")
        //通道连接成功后,把该通道的信息发往后端
        this.socket.request('connectProducerTransport', { dtlsParameters })
            .then(callback)
            .catch(errback);
        
      });
      transport.on('produce', async ({ kind, rtpParameters }, callback, errback) => {
        console.log("produce")
        //指示传输将音频或视频轨道发送到 mediasoup 路由器
        try {
            const { id } = await this.socket.request('produce', {
              transportId: transport.id,
              kind,
              rtpParameters,
            });
            callback({ id });
          } catch (err) {
            errback(err);
          }
      });
    
      transport.on('connectionstatechange', (state) => {
        
        switch (state){
            case 'connecting':
                console.log("connectionstatechange:",state);
            break;

            case 'connected':
                console.log("connectionstatechange:",state);
            break;

            case 'failed':
                console.log("connectionstatechange:",state);
            break;

            default: break;
        }
      });
      //判断该设备是否支持摄像头
      if (!this.device.canProduce('video')) {
        console.error('该设备没有摄像头');
        return;
      }
      //获取该设备的视频

    let stream=await navigator.mediaDevices.getUserMedia({ video: true });
    const track = stream.getVideoTracks()[0];
    const params = { track };
    params.encodings = [
      { maxBitrate: 100000 },
      { maxBitrate: 300000 },
      { maxBitrate: 900000 },
    ];
    params.codecOptions = {
      videoGoogleStartBitrate : 1000
    };
    //把视频信息推入流
    this.producer = await transport.produce(params);

}
//创建后台向前台发送流的数据通道
async subscribe(){
    //向后台发送创建流通道的请求,消费者通道
    const data = await this.socket.request('createConsumerTransport', {
        forceTcp: false,
      });
      if (data.error) {
        console.error(data.error);
        return;
      }
    //根据服务器的通道信息创建自己的通道信息,后台往前发的通道
    const transport = this.device.createRecvTransport(data);
    transport.on('connect', ({ dtlsParameters }, callback, errback) => {
        console.log("已连接")
        this.socket.request('connectConsumerTransport', {
          transportId: transport.id,
          dtlsParameters
        })
          .then(callback)
          .catch(errback);
      });
    
      transport.on('connectionstatechange', async (state) => {
        switch (state) {
          case 'connecting':
            console.log("我在:",state)
            break;
    
          case 'connected':
            console.log("我在:",state)
            this.video.srcObject = await stream;
            await this.socket.request('resume');
            break;
    
          case 'failed':
            console.log("我在:",state)
            break;
    
          default: break;
        }
      });
      const stream = this.consume(transport);
    

}
async consume(transport) {
    const { rtpCapabilities } = this.device;
    console.log(rtpCapabilities)
    const data = await this.socket.request('consume', { rtpCapabilities });
    const {
      producerId,
      id,
      kind,
      rtpParameters,
    } = data;
  
    let codecOptions = {};
    const consumer = await transport.consume({
      id,
      producerId,
      kind,
      rtpParameters,
      codecOptions,
    });
    
    const stream = new MediaStream();
    stream.addTrack(consumer.track);
    return stream;
  }

}`

My ffmpeg ffmpeg -i "rtsp://admin:admin123@192.168.1.82:554/cam/realmonitor?channel=1&subtype=0&unicast=true&proto=Onvif" -an -c copy -ssrc 22222222 -payload_type 104 -f rtp "rtp://192.168.125.128:10098?rtcpport=10096"

My 问题描述

if i use VP8, it is very good work ,but I use h264, it is work in chrome too,but the stream is bad,this the video is buffering , More than ten seconds to change, What can I do?

versatica/mediasoup

Answer questions ibc

You know there is a forum, you read it in the issue template that you ignored. Please use the forum for questions. This is just for bugs.

useful!
source:https://uonfu.com/
answerer
Iñaki Baz Castillo ibc around.co Bilbao & Madrid https://inakibaz.me Passionate about new technologies, Open Source, modern web applications development, Node.js, C++, SIP protocol and, above all, Real-Time Communications.
Github User Rank List