@OKNOZA

Онлайн вещание HLS как сделать?

Собрал сборку nginx, nginx-rtmp-module, ffmpeg, конфигурационный файл такой.
#user  nobody;
worker_processes  1;

#error_log  logs/error.log;
#error_log  logs/error.log  notice;
#error_log  logs/error.log  info;

#pid        logs/nginx.pid;


events {
    worker_connections  1024;
}



http {
    include       mime.types;
    default_type  application/octet-stream;

    #log_format  main  '$remote_addr - $remote_user [$time_local] "$request" '
    #                  '$status $body_bytes_sent "$http_referer" '
    #                  '"$http_user_agent" "$http_x_forwarded_for"';

    #access_log  logs/access.log  main;

    sendfile        on;
    #tcp_nopush     on;

    #keepalive_timeout  0;
    keepalive_timeout  65;

    #gzip  on;


# HTTP can be used for accessing RTMP stats
    server {

        listen      80;
        charset  UTF-8;

        location / {
            root   /usr/local/nginx/html;
            index  index.html index.htm;
        }

        # This URL provides RTMP statistics in XML
        location /stat {
            rtmp_stat all;

            # Use this stylesheet to view XML as web page
            # in browser
            rtmp_stat_stylesheet stat.xsl;
	    #allow 127.0.0.1;
        }

        location /stat.xsl {
            # XML stylesheet to view RTMP stats.
            # Copy stat.xsl wherever you want
            # and put the full directory path here
            root /usr/build/nginx-rtmp-module;
        }
	location /nclients {
	    proxy_pass http://127.0.0.1/stat;
#	    xslt_stylesheet /usr/local/nginx/html/nclients.xsl app='$arg_app' name='$arg_name';
	    add_header Refresh "3; $request_uri";
	}

        # rtmp control
        location /control {
            rtmp_control all;
        }

        location /hls {
            # Serve HLS fragments
            # alias /tmp/app;
	    alias /usr/local/nginx/html/video;

            expires -1;
        }

    }
}

rtmp {

    server {

        listen 1935;

        chunk_size 4000;

        # TV mode: one publisher, many subscribers
        application mytv {

            # enable live streaming
            live on;

            # record first 1K of stream
            record all;
            record_path /tmp/av;
            record_max_size 1K;

            # append current timestamp to each flv
            record_unique on;

            # publish only from localhost
            allow publish 127.0.0.1;
            deny publish all;

            #allow play all;
        }

        # Transcoding (ffmpeg needed)
        application big {
            live on;

            # On every pusblished stream run this command (ffmpeg)
            # with substitutions: $app/${app}, $name/${name} for application & stream name.
            #
            # This ffmpeg call receives stream from this application &
            # reduces the resolution down to 32x32. The stream is the published to
            # 'small' application (see below) under the same name.
            #
            # ffmpeg can do anything with the stream like video/audio
            # transcoding, resizing, altering container/codec params etc
            #
            # Multiple exec lines can be specified.

#            exec /usr/local/bin/ffmpeg -re -i rtmp://localhost:1935/$app/$name -vcodec libx264 -threads 0 -r 25 -g 50 -b 500k -bt 500k -s 320x180 -acodec mp3 -ar 44100 -ab 64k -f flv rtmp://localhost:1935/small/${name};
            exec /usr/local/bin/ffmpeg -re -i rtmp://localhost:1935/$app/$name -vcodec libx264 -threads 0 -r 25 -g 50 -b 500k -bt 500k -s 320x180 -acodec mp3 -ar 44100 -ab 64k -f flv rtmp://1.13577498.fme.ustream.tv/ustreamVideo/13577498/RrHUx94pJWjaEhPjnQYwUxcU2T2w8CaX;
# 2>>/tmp/ffmpeg.log;

#	    exec /usr/local/bin/ffmpeg -re -i rtmp://localhost:1935/$app/$name 
#		-vcodec libx264 -vprofile baseline -b:v 128k -b:a 32k   -s 320x180  -acodec libvo_aacenc -ar 44100 -ac 1 -f flv rtmp://localhost:1935/small/${name}_low
#		-vcodec libx264 -vprofile baseline -b:v 384k -b:a 64k   -s 640x360  -acodec libvo_aacenc -ar 44100 -ac 1 -f flv rtmp://localhost:1935/small/${name}_mid
#		-vcodec libx264 -vprofile baseline -b:v 1024k -b:a 128k -s 1280x720 -acodec libvo_aacenc -ar 44100 -ac 1 -f flv rtmp://localhost:1935/small/${name}_hi;

#hls
#	    exec /usr/local/bin/ffmpeg -re -i rtmp://localhost:1935/$app/$name 
#		-vcodec libx264 -vprofile baseline -b:v 128k -b:a 32k   -s 320x180  -acodec libvo_aacenc -ar 44100 -ac 1 -f flv rtmp://localhost:1935/hls/${name}_low
#		-vcodec libx264 -vprofile baseline -b:v 384k -b:a 64k   -s 640x360  -acodec libvo_aacenc -ar 44100 -ac 1 -f flv rtmp://localhost:1935/hls/${name}_mid
#		-vcodec libx264 -vprofile baseline -b:v 1024k -b:a 128k -s 1280x720 -acodec libvo_aacenc -ar 44100 -ac 1 -f flv rtmp://localhost:1935/hls/${name}_hi;
# 2>>/tmp/ffmpeg.log;
#hls
	    exec /usr/local/bin/ffmpeg -re -i rtmp://localhost:1935/$app/$name 
		-vcodec libx264 -vprofile baseline -acodec libvo_aacenc -ar 44100 -ac 1 -f flv rtmp://localhost:1935/hls/${name};


	    # record first 1K of stream
            record all;
            record_path /usr/local/nginx/html/video;
            # record_max_size 1K;

            # append current timestamp to each flv
            record_unique on;

#	    push rtmp://1.13577498.fme.ustream.tv/ustreamVideo/13577498 name=RrHUx94pJWjaEhPjnQYwUxcU2T2w8CaX;


        }

        application small {
            live on;
            # Video with reduced resolution comes here from ffmpeg
        }

        application webcam {
            live on;

            # Stream from local webcam
            exec_static ffmpeg -f video4linux2 -i /dev/video0 -c:v libx264 -an -f flv rtmp://localhost:1935/webcam/mystream;
        }

        application mypush {
            live on;

            # Every stream published here
            # is automatically pushed to 
            # these two machines
#            push rtmp1.example.com;
#            push rtmp2.example.com:1934;
        }

        application mypull {
            live on;

            # Pull all streams from remote machine
            # and play locally
#            pull rtmp://rtmp3.example.com pageUrl=www.example.com/index.html;
        }

        application mystaticpull {
            live on;

            # Static pull is started at nginx start
#            pull rtmp://rtmp4.example.com pageUrl=www.example.com/index.html name=mystream static;
        }

        # video on demand
        application vod {
            play /usr/local/nginx/html/video;
        }

        application vod2 {
            play /var/mp4s;
        }

        # Many publishers, many subscribers
        # no checks, no recording
        application videochat {

            live on;

            # The following notifications receive all 
            # the session variables as well as 
            # particular call arguments in HTTP POST
            # request

            # Make HTTP request & use HTTP retcode
            # to decide whether to allow publishing
            # from this connection or not
            on_publish http://localhost:80/publish;

            # Same with playing
            on_play http://localhost:80/play;

            # Publish/play end (repeats on disconnect)
            on_done http://localhost:80/done;

            # All above mentioned notifications receive
            # standard connect() arguments as well as 
            # play/publish ones. If any arguments are sent
            # with GET-style syntax to play & publish
            # these are also included.
            # Example URL:
            #   rtmp://localhost/myapp/mystream?a=b&c=d

            # record 10 video keyframes (no audio) every 2 minutes
            record keyframes;
            record_path /tmp/vc;
            record_max_frames 10;
            record_interval 2m;

            # Async notify about an flv recorded
            on_record_done http://localhost:80/record_done;

        }


        # HLS

        # For HLS to work please create a directory in tmpfs (/tmp/app here)
        # for the fragments. The directory contents is served via HTTP (see
        # http{} section in config)
        #
        # Incoming stream must be in H264/AAC. For iPhones use baseline H264
        # profile (see ffmpeg example).
        # This example creates RTMP stream from movie ready for HLS:
        #
        # ffmpeg -loglevel verbose -re -i movie.avi  -vcodec libx264 
        #    -vprofile baseline -acodec libmp3lame -ar 44100 -ac 1 
        #    -f flv rtmp://localhost:1935/hls/movie
        #
        # If you need to transcode live stream use 'exec' feature.
        #
        application hls {
            live on;
            hls on;
            # hls_path /tmp/app;
            hls_path /usr/local/nginx/html/video;
#            hls_fragment 15s;
#	    hls_playlist_length 10m;
#	    hls_continuous on;
#	    hls_fragment 5s;
#	    hls_playlist_length 15s;
#	    hls_continuous on;

#            hls_nested on;

#            hls_variant _low BANDWIDTH=160000;
#            hls_variant _mid BANDWIDTH=448000;
#            hls_variant _hi  BANDWIDTH=1152000;

        }

    }
}

Что еще нужно чтобы пустить поток от VLC на nginx чтобы получить на выходе HLS с трансляцией?
  • Вопрос задан
  • 7649 просмотров
Пригласить эксперта
Ответы на вопрос 1
@Grashooper
Тоже интересно как вообще сделать своё вещание со своим сервером.
Ответ написан
Комментировать
Ваш ответ на вопрос

Войдите, чтобы написать ответ

Войти через центр авторизации
Похожие вопросы