android 使用ffmpeg推流到nginx

上一篇写了如果编译ffmpeg在android上的动态库,写了如何把动态库引入eclipse android工程,

这次写如何将android端mp4文件推流到nginx,并在网页上观看直播

本文参考了:

最简单的基于FFmpeg的移动端例子:Android 推流器

windows下流媒体nginx-rmtp-module服务器搭建

Android请求网络权限


     

1.下载nginx服务器,我也是从上面文章中找的,也可以直接下载

https://pan.baidu.com/s/1hspgWb6

   这个nginx还是可以的,启用后在浏览器访问,可以推流摄像头内容到nginx,并且观看直播状态,

  可以打开看看

2.创建android工程,我也是用的上面的例子的android推流器

   

视频文件时sd卡根目录的视频文件,

url是nginx服务器地址,可以先把nginx打开,用浏览器访问,允许打开摄像头,看url的规律

在手机端推流的同时,可以在浏览器打开推流的地址,播放视频内容

看android端代码:

activity_main.xml

<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
    package="com.example.myffnpeg"
    android:versionCode="1"
    android:versionName="1.0" >

    <uses-sdk
        android:minSdkVersion="19"
        android:targetSdkVersion="19" />

    <application
        android:allowBackup="true"
        android:icon="@drawable/ic_launcher"
        android:label="@string/app_name"
        android:theme="@style/AppTheme" >
        <activity
            android:name=".MainActivity"
            android:label="@string/app_name" >
            <intent-filter>
                <action android:name="android.intent.action.MAIN" />

                <category android:name="android.intent.category.LAUNCHER" />
            </intent-filter>
        </activity>
    </application>
	
	<uses-permission android:name="android.permission.INTERNET"/>
</manifest>

MainActivity.java

package com.example.myffnpeg;

import android.app.Activity;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.EditText;


public class MainActivity extends Activity {

	JniUtils jniUtils;
	
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        
        jniUtils=new JniUtils();
        
        Button startButton = (Button) this.findViewById(R.id.button1);  
        final EditText urlEdittext_input= (EditText) this.findViewById(R.id.editText1);  
        final EditText urlEdittext_output= (EditText) this.findViewById(R.id.editText2);  
        
        startButton.setOnClickListener(new OnClickListener() {  
        	@Override
        	public void onClick(View arg0){  
  
                String folderurl=Environment.getExternalStorageDirectory().getPath();  
                  
                String urltext_input=urlEdittext_input.getText().toString();  
                String inputurl=folderurl+"/"+urltext_input;  
                  
                String outputurl=urlEdittext_output.getText().toString();  
                  
                Log.e("TestLog",inputurl);  
                Log.e("TestLog",outputurl);  
                String info="";  
              
                int stream = jniUtils.stream(inputurl,outputurl);//推流
                Log.e("TestLog",String.valueOf(stream));  
                Log.e("TestLog",info);  
            }

        });  
        
        
    }

    @Override
    public boolean onCreateOptionsMenu(Menu menu) {
        // Inflate the menu; this adds items to the action bar if it is present.
        getMenuInflater().inflate(R.menu.main, menu);
        return true;
    }

    @Override
    public boolean onOptionsItemSelected(MenuItem item) {
        // Handle action bar item clicks here. The action bar will
        // automatically handle clicks on the Home/Up button, so long
        // as you specify a parent activity in AndroidManifest.xml.
        int id = item.getItemId();
        if (id == R.id.action_settings) {
            return true;
        }
        return super.onOptionsItemSelected(item);
    }
}

JniUtils.java

package com.example.myffnpeg;

public class JniUtils {

	static {

		System.loadLibrary("avcodec-57");
		System.loadLibrary("avfilter-6");
		System.loadLibrary("avformat-57");
		System.loadLibrary("avutil-55");
		System.loadLibrary("swresample-2");
		System.loadLibrary("swscale-4");
		System.loadLibrary("hello-jni");
	}

	//public native String stringFromJNI();

	//public native String avformatinfo();

	//public native String avcodecinfo();

	//public native String avfilterinfo();

	//public native String configurationinfo();
	/*推流*/
	public native int stream(String inputurl, String outputurl); 
}
Android.mk

LOCAL_PATH := $(call my-dir)

#ffmpeg
include $(CLEAR_VARS)
LOCAL_MODULE:= avcodec
LOCAL_SRC_FILES:= libavcodec-57.so
LOCAL_MODULE_FILENAME:=libavcodec-57
include $(PREBUILT_SHARED_LIBRARY)  ##指明此模块需要预编译

include $(CLEAR_VARS)
LOCAL_MODULE:= avfilter
LOCAL_SRC_FILES:= libavfilter-6.so
LOCAL_MODULE_FILENAME:=libavfilter-6
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE:= avformat
LOCAL_SRC_FILES:= libavformat-57.so
LOCAL_MODULE_FILENAME:=libavformat-57
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE:= avutil
LOCAL_SRC_FILES:= libavutil-55.so
LOCAL_MODULE_FILENAME:=libavutil-55
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE:= swresample
LOCAL_SRC_FILES:= libswresample-2.so
LOCAL_MODULE_FILENAME:=libswresample-2
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE:= swscale
LOCAL_SRC_FILES:= libswscale-4.so
LOCAL_MODULE_FILENAME:=libswscale-4
include $(PREBUILT_SHARED_LIBRARY)
#end


#my
include $(CLEAR_VARS)
#指明自己模块的名称
LOCAL_MODULE    := hello-jni
#指明自己模块需要的源代码
LOCAL_SRC_FILES := HelloJni.c
#指明.so库的头文件路径
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include
#指明需要使用的ndk自带模块
LOCAL_LDLIBS := -llog -lz -landroid
#指明自己模块编译时需要依赖的模块
LOCAL_SHARED_LIBRARIES := avcodec avfilter avformat avutil swresample swscale
#指明自己的模块编译为动态库
include $(BUILD_SHARED_LIBRARY)
Application.mk

#指明编译时生成armeabi-v7a架构的so文件
APP_ABI := armeabi-v7a 
HelloJni.c
#include <string.h>
//#include "com_example_ffmpegtest001_JniUtils.h"
#include <stdio.h>

#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include "include/libavutil/avutil.h"
#include "include/libavfilter/avfilter.h"

//http://www.pps.tv/w_19rt0grxmx.html  很好的视频

//Log
#ifdef ANDROID
#include <jni.h>
#include <android/log.h>
#define LOGE(format, ...)  __android_log_print(ANDROID_LOG_ERROR, "(>_<)", format, ##__VA_ARGS__)
#else
#define LOGE(format, ...)  printf("(>_<) " format "\n", ##__VA_ARGS__)
#endif

/* 注释内容
JNIEXPORT jstring JNICALL Java_com_example_myffnpeg_JniUtils_stringFromJNI(
		JNIEnv* env, jobject thiz) {
	//return env->NewStringUTF("Hello from JNI !  Compiled with ABI ");
	return (*env)->NewStringUTF(env, "Hello from JNI !  Compiled with ABI ");
}

JNIEXPORT jstring JNICALL Java_com_example_myffnpeg_JniUtils_avformatinfo(
		JNIEnv* env, jobject thiz) {
	char info[40000] = { 0 };

	av_register_all();

	AVInputFormat *if_temp = av_iformat_next(NULL);
	AVOutputFormat *of_temp = av_oformat_next(NULL);
	//Input
	while (if_temp != NULL) {
		sprintf(info, "%s[In ][%10s]\n", info, if_temp->name);
		if_temp = if_temp->next;
	}
	//Output
	while (of_temp != NULL) {
		sprintf(info, "%s[Out][%10s]\n", info, of_temp->name);
		of_temp = of_temp->next;
	}
	//LOGE("%s", info);
	//return env->NewStringUTF(info);
	return (*env)->NewStringUTF(env, info);
}
JNIEXPORT jstring JNICALL Java_com_example_myffnpeg_JniUtils_avcodecinfo(
		JNIEnv* env, jobject thiz) {
	char info[40000] = { 0 };

	av_register_all();

	AVCodec *c_temp = av_codec_next(NULL);

	while (c_temp != NULL) {
		if (c_temp->decode != NULL) {
			sprintf(info, "%s[Dec]", info);
		} else {
			sprintf(info, "%s[Enc]", info);
		}
		switch (c_temp->type) {
		case AVMEDIA_TYPE_VIDEO:
			sprintf(info, "%s[Video]", info);
			break;
		case AVMEDIA_TYPE_AUDIO:
			sprintf(info, "%s[Audio]", info);
			break;
		default:
			sprintf(info, "%s[Other]", info);
			break;
		}
		sprintf(info, "%s[%10s]\n", info, c_temp->name);

		c_temp = c_temp->next;
	}
	//LOGE("%s", info);

	return (*env)->NewStringUTF(env, info);
	// return env->NewStringUTF(info);

}
JNIEXPORT jstring JNICALL Java_com_example_myffnpeg_JniUtils_avfilterinfo(
		JNIEnv* env, jobject thiz) {
	char info[40000] = { 0 };
	avfilter_register_all();
	AVFilter *f_temp = (AVFilter *) avfilter_next(NULL);
	int i = 0;
	while (f_temp != NULL) {
		sprintf(info, "%s[%10s]\n", info, f_temp->name);
		f_temp = f_temp->next;
	}
	return (*env)->NewStringUTF(env, info);
	//  return env->NewStringUTF( info);
}
JNIEXPORT jstring JNICALL Java_com_example_myffnpeg_JniUtils_configurationinfo(
		JNIEnv* env, jobject thiz) {
	char info[10000] = { 0 };
	av_register_all();

	sprintf(info, "%s\n", avcodec_configuration());

	//LOGE("%s", info);
	//return env->NewStringUTF(info);
	return (*env)->NewStringUTF(env, info);
}
注释内容*/


//-------------------------------推流------------------
//Output FFmpeg's av_log()
void custom_log(void *ptr, int level, const char* fmt, va_list vl){

    //To TXT file
    FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
    if(fp){
        vfprintf(fp,fmt,vl);
        fflush(fp);
        fclose(fp);
    }

    //To Logcat
    //LOGE(fmt, vl);
}
JNIEXPORT jint JNICALL Java_com_example_myffnpeg_JniUtils_stream
  (JNIEnv *env, jobject obj, jstring input_jstr, jstring output_jstr)
{
  AVOutputFormat *ofmt = NULL;
    AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
    AVPacket pkt;

    int ret, i;
    char input_str[500]={0};
    char output_str[500]={0};
    char info[1000]={0};
    sprintf(input_str,"%s",(*env)->GetStringUTFChars(env,input_jstr, NULL));
    sprintf(output_str,"%s",(*env)->GetStringUTFChars(env,output_jstr, NULL));

    //input_str  = "cuc_ieschool.flv";
    //output_str = "rtmp://localhost/publishlive/livestream";
    //output_str = "rtp://233.233.233.233:6666";

    //FFmpeg av_log() callback
    av_log_set_callback(custom_log);

    av_register_all();
    //Network
    avformat_network_init();

    //Input
    if ((ret = avformat_open_input(&ifmt_ctx, input_str, 0, 0)) < 0) {
        LOGE( "Could not open input file.");
        goto end;
    }
    if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) {
        LOGE( "Failed to retrieve input stream information");
        goto end;
    }

    int videoindex=-1;
    for(i=0; i<ifmt_ctx->nb_streams; i++)
        if(ifmt_ctx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
            videoindex=i;
            break;
        }
    //Output
    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv",output_str); //RTMP
    //avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", output_str);//UDP

    if (!ofmt_ctx) {
        LOGE( "Could not create output context\n");
        ret = AVERROR_UNKNOWN;
        goto end;
    }
    ofmt = ofmt_ctx->oformat;
    for (i = 0; i < ifmt_ctx->nb_streams; i++) {
        //Create output AVStream according to input AVStream
        AVStream *in_stream = ifmt_ctx->streams[i];
        AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
        if (!out_stream) {
            LOGE( "Failed allocating output stream\n");
            ret = AVERROR_UNKNOWN;
            goto end;
        }
        //Copy the settings of AVCodecContext
        ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
        if (ret < 0) {
            LOGE( "Failed to copy context from input to output stream codec context\n");
            goto end;
        }
        out_stream->codec->codec_tag = 0;
        if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
            out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
    }

    //Open output URL
    if (!(ofmt->flags & AVFMT_NOFILE)) {
        ret = avio_open(&ofmt_ctx->pb, output_str, AVIO_FLAG_WRITE);
        if (ret < 0) {
            LOGE( "Could not open output URL '%s'", output_str);
            goto end;
        }
    }
    //Write file header
    ret = avformat_write_header(ofmt_ctx, NULL);
    if (ret < 0) {
        LOGE( "Error occurred when opening output URL\n");
        goto end;
    }

    int frame_index=0;

    int64_t start_time=av_gettime();
    while (1) {
        AVStream *in_stream, *out_stream;
        //Get an AVPacket
        ret = av_read_frame(ifmt_ctx, &pkt);
        if (ret < 0)
            break;
        //FIX:No PTS (Example: Raw H.264)
        //Simple Write PTS
        if(pkt.pts==AV_NOPTS_VALUE){
            //Write PTS
            AVRational time_base1=ifmt_ctx->streams[videoindex]->time_base;
            //Duration between 2 frames (us)
            int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
            //Parameters
            pkt.pts=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);
            pkt.dts=pkt.pts;
            pkt.duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);
        }
        //Important:Delay
        if(pkt.stream_index==videoindex){
            AVRational time_base=ifmt_ctx->streams[videoindex]->time_base;
            AVRational time_base_q={1,AV_TIME_BASE};
            int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
            int64_t now_time = av_gettime() - start_time;
            if (pts_time > now_time)
                av_usleep(pts_time - now_time);

        }

        in_stream  = ifmt_ctx->streams[pkt.stream_index];
        out_stream = ofmt_ctx->streams[pkt.stream_index];
        /* copy packet */
        //Convert PTS/DTS
        pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
        pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
        pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
        pkt.pos = -1;
        //Print to Screen
        if(pkt.stream_index==videoindex){
            LOGE("Send %8d video frames to output URL\n",frame_index);
            frame_index++;
        }
        //ret = av_write_frame(ofmt_ctx, &pkt);
        ret = av_interleaved_write_frame(ofmt_ctx, &pkt);

        if (ret < 0) {
            LOGE( "Error muxing packet\n");
            break;
        }
        av_free_packet(&pkt);

    }
    //Write file trailer
    av_write_trailer(ofmt_ctx);
end:
    avformat_close_input(&ifmt_ctx);
    /* close output */
    if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
        avio_close(ofmt_ctx->pb);
    avformat_free_context(ofmt_ctx);
    if (ret < 0 && ret != AVERROR_EOF) {
        LOGE( "Error occurred.\n");
        return -1;
    }
    return 0;
}

这样应该就可以了,也可以下载我的工程参考

效果图就不上传了,推送手机mp4到nginx服务器,在网页中观看视频内容

我的eclipse工程

https://pan.baidu.com/s/1jIrKru2

我的apk 

https://pan.baidu.com/s/1kUG46tL  

 



猜你喜欢

转载自blog.csdn.net/qq_31683775/article/details/78886093