将Android音频直播到服务器

问题描述:

我正在尝试将Android设备上的现场麦克风音频传输到Java程序。我开始在两个Android设备之间发送实时音频以确认我的方法是正确的。可以在接收设备上几乎没有任何延迟的情况下完美地听到音频。接下来,我将相同的音频流发送到一个小型Java程序,并验证数据是否正确发送。现在我想要做的就是对这些数据进行编码,并以某种方式在运行Java程序的服务器上播放它。我宁愿使用HTML5或JavaScript在网页浏览器中播放它,但我愿意使用其他方法,例如VLC。将Android音频直播到服务器

这里为发送该直播麦克风音频

public class MainActivity extends Activity { 


private Button startButton,stopButton; 

public byte[] buffer; 
public static DatagramSocket socket; 
    AudioRecord recorder; 

private int sampleRate = 44100; 
private int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;  
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;  
int minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat); 
    private boolean status = true; 

@Override 
protected void onCreate(Bundle savedInstanceState) { 
    super.onCreate(savedInstanceState); 
    setContentView(R.layout.activity_main); 

    startButton = (Button) findViewById (R.id.start_button); 
    stopButton = (Button) findViewById (R.id.stop_button); 

    startButton.setOnClickListener(startListener); 
    stopButton.setOnClickListener(stopListener); 

    minBufSize += 2048; 
} 

@Override 
public boolean onCreateOptionsMenu(Menu menu) { 
    getMenuInflater().inflate(R.menu.main, menu); 
    return true; 
} 

private final OnClickListener stopListener = new OnClickListener() { 

    @Override 
    public void onClick(View arg0) { 
       status = false; 
       recorder.release(); 
       Log.d("VS","Recorder released"); 
    } 
}; 

private final OnClickListener startListener = new OnClickListener() { 

    @Override 
    public void onClick(View arg0) { 
       status = true; 
       startStreaming();   
    } 
}; 



public void startStreaming() 
{ 
    Thread streamThread = new Thread(new Runnable(){ 
     @Override 
     public void run() 
     { 
      try{ 

       DatagramSocket socket = new DatagramSocket(); 
       Log.d("VS", "Socket Created"); 

       byte[] buffer = new byte[minBufSize]; 

       Log.d("VS","Buffer created of size " + minBufSize); 


       Log.d("VS", "Address retrieved"); 
       recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,sampleRate,channelConfig,audioFormat,minBufSize); 
       Log.d("VS", "Recorder initialized"); 


       recorder.startRecording(); 


       InetAddress IPAddress = InetAddress.getByName("192.168.1.5"); 
       byte[] sendData = new byte[1024]; 
       byte[] receiveData = new byte[1024]; 


       while (status == true) 
       { 
        DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, IPAddress, 50005); 
        socket.send(sendPacket); 
       } 

      } catch(UnknownHostException e) { 
       Log.e("VS", "UnknownHostException"); 
      } catch (IOException e) { 
       Log.e("VS", "IOException"); 
       e.printStackTrace(); 
      } 


     } 

    }); 
    streamThread.start(); 
} 
} 

这里是在数据的Java程序读取的代码为Android应用程序的代码..

class Server 
{ 
    public static void main(String args[]) throws Exception 
     { 
     DatagramSocket serverSocket = new DatagramSocket(50005); 
      byte[] receiveData = new byte[1024]; 
      byte[] sendData = new byte[1024]; 
      while(true) 
       { 
        DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length); 



       serverSocket.receive(receivePacket); 
       String sentence = new String(receivePacket.getData().toString()); 

       System.out.println("RECEIVED: " + sentence); 
      } 
    } 
} 

我知道,在发送到Java程序之前,我应该在应用程序端对音频进行编码,但我不确定如何在使用AudioRecorder时进行编码。我宁愿不使用NDK,因为我没有经验与它没有真正的时间来学习如何使用它......但:) :)

+0

你使用流音频是什么?我只想要一种方法。 – Atieh 2014-12-05 16:28:45

+0

我有兴趣编写一个Android应用程序,将Android设备的实时麦克风音频流式传输到桌面应用程序。您能否提供一些有用的指向一些与您发布的内容相关的资源?这将非常有帮助!谢谢! :) – wayway 2016-02-07 04:45:51

+0

你如何做两个Android设备之间的现场音频? – pkBhati 2016-02-25 07:08:38

所以我得到了我的问题修复。问题主要在接收方面。接收器接收音频流并将其推送至PC的扬声器。由此产生的声音仍然非常缓慢而且破碎,但是它的效果不会那么差。使用缓冲区大小可以改善这一点。

编辑:您使用线程来读取音频,以避免滞后。另外,最好使用16 000的采样大小,因为对于语音来说可以。

的Android代码:

package com.example.mictest2; 

import java.io.IOException; 
import java.net.DatagramPacket; 
import java.net.DatagramSocket; 
import java.net.InetAddress; 
import java.net.UnknownHostException; 

import android.app.Activity; 
import android.media.AudioFormat; 
import android.media.AudioRecord; 
import android.media.MediaRecorder; 
import android.os.Bundle; 
import android.util.Log; 
import android.view.View; 
import android.view.View.OnClickListener; 
import android.widget.Button; 

public class Send extends Activity { 
private Button startButton,stopButton; 

public byte[] buffer; 
public static DatagramSocket socket; 
private int port=50005; 

AudioRecord recorder; 

private int sampleRate = 16000 ; // 44100 for music 
private int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;  
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;  
int minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat); 
private boolean status = true; 


@Override 
public void onCreate(Bundle savedInstanceState) { 
    super.onCreate(savedInstanceState); 
    setContentView(R.layout.activity_main); 

    startButton = (Button) findViewById (R.id.start_button); 
    stopButton = (Button) findViewById (R.id.stop_button); 

    startButton.setOnClickListener (startListener); 
    stopButton.setOnClickListener (stopListener); 

} 

private final OnClickListener stopListener = new OnClickListener() { 

    @Override 
    public void onClick(View arg0) { 
       status = false; 
       recorder.release(); 
       Log.d("VS","Recorder released"); 
    } 

}; 

private final OnClickListener startListener = new OnClickListener() { 

    @Override 
    public void onClick(View arg0) { 
       status = true; 
       startStreaming();   
    } 

}; 

public void startStreaming() { 


    Thread streamThread = new Thread(new Runnable() { 

     @Override 
     public void run() { 
      try { 

       DatagramSocket socket = new DatagramSocket(); 
       Log.d("VS", "Socket Created"); 

       byte[] buffer = new byte[minBufSize]; 

       Log.d("VS","Buffer created of size " + minBufSize); 
       DatagramPacket packet; 

       final InetAddress destination = InetAddress.getByName("192.168.1.5"); 
       Log.d("VS", "Address retrieved"); 


       recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,sampleRate,channelConfig,audioFormat,minBufSize*10); 
       Log.d("VS", "Recorder initialized"); 

       recorder.startRecording(); 


       while(status == true) { 


        //reading data from MIC into buffer 
        minBufSize = recorder.read(buffer, 0, buffer.length); 

        //putting buffer in the packet 
        packet = new DatagramPacket (buffer,buffer.length,destination,port); 

        socket.send(packet); 
        System.out.println("MinBufferSize: " +minBufSize); 


       } 



      } catch(UnknownHostException e) { 
       Log.e("VS", "UnknownHostException"); 
      } catch (IOException e) { 
       e.printStackTrace(); 
       Log.e("VS", "IOException"); 
      } 
     } 

    }); 
    streamThread.start(); 
} 
} 

Android的XML:

<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" 
xmlns:tools="http://schemas.android.com/tools" 
android:layout_width="match_parent" 
android:layout_height="match_parent" 
android:paddingBottom="@dimen/activity_vertical_margin" 
android:paddingLeft="@dimen/activity_horizontal_margin" 
android:paddingRight="@dimen/activity_horizontal_margin" 
android:paddingTop="@dimen/activity_vertical_margin" 
tools:context=".MainActivity" > 

<TextView 
    android:id="@+id/textView1" 
    android:layout_width="wrap_content" 
    android:layout_height="wrap_content" 
    android:text="@string/hello_world" /> 

<Button 
    android:id="@+id/start_button" 
    android:layout_width="wrap_content" 
    android:layout_height="wrap_content" 
    android:layout_below="@+id/textView1" 
    android:layout_centerHorizontal="true" 
    android:layout_marginTop="130dp" 
    android:text="Start" /> 

<Button 
    android:id="@+id/stop_button" 
    android:layout_width="wrap_content" 
    android:layout_height="wrap_content" 
    android:layout_alignLeft="@+id/button1" 
    android:layout_below="@+id/button1" 
    android:layout_marginTop="64dp" 
    android:text="Stop" /> 

</RelativeLayout> 

Server代码:

package com.datagram; 

import java.io.ByteArrayInputStream; 
import java.net.DatagramPacket; 
import java.net.DatagramSocket; 

import javax.sound.sampled.AudioFormat; 
import javax.sound.sampled.AudioInputStream; 
import javax.sound.sampled.AudioSystem; 
import javax.sound.sampled.DataLine; 
import javax.sound.sampled.FloatControl; 
import javax.sound.sampled.SourceDataLine; 

class Server { 

AudioInputStream audioInputStream; 
static AudioInputStream ais; 
static AudioFormat format; 
static boolean status = true; 
static int port = 50005; 
static int sampleRate = 44100; 

public static void main(String args[]) throws Exception { 


    DatagramSocket serverSocket = new DatagramSocket(50005); 


    byte[] receiveData = new byte[1280]; 
    // (1280 for 16 000Hz and 3584 for 44 100Hz (use AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat) to get the correct size) 

    format = new AudioFormat(sampleRate, 16, 1, true, false); 

    while (status == true) { 
     DatagramPacket receivePacket = new DatagramPacket(receiveData, 
       receiveData.length); 

     serverSocket.receive(receivePacket); 

     ByteArrayInputStream baiss = new ByteArrayInputStream(
       receivePacket.getData()); 

     ais = new AudioInputStream(baiss, format, receivePacket.getLength()); 

     // A thread solve the problem of chunky audio 
     new Thread(new Runnable() { 
      @Override 
      public void run() { 
       toSpeaker(receivePacket.getData(), sourceDataLine); 
      } 
     }).start(); 
    } 
} 

public static void toSpeaker(byte soundbytes[]) { 
    try { 

     DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, format); 
     SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo); 

     sourceDataLine.open(format); 

     FloatControl volumeControl = (FloatControl) sourceDataLine.getControl(FloatControl.Type.MASTER_GAIN); 
     volumeControl.setValue(100.0f); 

     sourceDataLine.start(); 
     sourceDataLine.open(format); 

     sourceDataLine.start(); 

     System.out.println("format? :" + sourceDataLine.getFormat()); 

     sourceDataLine.write(soundbytes, 0, soundbytes.length); 
     System.out.println(soundbytes.toString()); 
     sourceDataLine.drain(); 
     sourceDataLine.close(); 
    } catch (Exception e) { 
     System.out.println("Not working in speakers..."); 
     e.printStackTrace(); 
    } 
} 
} 

我希望这有助于节省有人痛苦:)

+0

这会产生持续的抖动,并且声音不清晰。可以做什么? – kittu88 2013-12-04 12:21:53

+0

有你发送你的语音流从一个Android设备到多个Android设备通过服务器,就像组对话。我试图学习它,但无法找到任何东西。你会帮我吗? – 2014-09-17 15:09:31

+0

我可以联系你吗? – 2014-09-17 15:54:05

几个小时

我的2美分到您的代码,以提高效率。不错的尝试

package com.datagram; 

import java.io.ByteArrayInputStream; 
import java.net.DatagramPacket; 
import java.net.DatagramSocket; 

import javax.sound.sampled.AudioFormat; 
import javax.sound.sampled.AudioInputStream; 
import javax.sound.sampled.AudioSystem; 
import javax.sound.sampled.DataLine; 
import javax.sound.sampled.FloatControl; 
import javax.sound.sampled.SourceDataLine; 

class Server { 

AudioInputStream audioInputStream; 
static AudioInputStream ais; 
static AudioFormat format; 
static boolean status = true; 
static int port = 50005; 
static int sampleRate = 44100; 

static DataLine.Info dataLineInfo; 
static SourceDataLine sourceDataLine; 

public static void main(String args[]) throws Exception { 

    DatagramSocket serverSocket = new DatagramSocket(port); 

    /** 
    * Formula for lag = (byte_size/sample_rate)*2 
    * Byte size 9728 will produce ~ 0.45 seconds of lag. Voice slightly broken. 
    * Byte size 1400 will produce ~ 0.06 seconds of lag. Voice extremely broken. 
    * Byte size 4000 will produce ~ 0.18 seconds of lag. Voice slightly more broken then 9728. 
    */ 

    byte[] receiveData = new byte[4096]; 

    format = new AudioFormat(sampleRate, 16, 1, true, false); 
    dataLineInfo = new DataLine.Info(SourceDataLine.class, format); 
    sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo); 
    sourceDataLine.open(format); 
    sourceDataLine.start(); 

    FloatControl volumeControl = (FloatControl) sourceDataLine.getControl(FloatControl.Type.MASTER_GAIN); 
    volumeControl.setValue(1.00f); 

    DatagramPacket receivePacket = new DatagramPacket(receiveData, 
      receiveData.length); 
    ByteArrayInputStream baiss = new ByteArrayInputStream(
      receivePacket.getData()); 
    while (status == true) { 
     serverSocket.receive(receivePacket); 
     ais = new AudioInputStream(baiss, format, receivePacket.getLength()); 
     toSpeaker(receivePacket.getData()); 
    } 
    sourceDataLine.drain(); 
    sourceDataLine.close(); 
} 

    public static void toSpeaker(byte soundbytes[]) { 
     try { 
      sourceDataLine.write(soundbytes, 0, soundbytes.length); 
     } catch (Exception e) { 
      System.out.println("Not working in speakers..."); 
      e.printStackTrace(); 
     } 
    } 
} 
+0

我真的有助于减少噪音? – 2015-02-13 02:50:06

的声音,因为在你的Android以下代码行的破碎:

minBufSize += 2048; 

你只是添加空字节。此外,使用CHANNEL_IN_MONO而不是CHANNEL_CONFIGURATION_MONO