Android AudioRecord to Server sur les problèmes de lecture UDP

Je suis en train de faire une simple application Android qui diffuse en live de l'audio du microphone à un serveur pour la lecture. L'résultant de la lecture des sons étranges, avec de grandes lacunes au niveau de l'audio. Personne ne sait ce que je fais de mal?

EDIT: Résolu. En fait, j'ai en supposant que chaque future mémoire tampon est pleine, une mauvaise hypothèse de ma part.

Voici mon activité:

public class MainActivity extends Activity {
private static String TAG = "AudioClient";
//the server information
private static final String SERVER = "xx.xx.xx.xx";
private static final int PORT = 50005;
//the audio recording options
private static final int RECORDING_RATE = 44100;
private static final int CHANNEL = AudioFormat.CHANNEL_IN_MONO;
private static final int FORMAT = AudioFormat.ENCODING_PCM_16BIT;
//the button the user presses to send the audio stream to the server
private Button sendAudioButton;
//the audio recorder
private AudioRecord recorder;
//the minimum buffer size needed for audio recording
private static int BUFFER_SIZE = AudioRecord.getMinBufferSize(
RECORDING_RATE, CHANNEL, FORMAT);
//are we currently sending audio data
private boolean currentlySendingAudio = false;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Log.i(TAG, "Creating the Audio Client with minimum buffer of "
+ BUFFER_SIZE + " bytes");
//set up the button
sendAudioButton = (Button) findViewById(R.id.start_button);
sendAudioButton.setOnTouchListener(new OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
startStreamingAudio();
break;
case MotionEvent.ACTION_UP:
stopStreamingAudio();
break;
}
return false;
}
});
}
private void startStreamingAudio() {
Log.i(TAG, "Starting the audio stream");
currentlySendingAudio = true;
startStreaming();
}
private void stopStreamingAudio() {
Log.i(TAG, "Stopping the audio stream");
currentlySendingAudio = false;
recorder.release();
}
private void startStreaming() {
Log.i(TAG, "Starting the background thread to stream the audio data");
Thread streamThread = new Thread(new Runnable() {
@Override
public void run() {
try {
Log.d(TAG, "Creating the datagram socket");
DatagramSocket socket = new DatagramSocket();
Log.d(TAG, "Creating the buffer of size " + BUFFER_SIZE);
byte[] buffer = new byte[BUFFER_SIZE];
Log.d(TAG, "Connecting to " + SERVER + ":" + PORT);
final InetAddress serverAddress = InetAddress
.getByName(SERVER);
Log.d(TAG, "Connected to " + SERVER + ":" + PORT);
Log.d(TAG, "Creating the reuseable DatagramPacket");
DatagramPacket packet;
Log.d(TAG, "Creating the AudioRecord");
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDING_RATE, CHANNEL, FORMAT, BUFFER_SIZE * 10);
Log.d(TAG, "AudioRecord recording...");
recorder.startRecording();
while (currentlySendingAudio == true) {
//read the data into the buffer
int read = recorder.read(buffer, 0, buffer.length);
//place contents of buffer into the packet
packet = new DatagramPacket(buffer, read,
serverAddress, PORT);
//send the packet
socket.send(packet);
}
Log.d(TAG, "AudioRecord finished recording");
} catch (Exception e) {
Log.e(TAG, "Exception: " + e);
}
}
});
//start the thread
streamThread.start();
}
}

Voici mon code côté serveur:

class Server {
AudioInputStream audioInputStream;
static AudioInputStream ais;
static AudioFormat format;
static boolean status = true;
static int port = 50005;
static int sampleRate = 11025;
static int bufferSize = 9728;
static Long lastTime;
static long totalBytesReceived = 0L;
private static final int audioStreamBufferSize = bufferSize * 20;
static byte[] audioStreamBuffer = new byte[audioStreamBufferSize];
private static int audioStreamBufferIndex = 0;
public static void main(String args[]) throws Exception {
Log("Starting the AudioServer...");
Log("Creating the datagram socket on port " + port + "...");
DatagramSocket serverSocket = new DatagramSocket(null);
serverSocket.setReuseAddress(true);
serverSocket.bind(new InetSocketAddress(port));
Log("Creating the buffer to hold the received data of size "
+ bufferSize + "...");
byte[] receiveData = new byte[bufferSize];
Log("Setting the audio rate to " + sampleRate + "hz...");
format = new AudioFormat(sampleRate, 16, 1, true, false);
Log("Ready to receive audio data");
while (status == true) {
DatagramPacket receivePacket = new DatagramPacket(receiveData,
receiveData.length);
serverSocket.receive(receivePacket);
bufferAudioForPlayback(receivePacket.getData(),
receivePacket.getOffset(), receivePacket.getLength());
}
serverSocket.close();
}
private static void bufferAudioForPlayback(byte[] buffer, int offset,
int length) {
byte[] actualBytes = new byte[length];
for (int i = 0; i < length; i++) {
actualBytes[i] = buffer[i];
}
for (byte sample : actualBytes) {
int percentage = (int) (((double) audioStreamBufferIndex / (double) audioStreamBuffer.length) * 100.0);
Log("buffer is " + percentage + "% full");
audioStreamBuffer[audioStreamBufferIndex] = sample;
audioStreamBufferIndex++;
Log("Buffer " + audioStreamBufferIndex + " /"
+ audioStreamBuffer.length + "    " + percentage);
if (audioStreamBufferIndex == audioStreamBuffer.length - 1) {
toSpeaker(audioStreamBuffer);
audioStreamBufferIndex = 0;
System.exit(0);
}
}
}
private static void Log(String log) {
System.out.println(log);
}
public static void toSpeaker(byte soundbytes[]) {
try {
DataLine.Info dataLineInfo = new DataLine.Info(
SourceDataLine.class, format);
SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem
.getLine(dataLineInfo);
sourceDataLine.open(format);
FloatControl volumeControl = (FloatControl) sourceDataLine
.getControl(FloatControl.Type.MASTER_GAIN);
volumeControl.setValue(100.0f);
sourceDataLine.start();
sourceDataLine.open(format);
sourceDataLine.start();
sourceDataLine.write(soundbytes, 0, soundbytes.length);
sourceDataLine.drain();
sourceDataLine.close();
} catch (Exception e) {
System.out.println("Error with audio playback: " + e);
e.printStackTrace();
}
}
}

Enfin, ici, c'est la ressource de fichier xml pour l'activité principale:

<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:padding="20dip">
<ImageView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:src="@drawable/ic_launcher"
android:scaleType="fitCenter"/>
<TextView  
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="@string/app_info"
android:layout_weight="1.0"
android:textSize="20dip"/>
<LinearLayout
android:orientation="horizontal"
android:layout_width="fill_parent"
android:layout_height="wrap_content">
<Button
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/btnStart"
android:text="@string/start_recording"
android:layout_weight="1.0"/>
<Button
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/btnStop"
android:text="@string/stop_recording"
android:layout_weight="1.0"/>
</LinearLayout>
</LinearLayout>

EDIT: la lecture audio suh-suh-suh-suh-o-ou-ou-ou-nds, nds, ds ce genre.

source d'informationauteur Joshua W