シンプルなAndroidアプリケーションを作成して、再生用にライブマイクオーディオをサーバーにストリーミングします。結果の再生は奇妙で、オーディオに大きなギャップがあります。誰かが私が間違っていることを知っていますか? ?
編集:解決しました。私は、各着信バッファが完全にいっぱいであると想定していたことがわかりましたが、私の側では誤った想定です。
これが私の活動です:
public class MainActivity extends Activity {
private static String TAG = "AudioClient";
// the server information
private static final String SERVER = "xx.xx.xx.xx";
private static final int PORT = 50005;
// the audio recording options
private static final int RECORDING_RATE = 44100;
private static final int CHANNEL = AudioFormat.CHANNEL_IN_MONO;
private static final int FORMAT = AudioFormat.ENCODING_PCM_16BIT;
// the button the user presses to send the audio stream to the server
private Button sendAudioButton;
// the audio recorder
private AudioRecord recorder;
// the minimum buffer size needed for audio recording
private static int BUFFER_SIZE = AudioRecord.getMinBufferSize(
RECORDING_RATE, CHANNEL, FORMAT);
// are we currently sending audio data
private boolean currentlySendingAudio = false;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Log.i(TAG, "Creating the Audio Client with minimum buffer of "
+ BUFFER_SIZE + " bytes");
// set up the button
sendAudioButton = (Button) findViewById(R.id.start_button);
sendAudioButton.setOnTouchListener(new OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
startStreamingAudio();
break;
case MotionEvent.ACTION_UP:
stopStreamingAudio();
break;
}
return false;
}
});
}
private void startStreamingAudio() {
Log.i(TAG, "Starting the audio stream");
currentlySendingAudio = true;
startStreaming();
}
private void stopStreamingAudio() {
Log.i(TAG, "Stopping the audio stream");
currentlySendingAudio = false;
recorder.release();
}
private void startStreaming() {
Log.i(TAG, "Starting the background thread to stream the audio data");
Thread streamThread = new Thread(new Runnable() {
@Override
public void run() {
try {
Log.d(TAG, "Creating the datagram socket");
DatagramSocket socket = new DatagramSocket();
Log.d(TAG, "Creating the buffer of size " + BUFFER_SIZE);
byte[] buffer = new byte[BUFFER_SIZE];
Log.d(TAG, "Connecting to " + SERVER + ":" + PORT);
final InetAddress serverAddress = InetAddress
.getByName(SERVER);
Log.d(TAG, "Connected to " + SERVER + ":" + PORT);
Log.d(TAG, "Creating the reuseable DatagramPacket");
DatagramPacket packet;
Log.d(TAG, "Creating the AudioRecord");
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDING_RATE, CHANNEL, FORMAT, BUFFER_SIZE * 10);
Log.d(TAG, "AudioRecord recording...");
recorder.startRecording();
while (currentlySendingAudio == true) {
// read the data into the buffer
int read = recorder.read(buffer, 0, buffer.length);
// place contents of buffer into the packet
packet = new DatagramPacket(buffer, read,
serverAddress, PORT);
// send the packet
socket.send(packet);
}
Log.d(TAG, "AudioRecord finished recording");
} catch (Exception e) {
Log.e(TAG, "Exception: " + e);
}
}
});
// start the thread
streamThread.start();
}
}
これが私のサーバーサイドコードです:
class Server {
AudioInputStream audioInputStream;
static AudioInputStream ais;
static AudioFormat format;
static boolean status = true;
static int port = 50005;
static int sampleRate = 11025;
static int bufferSize = 9728;
static Long lastTime;
static long totalBytesReceived = 0L;
private static final int audioStreamBufferSize = bufferSize * 20;
static byte[] audioStreamBuffer = new byte[audioStreamBufferSize];
private static int audioStreamBufferIndex = 0;
public static void main(String args[]) throws Exception {
Log("Starting the AudioServer...");
Log("Creating the datagram socket on port " + port + "...");
DatagramSocket serverSocket = new DatagramSocket(null);
serverSocket.setReuseAddress(true);
serverSocket.bind(new InetSocketAddress(port));
Log("Creating the buffer to hold the received data of size "
+ bufferSize + "...");
byte[] receiveData = new byte[bufferSize];
Log("Setting the audio rate to " + sampleRate + "hz...");
format = new AudioFormat(sampleRate, 16, 1, true, false);
Log("Ready to receive audio data");
while (status == true) {
DatagramPacket receivePacket = new DatagramPacket(receiveData,
receiveData.length);
serverSocket.receive(receivePacket);
bufferAudioForPlayback(receivePacket.getData(),
receivePacket.getOffset(), receivePacket.getLength());
}
serverSocket.close();
}
private static void bufferAudioForPlayback(byte[] buffer, int offset,
int length) {
byte[] actualBytes = new byte[length];
for (int i = 0; i < length; i++) {
actualBytes[i] = buffer[i];
}
for (byte sample : actualBytes) {
int percentage = (int) (((double) audioStreamBufferIndex / (double) audioStreamBuffer.length) * 100.0);
Log("buffer is " + percentage + "% full");
audioStreamBuffer[audioStreamBufferIndex] = sample;
audioStreamBufferIndex++;
Log("Buffer " + audioStreamBufferIndex + " / "
+ audioStreamBuffer.length + " " + percentage);
if (audioStreamBufferIndex == audioStreamBuffer.length - 1) {
toSpeaker(audioStreamBuffer);
audioStreamBufferIndex = 0;
System.exit(0);
}
}
}
private static void Log(String log) {
System.out.println(log);
}
public static void toSpeaker(byte soundbytes[]) {
try {
DataLine.Info dataLineInfo = new DataLine.Info(
SourceDataLine.class, format);
SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem
.getLine(dataLineInfo);
sourceDataLine.open(format);
FloatControl volumeControl = (FloatControl) sourceDataLine
.getControl(FloatControl.Type.MASTER_GAIN);
volumeControl.setValue(100.0f);
sourceDataLine.start();
sourceDataLine.open(format);
sourceDataLine.start();
sourceDataLine.write(soundbytes, 0, soundbytes.length);
sourceDataLine.drain();
sourceDataLine.close();
} catch (Exception e) {
System.out.println("Error with audio playback: " + e);
e.printStackTrace();
}
}
}
最後に、メインアクティビティのリソースxmlファイルは次のとおりです。
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:Android="http://schemas.Android.com/apk/res/Android"
Android:orientation="vertical"
Android:layout_width="fill_parent"
Android:layout_height="fill_parent"
Android:padding="20dip">
<ImageView
Android:layout_width="fill_parent"
Android:layout_height="wrap_content"
Android:src="@drawable/ic_launcher"
Android:scaleType="fitCenter"/>
<TextView
Android:layout_width="fill_parent"
Android:layout_height="wrap_content"
Android:text="@string/app_info"
Android:layout_weight="1.0"
Android:textSize="20dip"/>
<LinearLayout
Android:orientation="horizontal"
Android:layout_width="fill_parent"
Android:layout_height="wrap_content">
<Button
Android:layout_width="wrap_content"
Android:layout_height="wrap_content"
Android:id="@+id/btnStart"
Android:text="@string/start_recording"
Android:layout_weight="1.0"/>
<Button
Android:layout_width="wrap_content"
Android:layout_height="wrap_content"
Android:id="@+id/btnStop"
Android:text="@string/stop_recording"
Android:layout_weight="1.0"/>
</LinearLayout>
</LinearLayout>
編集:このような再生オーディオsuh-suh-suh-suh-o-ou-ou-ou-nds-nds-ds。
代わりに、次の方法を試すことができます。
// read the data into the buffer
recorder.read(buffer, 0, buffer.length);
// place contents of buffer into the packet
packet = new DatagramPacket(buffer, buffer.length, serverAddress, PORT);
recorder
から完全に読み取られたバッファを受け取ったとは思わないでください。代わりに、実際の読み取り値を使用してください。
// read the data into the buffer
int read = recorder.read(buffer, 0, buffer.length);
// place contents of buffer into the packet
packet = new DatagramPacket(buffer, read, serverAddress, PORT);
または似たようなもの。
ポストジョシュアをありがとう....新しいミツバチのための大きな助け:)
volumeControl.setValue(volumeControl.getMaximum());
removes illegalStateException at Server
およびAndroidクライアントの権限
<uses-permission Android:name="Android.permission.RECORD_AUDIO"/>