PageRenderTime 110ms CodeModel.GetById 28ms RepoModel.GetById 0ms app.codeStats 0ms

/src/edu/cmu/pandaa/desktop/LiveAudioStream.java

https://github.com/dilipgudlur/Localization
Java | 419 lines | 365 code | 46 blank | 8 comment | 53 complexity | c4f36e6caee9b6f4e229ffc12631e0d2 MD5 | raw file
  1. package edu.cmu.pandaa.desktop;
  2. import java.awt.image.ImagingOpException;
  3. import java.io.ByteArrayOutputStream;
  4. import java.io.IOException;
  5. import java.net.InetAddress;
  6. import java.util.*;
  7. import javax.sound.sampled.*;
  8. import edu.cmu.pandaa.header.RawAudioHeader;
  9. import edu.cmu.pandaa.header.RawAudioHeader.RawAudioFrame;
  10. import edu.cmu.pandaa.header.StreamHeader;
  11. import edu.cmu.pandaa.header.StreamHeader.StreamFrame;
  12. import edu.cmu.pandaa.stream.FrameStream;
  13. import edu.cmu.pandaa.stream.RawAudioFileStream;
  14. import edu.cmu.pandaa.utils.DataConversionUtil;
  15. public class LiveAudioStream implements FrameStream {
  16. ByteArrayOutputStream byteArrayOutputStream;
  17. public long startTime;
  18. CaptureThread captureThread;
  19. AudioCaptureState audioCaptureState = AudioCaptureState.BEFORE;
  20. final static int syncFrames = 10;
  21. static final SortedMap<Mixer, TargetDataLine> lines = new TreeMap<Mixer, TargetDataLine>(new MixerComparator());
  22. static int lineCount = 0;
  23. final static int delayWindowMs = 10 * 1000;
  24. private RawAudioFileStream rawAudioOutputStream;
  25. public enum AudioCaptureState {
  26. BEFORE, PREFETCH, RUNNING, STOPPED;
  27. };
  28. private final TargetDataLine targetDataLine;
  29. private final int audioEncoding, bitsPerSample;
  30. private final int numChannels, samplingRate;
  31. private final int frameTime;
  32. private final int captureTimeMs;
  33. private final String fileName;
  34. private final String id;
  35. private int dataSize = -1;
  36. private RawAudioHeader header;
  37. private int framesDesired, framesCaptured;
  38. private long loopTime;
  39. private final int segmentLengthMs;
  40. private final static int DEFAULT_ENCODING = 1; // PCM
  41. private final static int DEFAULT_CHANNELS = 1; // MONO
  42. private final static int DEFAULT_SAMPLING_RATE = 44100;
  43. private final static int DEFAULT_FRAME_TIME = 100; // 100ms per frame
  44. private final static int DEFAULT_BITS_PER_SAMPLE = 16;
  45. static class MixerComparator implements Comparator<Mixer> {
  46. public int compare(Mixer a, Mixer b) {
  47. return a.getMixerInfo().getName().compareTo(b.getMixerInfo().getName());
  48. }
  49. }
  50. private LiveAudioStream(String id, int encoding, int samplingRate, int bitsPerSample, int frameTime,
  51. int captureTimeMs, int segmentLengthMs, TargetDataLine line, String fileName) {
  52. this.id = id;
  53. this.audioEncoding = encoding;
  54. this.samplingRate = samplingRate;
  55. this.bitsPerSample = bitsPerSample;
  56. this.frameTime = frameTime;
  57. this.numChannels = DEFAULT_CHANNELS;
  58. this.captureTimeMs = captureTimeMs;
  59. this.segmentLengthMs = segmentLengthMs;
  60. this.targetDataLine = line;
  61. this.fileName = fileName;
  62. }
  63. public LiveAudioStream(String id, TargetDataLine line, int captureTime, int segmentLengthMs, String fileName) {
  64. this(id, DEFAULT_ENCODING, (int) line.getFormat().getSampleRate(), line.getFormat().getSampleSizeInBits(),
  65. DEFAULT_FRAME_TIME, captureTime, segmentLengthMs, line, fileName);
  66. }
  67. public static void findTargetDataLines(AudioFormat audioFormat) {
  68. Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo();
  69. DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
  70. for (int cnt = 0; cnt < mixerInfo.length; cnt++) {
  71. boolean added = false;
  72. Mixer mixer = AudioSystem.getMixer(mixerInfo[cnt]);
  73. String name = mixer.getMixerInfo().getName();
  74. String desc = mixer.getMixerInfo().getDescription();
  75. if (desc.contains("Direct Audio Device")) {
  76. try {
  77. TargetDataLine line = (TargetDataLine) mixer.getLine(dataLineInfo);
  78. line.open(audioFormat);
  79. lines.put(mixer, line);
  80. added = true;
  81. } catch (Exception e) {
  82. // skip this entry
  83. }
  84. }
  85. System.out.println((added ? "*" : " ") + "Audio mixer " + name + ": " + desc);
  86. }
  87. }
  88. @Override
  89. public void setHeader(StreamHeader h) throws Exception {
  90. throw new RuntimeException("setHeader: Writing to Live Audio Stream is not supported");
  91. }
  92. @Override
  93. public void sendFrame(StreamFrame m) throws Exception {
  94. throw new RuntimeException("sendFrame: Writing to Live Audio Stream is not supported");
  95. }
  96. @Override
  97. public StreamHeader getHeader() throws Exception {
  98. if (header == null) {
  99. startCapturing();
  100. }
  101. return header;
  102. }
  103. @Override
  104. public StreamFrame recvFrame() throws Exception {
  105. if (rawAudioOutputStream == null) {
  106. StreamHeader header = getHeader();
  107. loopTime = header.getNextFrameTime();
  108. String segmentFile = String.format(fileName, loopTime);
  109. rawAudioOutputStream = new RawAudioFileStream(segmentFile, true);
  110. rawAudioOutputStream.setHeader(header);
  111. System.out.println(System.currentTimeMillis() + " Saving captured audio to: " + segmentFile);
  112. framesDesired = segmentLengthMs / frameTime;
  113. framesCaptured = 0;
  114. long phase = System.currentTimeMillis() - loopTime;
  115. if (phase > frameTime*2 || phase < 0) {
  116. System.out.println(System.currentTimeMillis() + " Excessive frame drift detected: " + phase);
  117. }
  118. }
  119. byte[] audioData;
  120. synchronized (byteArrayOutputStream) {
  121. while (byteArrayOutputStream.size() == 0) {
  122. if (isRunning())
  123. byteArrayOutputStream.wait();
  124. else
  125. return null;
  126. }
  127. audioData = byteArrayOutputStream.toByteArray();
  128. byteArrayOutputStream.reset();
  129. if (audioData.length < dataSize) {
  130. throw new Exception("Bad data read length: " + audioData.length);
  131. }
  132. if (audioData.length > dataSize) {
  133. byte[] nData = new byte[dataSize];
  134. System.arraycopy(audioData, 0, nData, 0, dataSize);
  135. byteArrayOutputStream.write(audioData, dataSize, audioData.length - dataSize);
  136. audioData = nData;
  137. }
  138. }
  139. RawAudioFrame audioFrame = header.makeFrame();
  140. audioFrame.audioData = DataConversionUtil.byteArrayToShortArray(audioData);
  141. if (rawAudioOutputStream != null) {
  142. rawAudioOutputStream.sendFrame(audioFrame);
  143. framesCaptured++;
  144. if (framesCaptured == framesDesired) {
  145. System.out.println(System.currentTimeMillis() + " Audio stream complete for " + id);
  146. rawAudioOutputStream.close();
  147. rawAudioOutputStream = null;
  148. loopTime += framesCaptured * frameTime;
  149. }
  150. }
  151. return audioFrame;
  152. }
  153. @Override
  154. public void close() {
  155. synchronized (this) {
  156. stopAudioCapture();
  157. while (isRunning()) {
  158. try {
  159. wait();
  160. } catch (InterruptedException e) {
  161. // ignore interruption
  162. }
  163. }
  164. }
  165. if (byteArrayOutputStream != null) {
  166. synchronized (byteArrayOutputStream) {
  167. try {
  168. byteArrayOutputStream.close();
  169. } catch (IOException e) {
  170. // ignore closing exception
  171. }
  172. }
  173. }
  174. if (targetDataLine != null) {
  175. targetDataLine.close();
  176. }
  177. if (rawAudioOutputStream != null) {
  178. rawAudioOutputStream.close();
  179. }
  180. }
  181. private void stopAudioCapture() {
  182. synchronized (byteArrayOutputStream) {
  183. setState(AudioCaptureState.STOPPED);
  184. byteArrayOutputStream.notify();
  185. }
  186. }
  187. private synchronized void setState(AudioCaptureState newState) {
  188. audioCaptureState = newState;
  189. notifyAll();
  190. }
  191. private synchronized boolean isRunning() {
  192. return audioCaptureState == AudioCaptureState.RUNNING ||
  193. audioCaptureState == AudioCaptureState.PREFETCH;
  194. }
  195. private synchronized void waitActive() {
  196. try {
  197. while (audioCaptureState == AudioCaptureState.PREFETCH) {
  198. wait();
  199. }
  200. } catch (InterruptedException e) {
  201. //
  202. }
  203. }
  204. private void startCapturing() throws Exception {
  205. long startTime = alignStartTime();
  206. String comment = "stime:" + startTime;
  207. header = new RawAudioHeader(id, startTime, frameTime, audioEncoding, numChannels,
  208. samplingRate, bitsPerSample, comment);
  209. System.out.println(System.currentTimeMillis() + " Starting data line " + id);
  210. dataSize = (frameTime * numChannels * samplingRate / 1000 * 2);
  211. byteArrayOutputStream = new ByteArrayOutputStream();
  212. captureThread = new CaptureThread(targetDataLine, id);
  213. setState(AudioCaptureState.PREFETCH);
  214. captureThread.start();
  215. waitActive();
  216. }
  217. class CaptureThread extends Thread {
  218. byte dataBuffer[] = new byte[dataSize];
  219. TargetDataLine targetDataLine;
  220. public CaptureThread(TargetDataLine targetDataLine, String name) {
  221. super(name);
  222. this.targetDataLine = targetDataLine;
  223. }
  224. private int readData() {
  225. return targetDataLine.read(dataBuffer, 0, dataBuffer.length);
  226. }
  227. @Override
  228. public void run() {
  229. try {
  230. long loopStartTime = header.startTime;
  231. long delay = loopStartTime - System.currentTimeMillis();
  232. System.out.println(System.currentTimeMillis() + " Delaying start for " + delay);
  233. Thread.sleep(delay);
  234. readySetGo(targetDataLine);
  235. setState(AudioCaptureState.RUNNING);
  236. while (isRunning()) {
  237. if (captureTimeMs >= 0 && System.currentTimeMillis() - loopStartTime > captureTimeMs) {
  238. break;
  239. }
  240. int cnt = readData();
  241. if (cnt < 0) {
  242. break;
  243. }
  244. synchronized (byteArrayOutputStream) {
  245. if (audioCaptureState != AudioCaptureState.PREFETCH && byteArrayOutputStream.size() < dataSize*100) {
  246. byteArrayOutputStream.write(dataBuffer, 0, cnt);
  247. byteArrayOutputStream.notifyAll();
  248. }
  249. }
  250. if (targetDataLine.available() > 0) {
  251. Thread.sleep(frameTime/2);
  252. }
  253. }
  254. } catch (Exception e) {
  255. e.printStackTrace();
  256. }
  257. stopAudioCapture();
  258. }
  259. }
  260. private static String getHostName() throws Exception {
  261. InetAddress localMachine = java.net.InetAddress.getLocalHost();
  262. return localMachine.getHostName();
  263. }
  264. public static void main(String[] args) throws Exception {
  265. String hostname = getHostName().replace('-', '+').replace('_', '+');
  266. int arg = 0;
  267. int captureTimeMs = (arg < args.length ? new Integer(args[arg++]) : 100) * 1000;
  268. int segmentLengthMs = (arg < args.length ? new Integer(args[arg++]) : 10) * 1000;
  269. System.out.println("Starting audio capture for " + captureTimeMs/1000.0 + "s in "+segmentLengthMs/1000.0+"s segments");
  270. AudioFormat audioFormat = new AudioFormat((float) DEFAULT_SAMPLING_RATE,
  271. DEFAULT_BITS_PER_SAMPLE, DEFAULT_CHANNELS, true, false);
  272. List<LiveAudioStream> streams = getLiveAudioStreams(null, captureTimeMs, segmentLengthMs);
  273. for (LiveAudioStream stream : streams) {
  274. AudioRunner runner = new AudioRunner(stream);
  275. new Thread(runner, stream.id).start();
  276. }
  277. }
  278. public static List<LiveAudioStream> getLiveAudioStreams(String path, int captureTimeMs, int segmentLengthMs)
  279. throws Exception {
  280. String hostname = getHostName().replace('-', '+').replace('_','+');
  281. if (path == null) {
  282. path = "";
  283. }
  284. AudioFormat audioFormat = new AudioFormat((float) DEFAULT_SAMPLING_RATE,
  285. DEFAULT_BITS_PER_SAMPLE, DEFAULT_CHANNELS, true, false);
  286. findTargetDataLines(audioFormat);
  287. if (lines.size() == 0) {
  288. throw new Exception("No valid data lines found");
  289. }
  290. int cnt = 1;
  291. List<LiveAudioStream> streams = new ArrayList<LiveAudioStream>();
  292. for (Mixer mixer : lines.keySet()) {
  293. TargetDataLine line = lines.get(mixer);
  294. System.out.println(mixer.getMixerInfo().getName());
  295. String id = hostname + "-" + cnt;
  296. String fileName = path + id + "_%d.wav";
  297. LiveAudioStream stream = new LiveAudioStream(id, line, captureTimeMs,segmentLengthMs, fileName);
  298. streams.add(stream);
  299. cnt++;
  300. }
  301. return streams;
  302. }
  303. static class AudioRunner implements Runnable {
  304. final LiveAudioStream stream;
  305. private AudioRunner(LiveAudioStream stream) {
  306. this.stream = stream;
  307. }
  308. public void run() {
  309. System.out.println("Starting capture loop " + stream.id);
  310. try {
  311. stream.startSaveAudio();
  312. stream.close();
  313. } catch (Exception e) {
  314. e.printStackTrace();
  315. }
  316. System.out.println("Terminating capture loop " + stream.id);
  317. }
  318. }
  319. private static void readySetGo(TargetDataLine targetDataLine) throws InterruptedException {
  320. synchronized(lines) {
  321. lineCount++;
  322. //System.out.println("Linecount is ++ " + lineCount);
  323. if (lineCount == lines.size()) {
  324. lines.notifyAll();
  325. } else while (lineCount < lines.size()) {
  326. lines.wait();
  327. }
  328. targetDataLine.start();
  329. if (targetDataLine.available() > 0) {
  330. System.out.println("Target line already has " + targetDataLine.available());
  331. // this flush is necessary on some versions of the JDK that seem to start capture before start()!
  332. targetDataLine.flush();
  333. }
  334. }
  335. //System.out.println("Linecount is == "+ + lineCount);
  336. Thread.sleep(1); // let other threads start their lines
  337. synchronized(lines) {
  338. lineCount--;
  339. //System.out.println("Linecount is -- "+ + lineCount);
  340. while (lineCount > 0) {
  341. lines.wait();
  342. }
  343. lines.notifyAll();
  344. }
  345. System.out.println(System.currentTimeMillis() + " Releasing line");
  346. }
  347. private long alignStartTime() throws Exception {
  348. if ((delayWindowMs % frameTime) != 0) {
  349. throw new IllegalArgumentException("delayWindowMs(" + delayWindowMs +
  350. ") must be a mod of frameTime("+frameTime+")");
  351. }
  352. long loopTime = System.currentTimeMillis();
  353. int delay = delayWindowMs - (int) (loopTime % delayWindowMs);
  354. if (delay < frameTime*2)
  355. delay += delayWindowMs;
  356. loopTime += delay;
  357. System.out.println("Aiming to start at " + loopTime);
  358. return loopTime;
  359. }
  360. private void startSaveAudio()
  361. throws Exception {
  362. String segmentName = id;
  363. try {
  364. StreamFrame frame;
  365. do {
  366. frame = recvFrame();
  367. } while (frame != null);
  368. } finally {
  369. System.out.println("Audio stream " + segmentName + " complete at " + System.currentTimeMillis());
  370. close();
  371. }
  372. }
  373. }