use of org.eclipse.smarthome.core.audio.AudioFormat in project smarthome by eclipse.
the class SonosAudioSink method process.
@Override
public void process(AudioStream audioStream) throws UnsupportedAudioFormatException, UnsupportedAudioStreamException {
if (audioStream == null) {
// in case the audioStream is null, this should be interpreted as a request to end any currently playing
// stream.
logger.trace("Stop currently playing stream.");
handler.stopPlaying(OnOffType.ON);
} else if (audioStream instanceof URLAudioStream) {
// it is an external URL, the speaker can access it itself and play it.
URLAudioStream urlAudioStream = (URLAudioStream) audioStream;
handler.playURI(new StringType(urlAudioStream.getURL()));
try {
audioStream.close();
} catch (IOException e) {
}
} else if (audioStream instanceof FixedLengthAudioStream) {
// the AudioServlet, so a one time serving won't work.
if (callbackUrl != null) {
String relativeUrl = audioHTTPServer.serve((FixedLengthAudioStream) audioStream, 10).toString();
String url = callbackUrl + relativeUrl;
AudioFormat format = audioStream.getFormat();
if (!ThingHandlerHelper.isHandlerInitialized(handler)) {
logger.warn("Sonos speaker '{}' is not initialized - status is {}", handler.getThing().getUID(), handler.getThing().getStatus());
} else if (AudioFormat.WAV.isCompatible(format)) {
handler.playNotificationSoundURI(new StringType(url + AudioStreamUtils.EXTENSION_SEPARATOR + FileAudioStream.WAV_EXTENSION));
} else if (AudioFormat.MP3.isCompatible(format)) {
handler.playNotificationSoundURI(new StringType(url + AudioStreamUtils.EXTENSION_SEPARATOR + FileAudioStream.MP3_EXTENSION));
} else {
throw new UnsupportedAudioFormatException("Sonos only supports MP3 or WAV.", format);
}
} else {
logger.warn("We do not have any callback url, so Sonos cannot play the audio stream!");
}
} else {
IOUtils.closeQuietly(audioStream);
throw new UnsupportedAudioStreamException("Sonos can only handle FixedLengthAudioStreams and URLAudioStreams.", audioStream.getClass());
// Instead of throwing an exception, we could ourselves try to wrap it into a
// FixedLengthAudioStream, but this might be dangerous as we have no clue, how much data to expect from
// the stream.
}
}
use of org.eclipse.smarthome.core.audio.AudioFormat in project smarthome by eclipse.
the class VoiceManagerImpl method say.
@Override
public void say(String text, String voiceId, String sinkId, PercentType volume) {
Objects.requireNonNull(text, "Text cannot be said as it is null.");
try {
TTSService tts = null;
Voice voice = null;
String selectedVoiceId = voiceId;
if (selectedVoiceId == null) {
// use the configured default, if set
selectedVoiceId = defaultVoice;
}
if (selectedVoiceId == null) {
tts = getTTS();
if (tts != null) {
voice = getPreferredVoice(tts.getAvailableVoices());
}
} else if (selectedVoiceId.contains(":")) {
// it is a fully qualified unique id
String[] segments = selectedVoiceId.split(":");
tts = getTTS(segments[0]);
if (tts != null) {
voice = getVoice(tts.getAvailableVoices(), segments[1]);
}
} else {
// voiceId is not fully qualified
tts = getTTS();
if (tts != null) {
voice = getVoice(tts.getAvailableVoices(), selectedVoiceId);
}
}
if (tts == null) {
throw new TTSException("No TTS service can be found for voice " + selectedVoiceId);
}
if (voice == null) {
throw new TTSException("Unable to find a voice for language " + localeProvider.getLocale().getLanguage());
}
Set<AudioFormat> audioFormats = tts.getSupportedFormats();
AudioSink sink = audioManager.getSink(sinkId);
if (sink != null) {
AudioFormat audioFormat = getBestMatch(audioFormats, sink.getSupportedFormats());
if (audioFormat != null) {
AudioStream audioStream = tts.synthesize(text, voice, audioFormat);
if (sink.getSupportedStreams().stream().anyMatch(clazz -> clazz.isInstance(audioStream))) {
// get current volume
PercentType oldVolume = audioManager.getVolume(sinkId);
// set notification sound volume
if (volume != null) {
audioManager.setVolume(volume, sinkId);
}
try {
sink.process(audioStream);
} catch (UnsupportedAudioFormatException | UnsupportedAudioStreamException e) {
logger.warn("Error saying '{}': {}", text, e.getMessage(), e);
} finally {
if (volume != null) {
// restore volume only if it was set before
audioManager.setVolume(oldVolume, sinkId);
}
}
} else {
logger.warn("Failed playing audio stream '{}' as audio sink doesn't support it.", audioStream);
}
} else {
logger.warn("No compatible audio format found for TTS '{}' and sink '{}'", tts.getId(), sink.getId());
}
}
} catch (TTSException e) {
logger.warn("Error saying '{}': {}", text, e.getMessage(), e);
}
}
use of org.eclipse.smarthome.core.audio.AudioFormat in project smarthome by eclipse.
the class VoiceManagerImpl method getPreferredFormat.
public static AudioFormat getPreferredFormat(Set<AudioFormat> audioFormats) {
// Return the first concrete AudioFormat found
for (AudioFormat currentAudioFormat : audioFormats) {
// Check if currentAudioFormat is abstract
if (null == currentAudioFormat.getCodec()) {
continue;
}
if (null == currentAudioFormat.getContainer()) {
continue;
}
if (null == currentAudioFormat.isBigEndian()) {
continue;
}
if (null == currentAudioFormat.getBitDepth()) {
continue;
}
if (null == currentAudioFormat.getBitRate()) {
continue;
}
if (null == currentAudioFormat.getFrequency()) {
continue;
}
// Prefer WAVE container
if (!currentAudioFormat.getContainer().equals("WAVE")) {
continue;
}
// As currentAudioFormat is concrete, use it
return currentAudioFormat;
}
// There's no concrete AudioFormat so we must create one
for (AudioFormat currentAudioFormat : audioFormats) {
// Define AudioFormat to return
AudioFormat format = currentAudioFormat;
// Not all Codecs and containers can be supported
if (null == format.getCodec()) {
continue;
}
if (null == format.getContainer()) {
continue;
}
// Prefer WAVE container
if (!format.getContainer().equals(AudioFormat.CONTAINER_WAVE)) {
continue;
}
// If required set BigEndian, BitDepth, BitRate, and Frequency to default values
if (null == format.isBigEndian()) {
format = new AudioFormat(format.getContainer(), format.getCodec(), new Boolean(true), format.getBitDepth(), format.getBitRate(), format.getFrequency());
}
if (null == format.getBitDepth() || null == format.getBitRate() || null == format.getFrequency()) {
// Define default values
int defaultBitDepth = 16;
long defaultFrequency = 44100;
// Obtain current values
Integer bitRate = format.getBitRate();
Long frequency = format.getFrequency();
Integer bitDepth = format.getBitDepth();
// These values must be interdependent (bitRate = bitDepth * frequency)
if (null == bitRate) {
if (null == bitDepth) {
bitDepth = new Integer(defaultBitDepth);
}
if (null == frequency) {
frequency = new Long(defaultFrequency);
}
bitRate = new Integer(bitDepth.intValue() * frequency.intValue());
} else if (null == bitDepth) {
if (null == frequency) {
frequency = new Long(defaultFrequency);
}
bitDepth = new Integer(bitRate.intValue() / frequency.intValue());
} else if (null == frequency) {
frequency = new Long(bitRate.longValue() / bitDepth.longValue());
}
format = new AudioFormat(format.getContainer(), format.getCodec(), format.isBigEndian(), bitDepth, bitRate, frequency);
}
// Return preferred AudioFormat
return format;
}
// Return null indicating failure
return null;
}
use of org.eclipse.smarthome.core.audio.AudioFormat in project smarthome by eclipse.
the class TTSServiceMacOSTest method synthesizeTest.
/**
* Test TTSServiceMacOS.synthesize(String,Voice,AudioFormat)
*/
@Test
public void synthesizeTest() {
Assume.assumeTrue("Mac OS X".equals(System.getProperty("os.name")));
MacTTSService ttsServiceMacOS = new MacTTSService();
Set<Voice> voices = ttsServiceMacOS.getAvailableVoices();
Set<AudioFormat> audioFormats = ttsServiceMacOS.getSupportedFormats();
try (AudioStream audioStream = ttsServiceMacOS.synthesize("Hello", voices.iterator().next(), audioFormats.iterator().next())) {
Assert.assertNotNull("The test synthesizeTest() created null AudioSource", audioStream);
Assert.assertNotNull("The test synthesizeTest() created an AudioSource w/o AudioFormat", audioStream.getFormat());
Assert.assertNotNull("The test synthesizeTest() created an AudioSource w/o InputStream", audioStream);
Assert.assertTrue("The test synthesizeTest() returned an AudioSource with no data", (-1 != audioStream.read(new byte[2])));
} catch (TTSException e) {
Assert.fail("synthesizeTest() failed with TTSException: " + e.getMessage());
} catch (IOException e) {
Assert.fail("synthesizeTest() failed with IOException: " + e.getMessage());
}
}
Aggregations