[chromecast] Support for more audio streams through the audio servlet (#15180)

* [chromecast] Support for more audio streams through the HTTP audio servlet

Separates audiosink in its own class.
Improves volume change/restauration and temporary file handling by using the new core capabilities

---------

Signed-off-by: Gwendal Roulleau <gwendal.roulleau@gmail.com>
This commit is contained in:
Gwendal Roulleau 2023-07-06 21:05:21 +02:00 committed by GitHub
parent dee79190a9
commit 06abd44a7d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 93 additions and 78 deletions

View File

@ -12,15 +12,22 @@
*/
package org.openhab.binding.chromecast.internal;
import java.io.IOException;
import java.io.InputStream;
import java.util.Locale;
import java.util.Set;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.binding.chromecast.internal.handler.ChromecastHandler;
import org.openhab.core.audio.AudioFormat;
import org.openhab.core.audio.AudioHTTPServer;
import org.openhab.core.audio.AudioSinkAsync;
import org.openhab.core.audio.AudioStream;
import org.openhab.core.audio.FixedLengthAudioStream;
import org.openhab.core.audio.StreamServed;
import org.openhab.core.audio.URLAudioStream;
import org.openhab.core.audio.UnsupportedAudioFormatException;
import org.openhab.core.library.types.OnOffType;
import org.openhab.core.library.types.PercentType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -30,52 +37,103 @@ import org.slf4j.LoggerFactory;
* @author Jason Holmes - Initial contribution
*/
@NonNullByDefault
public class ChromecastAudioSink {
public class ChromecastAudioSink extends AudioSinkAsync {
private final Logger logger = LoggerFactory.getLogger(ChromecastAudioSink.class);
private static final Set<AudioFormat> SUPPORTED_FORMATS = Set.of(AudioFormat.MP3, AudioFormat.WAV);
private static final Set<Class<? extends AudioStream>> SUPPORTED_STREAMS = Set.of(AudioStream.class);
private static final String MIME_TYPE_AUDIO_WAV = "audio/wav";
private static final String MIME_TYPE_AUDIO_MPEG = "audio/mpeg";
private final ChromecastCommander commander;
private final ChromecastHandler handler;
private final AudioHTTPServer audioHTTPServer;
private final @Nullable String callbackUrl;
public ChromecastAudioSink(ChromecastCommander commander, AudioHTTPServer audioHTTPServer,
public ChromecastAudioSink(ChromecastHandler handler, AudioHTTPServer audioHTTPServer,
@Nullable String callbackUrl) {
this.commander = commander;
this.handler = handler;
this.audioHTTPServer = audioHTTPServer;
this.callbackUrl = callbackUrl;
}
public void process(@Nullable AudioStream audioStream) throws UnsupportedAudioFormatException {
@Override
public String getId() {
return handler.getThing().getUID().toString();
}
@Override
public @Nullable String getLabel(@Nullable Locale locale) {
return handler.getThing().getLabel();
}
@Override
public void processAsynchronously(@Nullable AudioStream audioStream) throws UnsupportedAudioFormatException {
if (audioStream == null) {
// in case the audioStream is null, this should be interpreted as a request to end any currently playing
// stream.
logger.trace("Stop currently playing stream.");
commander.handleCloseApp(OnOffType.ON);
handler.stop();
} else {
final String url;
if (audioStream instanceof URLAudioStream) {
// it is an external URL, the speaker can access it itself and play it.
URLAudioStream urlAudioStream = (URLAudioStream) audioStream;
url = urlAudioStream.getURL();
tryClose(audioStream);
} else {
if (callbackUrl != null) {
// we serve it on our own HTTP server
String relativeUrl;
if (audioStream instanceof FixedLengthAudioStream) {
relativeUrl = audioHTTPServer.serve((FixedLengthAudioStream) audioStream, 10);
} else {
relativeUrl = audioHTTPServer.serve(audioStream);
try {
StreamServed streamServed = audioHTTPServer.serve(audioStream, 10, true);
relativeUrl = streamServed.url();
// we have to run the delayed task when the server has completely played the stream
streamServed.playEnd().thenRun(() -> this.playbackFinished(audioStream));
} catch (IOException e) {
logger.warn("Chromecast binding was not able to handle the audio stream (cache on disk failed)",
e);
tryClose(audioStream);
return;
}
url = callbackUrl + relativeUrl;
} else {
logger.warn("We do not have any callback url, so Chromecast cannot play the audio stream!");
tryClose(audioStream);
return;
}
}
commander.playMedia("Notification", url,
handler.playURL("Notification", url,
AudioFormat.MP3.isCompatible(audioStream.getFormat()) ? MIME_TYPE_AUDIO_MPEG : MIME_TYPE_AUDIO_WAV);
}
}
private void tryClose(@Nullable InputStream is) {
if (is != null) {
try {
is.close();
} catch (IOException ignored) {
}
}
}
@Override
public Set<AudioFormat> getSupportedFormats() {
return SUPPORTED_FORMATS;
}
@Override
public Set<Class<? extends AudioStream>> getSupportedStreams() {
return SUPPORTED_STREAMS;
}
@Override
public PercentType getVolume() throws IOException {
return handler.getVolume();
}
@Override
public void setVolume(PercentType percentType) throws IOException {
handler.setVolume(percentType);
}
}

View File

@ -50,7 +50,7 @@ public class ChromecastActions implements ThingActions {
logger.warn("Handler is null, cannot play.");
return false;
} else {
return handler.playURL(url, null);
return handler.playURL(null, url, null);
}
}
@ -68,7 +68,7 @@ public class ChromecastActions implements ThingActions {
logger.warn("Handler is null, cannot tweet.");
return false;
} else {
return handler.playURL(url, mediaType);
return handler.playURL(null, url, mediaType);
}
}

View File

@ -20,6 +20,7 @@ import java.util.concurrent.ConcurrentHashMap;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.binding.chromecast.internal.ChromecastAudioSink;
import org.openhab.binding.chromecast.internal.handler.ChromecastHandler;
import org.openhab.core.audio.AudioHTTPServer;
import org.openhab.core.audio.AudioSink;
@ -77,11 +78,12 @@ public class ChromecastHandlerFactory extends BaseThingHandlerFactory {
@Override
protected @Nullable ThingHandler createHandler(Thing thing) {
ChromecastHandler handler = new ChromecastHandler(thing, audioHTTPServer, createCallbackUrl());
ChromecastHandler handler = new ChromecastHandler(thing);
ChromecastAudioSink audioSink = new ChromecastAudioSink(handler, audioHTTPServer, createCallbackUrl());
@SuppressWarnings("unchecked")
ServiceRegistration<AudioSink> reg = (ServiceRegistration<AudioSink>) bundleContext
.registerService(AudioSink.class.getName(), handler, null);
.registerService(AudioSink.class.getName(), audioSink, null);
audioSinkRegistrations.put(thing.getUID().toString(), reg);
return handler;

View File

@ -16,24 +16,17 @@ import java.io.IOException;
import java.security.GeneralSecurityException;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.openhab.binding.chromecast.internal.ChromecastAudioSink;
import org.openhab.binding.chromecast.internal.ChromecastCommander;
import org.openhab.binding.chromecast.internal.ChromecastEventReceiver;
import org.openhab.binding.chromecast.internal.ChromecastScheduler;
import org.openhab.binding.chromecast.internal.ChromecastStatusUpdater;
import org.openhab.binding.chromecast.internal.action.ChromecastActions;
import org.openhab.binding.chromecast.internal.config.ChromecastConfig;
import org.openhab.core.audio.AudioFormat;
import org.openhab.core.audio.AudioHTTPServer;
import org.openhab.core.audio.AudioSink;
import org.openhab.core.audio.AudioStream;
import org.openhab.core.audio.UnsupportedAudioFormatException;
import org.openhab.core.audio.UnsupportedAudioStreamException;
import org.openhab.core.library.types.OnOffType;
import org.openhab.core.library.types.PercentType;
import org.openhab.core.thing.ChannelUID;
import org.openhab.core.thing.Thing;
@ -58,15 +51,9 @@ import su.litvak.chromecast.api.v2.ChromeCast;
* @author Scott Hanson - Added Actions.
*/
@NonNullByDefault
public class ChromecastHandler extends BaseThingHandler implements AudioSink {
public class ChromecastHandler extends BaseThingHandler {
private final Logger logger = LoggerFactory.getLogger(ChromecastHandler.class);
private static final Set<AudioFormat> SUPPORTED_FORMATS = Set.of(AudioFormat.MP3, AudioFormat.WAV);
private static final Set<Class<? extends AudioStream>> SUPPORTED_STREAMS = Set.of(AudioStream.class);
private final AudioHTTPServer audioHTTPServer;
private final @Nullable String callbackUrl;
/**
* The actual implementation. A new one is created each time #initialize is called.
*/
@ -76,13 +63,9 @@ public class ChromecastHandler extends BaseThingHandler implements AudioSink {
* Constructor.
*
* @param thing the thing the coordinator should be created for
* @param audioHTTPServer server for hosting audio streams
* @param callbackUrl url to be used to tell the Chromecast which host to call for audio urls
*/
public ChromecastHandler(final Thing thing, AudioHTTPServer audioHTTPServer, @Nullable String callbackUrl) {
public ChromecastHandler(final Thing thing) {
super(thing);
this.audioHTTPServer = audioHTTPServer;
this.callbackUrl = callbackUrl;
}
@Override
@ -107,8 +90,7 @@ public class ChromecastHandler extends BaseThingHandler implements AudioSink {
if (localCoordinator == null) {
ChromeCast chromecast = new ChromeCast(ipAddress, config.port);
localCoordinator = new Coordinator(this, thing, chromecast, config.refreshRate, audioHTTPServer,
callbackUrl);
localCoordinator = new Coordinator(this, thing, chromecast, config.refreshRate);
coordinator = localCoordinator;
scheduler.submit(() -> {
@ -164,38 +146,6 @@ public class ChromecastHandler extends BaseThingHandler implements AudioSink {
return super.isLinked(channelUID);
}
@Override
public String getId() {
return thing.getUID().toString();
}
@Override
public @Nullable String getLabel(@Nullable Locale locale) {
return thing.getLabel();
}
@Override
public Set<AudioFormat> getSupportedFormats() {
return SUPPORTED_FORMATS;
}
@Override
public Set<Class<? extends AudioStream>> getSupportedStreams() {
return SUPPORTED_STREAMS;
}
@Override
public void process(@Nullable AudioStream audioStream)
throws UnsupportedAudioFormatException, UnsupportedAudioStreamException {
Coordinator localCoordinator = coordinator;
if (localCoordinator != null) {
localCoordinator.audioSink.process(audioStream);
} else {
logger.debug("Cannot process audioStream. No coordinator has been initialized.");
}
}
@Override
public PercentType getVolume() throws IOException {
Coordinator localCoordinator = coordinator;
if (localCoordinator != null) {
@ -205,7 +155,6 @@ public class ChromecastHandler extends BaseThingHandler implements AudioSink {
}
}
@Override
public void setVolume(PercentType percentType) throws IOException {
Coordinator localCoordinator = coordinator;
if (localCoordinator != null) {
@ -215,15 +164,24 @@ public class ChromecastHandler extends BaseThingHandler implements AudioSink {
}
}
public void stop() {
Coordinator localCoordinator = coordinator;
if (localCoordinator != null) {
localCoordinator.commander.handleCloseApp(OnOffType.ON);
} else {
logger.debug("Cannot stop. No coordinator has been initialized.");
}
}
@Override
public Collection<Class<? extends ThingHandlerService>> getServices() {
return List.of(ChromecastActions.class);
}
public boolean playURL(String url, @Nullable String mediaType) {
public boolean playURL(@Nullable String title, String url, @Nullable String mediaType) {
Coordinator localCoordinator = coordinator;
if (localCoordinator != null) {
localCoordinator.commander.playMedia(null, url, mediaType);
localCoordinator.commander.playMedia(title, url, mediaType);
return true;
}
return false;
@ -235,7 +193,6 @@ public class ChromecastHandler extends BaseThingHandler implements AudioSink {
private static final long CONNECT_DELAY = 10;
private final ChromeCast chromeCast;
private final ChromecastAudioSink audioSink;
private final ChromecastCommander commander;
private final ChromecastEventReceiver eventReceiver;
private final ChromecastStatusUpdater statusUpdater;
@ -254,8 +211,7 @@ public class ChromecastHandler extends BaseThingHandler implements AudioSink {
private ConnectionState connectionState = ConnectionState.UNKNOWN;
private Coordinator(ChromecastHandler handler, Thing thing, ChromeCast chromeCast, long refreshRate,
AudioHTTPServer audioHttpServer, @Nullable String callbackURL) {
private Coordinator(ChromecastHandler handler, Thing thing, ChromeCast chromeCast, long refreshRate) {
this.chromeCast = chromeCast;
this.scheduler = new ChromecastScheduler(handler.scheduler, CONNECT_DELAY, this::connect, refreshRate,
@ -264,7 +220,6 @@ public class ChromecastHandler extends BaseThingHandler implements AudioSink {
this.commander = new ChromecastCommander(chromeCast, scheduler, statusUpdater);
this.eventReceiver = new ChromecastEventReceiver(scheduler, statusUpdater);
this.audioSink = new ChromecastAudioSink(commander, audioHttpServer, callbackURL);
}
void initialize() {