use of com.github.bordertech.wcomponents.WAudio in project wcomponents by BorderTech.
the class WAudioRenderer_Test method testRendererCorrectlyConfigured.
@Test
public void testRendererCorrectlyConfigured() {
WAudio audio = new WAudio();
Assert.assertTrue("Incorrect renderer supplied", getWebXmlRenderer(audio) instanceof WAudioRenderer);
}
use of com.github.bordertech.wcomponents.WAudio in project wcomponents by BorderTech.
the class WAudioRenderer_Test method testDoPaint.
@Test
public void testDoPaint() throws IOException, SAXException, XpathException {
MockAudio mockAudio = new MockAudio();
mockAudio.setMimeType("audio/basic");
// Test with no audio tracks - should not render
WAudio audio = new WAudio();
assertSchemaMatch(audio);
assertXpathNotExists("//ui:audio", audio);
// Test with minimal options
audio = new WAudio(mockAudio);
// so that the URIs are consistent
audio.setCacheKey("x{}<>");
setActiveContext(createUIContext());
assertSchemaMatch(audio);
assertXpathExists("//ui:audio", audio);
assertXpathEvaluatesTo(audio.getId(), "//ui:audio/@id", audio);
assertXpathEvaluatesTo("none", "//ui:audio/@preload", audio);
assertXpathNotExists("//ui:audio/@alt", audio);
assertXpathNotExists("//ui:audio/@autoplay", audio);
assertXpathNotExists("//ui:audio/@loop", audio);
assertXpathNotExists("//ui:audio/@muted", audio);
assertXpathNotExists("//ui:audio/@controls", audio);
assertXpathNotExists("//ui:audio/@hidden", audio);
assertXpathNotExists("//ui:audio/@disabled", audio);
assertXpathNotExists("//ui:audio/@tooltip", audio);
assertXpathNotExists("//ui:audio/@duration", audio);
assertXpathEvaluatesTo("1", "count(//ui:audio/ui:src)", audio);
assertXpathUrlEvaluatesTo(audio.getAudioUrls()[0], "//ui:audio/ui:src/@uri", audio);
assertXpathEvaluatesTo(audio.getAudio()[0].getMimeType(), "//ui:audio/ui:src/@type", audio);
// Test other options, resetting them after each test
audio.setAltText("altText");
assertSchemaMatch(audio);
assertXpathEvaluatesTo("altText", "//ui:audio/@alt", audio);
audio.reset();
audio.setPreload(WAudio.Preload.META_DATA);
assertSchemaMatch(audio);
assertXpathEvaluatesTo("metadata", "//ui:audio/@preload", audio);
audio.reset();
audio.setAutoplay(true);
assertSchemaMatch(audio);
assertXpathEvaluatesTo("true", "//ui:audio/@autoplay", audio);
audio.reset();
audio.setLoop(true);
assertSchemaMatch(audio);
assertXpathEvaluatesTo("true", "//ui:audio/@loop", audio);
audio.reset();
audio.setControls(WAudio.Controls.NONE);
assertSchemaMatch(audio);
assertXpathEvaluatesTo("none", "//ui:audio/@controls", audio);
audio.reset();
audio.setControls(WAudio.Controls.ALL);
assertSchemaMatch(audio);
assertXpathEvaluatesTo("all", "//ui:audio/@controls", audio);
audio.reset();
audio.setControls(WAudio.Controls.PLAY_PAUSE);
assertSchemaMatch(audio);
assertXpathEvaluatesTo("play", "//ui:audio/@controls", audio);
audio.reset();
audio.setControls(WAudio.Controls.DEFAULT);
assertSchemaMatch(audio);
assertXpathEvaluatesTo("default", "//ui:audio/@controls", audio);
audio.reset();
setFlag(audio, ComponentModel.HIDE_FLAG, true);
assertSchemaMatch(audio);
assertXpathEvaluatesTo("true", "//ui:audio/@hidden", audio);
audio.reset();
audio.setDisabled(true);
assertSchemaMatch(audio);
assertXpathEvaluatesTo("true", "//ui:audio/@disabled", audio);
audio.reset();
audio.setToolTip("toolTip");
assertSchemaMatch(audio);
assertXpathEvaluatesTo("toolTip", "//ui:audio/@toolTip", audio);
audio.reset();
mockAudio.setDuration(123);
assertXpathEvaluatesTo("123", "//ui:audio/@duration", audio);
}
use of com.github.bordertech.wcomponents.WAudio in project wcomponents by BorderTech.
the class WAudioRenderer method doRender.
/**
* Paints the given WAudio.
*
* @param component the WAudio to paint.
* @param renderContext the RenderContext to paint to.
*/
@Override
public void doRender(final WComponent component, final WebXmlRenderContext renderContext) {
WAudio audioComponent = (WAudio) component;
XmlStringBuilder xml = renderContext.getWriter();
Audio[] audio = audioComponent.getAudio();
if (audio == null || audio.length == 0) {
return;
}
WAudio.Controls controls = audioComponent.getControls();
int duration = audio[0].getDuration();
// Check for alternative text
String alternativeText = audioComponent.getAltText();
if (alternativeText == null) {
LOG.warn("Audio should have a description.");
alternativeText = null;
} else {
alternativeText = I18nUtilities.format(null, alternativeText);
}
xml.appendTagOpen("ui:audio");
xml.appendAttribute("id", component.getId());
xml.appendOptionalAttribute("class", component.getHtmlClass());
xml.appendOptionalAttribute("track", component.isTracking(), "true");
xml.appendOptionalAttribute("alt", alternativeText);
xml.appendOptionalAttribute("autoplay", audioComponent.isAutoplay(), "true");
xml.appendOptionalAttribute("mediagroup", audioComponent.getMediaGroup());
xml.appendOptionalAttribute("loop", audioComponent.isLoop(), "true");
xml.appendOptionalAttribute("hidden", audioComponent.isHidden(), "true");
xml.appendOptionalAttribute("disabled", audioComponent.isDisabled(), "true");
xml.appendOptionalAttribute("toolTip", audioComponent.getToolTip());
xml.appendOptionalAttribute("duration", duration > 0, duration);
switch(audioComponent.getPreload()) {
case NONE:
xml.appendAttribute("preload", "none");
break;
case META_DATA:
xml.appendAttribute("preload", "metadata");
break;
case AUTO:
default:
break;
}
if (controls != null && !WAudio.Controls.NATIVE.equals(controls)) {
switch(controls) {
case NONE:
xml.appendAttribute("controls", "none");
break;
case ALL:
xml.appendAttribute("controls", "all");
break;
case PLAY_PAUSE:
xml.appendAttribute("controls", "play");
break;
case DEFAULT:
xml.appendAttribute("controls", "default");
break;
default:
LOG.error("Unknown control type: " + controls);
}
}
xml.appendClose();
String[] urls = audioComponent.getAudioUrls();
for (int i = 0; i < urls.length; i++) {
xml.appendTagOpen("ui:src");
xml.appendUrlAttribute("uri", urls[i]);
xml.appendOptionalAttribute("type", audio[i].getMimeType());
xml.appendEnd();
}
xml.appendEndTag("ui:audio");
}
use of com.github.bordertech.wcomponents.WAudio in project wcomponents by BorderTech.
the class StepCountUtil method isCachedContentRequest.
/**
* Check if the request is for cached content.
*
* @param request the request being processed
* @return true if content is cached, otherwise false
*/
public static boolean isCachedContentRequest(final Request request) {
// Get target id on request
String targetId = request.getParameter(Environment.TARGET_ID);
if (targetId == null) {
return false;
}
// Get target
ComponentWithContext targetWithContext = WebUtilities.getComponentById(targetId, true);
if (targetWithContext == null) {
return false;
}
// Check for caching key
WComponent target = targetWithContext.getComponent();
UIContextHolder.pushContext(targetWithContext.getContext());
try {
// TODO Look at implementing CacheableTarget interface
String key = null;
if (target instanceof WContent) {
key = ((WContent) target).getCacheKey();
} else if (target instanceof WImage) {
key = ((WImage) target).getCacheKey();
} else if (target instanceof WVideo) {
key = ((WVideo) target).getCacheKey();
} else if (target instanceof WAudio) {
key = ((WAudio) target).getCacheKey();
}
return !Util.empty(key);
} finally {
UIContextHolder.popContext();
}
}
Aggregations