use of org.apache.commons.codec.binary.Base64.encodeBase64String in project wso2-synapse by wso2.
the class SynapseConfigUtils method getURLConnection.
/**
* Returns a URLCOnnection for given URL. If the URL is https one , then URLConnectin is a
* HttpsURLCOnnection and it is configured with KeyStores given in the synapse.properties file
*
* @param url URL
* @return URLConnection for given URL
*/
public static URLConnection getURLConnection(URL url) {
try {
if (url == null) {
if (log.isDebugEnabled()) {
log.debug("Provided URL is null");
}
return null;
}
URLConnection connection;
if (url.getProtocol().equalsIgnoreCase("http") || url.getProtocol().equalsIgnoreCase("https")) {
Properties synapseProperties = SynapsePropertiesLoader.loadSynapseProperties();
String proxyHost = synapseProperties.getProperty(SynapseConstants.SYNPASE_HTTP_PROXY_HOST);
String proxyPort = synapseProperties.getProperty(SynapseConstants.SYNPASE_HTTP_PROXY_PORT);
// get the list of excluded hosts for proxy
List<String> excludedHosts = getExcludedHostsForProxy(synapseProperties);
if (proxyHost != null && proxyPort != null && !excludedHosts.contains(proxyHost)) {
SocketAddress sockaddr = new InetSocketAddress(proxyHost, Integer.parseInt(proxyPort));
Proxy proxy = new Proxy(Proxy.Type.HTTP, sockaddr);
if (url.getProtocol().equalsIgnoreCase("https")) {
connection = getHttpsURLConnection(url, synapseProperties, proxy);
} else {
connection = url.openConnection(proxy);
}
} else {
if (url.getProtocol().equalsIgnoreCase("https")) {
connection = getHttpsURLConnection(url, synapseProperties, null);
} else {
connection = url.openConnection();
}
}
// try to see weather authentication is required
String userName = synapseProperties.getProperty(SynapseConstants.SYNPASE_HTTP_PROXY_USER);
String password = synapseProperties.getProperty(SynapseConstants.SYNPASE_HTTP_PROXY_PASSWORD);
if (userName != null && password != null) {
String header = userName + ":" + password;
byte[] encodedHeaderBytes = new Base64().encode(header.getBytes());
String encodedHeader = new String(encodedHeaderBytes);
connection.setRequestProperty("Proxy-Authorization", "Basic " + encodedHeader);
}
} else {
connection = url.openConnection();
}
connection.setReadTimeout(getReadTimeout());
connection.setConnectTimeout(getConnectTimeout());
// if http is being used
connection.setRequestProperty("Connection", "close");
return connection;
} catch (IOException e) {
handleException("Error reading at URI ' " + url + " ' ", e);
}
return null;
}
use of org.apache.commons.codec.binary.Base64.encodeBase64String in project wso2-synapse by wso2.
the class SynapseConfigUtils method getOMElementFromURL.
/**
* Return an OMElement from a URL source
*
* @param urlStr a URL string
* @param synapseHome synapse home parameter to be used
* @return an OMElement of the resource
* @throws IOException for invalid URL's or IO errors
*/
public static OMNode getOMElementFromURL(String urlStr, String synapseHome) throws IOException {
URL url = getURLFromPath(urlStr, synapseHome);
if (url == null) {
return null;
}
URLConnection connection = null;
// If url contains http basic authentication parameters.
if (url.getUserInfo() != null) {
String protocol = url.getProtocol();
if ("http".equalsIgnoreCase(protocol) || "https".equalsIgnoreCase(protocol)) {
// Create new url excluding user info
URL newUrl = new URL(protocol, url.getHost(), url.getPort(), url.getFile());
connection = getURLConnection(newUrl);
String encoding = StringUtils.removeEnd(new String(new Base64().encode(URLDecoder.decode(url.getUserInfo(), "UTF8").getBytes())), "\r\n");
connection.setRequestProperty("Authorization", "Basic " + encoding);
} else {
handleException("Unsuported protocol [" + protocol + "]. Supports only http and https with " + "basic authentication");
}
} else {
connection = getURLConnection(url);
}
if (connection == null) {
if (log.isDebugEnabled()) {
log.debug("Cannot create a URLConnection for given URL : " + urlStr);
}
return null;
}
InputStream inStream = connection.getInputStream();
try {
StAXOMBuilder builder = new StAXOMBuilder(inStream);
OMElement doc = builder.getDocumentElement();
doc.build();
return doc;
} catch (Exception e) {
if (log.isDebugEnabled()) {
log.info("Content at URL : " + url + " is non XML..");
}
return readNonXML(url);
} finally {
try {
inStream.close();
} catch (IOException e) {
log.warn("Error while closing the input stream to: " + url, e);
}
}
}
use of org.apache.commons.codec.binary.Base64.encodeBase64String in project sldeditor by robward-scisys.
the class EncryptedPropertiesApache method decrypt.
/* (non-Javadoc)
* @see com.sldeditor.common.property.EncryptedPropertiesInterface#decrypt(java.lang.String)
*/
@Override
public synchronized String decrypt(String str) {
Charset fileEncoding = PrefManager.getInstance().getPrefData().getFileEncoding();
byte[] dec;
try {
dec = new Base64().decode(str.getBytes(fileEncoding));
byte[] utf8 = decrypter.doFinal(dec);
return new String(utf8, fileEncoding);
} catch (IllegalBlockSizeException | BadPaddingException e) {
ConsoleManager.getInstance().exception(this, e);
}
return str;
}
use of org.apache.commons.codec.binary.Base64.encodeBase64String in project sldeditor by robward-scisys.
the class EncryptedPropertiesApache method encrypt.
/* (non-Javadoc)
* @see com.sldeditor.common.property.EncryptedPropertiesInterface#encrypt(java.lang.String)
*/
@Override
public synchronized String encrypt(String str) {
byte[] utf8;
try {
Charset fileEncoding = PrefManager.getInstance().getPrefData().getFileEncoding();
utf8 = str.getBytes(fileEncoding);
byte[] enc = encrypter.doFinal(utf8);
return new Base64().encodeToString(enc);
} catch (IllegalBlockSizeException | BadPaddingException e) {
ConsoleManager.getInstance().exception(this, e);
}
return "";
}
use of org.apache.commons.codec.binary.Base64.encodeBase64String in project UniversalMediaServer by UniversalMediaServer.
the class LibMediaInfoParser method parse.
/**
* Parse media via MediaInfo.
*/
public static synchronized void parse(DLNAMediaInfo media, InputFile inputFile, int type, RendererConfiguration renderer) {
File file = inputFile.getFile();
if (!media.isMediaparsed() && file != null && MI.isValid() && MI.Open(file.getAbsolutePath()) > 0) {
StreamType general = StreamType.General;
StreamType video = StreamType.Video;
StreamType audio = StreamType.Audio;
StreamType image = StreamType.Image;
StreamType text = StreamType.Text;
DLNAMediaAudio currentAudioTrack = new DLNAMediaAudio();
DLNAMediaSubtitle currentSubTrack;
media.setSize(file.length());
String value;
// set General
getFormat(general, media, currentAudioTrack, MI.Get(general, 0, "Format"), file);
getFormat(general, media, currentAudioTrack, MI.Get(general, 0, "CodecID").trim(), file);
media.setDuration(getDuration(MI.Get(general, 0, "Duration/String1")));
media.setBitrate(getBitrate(MI.Get(general, 0, "OverallBitRate")));
media.setStereoscopy(MI.Get(general, 0, "StereoscopicLayout"));
value = MI.Get(general, 0, "Cover_Data");
if (!value.isEmpty()) {
try {
media.setThumb(DLNAThumbnail.toThumbnail(new Base64().decode(value.getBytes(StandardCharsets.US_ASCII)), 640, 480, ScaleType.MAX, ImageFormat.SOURCE, false));
} catch (EOFException e) {
LOGGER.debug("Error reading \"{}\" thumbnail from MediaInfo: Unexpected end of stream, probably corrupt or read error.", file.getName());
} catch (UnknownFormatException e) {
LOGGER.debug("Could not read \"{}\" thumbnail from MediaInfo: {}", file.getName(), e.getMessage());
} catch (IOException e) {
LOGGER.error("Error reading \"{}\" thumbnail from MediaInfo: {}", file.getName(), e.getMessage());
LOGGER.trace("", e);
}
}
value = MI.Get(general, 0, "Title");
if (!value.isEmpty()) {
media.setFileTitleFromMetadata(value);
}
// set Video
media.setVideoTrackCount(MI.Count_Get(video));
if (media.getVideoTrackCount() > 0) {
for (int i = 0; i < media.getVideoTrackCount(); i++) {
// check for DXSA and DXSB subtitles (subs in video format)
if (MI.Get(video, i, "Title").startsWith("Subtitle")) {
currentSubTrack = new DLNAMediaSubtitle();
// First attempt to detect subtitle track format
currentSubTrack.setType(SubtitleType.valueOfLibMediaInfoCodec(MI.Get(video, i, "Format")));
// Second attempt to detect subtitle track format (CodecID usually is more accurate)
currentSubTrack.setType(SubtitleType.valueOfLibMediaInfoCodec(MI.Get(video, i, "CodecID")));
currentSubTrack.setId(media.getSubtitleTracksList().size());
addSub(currentSubTrack, media);
} else {
getFormat(video, media, currentAudioTrack, MI.Get(video, i, "Format"), file);
getFormat(video, media, currentAudioTrack, MI.Get(video, i, "Format_Version"), file);
getFormat(video, media, currentAudioTrack, MI.Get(video, i, "CodecID"), file);
media.setWidth(getPixelValue(MI.Get(video, i, "Width")));
media.setHeight(getPixelValue(MI.Get(video, i, "Height")));
media.setMatrixCoefficients(MI.Get(video, i, "matrix_coefficients"));
if (!media.is3d()) {
media.setStereoscopy(MI.Get(video, i, "MultiView_Layout"));
}
media.setAspectRatioContainer(MI.Get(video, i, "DisplayAspectRatio/String"));
media.setAspectRatioVideoTrack(MI.Get(video, i, "DisplayAspectRatio_Original/String"));
media.setFrameRate(getFPSValue(MI.Get(video, i, "FrameRate")));
media.setFrameRateOriginal(MI.Get(video, i, "FrameRate_Original"));
media.setFrameRateMode(getFrameRateModeValue(MI.Get(video, i, "FrameRate_Mode")));
media.setFrameRateModeRaw(MI.Get(video, i, "FrameRate_Mode"));
media.setReferenceFrameCount(getReferenceFrameCount(MI.Get(video, i, "Format_Settings_RefFrames/String")));
media.setVideoTrackTitleFromMetadata(MI.Get(video, i, "Title"));
value = MI.Get(video, i, "Format_Settings_QPel");
if (!value.isEmpty()) {
media.putExtra(FormatConfiguration.MI_QPEL, value);
}
value = MI.Get(video, i, "Format_Settings_GMC");
if (!value.isEmpty()) {
media.putExtra(FormatConfiguration.MI_GMC, value);
}
value = MI.Get(video, i, "Format_Settings_GOP");
if (!value.isEmpty()) {
media.putExtra(FormatConfiguration.MI_GOP, value);
}
media.setMuxingMode(MI.Get(video, i, "MuxingMode"));
if (!media.isEncrypted()) {
media.setEncrypted("encrypted".equals(MI.Get(video, i, "Encryption")));
}
value = MI.Get(video, i, "BitDepth");
if (!value.isEmpty()) {
try {
media.setVideoBitDepth(Integer.parseInt(value));
} catch (NumberFormatException nfe) {
LOGGER.debug("Could not parse bits per sample \"" + value + "\"");
}
}
value = MI.Get(video, i, "Format_Profile");
if (!value.isEmpty() && media.getCodecV() != null && media.getCodecV().equals(FormatConfiguration.H264)) {
media.setAvcLevel(getAvcLevel(value));
}
}
}
}
// set Audio
int audioTracks = MI.Count_Get(audio);
if (audioTracks > 0) {
for (int i = 0; i < audioTracks; i++) {
currentAudioTrack = new DLNAMediaAudio();
getFormat(audio, media, currentAudioTrack, MI.Get(audio, i, "Format"), file);
getFormat(audio, media, currentAudioTrack, MI.Get(audio, i, "Format_Version"), file);
getFormat(audio, media, currentAudioTrack, MI.Get(audio, i, "Format_Profile"), file);
getFormat(audio, media, currentAudioTrack, MI.Get(audio, i, "CodecID"), file);
value = MI.Get(audio, i, "CodecID_Description");
if (isNotBlank(value) && value.startsWith("Windows Media Audio 10")) {
currentAudioTrack.setCodecA(FormatConfiguration.WMA10);
}
currentAudioTrack.setLang(getLang(MI.Get(audio, i, "Language/String")));
currentAudioTrack.setAudioTrackTitleFromMetadata((MI.Get(audio, i, "Title")).trim());
currentAudioTrack.getAudioProperties().setNumberOfChannels(MI.Get(audio, i, "Channel(s)"));
currentAudioTrack.setSampleFrequency(getSampleFrequency(MI.Get(audio, i, "SamplingRate")));
currentAudioTrack.setBitRate(getBitrate(MI.Get(audio, i, "BitRate")));
currentAudioTrack.setSongname(MI.Get(general, 0, "Track"));
if (renderer.isPrependTrackNumbers() && currentAudioTrack.getTrack() > 0 && currentAudioTrack.getSongname() != null && currentAudioTrack.getSongname().length() > 0) {
currentAudioTrack.setSongname(currentAudioTrack.getTrack() + ": " + currentAudioTrack.getSongname());
}
currentAudioTrack.setAlbum(MI.Get(general, 0, "Album"));
currentAudioTrack.setArtist(MI.Get(general, 0, "Performer"));
currentAudioTrack.setGenre(MI.Get(general, 0, "Genre"));
// Try to parse the year from the stored date
String recordedDate = MI.Get(general, 0, "Recorded_Date");
Matcher matcher = yearPattern.matcher(recordedDate);
if (matcher.matches()) {
try {
currentAudioTrack.setYear(Integer.parseInt(matcher.group(1)));
} catch (NumberFormatException nfe) {
LOGGER.debug("Could not parse year from recorded date \"" + recordedDate + "\"");
}
}
// Special check for OGM: MediaInfo reports specific Audio/Subs IDs (0xn) while mencoder does not
value = MI.Get(audio, i, "ID/String");
if (!value.isEmpty()) {
if (value.contains("(0x") && !FormatConfiguration.OGG.equals(media.getContainer())) {
currentAudioTrack.setId(getSpecificID(value));
} else {
currentAudioTrack.setId(media.getAudioTracksList().size());
}
}
value = MI.Get(general, i, "Track/Position");
if (!value.isEmpty()) {
try {
currentAudioTrack.setTrack(Integer.parseInt(value));
} catch (NumberFormatException nfe) {
LOGGER.debug("Could not parse track \"" + value + "\"");
}
}
value = MI.Get(audio, i, "BitDepth");
if (!value.isEmpty()) {
try {
currentAudioTrack.setBitsperSample(Integer.parseInt(value));
} catch (NumberFormatException nfe) {
LOGGER.debug("Could not parse bits per sample \"" + value + "\"");
}
}
addAudio(currentAudioTrack, media);
}
}
// set Image
media.setImageCount(MI.Count_Get(image));
if (media.getImageCount() > 0 || type == Format.IMAGE) {
boolean parseByMediainfo = false;
// For images use our own parser instead of MediaInfo which doesn't provide enough information
try {
ImagesUtil.parseImage(file, media);
// so that MI.Count_Get(image) might return 0 even if there is an image.
if (media.getImageCount() == 0) {
media.setImageCount(1);
}
} catch (IOException e) {
if (media.getImageCount() > 0) {
LOGGER.debug("Error parsing image ({}), switching to MediaInfo: {}", file.getAbsolutePath(), e.getMessage());
LOGGER.trace("", e);
parseByMediainfo = true;
} else {
LOGGER.warn("Image parsing for \"{}\" failed both with MediaInfo and internally: {}", file.getAbsolutePath(), e.getMessage());
LOGGER.trace("", e);
media.setImageCount(1);
}
}
if (parseByMediainfo) {
getFormat(image, media, currentAudioTrack, MI.Get(image, 0, "Format"), file);
media.setWidth(getPixelValue(MI.Get(image, 0, "Width")));
media.setHeight(getPixelValue(MI.Get(image, 0, "Height")));
}
}
// set Subs in text format
int subTracks = MI.Count_Get(text);
if (subTracks > 0) {
for (int i = 0; i < subTracks; i++) {
currentSubTrack = new DLNAMediaSubtitle();
currentSubTrack.setType(SubtitleType.valueOfLibMediaInfoCodec(MI.Get(text, i, "Format")));
currentSubTrack.setType(SubtitleType.valueOfLibMediaInfoCodec(MI.Get(text, i, "CodecID")));
currentSubTrack.setLang(getLang(MI.Get(text, i, "Language/String")));
currentSubTrack.setSubtitlesTrackTitleFromMetadata((MI.Get(text, i, "Title")).trim());
// Special check for OGM: MediaInfo reports specific Audio/Subs IDs (0xn) while mencoder does not
value = MI.Get(text, i, "ID/String");
if (!value.isEmpty()) {
if (value.contains("(0x") && !FormatConfiguration.OGG.equals(media.getContainer())) {
currentSubTrack.setId(getSpecificID(value));
} else {
currentSubTrack.setId(media.getSubtitleTracksList().size());
}
}
addSub(currentSubTrack, media);
}
}
/*
* Some container formats (like MP4/M4A) can represent both audio
* and video media. UMS initially recognized this as video, but this
* is corrected here it the content is only audio.
*/
if (media.isAudioOrVideoContainer() && media.isAudio()) {
media.setContainer(media.getAudioVariantFormatConfigurationString());
}
/*
* Recognize 3D layout from the filename.
*
* First we check for our custom naming convention, for which the filename
* either has to start with "3DSBSLF" or "3DSBSRF" for side-by-side layout
* or "3DOULF" or "3DOURF" for over-under layout.
* For anaglyph 3D video can be used following combination:
* 3DARCG anaglyph_red_cyan_gray
* 3DARCH anaglyph_red_cyan_half_color
* 3DARCC anaglyph_red_cyan_color
* 3DARCD anaglyph_red_cyan_dubois
* 3DAGMG anaglyph_green_magenta_gray
* 3DAGMH anaglyph_green_magenta_half_color
* 3DAGMC anaglyph_green_magenta_color
* 3DAGMD anaglyph_green_magenta_dubois
* 3DAYBG anaglyph_yellow_blue_gray
* 3DAYBH anaglyph_yellow_blue_half_color
* 3DAYBC anaglyph_yellow_blue_color
* 3DAYBD anaglyph_yellow_blue_dubois
*
* Next we check for common naming conventions.
*/
if (!media.is3d()) {
String upperCaseFileName = file.getName().toUpperCase();
if (upperCaseFileName.startsWith("3DSBS")) {
LOGGER.debug("3D format SBS detected for " + file.getName());
media.setStereoscopy(file.getName().substring(2, 7));
} else if (upperCaseFileName.startsWith("3DOU")) {
LOGGER.debug("3D format OU detected for " + file.getName());
media.setStereoscopy(file.getName().substring(2, 6));
} else if (upperCaseFileName.startsWith("3DA")) {
LOGGER.debug("3D format Anaglyph detected for " + file.getName());
media.setStereoscopy(file.getName().substring(2, 6));
} else if (upperCaseFileName.matches(".*[\\s\\.](H-|H|HALF-|HALF.)SBS[\\s\\.].*")) {
LOGGER.debug("3D format HSBS detected for " + file.getName());
media.setStereoscopy("half side by side (left eye first)");
} else if (upperCaseFileName.matches(".*[\\s\\.](H-|H|HALF-|HALF.)(OU|TB)[\\s\\.].*")) {
LOGGER.debug("3D format HOU detected for " + file.getName());
media.setStereoscopy("half top-bottom (left eye first)");
} else if (upperCaseFileName.matches(".*[\\s\\.]SBS[\\s\\.].*")) {
if (media.getWidth() > 1920) {
LOGGER.debug("3D format SBS detected for " + file.getName());
media.setStereoscopy("side by side (left eye first)");
} else {
LOGGER.debug("3D format HSBS detected based on width for " + file.getName());
media.setStereoscopy("half side by side (left eye first)");
}
} else if (upperCaseFileName.matches(".*[\\s\\.](OU|TB)[\\s\\.].*")) {
if (media.getHeight() > 1080) {
LOGGER.debug("3D format OU detected for " + file.getName());
media.setStereoscopy("top-bottom (left eye first)");
} else {
LOGGER.debug("3D format HOU detected based on height for " + file.getName());
media.setStereoscopy("half top-bottom (left eye first)");
}
}
}
media.postParse(type, inputFile);
MI.Close();
if (media.getContainer() == null) {
media.setContainer(DLNAMediaLang.UND);
}
if (media.getCodecV() == null) {
media.setCodecV(DLNAMediaLang.UND);
}
media.setMediaparsed(true);
}
}
Aggregations