use of java.net.MalformedURLException in project hadoop by apache.
the class AmIpFilter method init.
@Override
public void init(FilterConfig conf) throws ServletException {
// Maintain for backwards compatibility
if (conf.getInitParameter(PROXY_HOST) != null && conf.getInitParameter(PROXY_URI_BASE) != null) {
proxyHosts = new String[] { conf.getInitParameter(PROXY_HOST) };
proxyUriBases = new HashMap<>(1);
proxyUriBases.put("dummy", conf.getInitParameter(PROXY_URI_BASE));
} else {
proxyHosts = conf.getInitParameter(PROXY_HOSTS).split(PROXY_HOSTS_DELIMITER);
String[] proxyUriBasesArr = conf.getInitParameter(PROXY_URI_BASES).split(PROXY_URI_BASES_DELIMITER);
proxyUriBases = new HashMap<>(proxyUriBasesArr.length);
for (String proxyUriBase : proxyUriBasesArr) {
try {
URL url = new URL(proxyUriBase);
proxyUriBases.put(url.getHost() + ":" + url.getPort(), proxyUriBase);
} catch (MalformedURLException e) {
LOG.warn("{} does not appear to be a valid URL", proxyUriBase, e);
}
}
}
}
use of java.net.MalformedURLException in project hbase by apache.
the class DynamicClassLoader method loadNewJars.
private synchronized void loadNewJars() {
// Refresh local jar file lists
if (localDir != null) {
for (File file : localDir.listFiles()) {
String fileName = file.getName();
if (jarModifiedTime.containsKey(fileName)) {
continue;
}
if (file.isFile() && fileName.endsWith(".jar")) {
jarModifiedTime.put(fileName, Long.valueOf(file.lastModified()));
try {
URL url = file.toURI().toURL();
addURL(url);
} catch (MalformedURLException mue) {
// This should not happen, just log it
LOG.warn("Failed to load new jar " + fileName, mue);
}
}
}
}
// Check remote files
FileStatus[] statuses = null;
if (remoteDir != null) {
try {
statuses = remoteDirFs.listStatus(remoteDir);
} catch (IOException ioe) {
LOG.warn("Failed to check remote dir status " + remoteDir, ioe);
}
}
if (statuses == null || statuses.length == 0) {
// no remote files at all
return;
}
for (FileStatus status : statuses) {
// No recursive lookup
if (status.isDirectory())
continue;
Path path = status.getPath();
String fileName = path.getName();
if (!fileName.endsWith(".jar")) {
if (LOG.isDebugEnabled()) {
LOG.debug("Ignored non-jar file " + fileName);
}
// Ignore non-jar files
continue;
}
Long cachedLastModificationTime = jarModifiedTime.get(fileName);
if (cachedLastModificationTime != null) {
long lastModified = status.getModificationTime();
if (lastModified < cachedLastModificationTime.longValue()) {
// to touch the remote jar to update its last modified time
continue;
}
}
try {
// Copy it to local
File dst = new File(localDir, fileName);
remoteDirFs.copyToLocalFile(path, new Path(dst.getPath()));
jarModifiedTime.put(fileName, Long.valueOf(dst.lastModified()));
URL url = dst.toURI().toURL();
addURL(url);
} catch (IOException ioe) {
LOG.warn("Failed to load new jar " + fileName, ioe);
}
}
}
use of java.net.MalformedURLException in project hive by apache.
the class GenericUDTFParseUrlTuple method process.
@Override
public void process(Object[] o) throws HiveException {
if (o[0] == null) {
forward(nullCols);
return;
}
// get the path names for the 1st row only
if (!pathParsed) {
for (int i = 0; i < numCols; ++i) {
paths[i] = ((StringObjectInspector) inputOIs[i + 1]).getPrimitiveJavaObject(o[i + 1]);
if (paths[i] == null) {
partnames[i] = PARTNAME.NULLNAME;
} else if (paths[i].equals("HOST")) {
partnames[i] = PARTNAME.HOST;
} else if (paths[i].equals("PATH")) {
partnames[i] = PARTNAME.PATH;
} else if (paths[i].equals("QUERY")) {
partnames[i] = PARTNAME.QUERY;
} else if (paths[i].equals("REF")) {
partnames[i] = PARTNAME.REF;
} else if (paths[i].equals("PROTOCOL")) {
partnames[i] = PARTNAME.PROTOCOL;
} else if (paths[i].equals("FILE")) {
partnames[i] = PARTNAME.FILE;
} else if (paths[i].equals("AUTHORITY")) {
partnames[i] = PARTNAME.AUTHORITY;
} else if (paths[i].equals("USERINFO")) {
partnames[i] = PARTNAME.USERINFO;
} else if (paths[i].startsWith("QUERY:")) {
partnames[i] = PARTNAME.QUERY_WITH_KEY;
// update paths[i], e.g., from "QUERY:id" to "id"
paths[i] = paths[i].substring(6);
} else {
partnames[i] = PARTNAME.NULLNAME;
}
}
pathParsed = true;
}
String urlStr = ((StringObjectInspector) inputOIs[0]).getPrimitiveJavaObject(o[0]);
if (urlStr == null) {
forward(nullCols);
return;
}
try {
String ret = null;
url = new URL(urlStr);
for (int i = 0; i < numCols; ++i) {
ret = evaluate(url, i);
if (ret == null) {
retCols[i] = null;
} else {
if (retCols[i] == null) {
// use the object pool rather than creating a new object
retCols[i] = cols[i];
}
retCols[i].set(ret);
}
}
forward(retCols);
return;
} catch (MalformedURLException e) {
// parsing error, invalid url string
if (!seenErrors) {
LOG.error("The input is not a valid url string: " + urlStr + ". Skipping such error messages in the future.");
seenErrors = true;
}
forward(nullCols);
return;
}
}
use of java.net.MalformedURLException in project hive by apache.
the class TaskLogProcessor method getStackTraces.
/**
* Processes the provided task logs to extract stack traces.
* @return A list of lists of strings where each list of strings represents a stack trace
*/
public List<List<String>> getStackTraces() {
List<List<String>> stackTraces = new ArrayList<List<String>>();
for (String urlString : taskLogUrls) {
// Open the log file, and read the lines, parse out stack traces
URL taskAttemptLogUrl;
try {
taskAttemptLogUrl = new URL(urlString);
} catch (MalformedURLException e) {
throw new RuntimeException("Bad task log url", e);
}
BufferedReader in;
try {
in = new BufferedReader(new InputStreamReader(taskAttemptLogUrl.openStream()));
String lastLine = null;
boolean lastLineMatched = false;
List<String> stackTrace = null;
// Patterns that match the middle/end of stack traces
Pattern stackTracePattern = Pattern.compile("^\tat .*", Pattern.CASE_INSENSITIVE);
Pattern endStackTracePattern = Pattern.compile("^\t... [0-9]+ more.*", Pattern.CASE_INSENSITIVE);
String inputLine;
while (true) {
inputLine = in.readLine();
if (inputLine == null) {
// EOF:
if (stackTrace != null) {
stackTraces.add(stackTrace);
stackTrace = null;
}
break;
}
inputLine = HtmlQuoting.unquoteHtmlChars(inputLine);
if (stackTracePattern.matcher(inputLine).matches() || endStackTracePattern.matcher(inputLine).matches()) {
if (stackTrace == null) {
// This is the first time we have realized we are in a stack trace. In this case,
// the previous line was the error message, add that to the stack trace as well
stackTrace = new ArrayList<String>();
stackTrace.add(lastLine);
} else if (!lastLineMatched) {
// The last line didn't match a pattern, it is probably an error message, part of
// a string of stack traces related to the same error message so add it to the stack
// trace
stackTrace.add(lastLine);
}
stackTrace.add(inputLine);
lastLineMatched = true;
} else {
if (!lastLineMatched && stackTrace != null) {
// If the last line didn't match the patterns either, the stack trace is definitely
// over
stackTraces.add(stackTrace);
stackTrace = null;
}
lastLineMatched = false;
}
lastLine = inputLine;
}
in.close();
} catch (IOException e) {
throw new RuntimeException("Error while reading from task log url", e);
}
}
return stackTraces;
}
use of java.net.MalformedURLException in project hive by apache.
the class DruidStorageHandler method commitCreateTable.
@Override
public void commitCreateTable(Table table) throws MetaException {
if (MetaStoreUtils.isExternalTable(table)) {
return;
}
Lifecycle lifecycle = new Lifecycle();
LOG.info(String.format("Committing table [%s] to the druid metastore", table.getDbName()));
final Path tableDir = getSegmentDescriptorDir();
try {
List<DataSegment> segmentList = DruidStorageHandlerUtils.getPublishedSegments(tableDir, getConf());
LOG.info(String.format("Found [%d] segments under path [%s]", segmentList.size(), tableDir));
druidSqlMetadataStorageUpdaterJobHandler.publishSegments(druidMetadataStorageTablesConfig.getSegmentsTable(), segmentList, DruidStorageHandlerUtils.JSON_MAPPER);
final String coordinatorAddress = HiveConf.getVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_COORDINATOR_DEFAULT_ADDRESS);
int maxTries = HiveConf.getIntVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_MAX_TRIES);
final String dataSourceName = table.getParameters().get(Constants.DRUID_DATA_SOURCE);
LOG.info(String.format("checking load status from coordinator [%s]", coordinatorAddress));
// check if the coordinator is up
httpClient = makeHttpClient(lifecycle);
try {
lifecycle.start();
} catch (Exception e) {
Throwables.propagate(e);
}
String coordinatorResponse = null;
try {
coordinatorResponse = RetryUtils.retry(new Callable<String>() {
@Override
public String call() throws Exception {
return DruidStorageHandlerUtils.getURL(httpClient, new URL(String.format("http://%s/status", coordinatorAddress)));
}
}, new Predicate<Throwable>() {
@Override
public boolean apply(@Nullable Throwable input) {
return input instanceof IOException;
}
}, maxTries);
} catch (Exception e) {
console.printInfo("Will skip waiting for data loading");
return;
}
if (Strings.isNullOrEmpty(coordinatorResponse)) {
console.printInfo("Will skip waiting for data loading");
return;
}
console.printInfo(String.format("Waiting for the loading of [%s] segments", segmentList.size()));
long passiveWaitTimeMs = HiveConf.getLongVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_PASSIVE_WAIT_TIME);
ImmutableSet<URL> setOfUrls = FluentIterable.from(segmentList).transform(new Function<DataSegment, URL>() {
@Override
public URL apply(DataSegment dataSegment) {
try {
//Need to make sure that we are using UTC since most of the druid cluster use UTC by default
return new URL(String.format("http://%s/druid/coordinator/v1/datasources/%s/segments/%s", coordinatorAddress, dataSourceName, DataSegment.makeDataSegmentIdentifier(dataSegment.getDataSource(), new DateTime(dataSegment.getInterval().getStartMillis(), DateTimeZone.UTC), new DateTime(dataSegment.getInterval().getEndMillis(), DateTimeZone.UTC), dataSegment.getVersion(), dataSegment.getShardSpec())));
} catch (MalformedURLException e) {
Throwables.propagate(e);
}
return null;
}
}).toSet();
int numRetries = 0;
while (numRetries++ < maxTries && !setOfUrls.isEmpty()) {
setOfUrls = ImmutableSet.copyOf(Sets.filter(setOfUrls, new Predicate<URL>() {
@Override
public boolean apply(URL input) {
try {
String result = DruidStorageHandlerUtils.getURL(httpClient, input);
LOG.debug(String.format("Checking segment [%s] response is [%s]", input, result));
return Strings.isNullOrEmpty(result);
} catch (IOException e) {
LOG.error(String.format("Error while checking URL [%s]", input), e);
return true;
}
}
}));
try {
if (!setOfUrls.isEmpty()) {
Thread.sleep(passiveWaitTimeMs);
}
} catch (InterruptedException e) {
Thread.interrupted();
Throwables.propagate(e);
}
}
if (!setOfUrls.isEmpty()) {
// We are not Throwing an exception since it might be a transient issue that is blocking loading
console.printError(String.format("Wait time exhausted and we have [%s] out of [%s] segments not loaded yet", setOfUrls.size(), segmentList.size()));
}
} catch (IOException e) {
LOG.error("Exception while commit", e);
Throwables.propagate(e);
} finally {
cleanWorkingDir();
lifecycle.stop();
}
}
Aggregations