use of org.elasticsearch.common.SuppressForbidden in project elasticsearch by elastic.
the class BootstrapForTesting method getPluginPermissions.
/**
* we don't know which codesources belong to which plugin, so just remove the permission from key codebases
* like core, test-framework, etc. this way tests fail if accesscontroller blocks are missing.
*/
@SuppressForbidden(reason = "accesses fully qualified URLs to configure security")
static Map<String, Policy> getPluginPermissions() throws Exception {
List<URL> pluginPolicies = Collections.list(BootstrapForTesting.class.getClassLoader().getResources(PluginInfo.ES_PLUGIN_POLICY));
if (pluginPolicies.isEmpty()) {
return Collections.emptyMap();
}
// compute classpath minus obvious places, all other jars will get the permission.
Set<URL> codebases = new HashSet<>(Arrays.asList(parseClassPathWithSymlinks()));
Set<URL> excluded = new HashSet<>(Arrays.asList(// es core
Bootstrap.class.getProtectionDomain().getCodeSource().getLocation(), // es test framework
BootstrapForTesting.class.getProtectionDomain().getCodeSource().getLocation(), // lucene test framework
LuceneTestCase.class.getProtectionDomain().getCodeSource().getLocation(), // randomized runner
RandomizedRunner.class.getProtectionDomain().getCodeSource().getLocation(), // junit library
Assert.class.getProtectionDomain().getCodeSource().getLocation()));
codebases.removeAll(excluded);
// parse each policy file, with codebase substitution from the classpath
final List<Policy> policies = new ArrayList<>();
for (URL policyFile : pluginPolicies) {
policies.add(Security.readPolicy(policyFile, codebases.toArray(new URL[codebases.size()])));
}
// consult each policy file for those codebases
Map<String, Policy> map = new HashMap<>();
for (URL url : codebases) {
map.put(url.getFile(), new Policy() {
@Override
public boolean implies(ProtectionDomain domain, Permission permission) {
// implements union
for (Policy p : policies) {
if (p.implies(domain, permission)) {
return true;
}
}
return false;
}
});
}
return Collections.unmodifiableMap(map);
}
use of org.elasticsearch.common.SuppressForbidden in project elasticsearch by elastic.
the class InstallPluginCommand method downloadZipAndChecksum.
/** Downloads a zip from the url, as well as a SHA1 checksum, and checks the checksum. */
@SuppressForbidden(reason = "We use openStream to download plugins")
private Path downloadZipAndChecksum(Terminal terminal, String urlString, Path tmpDir) throws Exception {
Path zip = downloadZip(terminal, urlString, tmpDir);
pathsToDeleteOnShutdown.add(zip);
URL checksumUrl = new URL(urlString + ".sha1");
final String expectedChecksum;
try (InputStream in = checksumUrl.openStream()) {
BufferedReader checksumReader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
expectedChecksum = checksumReader.readLine();
if (checksumReader.readLine() != null) {
throw new UserException(ExitCodes.IO_ERROR, "Invalid checksum file at " + checksumUrl);
}
}
byte[] zipbytes = Files.readAllBytes(zip);
String gotChecksum = MessageDigests.toHexString(MessageDigests.sha1().digest(zipbytes));
if (expectedChecksum.equals(gotChecksum) == false) {
throw new UserException(ExitCodes.IO_ERROR, "SHA1 mismatch, expected " + expectedChecksum + " but got " + gotChecksum);
}
return zip;
}
use of org.elasticsearch.common.SuppressForbidden in project elasticsearch by elastic.
the class InstallPluginCommand method downloadZip.
/** Downloads a zip from the url, into a temp file under the given temp dir. */
@SuppressForbidden(reason = "We use getInputStream to download plugins")
private Path downloadZip(Terminal terminal, String urlString, Path tmpDir) throws IOException {
terminal.println(VERBOSE, "Retrieving zip from " + urlString);
URL url = new URL(urlString);
Path zip = Files.createTempFile(tmpDir, null, ".zip");
URLConnection urlConnection = url.openConnection();
urlConnection.addRequestProperty("User-Agent", "elasticsearch-plugin-installer");
int contentLength = urlConnection.getContentLength();
try (InputStream in = new TerminalProgressInputStream(urlConnection.getInputStream(), contentLength, terminal)) {
// must overwrite since creating the temp file above actually created the file
Files.copy(in, zip, StandardCopyOption.REPLACE_EXISTING);
}
return zip;
}
use of org.elasticsearch.common.SuppressForbidden in project elasticsearch by elastic.
the class LocalCheckpointTracker method updateCheckpoint.
/**
* Moves the checkpoint to the last consecutively processed sequence number. This method assumes that the sequence number following the
* current checkpoint is processed.
*/
@SuppressForbidden(reason = "Object#notifyAll")
private void updateCheckpoint() {
assert Thread.holdsLock(this);
assert checkpoint < firstProcessedSeqNo + bitArraysSize - 1 : "checkpoint should be below the end of the first bit set (o.w. current bit set is completed and shouldn't be there)";
assert getBitSetForSeqNo(checkpoint + 1) == processedSeqNo.getFirst() : "checkpoint + 1 doesn't point to the first bit set (o.w. current bit set is completed and shouldn't be there)";
assert getBitSetForSeqNo(checkpoint + 1).get(seqNoToBitSetOffset(checkpoint + 1)) : "updateCheckpoint is called but the bit following the checkpoint is not set";
try {
// keep it simple for now, get the checkpoint one by one; in the future we can optimize and read words
FixedBitSet current = processedSeqNo.getFirst();
do {
checkpoint++;
// on the last bit of the current bit set, we can clean it.
if (checkpoint == firstProcessedSeqNo + bitArraysSize - 1) {
processedSeqNo.removeFirst();
firstProcessedSeqNo += bitArraysSize;
assert checkpoint - firstProcessedSeqNo < bitArraysSize;
current = processedSeqNo.peekFirst();
}
} while (current != null && current.get(seqNoToBitSetOffset(checkpoint + 1)));
} finally {
// notifies waiters in waitForOpsToComplete
this.notifyAll();
}
}
use of org.elasticsearch.common.SuppressForbidden in project elasticsearch by elastic.
the class RemoteClusterConnectionTests method testSlowNodeCanBeCanceled.
@SuppressForbidden(reason = "calls getLocalHost here but it's fine in this case")
public void testSlowNodeCanBeCanceled() throws IOException, InterruptedException {
try (ServerSocket socket = new MockServerSocket()) {
socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1);
socket.setReuseAddress(true);
DiscoveryNode seedNode = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(), socket.getLocalPort()), emptyMap(), emptySet(), Version.CURRENT);
CountDownLatch acceptedLatch = new CountDownLatch(1);
CountDownLatch closeRemote = new CountDownLatch(1);
Thread t = new Thread() {
@Override
public void run() {
try (Socket accept = socket.accept()) {
acceptedLatch.countDown();
closeRemote.await();
} catch (IOException e) {
// that's fine we might close
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
};
t.start();
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
CountDownLatch listenerCalled = new CountDownLatch(1);
AtomicReference<Exception> exceptionReference = new AtomicReference<>();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) {
ActionListener<Void> listener = ActionListener.wrap(x -> {
listenerCalled.countDown();
fail("expected exception");
}, x -> {
exceptionReference.set(x);
listenerCalled.countDown();
});
connection.updateSeedNodes(Arrays.asList(seedNode), listener);
acceptedLatch.await();
// now close it, this should trigger an interrupt on the socket and we can move on
connection.close();
assertTrue(connection.assertNoRunningConnections());
}
closeRemote.countDown();
listenerCalled.await();
assertNotNull(exceptionReference.get());
expectThrows(CancellableThreads.ExecutionCancelledException.class, () -> {
throw exceptionReference.get();
});
}
}
}
Aggregations