use of org.apache.solr.common.util.SuppressForbidden in project lucene-solr by apache.
the class AbstractDataImportHandlerTestCase method createFile.
@SuppressForbidden(reason = "Needs currentTimeMillis to set modified time for a file")
public static File createFile(File tmpdir, String name, byte[] content, boolean changeModifiedTime) throws IOException {
File file = new File(tmpdir.getAbsolutePath() + File.separator + name);
file.deleteOnExit();
FileOutputStream f = new FileOutputStream(file);
f.write(content);
f.close();
if (changeModifiedTime)
file.setLastModified(System.currentTimeMillis() - 3600000);
return file;
}
use of org.apache.solr.common.util.SuppressForbidden in project lucene-solr by apache.
the class PKIAuthenticationPlugin method doAuthenticate.
@SuppressForbidden(reason = "Needs currentTimeMillis to compare against time in header")
@Override
public boolean doAuthenticate(ServletRequest request, ServletResponse response, FilterChain filterChain) throws Exception {
String requestURI = ((HttpServletRequest) request).getRequestURI();
if (requestURI.endsWith(PATH)) {
filterChain.doFilter(request, response);
return true;
}
long receivedTime = System.currentTimeMillis();
String header = ((HttpServletRequest) request).getHeader(HEADER);
if (header == null) {
//this must not happen
log.error("No SolrAuth header present");
filterChain.doFilter(request, response);
return true;
}
List<String> authInfo = StrUtils.splitWS(header, false);
if (authInfo.size() < 2) {
log.error("Invalid SolrAuth Header {}", header);
filterChain.doFilter(request, response);
return true;
}
String nodeName = authInfo.get(0);
String cipher = authInfo.get(1);
PKIHeaderData decipher = decipherHeader(nodeName, cipher);
if (decipher == null) {
log.error("Could not decipher a header {} . No principal set", header);
filterChain.doFilter(request, response);
return true;
}
if ((receivedTime - decipher.timestamp) > MAX_VALIDITY) {
log.error("Invalid key request timestamp: {} , received timestamp: {} , TTL: {}", decipher.timestamp, receivedTime, MAX_VALIDITY);
filterChain.doFilter(request, response);
return true;
}
final Principal principal = "$".equals(decipher.userName) ? SU : new BasicUserPrincipal(decipher.userName);
filterChain.doFilter(getWrapper((HttpServletRequest) request, principal), response);
return true;
}
use of org.apache.solr.common.util.SuppressForbidden in project lucene-solr by apache.
the class PKIAuthenticationPlugin method setHeader.
@SuppressForbidden(reason = "Needs currentTimeMillis to set current time in header")
void setHeader(HttpRequest httpRequest) {
SolrRequestInfo reqInfo = getRequestInfo();
String usr;
if (reqInfo != null) {
Principal principal = reqInfo.getReq().getUserPrincipal();
if (principal == null) {
//so we don't not need to set a principal
return;
} else {
usr = principal.getName();
}
} else {
if (!isSolrThread()) {
// then no need to add any header
return;
}
//this request seems to be originated from Solr itself
//special name to denote the user is the node itself
usr = "$";
}
String s = usr + " " + System.currentTimeMillis();
byte[] payload = s.getBytes(UTF_8);
byte[] payloadCipher = keyPair.encrypt(ByteBuffer.wrap(payload));
String base64Cipher = Base64.byteArrayToBase64(payloadCipher);
httpRequest.setHeader(HEADER, myNodeName + " " + base64Cipher);
}
use of org.apache.solr.common.util.SuppressForbidden in project lucene-solr by apache.
the class IndexFetcher method logReplicationTimeAndConfFiles.
/**
* Helper method to record the last replication's details so that we can show them on the statistics page across
* restarts.
* @throws IOException on IO error
*/
@SuppressForbidden(reason = "Need currentTimeMillis for debugging/stats")
private void logReplicationTimeAndConfFiles(Collection<Map<String, Object>> modifiedConfFiles, boolean successfulInstall) throws IOException {
List<String> confFiles = new ArrayList<>();
if (modifiedConfFiles != null && !modifiedConfFiles.isEmpty())
for (Map<String, Object> map1 : modifiedConfFiles) confFiles.add((String) map1.get(NAME));
Properties props = replicationHandler.loadReplicationProperties();
long replicationTime = System.currentTimeMillis();
long replicationTimeTaken = getReplicationTimeElapsed();
Directory dir = null;
try {
dir = solrCore.getDirectoryFactory().get(solrCore.getDataDir(), DirContext.META_DATA, solrCore.getSolrConfig().indexConfig.lockType);
int indexCount = 1, confFilesCount = 1;
if (props.containsKey(TIMES_INDEX_REPLICATED)) {
indexCount = Integer.parseInt(props.getProperty(TIMES_INDEX_REPLICATED)) + 1;
}
StringBuilder sb = readToStringBuilder(replicationTime, props.getProperty(INDEX_REPLICATED_AT_LIST));
props.setProperty(INDEX_REPLICATED_AT_LIST, sb.toString());
props.setProperty(INDEX_REPLICATED_AT, String.valueOf(replicationTime));
props.setProperty(PREVIOUS_CYCLE_TIME_TAKEN, String.valueOf(replicationTimeTaken));
props.setProperty(TIMES_INDEX_REPLICATED, String.valueOf(indexCount));
if (modifiedConfFiles != null && !modifiedConfFiles.isEmpty()) {
props.setProperty(CONF_FILES_REPLICATED, confFiles.toString());
props.setProperty(CONF_FILES_REPLICATED_AT, String.valueOf(replicationTime));
if (props.containsKey(TIMES_CONFIG_REPLICATED)) {
confFilesCount = Integer.parseInt(props.getProperty(TIMES_CONFIG_REPLICATED)) + 1;
}
props.setProperty(TIMES_CONFIG_REPLICATED, String.valueOf(confFilesCount));
}
props.setProperty(LAST_CYCLE_BYTES_DOWNLOADED, String.valueOf(getTotalBytesDownloaded()));
if (!successfulInstall) {
int numFailures = 1;
if (props.containsKey(TIMES_FAILED)) {
numFailures = Integer.parseInt(props.getProperty(TIMES_FAILED)) + 1;
}
props.setProperty(TIMES_FAILED, String.valueOf(numFailures));
props.setProperty(REPLICATION_FAILED_AT, String.valueOf(replicationTime));
sb = readToStringBuilder(replicationTime, props.getProperty(REPLICATION_FAILED_AT_LIST));
props.setProperty(REPLICATION_FAILED_AT_LIST, sb.toString());
}
String tmpFileName = REPLICATION_PROPERTIES + "." + System.nanoTime();
final IndexOutput out = dir.createOutput(tmpFileName, DirectoryFactory.IOCONTEXT_NO_CACHE);
Writer outFile = new OutputStreamWriter(new PropertiesOutputStream(out), StandardCharsets.UTF_8);
try {
props.store(outFile, "Replication details");
dir.sync(Collections.singleton(tmpFileName));
} finally {
IOUtils.closeQuietly(outFile);
}
solrCore.getDirectoryFactory().renameWithOverwrite(dir, tmpFileName, REPLICATION_PROPERTIES);
} catch (Exception e) {
LOG.warn("Exception while updating statistics", e);
} finally {
if (dir != null) {
solrCore.getDirectoryFactory().release(dir);
}
}
}
use of org.apache.solr.common.util.SuppressForbidden in project lucene-solr by apache.
the class TestZKPropertiesWriter method testZKPropertiesWriter.
@SuppressForbidden(reason = "Needs currentTimeMillis to construct date stamps")
@Test
public void testZKPropertiesWriter() throws Exception {
// test using ZooKeeper
assertTrue("Not using ZooKeeper", h.getCoreContainer().isZooKeeperAware());
// for the really slow/busy computer, we wait to make sure we have a leader before starting
h.getCoreContainer().getZkController().getZkStateReader().getLeaderUrl("collection1", "shard1", 30000);
assertQ("test query on empty index", request("qlkciyopsbgzyvkylsjhchghjrdf"), "//result[@numFound='0']");
SimpleDateFormat errMsgFormat = new SimpleDateFormat(dateFormat, Locale.ROOT);
delQ("*:*");
commit();
SimpleDateFormat df = new SimpleDateFormat(dateFormat, Locale.ROOT);
Date oneSecondAgo = new Date(System.currentTimeMillis() - 1000);
Map<String, String> init = new HashMap<>();
init.put("dateFormat", dateFormat);
ZKPropertiesWriter spw = new ZKPropertiesWriter();
spw.init(new DataImporter(h.getCore(), "dataimport"), init);
Map<String, Object> props = new HashMap<>();
props.put("SomeDates.last_index_time", oneSecondAgo);
props.put("last_index_time", oneSecondAgo);
spw.persist(props);
List rows = new ArrayList();
rows.add(createMap("id", "1", "year_s", "2013"));
MockDataSource.setIterator("select " + df.format(oneSecondAgo) + " from dummy", rows.iterator());
h.query("/dataimport", lrf.makeRequest("command", "full-import", "dataConfig", generateConfig(), "clean", "true", "commit", "true", "synchronous", "true", "indent", "true"));
props = spw.readIndexerProperties();
Date entityDate = df.parse((String) props.get("SomeDates.last_index_time"));
Date docDate = df.parse((String) props.get("last_index_time"));
Assert.assertTrue("This date: " + errMsgFormat.format(oneSecondAgo) + " should be prior to the document date: " + errMsgFormat.format(docDate), docDate.getTime() - oneSecondAgo.getTime() > 0);
Assert.assertTrue("This date: " + errMsgFormat.format(oneSecondAgo) + " should be prior to the entity date: " + errMsgFormat.format(entityDate), entityDate.getTime() - oneSecondAgo.getTime() > 0);
assertQ(request("*:*"), "//*[@numFound='1']", "//doc/str[@name=\"year_s\"]=\"2013\"");
}
Aggregations