use of org.apache.stanbol.entityhub.servicesapi.mapping.FieldMapper in project stanbol by apache.
the class CacheUtils method loadBaseMappings.
/**
* Loads the base mappings form the parsed Yard
* @param yard The yard
* @param nsPrefixService if present '{prefix}:{localname}' configurations are
* @return The baseMappings
* @throws YardException on any Error while getting the Representation holding
* the Configuration from the Yard.
* @throws CacheInitialisationException if the configuration is found but not
* valid.
* @throws IllegalArgumentException if <code>null</code> is parsed as {@link Yard}
*/
public static FieldMapper loadBaseMappings(Yard yard, NamespacePrefixService nsPrefixService) throws YardException, CacheInitialisationException {
if (yard == null) {
throw new IllegalArgumentException("The parsed Yard MUST NOT be NULL!");
}
Representation baseConfig = yard.getRepresentation(Cache.BASE_CONFIGURATION_URI);
if (baseConfig != null) {
FieldMapper mapper = readFieldConfig(yard, baseConfig, nsPrefixService);
if (mapper == null) {
String msg = "Invalid Base Configuration: Unable to parse FieldMappings from Field " + Cache.FIELD_MAPPING_CONFIG_FIELD;
log.error(msg);
if (log.isErrorEnabled()) {
log.error(ModelUtils.getRepresentationInfo(baseConfig));
}
throw new CacheInitialisationException(msg);
} else {
return mapper;
}
} else {
return null;
// throw new CacheInitialisationException("Base Configuration not present");
}
}
use of org.apache.stanbol.entityhub.servicesapi.mapping.FieldMapper in project stanbol by apache.
the class CacheImpl method setBaseMappings.
@Override
public void setBaseMappings(FieldMapper fieldMapper) throws YardException {
if (isAvailable()) {
FieldMapper old = this.baseMapper;
this.baseMapper = fieldMapper;
try {
CacheUtils.storeBaseMappingsConfiguration(yard, baseMapper);
} catch (YardException e) {
this.baseMapper = old;
throw e;
}
}
}
use of org.apache.stanbol.entityhub.servicesapi.mapping.FieldMapper in project stanbol by apache.
the class TrackingDereferencerBase method dereference.
@Override
public final boolean dereference(IRI uri, Graph graph, Lock writeLock, DereferenceContext dc) throws DereferenceException {
T service = getService();
if (service == null) {
throw new DereferenceException(uri, serviceClass.getClass().getSimpleName() + "service is currently not available");
}
EntityhubDereferenceContext derefContext = (EntityhubDereferenceContext) dc;
Representation rep;
try {
rep = getRepresentation(service, uri.getUnicodeString(), derefContext.isOfflineMode());
} catch (EntityhubException e) {
throw new DereferenceException(uri, e);
}
// we need the languages as strings
final Set<String> langs = derefContext.getLanguages();
final FieldMapper fieldMapper = derefContext.getFieldMapper();
final Program<Object> ldpathProgram = derefContext.getProgram();
if (rep != null) {
if (fieldMapper == null && ldpathProgram == null && (langs == null || langs.isEmpty())) {
copyAll(uri, rep, graph, writeLock);
} else {
// we need to apply some filters while dereferencing
if (fieldMapper != null || (langs != null && !langs.isEmpty())) {
// this considers speficied fields and included languages
copyMapped(uri, rep, fieldMapper, langs, graph, writeLock);
}
if (ldpathProgram != null) {
// this executes LDPath statements
copyLdPath(uri, getRdfBackend(service), ldpathProgram, langs, graph, writeLock);
}
}
return true;
} else {
return false;
}
}
use of org.apache.stanbol.entityhub.servicesapi.mapping.FieldMapper in project stanbol by apache.
the class SolrYardIndexingDestination method finalise.
@SuppressWarnings("unchecked")
@Override
public void finalise() {
// write the indexing configuration
if (indexFieldConfiguration != null) {
FieldMapper mapper = FieldMappingUtils.createDefaultFieldMapper(indexFieldConfiguration);
try {
CacheUtils.storeBaseMappingsConfiguration(solrYard, mapper);
} catch (YardException e) {
log.error("Unable to store FieldMapperConfiguration to the Store!", e);
}
}
log.info(" ... optimize SolrCore");
try {
solrYard.optimize();
} catch (YardException e) {
log.error("Unable to optimize SolrIndex after indexing! IndexArchive will not be optimized ...", e);
}
// build the FST models
if (fstConfigs != null) {
// (1) FST config initialisation
log.info(" ... init FST configuration(s)");
IndexSchema schema = core.getLatestSchema();
File fstDir = new File(new File(core.getDataDir()), "fst");
if (!fstDir.isDirectory()) {
try {
FileUtils.forceMkdir(fstDir);
} catch (IOException e) {
throw new IllegalStateException("Unable to create Directory " + fstDir.getAbsolutePath() + "for storing the FST models " + "of SolrCore " + core.getName());
}
}
RefCounted<SolrIndexSearcher> searcherRef = core.getSearcher();
try {
for (FstConfig fstConfig : fstConfigs) {
// set the FST directory
fstConfig.setFstDirectory(fstDir);
log.info("> FST config {}", fstConfig);
fstConfig.buildConfig(schema, searcherRef.get().getAtomicReader());
for (CorpusCreationInfo corpus : fstConfig.getCorpusCreationInfos()) {
log.info(" - {}", corpus);
}
}
} finally {
searcherRef.decref();
}
List<Future<?>> fstCreationTasks = new ArrayList<Future<?>>();
ExecutorService es = Executors.newFixedThreadPool(fstThreads);
log.info(" ... build FST models ");
for (FstConfig config : fstConfigs) {
for (final CorpusCreationInfo corpus : config.getCorpusCreationInfos()) {
fstCreationTasks.add(es.submit(new CorpusCreationTask(core, corpus)));
}
}
// now wait for the completion of the tasks
Iterator<Future<?>> taskIt = fstCreationTasks.iterator();
while (taskIt.hasNext()) {
Future<?> task = taskIt.next();
try {
// wait until ready
task.get();
taskIt.remove();
} catch (ExecutionException e) {
log.error("Exception while building FST models for SolrCore " + core.getName(), e);
} catch (InterruptedException e) {
log.error("Interupped while building FST models for SolrCore " + core.getName(), e);
Thread.currentThread().interrupt();
}
}
if (!fstCreationTasks.isEmpty()) {
log.warn("Unable to build {} FST models for SolrCore {}", fstCreationTasks.size(), core.getName());
} else {
log.info("All FST modles for SolrCore {} build successfully!", core.getName());
}
}
// no FST modles to build
// all Solr specific stuff is now ready
log.info(" ... close SolrCore");
solrYard.close();
// if a indexing config is present we need to create the distribution files
if (indexingConfig != null) {
// first check if the distribution folder needs to be created and is valid
File distFolder = indexingConfig.getDistributionFolder();
if (!distFolder.exists()) {
if (!distFolder.mkdirs()) {
throw new IllegalStateException("Unable to create distribution folder " + distFolder.getAbsolutePath());
}
} else if (!distFolder.isDirectory()) {
throw new IllegalStateException("Distribution folder" + distFolder.getAbsolutePath() + "is not a Directory!");
}
// zip the index and copy it over to distribution
log.info(" ... build Solr index archive");
if (solrArchive != null) {
try {
writeSolrIndexArchive(indexingConfig);
} catch (IOException e) {
log.error("Error while creating Solr Archive " + solrArchive.getAbsolutePath() + "! The archive will not be created!", e);
log.error("As a Workaround you can manually create the Solr Archive " + "by creating a ZIP archive with the contents of the Folder " + solrIndexLocation + "!");
}
}
if (solrArchiveRef != null) {
try {
writeSolrIndexReference(indexingConfig);
} catch (IOException e) {
log.error("Error while creating Solr Archive Reference " + solrArchiveRef.getAbsolutePath() + "! The file will not be created!", e);
}
}
// finally create the Osgi Configuration
try {
OsgiConfigurationUtil.writeSiteConfiguration(indexingConfig);
} catch (IOException e) {
log.error("Unable to write OSGI configuration file for the referenced site", e);
}
try {
OsgiConfigurationUtil.writeCacheConfiguration(indexingConfig);
} catch (IOException e) {
log.error("Unable to write OSGI configuration file for the Cache", e);
}
// create the SolrYard configuration
try {
writeSolrYardConfiguration(indexingConfig);
} catch (IOException e) {
log.error("Unable to write OSGI configuration file for the SolrYard", e);
}
// create the bundle
OsgiConfigurationUtil.createBundle(indexingConfig);
}
}
use of org.apache.stanbol.entityhub.servicesapi.mapping.FieldMapper in project stanbol by apache.
the class EntityhubDereferenceContext method initFieldMappings.
protected void initFieldMappings(List<String> fields) throws DereferenceConfigurationException {
TrackingDereferencerBase<?> dereferencer = getEntityhubDereferencer();
FieldMapper fieldMapper;
if (fields != null && !fields.isEmpty()) {
log.debug("parse FieldMappings from EnhancementProperties");
List<FieldMapping> mappings = new ArrayList<FieldMapping>(fields.size());
for (String configuredMapping : fields) {
FieldMapping mapping = FieldMappingUtils.parseFieldMapping(configuredMapping, dereferencer.getNsPrefixService());
if (mapping != null) {
log.debug(" - add FieldMapping {}", mapping);
mappings.add(mapping);
} else if (configuredMapping != null && !configuredMapping.isEmpty()) {
log.warn(" - unable to parse FieldMapping '{}'", configuredMapping);
}
}
if (!mappings.isEmpty()) {
log.debug(" > apply {} valid mappings", mappings.size());
fieldMapper = new DefaultFieldMapperImpl(ValueConverterFactory.getDefaultInstance());
for (FieldMapping mapping : mappings) {
fieldMapper.addMapping(mapping);
}
} else {
// no valid mapping parsed
log.debug(" > no valid mapping parsed ... will dereference all fields");
fieldMapper = null;
}
} else if (dereferencer.getFieldMapper() != null) {
fieldMapper = dereferencer.getFieldMapper().clone();
} else {
fieldMapper = null;
}
// TODO: uncomment this to merge context with engine mappings. Not sure
// if this is desirable
// if(fieldMapper != null){
// if(dereferencer.getFieldMapper() != null){
// //add mappings of the engine configuration to the context mappings
// for(FieldMapping mapping : dereferencer.getFieldMapper().getMappings()){
// fieldMapper.addMapping(mapping);
// }
// }
// }
// if a fieldMapper is present and languages are set we will add a language
// filter to the fieldMapper. If the fieldmapper is null languages are
// filtered separately.
Collection<String> langs = getLanguages();
if (langs != null && !langs.isEmpty()) {
if (fieldMapper == null) {
// create a fieldMapper for filtering languages
fieldMapper = new DefaultFieldMapperImpl(ValueConverterFactory.getDefaultInstance());
}
fieldMapper.addMapping(new FieldMapping(new TextConstraint((String) null, langs.toArray(new String[langs.size()]))));
}
// set the field
this.fieldMapper = fieldMapper;
}
Aggregations