use of org.apache.gora.util.GoraException in project gora by apache.
the class LuceneStore method convertDocFieldToAvroUnion.
private Object convertDocFieldToAvroUnion(final Schema fieldSchema, final Schema.Field field, final String sf, final Document doc) throws IOException {
Object result;
Schema.Type type0 = fieldSchema.getTypes().get(0).getType();
Schema.Type type1 = fieldSchema.getTypes().get(1).getType();
if (!type0.equals(type1) && (type0.equals(Schema.Type.NULL) || type1.equals(Schema.Type.NULL))) {
Schema innerSchema = null;
if (type0.equals(Schema.Type.NULL)) {
innerSchema = fieldSchema.getTypes().get(1);
} else {
innerSchema = fieldSchema.getTypes().get(0);
}
result = convertToIndexableFieldToAvroField(doc, field, innerSchema, sf);
} else {
throw new GoraException("LuceneStore only supports Union of two types field.");
}
return result;
}
use of org.apache.gora.util.GoraException in project gora by apache.
the class MongoStore method execute.
/**
* Execute the query and return the result.
*/
@Override
public Result<K, T> execute(final Query<K, T> query) throws GoraException {
try {
String[] fields = getFieldsToQuery(query.getFields());
// Build the actual MongoDB query
Bson q = MongoDBQuery.toDBQuery(query);
Bson p = MongoDBQuery.toProjection(fields, mapping);
if (query.getFilter() != null) {
Optional<Bson> filter = filterUtil.setFilter(query.getFilter(), this);
if (!filter.isPresent()) {
// don't need local filter
query.setLocalFilterEnabled(false);
} else {
q = and(q, filter.get());
}
}
// Execute the query on the collection
FindIterable<Document> iterable = mongoClientColl.find(q).projection(p);
CountOptions countOptions = new CountOptions();
if (query.getLimit() > 0) {
iterable.limit((int) query.getLimit());
countOptions.limit((int) query.getLimit());
}
iterable.batchSize(100);
iterable.noCursorTimeout(true);
// Build the result
long size = mongoClientColl.countDocuments(q, countOptions);
return new MongoDBResult<>(this, query, iterable.cursor(), size);
} catch (Exception e) {
throw new GoraException(e);
}
}
use of org.apache.gora.util.GoraException in project gora by apache.
the class MongoStore method initialize.
/**
* Initialize the data store by reading the credentials, setting the client's
* properties up and reading the mapping file.
*/
public void initialize(final Class<K> keyClass, final Class<T> pPersistentClass, final Properties properties) throws GoraException {
try {
LOG.debug("Initializing MongoDB store");
MongoStoreParameters parameters = MongoStoreParameters.load(properties, getConf());
super.initialize(keyClass, pPersistentClass, properties);
filterUtil = new MongoFilterUtil<>(getConf());
// Load the mapping
MongoMappingBuilder<K, T> builder = new MongoMappingBuilder<>(this);
LOG.debug("Initializing Mongo store with mapping {}.", new Object[] { parameters.getMappingFile() });
InputStream mappingInputStream;
// If there is a mapping definition in the Properties, use it.
if (properties.containsKey(XML_MAPPING_DEFINITION)) {
if (LOG.isTraceEnabled())
LOG.trace(XML_MAPPING_DEFINITION + " = " + properties.getProperty(XML_MAPPING_DEFINITION));
mappingInputStream = IOUtils.toInputStream(properties.getProperty(XML_MAPPING_DEFINITION), (Charset) null);
} else // Otherwise use the mapping file from parameters.
{
String mappingFile = parameters.getMappingFile();
mappingInputStream = getClass().getResourceAsStream(mappingFile);
}
builder.fromInputStream(mappingInputStream);
mapping = builder.build();
// Prepare MongoDB connection
mongoClientDB = getDB(parameters);
mongoClientColl = mongoClientDB.getCollection(mapping.getCollectionName());
LOG.info("Initialized Mongo store for database {} of {}.", new Object[] { parameters.getDbname(), parameters.getServers() });
} catch (GoraException e) {
throw e;
} catch (IOException e) {
LOG.error("Error while initializing MongoDB store", e);
throw new GoraException(e);
}
}
use of org.apache.gora.util.GoraException in project gora by apache.
the class MongoStore method deleteByQuery.
@Override
public long deleteByQuery(final Query<K, T> query) throws GoraException {
try {
// Build the actual MongoDB query
Bson q = MongoDBQuery.toDBQuery(query);
DeleteResult writeResult = mongoClientColl.deleteMany(q);
return writeResult.getDeletedCount();
} catch (Exception e) {
throw new GoraException(e);
}
}
use of org.apache.gora.util.GoraException in project gora by apache.
the class AvroSerializer method put.
/**
* {@inheritDoc}
*
* @param key
* @param persistent
*/
@Override
public void put(Object key, Persistent persistent) throws GoraException {
try {
if (persistent instanceof PersistentBase) {
if (persistent.isDirty()) {
PersistentBase persistentBase = (PersistentBase) persistent;
ArrayList<String> fields = new ArrayList<>();
ArrayList<Object> values = new ArrayList<>();
AvroCassandraUtils.processKeys(mapping, key, fields, values);
for (Schema.Field f : persistentBase.getSchema().getFields()) {
String fieldName = f.name();
Field field = mapping.getFieldFromFieldName(fieldName);
if (field == null) {
LOG.debug("Ignoring {} adding field, {} field can't find in {} mapping", new Object[] { fieldName, fieldName, persistentClass });
continue;
}
if (persistent.isDirty(f.pos()) || mapping.getInlinedDefinedPartitionKey().equals(mapping.getFieldFromFieldName(fieldName))) {
Object value = persistentBase.get(f.pos());
String fieldType = field.getType();
if (fieldType.contains("frozen")) {
fieldType = fieldType.substring(fieldType.indexOf("<") + 1, fieldType.indexOf(">"));
UserType userType = client.getSession().getCluster().getMetadata().getKeyspace(mapping.getKeySpace().getName()).getUserType(fieldType);
UDTValue udtValue = userType.newValue();
Schema udtSchema = f.schema();
if (udtSchema.getType().equals(Schema.Type.UNION)) {
for (Schema schema : udtSchema.getTypes()) {
if (schema.getType().equals(Schema.Type.RECORD)) {
udtSchema = schema;
break;
}
}
}
PersistentBase udtObjectBase = (PersistentBase) value;
for (Schema.Field udtField : udtSchema.getFields()) {
Object udtFieldValue = AvroCassandraUtils.getFieldValueFromAvroBean(udtField.schema(), udtField.schema().getType(), udtObjectBase.get(udtField.name()), field);
if (udtField.schema().getType().equals(Schema.Type.MAP)) {
udtValue.setMap(udtField.name(), (Map) udtFieldValue);
} else if (udtField.schema().getType().equals(Schema.Type.ARRAY)) {
udtValue.setList(udtField.name(), (List) udtFieldValue);
} else {
udtValue.set(udtField.name(), udtFieldValue, (Class) udtFieldValue.getClass());
}
}
value = udtValue;
} else {
value = AvroCassandraUtils.getFieldValueFromAvroBean(f.schema(), f.schema().getType(), value, field);
}
values.add(value);
fields.add(fieldName);
}
}
String cqlQuery = CassandraQueryFactory.getInsertDataQuery(mapping, fields);
SimpleStatement statement = new SimpleStatement(cqlQuery, values.toArray());
if (writeConsistencyLevel != null) {
statement.setConsistencyLevel(ConsistencyLevel.valueOf(writeConsistencyLevel));
}
client.getSession().execute(statement);
} else {
LOG.info("Ignored putting persistent bean {} in the store as it is neither " + "new, neither dirty.", new Object[] { persistent });
}
} else {
LOG.error("{} Persistent bean isn't extended by {} .", new Object[] { this.persistentClass, PersistentBase.class });
}
} catch (Exception e) {
throw new GoraException(e);
}
}
Aggregations