use of com.palantir.atlasdb.table.description.TableDefinition in project atlasdb by palantir.
the class CheckAndSetSchema method generateSchema.
private static Schema generateSchema() {
Schema schema = new Schema(CheckAndSetSchema.class.getSimpleName(), CheckAndSetSchema.class.getPackage().getName() + ".generated", Namespace.DEFAULT_NAMESPACE, OptionalType.JAVA8);
schema.addTableDefinition(CAS_TABLE, new TableDefinition() {
{
allSafeForLoggingByDefault();
rowName();
rowComponent("id", ValueType.FIXED_LONG);
columns();
column(VALUES_COLUMN, "v", ValueType.FIXED_LONG);
}
});
return schema;
}
use of com.palantir.atlasdb.table.description.TableDefinition in project atlasdb by palantir.
the class TableFactoryRenderer method getNullSharedTriggers.
private TypeSpec getNullSharedTriggers(TypeName sharedTriggersInterfaceType) {
TypeSpec.Builder nullSharedTriggersClassBuilder = TypeSpec.classBuilder("NullSharedTriggers").addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT, Modifier.STATIC).addSuperinterface(sharedTriggersInterfaceType);
for (Entry<String, TableDefinition> entry : definitions.entrySet()) {
String name = entry.getKey();
TableDefinition tableDefinition = entry.getValue();
String tableName = getTableName(name);
ClassName tableType = ClassName.get(packageName, tableName);
TypeName rowType = tableType.nestedClass(name + "Row");
TypeName columnType = tableType.nestedClass(name + "ColumnValue");
if (!tableDefinition.toTableMetadata().getColumns().hasDynamicColumns()) {
columnType = ParameterizedTypeName.get(ClassName.get(packageName, tableName + "." + name + "NamedColumnValue"), WildcardTypeName.subtypeOf(Object.class));
}
MethodSpec putMethod = MethodSpec.methodBuilder("put" + name).addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).addParameter(ParameterizedTypeName.get(ClassName.get(Multimap.class), rowType, WildcardTypeName.subtypeOf(columnType)), "newRows").addComment("do nothing").build();
nullSharedTriggersClassBuilder.addMethod(putMethod);
}
return nullSharedTriggersClassBuilder.build();
}
use of com.palantir.atlasdb.table.description.TableDefinition in project atlasdb by palantir.
the class TableFactoryRenderer method of.
public static TableFactoryRenderer of(String schemaName, String packageName, Namespace defaultNamespace, Map<String, TableDefinition> definitions) {
SortedMap<String, TableDefinition> sortedDefinitions = Maps.newTreeMap();
for (Entry<String, TableDefinition> entry : definitions.entrySet()) {
sortedDefinitions.put(Renderers.getClassTableName(entry.getKey(), entry.getValue()), entry.getValue());
}
ClassName tableFactoryType = ClassName.get(packageName, schemaName + "TableFactory");
ClassName sharedTriggersType = tableFactoryType.nestedClass("SharedTriggers");
return new TableFactoryRenderer(schemaName, packageName, defaultNamespace.getName(), sortedDefinitions, tableFactoryType, sharedTriggersType);
}
use of com.palantir.atlasdb.table.description.TableDefinition in project atlasdb by palantir.
the class BlobSchema method generateSchema.
private static Schema generateSchema() {
Schema schema = new Schema(BlobSchema.class.getSimpleName(), BlobSchema.class.getPackage().getName() + ".generated", BLOB_NAMESPACE);
schema.addStreamStoreDefinition(new StreamStoreDefinitionBuilder("data", "Data", ValueType.VAR_LONG).hashRowComponents().tableNameLogSafety(TableMetadataPersistence.LogSafety.SAFE).build());
schema.addStreamStoreDefinition(new StreamStoreDefinitionBuilder("hotspottyData", "HotspottyData", ValueType.VAR_SIGNED_LONG).build());
schema.addTableDefinition("auditedData", new TableDefinition() {
{
allSafeForLoggingByDefault();
rowName();
rowComponent("id", ValueType.FIXED_LONG);
columns();
column("data", "d", ValueType.BLOB);
}
});
schema.addCleanupTask("auditedData", () -> (tx, cells) -> {
log.info("Deleted data items: [{}]", UnsafeArg.of("cells", cells));
return false;
});
schema.validate();
return schema;
}
use of com.palantir.atlasdb.table.description.TableDefinition in project atlasdb by palantir.
the class StreamTestSchema method generateSchema.
private static Schema generateSchema() {
Schema schema = new Schema("StreamTest", StreamTest.class.getPackage().getName() + ".generated", Namespace.DEFAULT_NAMESPACE, OptionalType.JAVA8);
// stores a mapping from key to streamId
schema.addTableDefinition("lookup", new TableDefinition() {
{
javaTableName("KeyValue");
rowName();
rowComponent("key", ValueType.STRING);
columns();
column("streamId", "s", ValueType.VAR_LONG);
}
});
// test defaults
schema.addStreamStoreDefinition(new StreamStoreDefinitionBuilder("stream_test", "stream_test", ValueType.VAR_LONG).build());
// test defaults with hashRowComponents
schema.addStreamStoreDefinition(new StreamStoreDefinitionBuilder("test_hash_components", "test_hash_components", ValueType.VAR_LONG).hashRowComponents().build());
// test all the things!
schema.addStreamStoreDefinition(new StreamStoreDefinitionBuilder("stream_test_with_hash", "stream_test_with_hash", ValueType.VAR_LONG).inMemoryThreshold(4000).compressBlocksInDb().compressStreamInClient().hashFirstRowComponent().isAppendHeavyAndReadLight().build());
schema.addStreamStoreDefinition(new StreamStoreDefinitionBuilder("stream_test_max_mem", "stream_test_max_mem", ValueType.VAR_LONG).inMemoryThreshold(StreamStoreDefinition.MAX_IN_MEMORY_THRESHOLD).build());
return schema;
}
Aggregations