Search in sources :

Example 96 with Entity

use of net.minecraft.server.v1_9_R2.Entity in project DataflowTemplates by GoogleCloudPlatform.

the class BigQueryConvertersTest method testAvroToEntityAllFieldTypes.

/**
 * Tests that {@link BigQueryConverters.AvroToEntity} creates an Entity with all field types.
 */
@Test
public void testAvroToEntityAllFieldTypes() throws Exception {
    // Create test data
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName(idField).setType("STRING"));
    fields.add(new TableFieldSchema().setName(shortStringField).setType("STRING"));
    fields.add(new TableFieldSchema().setName(longStringField).setType("STRING"));
    fields.add(new TableFieldSchema().setName(integerField).setType("INTEGER"));
    fields.add(new TableFieldSchema().setName(int64Field).setType("INT64"));
    fields.add(new TableFieldSchema().setName(floatField).setType("FLOAT"));
    fields.add(new TableFieldSchema().setName(float64Field).setType("FLOAT64"));
    fields.add(new TableFieldSchema().setName(booleanField).setType("BOOLEAN"));
    fields.add(new TableFieldSchema().setName(boolField).setType("BOOL"));
    fields.add(new TableFieldSchema().setName(validTimestampField).setType("TIMESTAMP"));
    fields.add(new TableFieldSchema().setName(dateField).setType("DATE"));
    fields.add(new TableFieldSchema().setName(timeField).setType("TIME"));
    fields.add(new TableFieldSchema().setName(dateTimeField).setType("DATETIME"));
    fields.add(new TableFieldSchema().setName(nullField).setType("STRING"));
    TableSchema bqSchema = new TableSchema().setFields(fields);
    Schema avroSchema = new Schema.Parser().parse(String.format(avroSchemaTemplate, new StringBuilder().append(String.format(avroFieldTemplate, idField, "string", idFieldDesc)).append(",").append(generateShortStringField()).append(",").append(generateLongStringField()).append(",").append(String.format(avroFieldTemplate, integerField, "int", integerFieldDesc)).append(",").append(String.format(avroFieldTemplate, int64Field, "int", int64FieldDesc)).append(",").append(String.format(avroFieldTemplate, floatField, "float", floatFieldDesc)).append(",").append(String.format(avroFieldTemplate, float64Field, "float", float64FieldDesc)).append(",").append(String.format(avroFieldTemplate, booleanField, "boolean", booleanFieldDesc)).append(",").append(String.format(avroFieldTemplate, boolField, "boolean", boolFieldDesc)).append(",").append(String.format(avroFieldTemplate, validTimestampField, "long", validTimestampFieldDesc)).append(",").append(String.format(avroFieldTemplate, dateField, "string", dateFieldDesc)).append(",").append(String.format(avroFieldTemplate, timeField, "string", timeFieldDesc)).append(",").append(String.format(avroFieldTemplate, dateTimeField, "string", dateTimeFieldDesc)).append(",").append(String.format(avroFieldTemplate, nullField, "null", nullFieldDesc)).toString()));
    GenericRecordBuilder builder = new GenericRecordBuilder(avroSchema);
    builder.set(idField, idFieldValueStr);
    builder.set(shortStringField, shortStringFieldValue);
    builder.set(longStringField, longStringFieldValue);
    builder.set(integerField, integerFieldValue);
    builder.set(int64Field, int64FieldValue);
    builder.set(floatField, floatFieldValue);
    builder.set(float64Field, float64FieldValue);
    builder.set(booleanField, booleanFieldValue);
    builder.set(boolField, boolFieldValue);
    builder.set(validTimestampField, validTimestampFieldValueMicros);
    builder.set(dateField, dateFieldValue);
    builder.set(timeField, timeFieldValue);
    builder.set(dateTimeField, dateTimeFieldValue);
    builder.set(nullField, null);
    Record record = builder.build();
    SchemaAndRecord inputBqData = new SchemaAndRecord(record, bqSchema);
    // Run the test
    Entity outputEntity = converter.apply(inputBqData);
    Map<String, Value> properties = outputEntity.getPropertiesMap();
    // Assess results
    assertTrue(outputEntity.hasKey());
    assertEquals(idFieldValueStr, outputEntity.getKey().getPath(0).getName());
    validateMetadata(outputEntity);
    assertTrue(outputEntity.containsProperties(shortStringField));
    assertEquals(shortStringFieldValue, properties.get(shortStringField).getStringValue());
    assertFalse(properties.get(shortStringField).getExcludeFromIndexes());
    assertTrue(outputEntity.containsProperties(longStringField));
    assertEquals(longStringFieldValue, properties.get(longStringField).getStringValue());
    assertTrue(properties.get(longStringField).getExcludeFromIndexes());
    assertTrue(outputEntity.containsProperties(integerField));
    assertEquals(integerFieldValue, properties.get(integerField).getIntegerValue());
    assertTrue(outputEntity.containsProperties(int64Field));
    assertEquals(int64FieldValue, properties.get(int64Field).getIntegerValue());
    assertTrue(outputEntity.containsProperties(floatField));
    assertEquals(floatFieldValue, properties.get(floatField).getDoubleValue(), 0.001);
    assertTrue(outputEntity.containsProperties(float64Field));
    assertEquals(float64FieldValue, properties.get(float64Field).getDoubleValue(), 0.001);
    assertTrue(outputEntity.containsProperties(booleanField));
    assertEquals(booleanFieldValue, properties.get(booleanField).getBooleanValue());
    assertTrue(outputEntity.containsProperties(boolField));
    assertEquals(boolFieldValue, properties.get(boolField).getBooleanValue());
    assertTrue(outputEntity.containsProperties(validTimestampField));
    assertEquals(Timestamps.fromMillis(validTimestampFieldValueMillis), properties.get(validTimestampField).getTimestampValue());
    assertTrue(outputEntity.containsProperties(dateField));
    assertEquals(dateFieldValue, properties.get(dateField).getStringValue());
    assertTrue(outputEntity.containsProperties(timeField));
    assertEquals(timeFieldValue, properties.get(timeField).getStringValue());
    assertTrue(outputEntity.containsProperties(dateTimeField));
    assertEquals(dateTimeFieldValue, properties.get(dateTimeField).getStringValue());
    assertTrue(outputEntity.containsProperties(nullField));
    assertEquals(NullValue.NULL_VALUE, properties.get(nullField).getNullValue());
}
Also used : Entity(com.google.datastore.v1.Entity) AvroToEntity(com.google.cloud.teleport.templates.common.BigQueryConverters.AvroToEntity) TableSchema(com.google.api.services.bigquery.model.TableSchema) TableSchema(com.google.api.services.bigquery.model.TableSchema) Schema(org.apache.avro.Schema) TableFieldSchema(com.google.api.services.bigquery.model.TableFieldSchema) ArrayList(java.util.ArrayList) TableFieldSchema(com.google.api.services.bigquery.model.TableFieldSchema) SchemaAndRecord(org.apache.beam.sdk.io.gcp.bigquery.SchemaAndRecord) CoreMatchers.notNullValue(org.hamcrest.CoreMatchers.notNullValue) NullValue(com.google.protobuf.NullValue) Value(com.google.datastore.v1.Value) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) Record(org.apache.avro.generic.GenericData.Record) SchemaAndRecord(org.apache.beam.sdk.io.gcp.bigquery.SchemaAndRecord) Test(org.junit.Test)

Example 97 with Entity

use of net.minecraft.server.v1_9_R2.Entity in project DataflowTemplates by GoogleCloudPlatform.

the class BigQueryConvertersTest method testAvroToEntityNoIdColumn.

/**
 * Tests that {@link BigQueryConverters.AvroToEntity} creates an Entity without a key when the
 * unique name column is missing.
 */
@Test
public void testAvroToEntityNoIdColumn() throws Exception {
    // Create test data
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName(shortStringField).setType("STRING"));
    TableSchema bqSchema = new TableSchema().setFields(fields);
    Record record = generateSingleFieldAvroRecord(shortStringField, "string", shortStringFieldDesc, shortStringFieldValue);
    SchemaAndRecord inputBqData = new SchemaAndRecord(record, bqSchema);
    // Run the test
    Entity outputEntity = converter.apply(inputBqData);
    assertTrue(!outputEntity.hasKey());
}
Also used : Entity(com.google.datastore.v1.Entity) AvroToEntity(com.google.cloud.teleport.templates.common.BigQueryConverters.AvroToEntity) TableSchema(com.google.api.services.bigquery.model.TableSchema) ArrayList(java.util.ArrayList) Record(org.apache.avro.generic.GenericData.Record) SchemaAndRecord(org.apache.beam.sdk.io.gcp.bigquery.SchemaAndRecord) TableFieldSchema(com.google.api.services.bigquery.model.TableFieldSchema) SchemaAndRecord(org.apache.beam.sdk.io.gcp.bigquery.SchemaAndRecord) Test(org.junit.Test)

Example 98 with Entity

use of net.minecraft.server.v1_9_R2.Entity in project DataflowTemplates by GoogleCloudPlatform.

the class BigQueryConvertersTest method testAvroToEntityDefaultNamespace.

/**
 * Tests that {@link BigQueryConverters.AvroToEntity} creates an Entity with a default namespace
 * when the namespace is not specified.
 */
@Test
public void testAvroToEntityDefaultNamespace() throws Exception {
    // Create test data
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName(idField).setType("STRING"));
    fields.add(new TableFieldSchema().setName(shortStringField).setType("STRING"));
    TableSchema bqSchema = new TableSchema().setFields(fields);
    Schema avroSchema = new Schema.Parser().parse(String.format(avroSchemaTemplate, new StringBuilder().append(String.format(avroFieldTemplate, idField, "int", idFieldDesc)).append(",").append(generateShortStringField()).toString()));
    GenericRecordBuilder builder = new GenericRecordBuilder(avroSchema);
    builder.set(idField, 1);
    builder.set(shortStringField, shortStringFieldValue);
    Record record = builder.build();
    SchemaAndRecord inputBqData = new SchemaAndRecord(record, bqSchema);
    // Run the test
    AvroToEntity noNamespaceConverter = AvroToEntity.newBuilder().setEntityKind(entityKind).setUniqueNameColumn(uniqueNameColumn).build();
    Entity outputEntity = noNamespaceConverter.apply(inputBqData);
    // Assess results
    assertTrue(outputEntity.hasKey());
    assertEquals("", outputEntity.getKey().getPartitionId().getNamespaceId());
}
Also used : Entity(com.google.datastore.v1.Entity) AvroToEntity(com.google.cloud.teleport.templates.common.BigQueryConverters.AvroToEntity) TableSchema(com.google.api.services.bigquery.model.TableSchema) TableSchema(com.google.api.services.bigquery.model.TableSchema) Schema(org.apache.avro.Schema) TableFieldSchema(com.google.api.services.bigquery.model.TableFieldSchema) ArrayList(java.util.ArrayList) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) Record(org.apache.avro.generic.GenericData.Record) SchemaAndRecord(org.apache.beam.sdk.io.gcp.bigquery.SchemaAndRecord) TableFieldSchema(com.google.api.services.bigquery.model.TableFieldSchema) SchemaAndRecord(org.apache.beam.sdk.io.gcp.bigquery.SchemaAndRecord) AvroToEntity(com.google.cloud.teleport.templates.common.BigQueryConverters.AvroToEntity) Test(org.junit.Test)

Example 99 with Entity

use of net.minecraft.server.v1_9_R2.Entity in project DataflowTemplates by GoogleCloudPlatform.

the class BigQueryConvertersTest method testAvroToEntityNullIdColumn.

/**
 * Tests that {@link BigQueryConverters.AvroToEntity} creates an Entity without a key when the
 * unique name column is null.
 */
@Test
public void testAvroToEntityNullIdColumn() throws Exception {
    // Create test data
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName(idField).setType("STRING"));
    fields.add(new TableFieldSchema().setName(shortStringField).setType("STRING"));
    TableSchema bqSchema = new TableSchema().setFields(fields);
    Schema avroSchema = new Schema.Parser().parse(String.format(avroSchemaTemplate, new StringBuilder().append(String.format(avroFieldTemplate, idField, "null", idFieldDesc)).append(",").append(generateShortStringField()).toString()));
    GenericRecordBuilder builder = new GenericRecordBuilder(avroSchema);
    builder.set(idField, null);
    builder.set(shortStringField, shortStringFieldValue);
    Record record = builder.build();
    SchemaAndRecord inputBqData = new SchemaAndRecord(record, bqSchema);
    // Run the test
    Entity outputEntity = converter.apply(inputBqData);
    assertTrue(!outputEntity.hasKey());
}
Also used : Entity(com.google.datastore.v1.Entity) AvroToEntity(com.google.cloud.teleport.templates.common.BigQueryConverters.AvroToEntity) TableSchema(com.google.api.services.bigquery.model.TableSchema) TableSchema(com.google.api.services.bigquery.model.TableSchema) Schema(org.apache.avro.Schema) TableFieldSchema(com.google.api.services.bigquery.model.TableFieldSchema) ArrayList(java.util.ArrayList) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) Record(org.apache.avro.generic.GenericData.Record) SchemaAndRecord(org.apache.beam.sdk.io.gcp.bigquery.SchemaAndRecord) TableFieldSchema(com.google.api.services.bigquery.model.TableFieldSchema) SchemaAndRecord(org.apache.beam.sdk.io.gcp.bigquery.SchemaAndRecord) Test(org.junit.Test)

Example 100 with Entity

use of net.minecraft.server.v1_9_R2.Entity in project DataflowTemplates by GoogleCloudPlatform.

the class BigQueryToDatastore method main.

/**
 * Runs a pipeline which reads data from BigQuery and writes it to Datastore.
 *
 * @param args arguments to the pipeline
 */
public static void main(String[] args) {
    BigQueryToDatastoreOptions options = PipelineOptionsFactory.fromArgs(args).withValidation().as(BigQueryToDatastoreOptions.class);
    Pipeline pipeline = Pipeline.create(options);
    // Read from BigQuery and convert data to Datastore Entity format with 2 possible outcomes,
    // success or failure, based on the possibility to create valid Entity keys from BQ data
    TupleTag<Entity> successTag = new TupleTag<Entity>() {
    };
    TupleTag<String> failureTag = new TupleTag<String>("failures") {
    };
    PCollectionTuple entities = pipeline.apply(BigQueryToEntity.newBuilder().setQuery(options.getReadQuery()).setUniqueNameColumn(options.getReadIdColumn()).setEntityKind(options.getDatastoreWriteEntityKind()).setNamespace(options.getDatastoreWriteNamespace()).setSuccessTag(successTag).setFailureTag(failureTag).build());
    // Write on GCS data that could not be converted to valid Datastore entities
    entities.apply(LogErrors.newBuilder().setErrorWritePath(options.getInvalidOutputPath()).setErrorTag(failureTag).build());
    // Write valid entities to Datastore
    TupleTag<String> errorTag = new TupleTag<String>("errors") {
    };
    entities.get(successTag).apply(WriteEntities.newBuilder().setProjectId(options.getDatastoreWriteProjectId()).setHintNumWorkers(options.getDatastoreHintNumWorkers()).setErrorTag(errorTag).build()).apply(LogErrors.newBuilder().setErrorWritePath(options.getErrorWritePath()).setErrorTag(errorTag).build());
    pipeline.run();
}
Also used : BigQueryToEntity(com.google.cloud.teleport.templates.common.BigQueryConverters.BigQueryToEntity) Entity(com.google.datastore.v1.Entity) TupleTag(org.apache.beam.sdk.values.TupleTag) PCollectionTuple(org.apache.beam.sdk.values.PCollectionTuple) Pipeline(org.apache.beam.sdk.Pipeline)

Aggregations

LivingEntity (org.bukkit.entity.LivingEntity)95 SkinnableEntity (net.citizensnpcs.npc.skin.SkinnableEntity)88 net.minecraft.world.entity (net.minecraft.world.entity)32 org.bukkit.entity (org.bukkit.entity)32 Entity (com.google.datastore.v1.Entity)31 Location (org.bukkit.Location)30 ArrayList (java.util.ArrayList)21 Test (org.junit.Test)21 Entity (net.minecraft.server.v1_12_R1.Entity)20 Entity (net.minecraft.server.v1_16_R3.Entity)19 NPCHolder (net.citizensnpcs.npc.ai.NPCHolder)18 Entity (net.minecraft.server.v1_8_R3.Entity)17 Entity (net.minecraft.server.v1_11_R1.Entity)16 Mob (net.minecraft.world.entity.Mob)16 CraftEntity (org.bukkit.craftbukkit.v1_16_R3.entity.CraftEntity)16 org.bukkit.craftbukkit.v1_17_R1.entity (org.bukkit.craftbukkit.v1_17_R1.entity)16 Player (org.bukkit.entity.Player)15 PathEntity (net.minecraft.server.v1_16_R3.PathEntity)13 Entity (net.minecraft.server.v1_15_R1.Entity)12 Entity (net.minecraft.server.v1_13_R2.Entity)11