use of com.google.cloud.automl.v1beta1.Row in project molgenis-emx2 by molgenis.
the class Emx1Entity method toRow.
public Row toRow() {
Row r = new Row();
r.set("name", getName());
r.set("extends", getExtends());
r.set("label", getLabel());
r.set("package", getPackageName());
return r;
}
use of com.google.cloud.automl.v1beta1.Row in project molgenis-emx2 by molgenis.
the class Emx2Members method inputRoles.
public static int inputRoles(TableStore store, Schema schema) {
int count = 0;
if (store.containsTable(ROLES_TABLE)) {
for (Row row : store.readTable(ROLES_TABLE)) {
count++;
schema.addMember(row.getString(USER), row.getString(ROLE));
}
}
return count;
}
use of com.google.cloud.automl.v1beta1.Row in project molgenis-emx2 by molgenis.
the class Mapper method map.
public static Row[] map(Object... beans) throws InvocationTargetException, IllegalAccessException {
ArrayList<Row> rows = new ArrayList<>();
for (Object b : beans) {
Class<?> c = b.getClass();
Method[] methods = c.getDeclaredMethods();
Map<String, Object> values = new LinkedHashMap<>();
for (Method m : methods) {
if (m.getName().startsWith("get") && m.getParameterCount() == 0) {
values.put(m.getName().substring(3), m.invoke(b));
}
}
rows.add(new Row(values));
}
return rows.toArray(new Row[rows.size()]);
}
use of com.google.cloud.automl.v1beta1.Row in project jaxdb by jaxdb.
the class SqlJaxSBLoader method sqlx2sql.
static void sqlx2sql(final DBVendor vendor, final $Database database, final File sqlOutputFile) throws IOException {
sqlOutputFile.getParentFile().mkdirs();
final ArrayList<Row> rows = new ArrayList<>();
final RowIterator iterator = new RowIterator(database);
final Compiler compiler = Compiler.getCompiler(vendor);
final TableToColumnToIncrement tableToColumnToIncrement = new TableToColumnToIncrement();
while (iterator.hasNext()) {
loadRow(rows, iterator.next(), vendor.getDialect(), compiler, tableToColumnToIncrement);
}
rows.sort(null);
final Iterator<Row> rowIterator = rows.iterator();
try (final OutputStreamWriter out = new FileWriter(sqlOutputFile)) {
for (int i = 0; rowIterator.hasNext(); ++i) {
if (i > 0)
out.write('\n');
out.append(rowIterator.next().toString()).append(';');
}
if (tableToColumnToIncrement.size() > 0)
for (final Map.Entry<String, Map<String, Integer>> entry : tableToColumnToIncrement.entrySet()) for (final Map.Entry<String, Integer> columnToIncrement : entry.getValue().entrySet()) compiler.sequenceReset(null, out, entry.getKey(), columnToIncrement.getKey(), columnToIncrement.getValue() + 1);
} catch (final SQLException e) {
throw new RuntimeException(e);
}
}
use of com.google.cloud.automl.v1beta1.Row in project DataflowTemplates by GoogleCloudPlatform.
the class BigtableToAvroTest method applyBigtableToAvroFn.
@Test
public void applyBigtableToAvroFn() throws Exception {
Row bigtableRow1 = createBigtableRow("row1");
bigtableRow1 = upsertBigtableCell(bigtableRow1, "family1", "column1", 1, "value1");
bigtableRow1 = upsertBigtableCell(bigtableRow1, "family1", "column1", 2, "value2");
bigtableRow1 = upsertBigtableCell(bigtableRow1, "family1", "column2", 1, "value3");
bigtableRow1 = upsertBigtableCell(bigtableRow1, "family2", "column1", 1, "value4");
Row bigtableRow2 = createBigtableRow("row2");
bigtableRow2 = upsertBigtableCell(bigtableRow2, "family2", "column2", 1, "value2");
final List<Row> bigtableRows = ImmutableList.of(bigtableRow1, bigtableRow2);
BigtableRow avroRow1 = createAvroRow("row1");
addAvroCell(avroRow1, "family1", "column1", 1, "value1");
// Expect a new cell due to a different timestamp of "2".
addAvroCell(avroRow1, "family1", "column1", 2, "value2");
// Expect a new cell due to a different column of "column2".
addAvroCell(avroRow1, "family1", "column2", 1, "value3");
// Expect a new cell due to a different family of "family2".
addAvroCell(avroRow1, "family2", "column1", 1, "value4");
BigtableRow avroRow2 = createAvroRow("row2");
addAvroCell(avroRow2, "family2", "column2", 1, "value2");
final List<BigtableRow> expectedAvroRows = ImmutableList.of(avroRow1, avroRow2);
PCollection<BigtableRow> avroRows = pipeline.apply("Create", Create.of(bigtableRows)).apply("Transform to Avro", MapElements.via(new BigtableToAvroFn()));
PAssert.that(avroRows).containsInAnyOrder(expectedAvroRows);
pipeline.run();
}
Aggregations