use of org.apache.cassandra.io.sstable.SSTableWriter in project eiger by wlloyd.
the class SSTableImport method importSorted.
public static int importSorted(String jsonFile, ColumnFamily columnFamily, String ssTablePath, IPartitioner<?> partitioner) throws IOException {
// already imported keys count
int importedKeys = 0;
long start = System.currentTimeMillis();
JsonParser parser = getParser(jsonFile);
if (keyCountToImport == null) {
keyCountToImport = 0;
System.out.println("Counting keys to import, please wait... (NOTE: to skip this use -n <num_keys>)");
// START_OBJECT
parser.nextToken();
while (parser.nextToken() != null) {
parser.nextToken();
parser.skipChildren();
if (parser.getCurrentName() == null)
continue;
keyCountToImport++;
}
}
System.out.printf("Importing %s keys...%n", keyCountToImport);
// renewing parser
parser = getParser(jsonFile);
SSTableWriter writer = new SSTableWriter(ssTablePath, keyCountToImport);
int lineNumber = 1;
DecoratedKey prevStoredKey = null;
while (parser.nextToken() != null) {
String key = parser.getCurrentName();
if (key != null) {
String tokenName = parser.nextToken().name();
if (tokenName.equals("START_ARRAY")) {
if (columnFamily.getType() == ColumnFamilyType.Super) {
throw new RuntimeException("Can't write Standard columns to the Super Column Family.");
}
List<?> columns = parser.readValueAs(new TypeReference<List<?>>() {
});
addToStandardCF(columns, columnFamily);
} else if (tokenName.equals("START_OBJECT")) {
if (columnFamily.getType() == ColumnFamilyType.Standard) {
throw new RuntimeException("Can't write Super columns to the Standard Column Family.");
}
Map<?, ?> columns = parser.readValueAs(new TypeReference<Map<?, ?>>() {
});
addToSuperCF(columns, columnFamily);
} else {
throw new UnsupportedOperationException("Only Array or Hash allowed as row content.");
}
DecoratedKey currentKey = partitioner.decorateKey(hexToBytes(key));
if (prevStoredKey != null && prevStoredKey.compareTo(currentKey) != -1) {
System.err.printf("Line %d: Key %s is greater than previous, collection is not sorted properly. Aborting import. You might need to delete SSTables manually.%n", lineNumber, key);
return -1;
}
// saving decorated key
writer.append(currentKey, columnFamily);
columnFamily.clear();
prevStoredKey = currentKey;
importedKeys++;
lineNumber++;
long current = System.currentTimeMillis();
if (// 5 secs.
current - start >= 5000) {
System.out.printf("Currently imported %d keys.%n", importedKeys);
start = current;
}
if (keyCountToImport == importedKeys)
break;
}
}
writer.closeAndOpenReader();
return importedKeys;
}
use of org.apache.cassandra.io.sstable.SSTableWriter in project eiger by wlloyd.
the class SSTableExportTest method testEnumeratekeys.
@Test
public void testEnumeratekeys() throws IOException {
File tempSS = tempSSTableFile("Keyspace1", "Standard1");
ColumnFamily cfamily = ColumnFamily.create("Keyspace1", "Standard1");
SSTableWriter writer = new SSTableWriter(tempSS.getPath(), 2);
// Add rowA
cfamily.addColumn(new QueryPath("Standard1", null, ByteBufferUtil.bytes("colA")), ByteBufferUtil.bytes("valA"), System.currentTimeMillis());
writer.append(Util.dk("rowA"), cfamily);
cfamily.clear();
// Add rowB
cfamily.addColumn(new QueryPath("Standard1", null, ByteBufferUtil.bytes("colB")), ByteBufferUtil.bytes("valB"), System.currentTimeMillis());
writer.append(Util.dk("rowB"), cfamily);
cfamily.clear();
writer.closeAndOpenReader();
// Enumerate and verify
File temp = File.createTempFile("Standard1", ".txt");
SSTableExport.enumeratekeys(writer.getFilename(), new PrintStream(temp.getPath()));
FileReader file = new FileReader(temp);
char[] buf = new char[(int) temp.length()];
file.read(buf);
String output = new String(buf);
String sep = System.getProperty("line.separator");
assert output.equals(asHex("rowA") + sep + asHex("rowB") + sep) : output;
}
use of org.apache.cassandra.io.sstable.SSTableWriter in project eiger by wlloyd.
the class SSTableExportTest method testEscapingDoubleQuotes.
@Test
public void testEscapingDoubleQuotes() throws IOException {
File tempSS = tempSSTableFile("Keyspace1", "ValuesWithQuotes");
ColumnFamily cfamily = ColumnFamily.create("Keyspace1", "ValuesWithQuotes");
SSTableWriter writer = new SSTableWriter(tempSS.getPath(), 2);
// Add rowA
cfamily.addColumn(null, new Column(ByteBufferUtil.bytes("data"), UTF8Type.instance.fromString("{\"foo\":\"bar\"}")));
writer.append(Util.dk("rowA"), cfamily);
cfamily.clear();
SSTableReader reader = writer.closeAndOpenReader();
// Export to JSON and verify
File tempJson = File.createTempFile("ValuesWithQuotes", ".json");
SSTableExport.export(reader, new PrintStream(tempJson.getPath()), new String[0]);
JSONObject json = (JSONObject) JSONValue.parse(new FileReader(tempJson));
JSONArray rowA = (JSONArray) json.get(asHex("rowA"));
JSONArray data = (JSONArray) rowA.get(0);
assert hexToBytes((String) data.get(0)).equals(ByteBufferUtil.bytes("data"));
assert data.get(1).equals("{\"foo\":\"bar\"}");
}
use of org.apache.cassandra.io.sstable.SSTableWriter in project eiger by wlloyd.
the class SSTableExportTest method testExportCounterCf.
@Test
public void testExportCounterCf() throws IOException {
File tempSS = tempSSTableFile("Keyspace1", "Counter1");
ColumnFamily cfamily = ColumnFamily.create("Keyspace1", "Counter1");
SSTableWriter writer = new SSTableWriter(tempSS.getPath(), 2);
// Add rowA
cfamily.addColumn(null, new CounterColumn(ByteBufferUtil.bytes("colA"), 42, System.currentTimeMillis()));
writer.append(Util.dk("rowA"), cfamily);
cfamily.clear();
SSTableReader reader = writer.closeAndOpenReader();
// Export to JSON and verify
File tempJson = File.createTempFile("Counter1", ".json");
SSTableExport.export(reader, new PrintStream(tempJson.getPath()), new String[0]);
JSONObject json = (JSONObject) JSONValue.parse(new FileReader(tempJson));
JSONArray rowA = (JSONArray) json.get(asHex("rowA"));
JSONArray colA = (JSONArray) rowA.get(0);
assert hexToBytes((String) colA.get(0)).equals(ByteBufferUtil.bytes("colA"));
assert ((String) colA.get(3)).equals("c");
assert (Long) colA.get(4) == Long.MIN_VALUE;
}
use of org.apache.cassandra.io.sstable.SSTableWriter in project eiger by wlloyd.
the class SSTableImport method importUnsorted.
private static int importUnsorted(JsonParser parser, ColumnFamily columnFamily, String ssTablePath, IPartitioner<?> partitioner) throws IOException {
int importedKeys = 0;
long start = System.currentTimeMillis();
Map<?, ?> data = parser.readValueAs(new TypeReference<Map<?, ?>>() {
});
keyCountToImport = (keyCountToImport == null) ? data.size() : keyCountToImport;
SSTableWriter writer = new SSTableWriter(ssTablePath, keyCountToImport);
System.out.printf("Importing %s keys...%n", keyCountToImport);
// sort by dk representation, but hold onto the hex version
SortedMap<DecoratedKey, String> decoratedKeys = new TreeMap<DecoratedKey, String>();
for (Object keyObject : data.keySet()) {
String key = (String) keyObject;
decoratedKeys.put(partitioner.decorateKey(hexToBytes(key)), key);
}
for (Map.Entry<DecoratedKey, String> rowKey : decoratedKeys.entrySet()) {
if (columnFamily.getType() == ColumnFamilyType.Super) {
addToSuperCF((Map<?, ?>) data.get(rowKey.getValue()), columnFamily);
} else {
addToStandardCF((List<?>) data.get(rowKey.getValue()), columnFamily);
}
writer.append(rowKey.getKey(), columnFamily);
columnFamily.clear();
importedKeys++;
long current = System.currentTimeMillis();
if (// 5 secs.
current - start >= 5000) {
System.out.printf("Currently imported %d keys.%n", importedKeys);
start = current;
}
if (keyCountToImport == importedKeys)
break;
}
writer.closeAndOpenReader();
return importedKeys;
}
Aggregations