Search in sources :

Example 76 with ByteString

use of com.google.protobuf.ByteString in project java-docs-samples by GoogleCloudPlatform.

the class SynthesizeFile method synthesizeSsmlFile.

// [END tts_synthesize_text_file]
// [START tts_synthesize_ssml_file]
/**
 * Demonstrates using the Text to Speech client to synthesize a text file or ssml file.
 * @param ssmlFile the ssml document to be synthesized. (e.g., hello.ssml)
 * @throws Exception on TextToSpeechClient Errors.
 */
public static void synthesizeSsmlFile(String ssmlFile) throws Exception {
    // Instantiates a client
    try (TextToSpeechClient textToSpeechClient = TextToSpeechClient.create()) {
        // Read the file's contents
        String contents = new String(Files.readAllBytes(Paths.get(ssmlFile)));
        // Set the ssml input to be synthesized
        SynthesisInput input = SynthesisInput.newBuilder().setSsml(contents).build();
        // Build the voice request
        VoiceSelectionParams voice = VoiceSelectionParams.newBuilder().setLanguageCode(// languageCode = "en_us"
        "en-US").setSsmlGender(// ssmlVoiceGender = SsmlVoiceGender.FEMALE
        SsmlVoiceGender.FEMALE).build();
        // Select the type of audio file you want returned
        AudioConfig audioConfig = AudioConfig.newBuilder().setAudioEncoding(// MP3 audio.
        AudioEncoding.MP3).build();
        // Perform the text-to-speech request
        SynthesizeSpeechResponse response = textToSpeechClient.synthesizeSpeech(input, voice, audioConfig);
        // Get the audio contents from the response
        ByteString audioContents = response.getAudioContent();
        // Write the response to the output file.
        try (OutputStream out = new FileOutputStream("output.mp3")) {
            out.write(audioContents.toByteArray());
            System.out.println("Audio content written to file \"output.mp3\"");
        }
    }
}
Also used : ByteString(com.google.protobuf.ByteString) SynthesizeSpeechResponse(com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechResponse) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) FileOutputStream(java.io.FileOutputStream) AudioConfig(com.google.cloud.texttospeech.v1beta1.AudioConfig) ByteString(com.google.protobuf.ByteString) SynthesisInput(com.google.cloud.texttospeech.v1beta1.SynthesisInput) TextToSpeechClient(com.google.cloud.texttospeech.v1beta1.TextToSpeechClient) VoiceSelectionParams(com.google.cloud.texttospeech.v1beta1.VoiceSelectionParams)

Example 77 with ByteString

use of com.google.protobuf.ByteString in project atlasdb by palantir.

the class DataStreamStore method deleteStreams.

/**
 * This should only be used from the cleanup tasks.
 */
void deleteStreams(Transaction t, final Set<Long> streamIds) {
    if (streamIds.isEmpty()) {
        return;
    }
    Set<DataStreamMetadataTable.DataStreamMetadataRow> smRows = Sets.newHashSet();
    Multimap<DataStreamHashAidxTable.DataStreamHashAidxRow, DataStreamHashAidxTable.DataStreamHashAidxColumn> shToDelete = HashMultimap.create();
    for (Long streamId : streamIds) {
        smRows.add(DataStreamMetadataTable.DataStreamMetadataRow.of(streamId));
    }
    DataStreamMetadataTable table = tables.getDataStreamMetadataTable(t);
    Map<DataStreamMetadataTable.DataStreamMetadataRow, StreamMetadata> metadatas = table.getMetadatas(smRows);
    Set<DataStreamValueTable.DataStreamValueRow> streamValueToDelete = Sets.newHashSet();
    for (Entry<DataStreamMetadataTable.DataStreamMetadataRow, StreamMetadata> e : metadatas.entrySet()) {
        Long streamId = e.getKey().getId();
        long blocks = getNumberOfBlocksFromMetadata(e.getValue());
        for (long i = 0; i < blocks; i++) {
            streamValueToDelete.add(DataStreamValueTable.DataStreamValueRow.of(streamId, i));
        }
        ByteString streamHash = e.getValue().getHash();
        Sha256Hash hash = Sha256Hash.EMPTY;
        if (streamHash != com.google.protobuf.ByteString.EMPTY) {
            hash = new Sha256Hash(streamHash.toByteArray());
        } else {
            log.error("Empty hash for stream {}", streamId);
        }
        DataStreamHashAidxTable.DataStreamHashAidxRow hashRow = DataStreamHashAidxTable.DataStreamHashAidxRow.of(hash);
        DataStreamHashAidxTable.DataStreamHashAidxColumn column = DataStreamHashAidxTable.DataStreamHashAidxColumn.of(streamId);
        shToDelete.put(hashRow, column);
    }
    tables.getDataStreamHashAidxTable(t).delete(shToDelete);
    tables.getDataStreamValueTable(t).delete(streamValueToDelete);
    table.delete(smRows);
}
Also used : ByteString(com.google.protobuf.ByteString) Sha256Hash(com.palantir.util.crypto.Sha256Hash) StreamMetadata(com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata)

Example 78 with ByteString

use of com.google.protobuf.ByteString in project atlasdb by palantir.

the class StreamStoreRenderer method renderStreamStore.

public String renderStreamStore() {
    final String StreamStore = name + "StreamStore";
    final String StreamValueTable = name + "StreamValueTable";
    final String StreamValueRow = StreamValueTable + "." + name + "StreamValueRow";
    final String StreamMetadataTable = name + "StreamMetadataTable";
    final String StreamMetadataRow = StreamMetadataTable + "." + name + "StreamMetadataRow";
    final String StreamHashAidxTable = name + "StreamHashAidxTable";
    final String StreamHashAidxRow = StreamHashAidxTable + "." + name + "StreamHashAidxRow";
    final String StreamHashAidxColumn = StreamHashAidxTable + "." + name + "StreamHashAidxColumn";
    final String StreamHashAidxColumnValue = StreamHashAidxTable + "." + name + "StreamHashAidxColumnValue";
    final String StreamIdxTable = name + "StreamIdxTable";
    final String StreamIdxRow = StreamIdxTable + "." + name + "StreamIdxRow";
    final String StreamIdxColumn = StreamIdxTable + "." + name + "StreamIdxColumn";
    final String StreamIdxColumnValue = StreamIdxTable + "." + name + "StreamIdxColumnValue";
    final String TableFactory = schemaName + "TableFactory";
    final String StreamId = streamIdType.getJavaObjectClassName();
    return new Renderer() {

        @Override
        protected void run() {
            ImportRenderer importRenderer = new ImportRenderer(this, Arrays.asList(IMPORTS));
            line("package ", packageName, ";");
            line();
            importRenderer.renderImports();
            line("@Generated(\"", StreamStoreRenderer.class.getName(), "\")");
            line("@SuppressWarnings(\"all\")");
            line("public final class ", StreamStore, " extends AbstractPersistentStreamStore", " {");
            {
                fields();
                line();
                constructors();
                line();
                getInMemoryThreshold();
                line();
                storeBlock();
                line();
                touchMetadataWhileStoringForConflicts();
                line();
                putMetadataAndHashIndexTask();
                line();
                getNumberOfBlocksFromMetadata();
                line();
                createTempFile();
                line();
                loadSingleBlockToOutputStream();
                line();
                getBlock();
                line();
                if (clientSideCompression) {
                    storeBlocksAndGetFinalMetadata();
                    line();
                    loadStreamWithCompression();
                    line();
                    loadSingleStreamWithCompression();
                    line();
                    loadStreamsWithCompression();
                    line();
                    tryWriteStreamToFile();
                    line();
                    makeStreamUsingTransaction();
                    line();
                }
                getMetadata();
                line();
                lookupStreamIdsByHash();
                line();
                getHashIndexRowsForHashes();
                line();
                getMetadataRowsForIds();
                line();
                putHashIndexTask();
                line();
                deleteStreams();
                line();
                markStreamsAsUsedInternal();
                line();
                unmarkStreamsAsUsed();
                line();
                touchMetadataWhileMarkingUsedForConflicts();
                line();
                importRenderer.renderImportJavaDoc();
                line("static final int dummy = 0;");
            }
            line("}");
        }

        private void fields() {
            line("public static final int BLOCK_SIZE_IN_BYTES = 1000000; // 1MB. DO NOT CHANGE THIS WITHOUT AN UPGRADE TASK");
            line("public static final int IN_MEMORY_THRESHOLD = ", String.valueOf(inMemoryThreshold), "; // streams under this size are kept in memory when loaded");
            line("public static final String STREAM_FILE_PREFIX = \"", name, "_stream_\";");
            line("public static final String STREAM_FILE_SUFFIX = \".tmp\";");
            line();
            line("private static final Logger log = LoggerFactory.getLogger(", StreamStore, ".class);");
            line();
            line("private final ", TableFactory, " tables;");
        }

        private void constructors() {
            line("private ", StreamStore, "(TransactionManager txManager, ", TableFactory, " tables) {");
            {
                line("this(txManager, tables, () -> StreamStorePersistenceConfiguration.DEFAULT_CONFIG);");
            }
            line("}");
            line();
            line("private ", StreamStore, "(TransactionManager txManager, ", TableFactory, " tables, ", "Supplier<StreamStorePersistenceConfiguration> persistenceConfiguration) {");
            {
                line("super(txManager, persistenceConfiguration);");
                line("this.tables = tables;");
            }
            line("}");
            line();
            line("public static ", StreamStore, " of(TransactionManager txManager, ", TableFactory, " tables) {");
            {
                line("return new ", StreamStore, "(txManager, tables);");
            }
            line("}");
            line();
            line("public static ", StreamStore, " of(TransactionManager txManager, ", TableFactory, " tables, ", " Supplier<StreamStorePersistenceConfiguration> persistenceConfiguration) {");
            {
                line("return new ", StreamStore, "(txManager, tables, persistenceConfiguration);");
            }
            line("}");
            line();
            line("/**");
            line(" * This should only be used by test code or as a performance optimization.");
            line(" */");
            line("static ", StreamStore, " of(", TableFactory, " tables) {");
            {
                line("return new ", StreamStore, "(null, tables);");
            }
            line("}");
        }

        private void storeBlock() {
            line("@Override");
            line("protected void storeBlock(Transaction t, long id, long blockNumber, final byte[] block) {");
            {
                line("Preconditions.checkArgument(block.length <= BLOCK_SIZE_IN_BYTES, \"Block to store in DB must be less than BLOCK_SIZE_IN_BYTES\");");
                line("final ", StreamValueRow, " row = ", StreamValueRow, ".of(id, blockNumber);");
                line("try {");
                {
                    line("// Do a touch operation on this table to ensure we get a conflict if someone cleans it up.");
                    line("touchMetadataWhileStoringForConflicts(t, row.getId(), row.getBlockId());");
                    line("tables.get", StreamValueTable, "(t).putValue(row, block);");
                }
                line("} catch (RuntimeException e) {");
                {
                    line("log.error(\"Error storing block {} for stream id {}\", row.getBlockId(), row.getId(), e);");
                    line("throw e;");
                }
                line("}");
            }
            line("}");
        }

        private void touchMetadataWhileStoringForConflicts() {
            line("private void touchMetadataWhileStoringForConflicts(Transaction t, ", StreamId, " id, long blockNumber) {");
            {
                line(StreamMetadataTable, " metaTable = tables.get", StreamMetadataTable, "(t);");
                line(StreamMetadataRow, " row = ", StreamMetadataRow, ".of(id);");
                line("StreamMetadata metadata = metaTable.getMetadatas(ImmutableSet.of(row)).values().iterator().next();");
                line("Preconditions.checkState(metadata.getStatus() == Status.STORING, \"This stream is being cleaned up while storing blocks: %s\", id);");
                line("Builder builder = StreamMetadata.newBuilder(metadata);");
                line("builder.setLength(blockNumber * BLOCK_SIZE_IN_BYTES + 1);");
                line("metaTable.putMetadata(row, builder.build());");
            }
            line("}");
        }

        private void putMetadataAndHashIndexTask() {
            line("@Override");
            line("protected void putMetadataAndHashIndexTask(Transaction t, Map<", StreamId, ", StreamMetadata> streamIdsToMetadata) {");
            {
                line(StreamMetadataTable, " mdTable = tables.get", StreamMetadataTable, "(t);");
                line("Map<", StreamId, ", StreamMetadata> prevMetadatas = getMetadata(t, streamIdsToMetadata.keySet());");
                line();
                line("Map<", StreamMetadataRow, ", StreamMetadata> rowsToStoredMetadata = Maps.newHashMap();");
                line("Map<", StreamMetadataRow, ", StreamMetadata> rowsToUnstoredMetadata = Maps.newHashMap();");
                line("for (Entry<", StreamId, ", StreamMetadata> e : streamIdsToMetadata.entrySet()) {");
                {
                    line("long streamId = e.getKey();");
                    line("StreamMetadata metadata = e.getValue();");
                    line("StreamMetadata prevMetadata = prevMetadatas.get(streamId);");
                    line("if (metadata.getStatus() == Status.STORED) {");
                    {
                        line("if (prevMetadata == null || prevMetadata.getStatus() != Status.STORING) {");
                        {
                            line("// This can happen if we cleanup old streams.");
                            line("throw new TransactionFailedRetriableException(\"Cannot mark a stream as stored that isn't currently storing: \" + prevMetadata);");
                        }
                        line("}");
                        line("rowsToStoredMetadata.put(", StreamMetadataRow, ".of(streamId), metadata);");
                    }
                    line("} else if (metadata.getStatus() == Status.STORING) {");
                    {
                        line("// This will prevent two users trying to store the same id.");
                        line("if (prevMetadata != null) {");
                        {
                            line("throw new TransactionFailedRetriableException(\"Cannot reuse the same stream id: \" + streamId);");
                        }
                        line("}");
                        line("rowsToUnstoredMetadata.put(", StreamMetadataRow, ".of(streamId), metadata);");
                    }
                    line("}");
                }
                line("}");
                line("putHashIndexTask(t, rowsToStoredMetadata);");
                line();
                line("Map<", StreamMetadataRow, ", StreamMetadata> rowsToMetadata = Maps.newHashMap();");
                line("rowsToMetadata.putAll(rowsToStoredMetadata);");
                line("rowsToMetadata.putAll(rowsToUnstoredMetadata);");
                line("mdTable.putMetadata(rowsToMetadata);");
            }
            line("}");
        }

        private void getNumberOfBlocksFromMetadata() {
            line("private long getNumberOfBlocksFromMetadata(StreamMetadata metadata) {");
            {
                line("return (metadata.getLength() + BLOCK_SIZE_IN_BYTES - 1) / BLOCK_SIZE_IN_BYTES;");
            }
            line("}");
        }

        private void getInMemoryThreshold() {
            line("@Override");
            line("protected long getInMemoryThreshold() {");
            {
                line("return IN_MEMORY_THRESHOLD;");
            }
            line("}");
        }

        private void createTempFile() {
            line("@Override");
            line("protected File createTempFile(", StreamId, " id) throws IOException {");
            {
                line("File file = TempFileUtils.createTempFile(STREAM_FILE_PREFIX + id, STREAM_FILE_SUFFIX);");
                line("file.deleteOnExit();");
                line("return file;");
            }
            line("}");
        }

        private void loadSingleBlockToOutputStream() {
            line("@Override");
            line("protected void loadSingleBlockToOutputStream(Transaction t, ", StreamId, " streamId, long blockId, OutputStream os) {");
            {
                line(StreamValueRow, " row = ", StreamValueRow, ".of(streamId, blockId);");
                line("try {");
                {
                    line("os.write(getBlock(t, row));");
                }
                line("} catch (RuntimeException e) {");
                {
                    line("log.error(\"Error storing block {} for stream id {}\", row.getBlockId(), row.getId(), e);");
                    line("throw e;");
                }
                line("} catch (IOException e) {");
                {
                    line("log.error(\"Error writing block {} to file when getting stream id {}\", row.getBlockId(), row.getId(), e);");
                    line("throw Throwables.rewrapAndThrowUncheckedException(\"Error writing blocks to file when creating stream.\", e);");
                }
                line("}");
            }
            line("}");
        }

        private void getBlock() {
            line("private byte[] getBlock(Transaction t, ", StreamValueRow, " row) {");
            {
                line(StreamValueTable, " valueTable = tables.get", StreamValueTable, "(t);");
                line("return valueTable.getValues(ImmutableSet.of(row)).get(row);");
            }
            line("}");
        }

        private void getMetadata() {
            line("@Override");
            line("protected Map<", StreamId, ", StreamMetadata> getMetadata(Transaction t, Set<", StreamId, "> streamIds) {");
            {
                line("if (streamIds.isEmpty()) {");
                {
                    line("return ImmutableMap.of();");
                }
                line("}");
                line(StreamMetadataTable, " table = tables.get", StreamMetadataTable, "(t);");
                line("Map<", StreamMetadataRow, ", StreamMetadata> metadatas = table.getMetadatas(getMetadataRowsForIds(streamIds));");
                line("Map<", StreamId, ", StreamMetadata> ret = Maps.newHashMap();");
                line("for (Map.Entry<", StreamMetadataRow, ", StreamMetadata> e : metadatas.entrySet()) {");
                {
                    line("ret.put(e.getKey().getId(), e.getValue());");
                }
                line("}");
                line("return ret;");
            }
            line("}");
        }

        private void lookupStreamIdsByHash() {
            line("@Override");
            line("public Map<Sha256Hash, ", StreamId, "> lookupStreamIdsByHash(Transaction t, final Set<Sha256Hash> hashes) {");
            {
                line("if (hashes.isEmpty()) {");
                {
                    line("return ImmutableMap.of();");
                }
                line("}");
                line(StreamHashAidxTable, " idx = tables.get", StreamHashAidxTable, "(t);");
                line("Set<", StreamHashAidxRow, "> rows = getHashIndexRowsForHashes(hashes);");
                line();
                line("Multimap<", StreamHashAidxRow, ", ", StreamHashAidxColumnValue, "> m = idx.getRowsMultimap(rows);");
                line("Map<", StreamId, ", Sha256Hash> hashForStreams = Maps.newHashMap();");
                line("for (", StreamHashAidxRow, " r : m.keySet()) {");
                {
                    line("for (", StreamHashAidxColumnValue, " v : m.get(r)) {");
                    {
                        line(StreamId, " streamId = v.getColumnName().getStreamId();");
                        line("Sha256Hash hash = r.getHash();");
                        line("if (hashForStreams.containsKey(streamId)) {");
                        {
                            line("AssertUtils.assertAndLog(log, hashForStreams.get(streamId).equals(hash), \"(BUG) Stream ID has 2 different hashes: \" + streamId);");
                        }
                        line("}");
                        line("hashForStreams.put(streamId, hash);");
                    }
                    line("}");
                }
                line("}");
                line("Map<", StreamId, ", StreamMetadata> metadata = getMetadata(t, hashForStreams.keySet());");
                line();
                line("Map<Sha256Hash, ", StreamId, "> ret = Maps.newHashMap();");
                line("for (Map.Entry<", StreamId, ", StreamMetadata> e : metadata.entrySet()) {");
                {
                    line("if (e.getValue().getStatus() != Status.STORED) {");
                    {
                        line("continue;");
                    }
                    line("}");
                    line("Sha256Hash hash = hashForStreams.get(e.getKey());");
                    line("ret.put(hash, e.getKey());");
                }
                line("}");
                line();
                line("return ret;");
            }
            line("}");
        }

        private void getHashIndexRowsForHashes() {
            line("private Set<", StreamHashAidxRow, "> getHashIndexRowsForHashes(final Set<Sha256Hash> hashes) {");
            {
                line("Set<", StreamHashAidxRow, "> rows = Sets.newHashSet();");
                line("for (Sha256Hash h : hashes) {");
                {
                    line("rows.add(", StreamHashAidxRow, ".of(h));");
                }
                line("}");
                line("return rows;");
            }
            line("}");
        }

        private void getMetadataRowsForIds() {
            line("private Set<", StreamMetadataRow, "> getMetadataRowsForIds(final Iterable<", StreamId, "> ids) {");
            {
                line("Set<", StreamMetadataRow, "> rows = Sets.newHashSet();");
                line("for (", StreamId, " id : ids) {");
                {
                    line("rows.add(", StreamMetadataRow, ".of(id));");
                }
                line("}");
                line("return rows;");
            }
            line("}");
        }

        private void putHashIndexTask() {
            line("private void putHashIndexTask(Transaction t, Map<", StreamMetadataRow, ", StreamMetadata> rowsToMetadata) {");
            {
                line("Multimap<", StreamHashAidxRow, ", ", StreamHashAidxColumnValue, "> indexMap = HashMultimap.create();");
                line("for (Entry<", StreamMetadataRow, ", StreamMetadata> e : rowsToMetadata.entrySet()) {");
                {
                    line(StreamMetadataRow, " row = e.getKey();");
                    line("StreamMetadata metadata = e.getValue();");
                    line("Preconditions.checkArgument(");
                    line("        metadata.getStatus() == Status.STORED,");
                    line("        \"Should only index successfully stored streams.\");");
                    line();
                    line("Sha256Hash hash = Sha256Hash.EMPTY;");
                    line("if (metadata.getHash() != com.google.protobuf.ByteString.EMPTY) {");
                    {
                        line("hash = new Sha256Hash(metadata.getHash().toByteArray());");
                    }
                    line("}");
                    line(StreamHashAidxRow, " hashRow = ", StreamHashAidxRow, ".of(hash);");
                    line(StreamHashAidxColumn, " column = ", StreamHashAidxColumn, ".of(row.getId());");
                    line(StreamHashAidxColumnValue, " columnValue = ", StreamHashAidxColumnValue, ".of(column, 0L);");
                    line("indexMap.put(hashRow, columnValue);");
                }
                line("}");
                line(StreamHashAidxTable, " hiTable = tables.get", StreamHashAidxTable, "(t);");
                line("hiTable.put(indexMap);");
            }
            line("}");
        }

        private void deleteStreams() {
            line("/**");
            line(" * This should only be used from the cleanup tasks.");
            line(" */");
            line("void deleteStreams(Transaction t, final Set<", StreamId, "> streamIds) {");
            {
                line("if (streamIds.isEmpty()) {");
                {
                    line("return;");
                }
                line("}");
                line("Set<", StreamMetadataRow, "> smRows = Sets.newHashSet();");
                line("Multimap<", StreamHashAidxRow, ", ", StreamHashAidxColumn, "> shToDelete = HashMultimap.create();");
                line("for (", StreamId, " streamId : streamIds) {");
                {
                    line("smRows.add(", StreamMetadataRow, ".of(streamId));");
                }
                line("}");
                line(StreamMetadataTable, " table = tables.get", StreamMetadataTable, "(t);");
                line("Map<", StreamMetadataRow, ", StreamMetadata> metadatas = table.getMetadatas(smRows);");
                line("Set<", StreamValueRow, "> streamValueToDelete = Sets.newHashSet();");
                line("for (Entry<", StreamMetadataRow, ", StreamMetadata> e : metadatas.entrySet()) {");
                {
                    line(StreamId, " streamId = e.getKey().getId();");
                    line("long blocks = getNumberOfBlocksFromMetadata(e.getValue());");
                    line("for (long i = 0; i < blocks; i++) {");
                    {
                        line("streamValueToDelete.add(", StreamValueRow, ".of(streamId, i));");
                    }
                    line("}");
                    line("ByteString streamHash = e.getValue().getHash();");
                    line("Sha256Hash hash = Sha256Hash.EMPTY;");
                    line("if (streamHash != com.google.protobuf.ByteString.EMPTY) {");
                    {
                        line("hash = new Sha256Hash(streamHash.toByteArray());");
                    }
                    line("} else {");
                    {
                        line("log.error(\"Empty hash for stream {}\", streamId);");
                    }
                    line("}");
                    line(StreamHashAidxRow, " hashRow = ", StreamHashAidxRow, ".of(hash);");
                    line(StreamHashAidxColumn, " column = ", StreamHashAidxColumn, ".of(streamId);");
                    line("shToDelete.put(hashRow, column);");
                }
                line("}");
                line("tables.get", StreamHashAidxTable, "(t).delete(shToDelete);");
                line("tables.get", StreamValueTable, "(t).delete(streamValueToDelete);");
                line("table.delete(smRows);");
            }
            line("}");
        }

        private void touchMetadataWhileMarkingUsedForConflicts() {
            line("@Override");
            line("protected void touchMetadataWhileMarkingUsedForConflicts(Transaction t, Iterable<", StreamId, "> ids) {");
            {
                line(StreamMetadataTable, " metaTable = tables.get", StreamMetadataTable, "(t);");
                line("Set<", StreamMetadataRow, "> rows = Sets.newHashSet();");
                line("for (", StreamId, " id : ids) {");
                {
                    line("rows.add(", StreamMetadataRow, ".of(id));");
                }
                line("}");
                line("Map<", StreamMetadataRow, ", StreamMetadata> metadatas = metaTable.getMetadatas(rows);");
                line("for (Map.Entry<", StreamMetadataRow, ", StreamMetadata> e : metadatas.entrySet()) {");
                {
                    line("StreamMetadata metadata = e.getValue();");
                    line("Preconditions.checkState(metadata.getStatus() == Status.STORED,");
                    line("\"Stream: %s has status: %s\", e.getKey().getId(), metadata.getStatus());");
                    line("metaTable.putMetadata(e.getKey(), metadata);");
                }
                line("}");
                line("SetView<", StreamMetadataRow, "> missingRows = Sets.difference(rows, metadatas.keySet());");
                line("if (!missingRows.isEmpty()) {");
                {
                    line("throw new IllegalStateException(\"Missing metadata rows for:\" + missingRows");
                    line("+ \" rows: \" + rows + \" metadata: \" + metadatas + \" txn timestamp: \" + t.getTimestamp());");
                }
                line("}");
            }
            line("}");
        }

        private void markStreamsAsUsedInternal() {
            line("@Override");
            line("protected void markStreamsAsUsedInternal(Transaction t, final Map<", StreamId, ", byte[]> streamIdsToReference) {");
            {
                line("if (streamIdsToReference.isEmpty()) {");
                {
                    line("return;");
                }
                line("}");
                line(StreamIdxTable, " index = tables.get", StreamIdxTable, "(t);");
                line("Multimap<", StreamIdxRow, ", ", StreamIdxColumnValue, "> rowsToValues = HashMultimap.create();");
                line("for (Map.Entry<", StreamId, ", byte[]> entry : streamIdsToReference.entrySet()) {");
                {
                    line(StreamId, " streamId = entry.getKey();");
                    line("byte[] reference = entry.getValue();");
                    line(StreamIdxColumn, " col = ", StreamIdxColumn, ".of(reference);");
                    line(StreamIdxColumnValue, " value = ", StreamIdxColumnValue, ".of(col, 0L);");
                    line("rowsToValues.put(", StreamIdxRow, ".of(streamId), value);");
                }
                line("}");
                line("index.put(rowsToValues);");
            }
            line("}");
        }

        private void unmarkStreamsAsUsed() {
            line("@Override");
            line("public void unmarkStreamsAsUsed(Transaction t, final Map<", StreamId, ", byte[]> streamIdsToReference) {");
            {
                line("if (streamIdsToReference.isEmpty()) {");
                {
                    line("return;");
                }
                line("}");
                line(StreamIdxTable, " index = tables.get", StreamIdxTable, "(t);");
                line("Multimap<", StreamIdxRow, ", ", StreamIdxColumn, "> toDelete = ArrayListMultimap.create(streamIdsToReference.size(), 1);");
                line("for (Map.Entry<", StreamId, ", byte[]> entry : streamIdsToReference.entrySet()) {");
                {
                    line(StreamId, " streamId = entry.getKey();");
                    line("byte[] reference = entry.getValue();");
                    line(StreamIdxColumn, " col = ", StreamIdxColumn, ".of(reference);");
                    line("toDelete.put(", StreamIdxRow, ".of(streamId), col);");
                }
                line("}");
                line("index.delete(toDelete);");
            }
            line("}");
        }

        private void storeBlocksAndGetFinalMetadata() {
            line("@Override");
            line("protected StreamMetadata storeBlocksAndGetFinalMetadata(Transaction t, long id, InputStream stream) {");
            {
                line("//Hash the data before compressing it");
                line("MessageDigest digest = Sha256Hash.getMessageDigest();");
                line("try (InputStream hashingStream = new DigestInputStream(stream, digest);");
                line("        InputStream compressingStream = new LZ4CompressingInputStream(hashingStream)) {");
                {
                    line("StreamMetadata metadata = storeBlocksAndGetHashlessMetadata(t, id, compressingStream);");
                    line("return StreamMetadata.newBuilder(metadata)");
                    line("        .setHash(ByteString.copyFrom(digest.digest()))");
                    line("        .build();");
                }
                line("} catch (IOException e) {");
                {
                    line("throw new RuntimeException(e);");
                }
                line("}");
            }
            line("}");
        }

        private void loadStreamWithCompression() {
            line("@Override");
            line("public InputStream loadStream(Transaction t, final ", StreamId, " id) {");
            {
                line("return new LZ4BlockInputStream(super.loadStream(t, id));");
            }
            line("}");
        }

        private void loadSingleStreamWithCompression() {
            line("@Override");
            line("public Optional<InputStream> loadSingleStream(Transaction t, final ", StreamId, " id) {");
            {
                line("Optional<InputStream> inputStream = super.loadSingleStream(t, id);");
                line("return inputStream.map(LZ4BlockInputStream::new);");
            }
            line("}");
        }

        private void loadStreamsWithCompression() {
            line("@Override");
            line("public Map<", StreamId, ", InputStream> loadStreams(Transaction t, Set<", StreamId, "> ids) {");
            {
                line("Map<", StreamId, ", InputStream> compressedStreams = super.loadStreams(t, ids);");
                line("return Maps.transformValues(compressedStreams, stream -> {");
                {
                    line("return new LZ4BlockInputStream(stream);");
                }
                line("});");
            }
            line("}");
        }

        private void tryWriteStreamToFile() {
            line("@Override");
            line("protected void tryWriteStreamToFile(Transaction transaction, ", StreamId, " id, StreamMetadata metadata, FileOutputStream fos) throws IOException {");
            {
                line("try (InputStream blockStream = makeStreamUsingTransaction(transaction, id, metadata);");
                line("        InputStream decompressingStream = new LZ4BlockInputStream(blockStream);");
                line("        OutputStream fileStream = fos;) {");
                {
                    line("ByteStreams.copy(decompressingStream, fileStream);");
                }
                line("}");
            }
            line("}");
        }

        private void makeStreamUsingTransaction() {
            line("private InputStream makeStreamUsingTransaction(Transaction parent, ", StreamId, " id, StreamMetadata metadata) {");
            {
                line("BiConsumer<Long, OutputStream> singleBlockLoader = (index, destination) ->");
                line("        loadSingleBlockToOutputStream(parent, id, index, destination);");
                line();
                line("BlockGetter pageRefresher = new BlockLoader(singleBlockLoader, BLOCK_SIZE_IN_BYTES);");
                line("long totalBlocks = getNumberOfBlocksFromMetadata(metadata);");
                line("int blocksInMemory = getNumberOfBlocksThatFitInMemory();");
                line();
                line("try {");
                {
                    line("return BlockConsumingInputStream.create(pageRefresher, totalBlocks, blocksInMemory);");
                }
                line("} catch(IOException e) {");
                {
                    line("throw Throwables.throwUncheckedException(e);");
                }
                line("}");
            }
            line("}");
        }
    }.render();
}
Also used : ByteString(com.google.protobuf.ByteString)

Example 79 with ByteString

use of com.google.protobuf.ByteString in project atlasdb by palantir.

the class ForkedJsonFormat method printUnknownFields.

protected static void printUnknownFields(UnknownFieldSet unknownFields, JsonGenerator generator) throws IOException {
    boolean firstField = true;
    for (Map.Entry<Integer, UnknownFieldSet.Field> entry : unknownFields.asMap().entrySet()) {
        UnknownFieldSet.Field field = entry.getValue();
        if (firstField) {
            firstField = false;
        } else {
            generator.print(", ");
        }
        generator.print("\"");
        generator.print(entry.getKey().toString());
        generator.print("\"");
        generator.print(": [");
        boolean firstValue = true;
        for (long value : field.getVarintList()) {
            if (firstValue) {
                firstValue = false;
            } else {
                generator.print(", ");
            }
            generator.print(unsignedToString(value));
        }
        for (int value : field.getFixed32List()) {
            if (firstValue) {
                firstValue = false;
            } else {
                generator.print(", ");
            }
            generator.print(String.format((Locale) null, "0x%08x", value));
        }
        for (long value : field.getFixed64List()) {
            if (firstValue) {
                firstValue = false;
            } else {
                generator.print(", ");
            }
            generator.print(String.format((Locale) null, "0x%016x", value));
        }
        for (ByteString value : field.getLengthDelimitedList()) {
            if (firstValue) {
                firstValue = false;
            } else {
                generator.print(", ");
            }
            generator.print("\"");
            generator.print(escapeBytes(value));
            generator.print("\"");
        }
        for (UnknownFieldSet value : field.getGroupList()) {
            if (firstValue) {
                firstValue = false;
            } else {
                generator.print(", ");
            }
            generator.print("{");
            printUnknownFields(value, generator);
            generator.print("}");
        }
        generator.print("]");
    }
}
Also used : BigInteger(java.math.BigInteger) Locale(java.util.Locale) ByteString(com.google.protobuf.ByteString) Map(java.util.Map) UnknownFieldSet(com.google.protobuf.UnknownFieldSet)

Example 80 with ByteString

use of com.google.protobuf.ByteString in project atlasdb by palantir.

the class ForkedJsonFormat method handleObject.

private static Object handleObject(Tokenizer tokenizer, ExtensionRegistry extensionRegistry, Message.Builder builder, FieldDescriptor field, ExtensionRegistry.ExtensionInfo extension, boolean unknown) throws ParseException {
    Message.Builder subBuilder;
    if (extension == null) {
        subBuilder = builder.newBuilderForField(field);
    } else {
        subBuilder = extension.defaultInstance.newBuilderForType();
    }
    if (unknown) {
        ByteString data = tokenizer.consumeByteString();
        try {
            subBuilder.mergeFrom(data);
            return subBuilder.build();
        } catch (InvalidProtocolBufferException e) {
            throw tokenizer.parseException("Failed to build " + field.getFullName() + " from " + data);
        }
    }
    tokenizer.consume("{");
    String endToken = "}";
    while (!tokenizer.tryConsume(endToken)) {
        if (tokenizer.atEnd()) {
            throw tokenizer.parseException("Expected \"" + endToken + "\".");
        }
        mergeField(tokenizer, extensionRegistry, subBuilder);
        if (tokenizer.tryConsume(",")) {
            // there are more fields in the object, so continue
            continue;
        }
    }
    return subBuilder.build();
}
Also used : Message(com.google.protobuf.Message) ByteString(com.google.protobuf.ByteString) InvalidProtocolBufferException(com.google.protobuf.InvalidProtocolBufferException) ByteString(com.google.protobuf.ByteString)

Aggregations

ByteString (com.google.protobuf.ByteString)406 Test (org.junit.Test)143 ArrayList (java.util.ArrayList)65 ByteString (org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.ByteString)63 HashMap (java.util.HashMap)41 InvalidProtocolBufferException (com.google.protobuf.InvalidProtocolBufferException)40 IOException (java.io.IOException)37 List (java.util.List)33 Map (java.util.Map)33 ServerRequest (com.pokegoapi.main.ServerRequest)17 ExecutionException (java.util.concurrent.ExecutionException)16 AnnotateImageRequest (com.google.cloud.vision.v1.AnnotateImageRequest)14 AnnotateImageResponse (com.google.cloud.vision.v1.AnnotateImageResponse)14 BatchAnnotateImagesResponse (com.google.cloud.vision.v1.BatchAnnotateImagesResponse)14 Feature (com.google.cloud.vision.v1.Feature)14 Image (com.google.cloud.vision.v1.Image)14 ImageAnnotatorClient (com.google.cloud.vision.v1.ImageAnnotatorClient)14 FileInputStream (java.io.FileInputStream)13 ByteBuffer (java.nio.ByteBuffer)13 WebImage (com.google.cloud.vision.v1.WebDetection.WebImage)12