Search in sources :

Example 1 with StreamController

use of com.google.api.gax.rpc.StreamController in project java-speech by googleapis.

the class InfiniteStreamRecognize method infiniteStreamingRecognize.

/**
 * Performs infinite streaming speech recognition
 */
public static void infiniteStreamingRecognize(String languageCode) throws Exception {
    // Microphone Input buffering
    class MicBuffer implements Runnable {

        @Override
        public void run() {
            System.out.println(YELLOW);
            System.out.println("Start speaking...Press Ctrl-C to stop");
            targetDataLine.start();
            byte[] data = new byte[BYTES_PER_BUFFER];
            while (targetDataLine.isOpen()) {
                try {
                    int numBytesRead = targetDataLine.read(data, 0, data.length);
                    if ((numBytesRead <= 0) && (targetDataLine.isOpen())) {
                        continue;
                    }
                    sharedQueue.put(data.clone());
                } catch (InterruptedException e) {
                    System.out.println("Microphone input buffering interrupted : " + e.getMessage());
                }
            }
        }
    }
    // Creating microphone input buffer thread
    MicBuffer micrunnable = new MicBuffer();
    Thread micThread = new Thread(micrunnable);
    ResponseObserver<StreamingRecognizeResponse> responseObserver = null;
    try (SpeechClient client = SpeechClient.create()) {
        ClientStream<StreamingRecognizeRequest> clientStream;
        responseObserver = new ResponseObserver<StreamingRecognizeResponse>() {

            ArrayList<StreamingRecognizeResponse> responses = new ArrayList<>();

            public void onStart(StreamController controller) {
                referenceToStreamController = controller;
            }

            public void onResponse(StreamingRecognizeResponse response) {
                responses.add(response);
                StreamingRecognitionResult result = response.getResultsList().get(0);
                Duration resultEndTime = result.getResultEndTime();
                resultEndTimeInMS = (int) ((resultEndTime.getSeconds() * 1000) + (resultEndTime.getNanos() / 1000000));
                double correctedTime = resultEndTimeInMS - bridgingOffset + (STREAMING_LIMIT * restartCounter);
                SpeechRecognitionAlternative alternative = result.getAlternativesList().get(0);
                if (result.getIsFinal()) {
                    System.out.print(GREEN);
                    System.out.print("\033[2K\r");
                    System.out.printf("%s: %s [confidence: %.2f]\n", convertMillisToDate(correctedTime), alternative.getTranscript(), alternative.getConfidence());
                    isFinalEndTime = resultEndTimeInMS;
                    lastTranscriptWasFinal = true;
                } else {
                    System.out.print(RED);
                    System.out.print("\033[2K\r");
                    System.out.printf("%s: %s", convertMillisToDate(correctedTime), alternative.getTranscript());
                    lastTranscriptWasFinal = false;
                }
            }

            public void onComplete() {
            }

            public void onError(Throwable t) {
            }
        };
        clientStream = client.streamingRecognizeCallable().splitCall(responseObserver);
        RecognitionConfig recognitionConfig = RecognitionConfig.newBuilder().setEncoding(RecognitionConfig.AudioEncoding.LINEAR16).setLanguageCode(languageCode).setSampleRateHertz(16000).build();
        StreamingRecognitionConfig streamingRecognitionConfig = StreamingRecognitionConfig.newBuilder().setConfig(recognitionConfig).setInterimResults(true).build();
        StreamingRecognizeRequest request = StreamingRecognizeRequest.newBuilder().setStreamingConfig(streamingRecognitionConfig).build();
        clientStream.send(request);
        try {
            // SampleRate:16000Hz, SampleSizeInBits: 16, Number of channels: 1, Signed: true,
            // bigEndian: false
            AudioFormat audioFormat = new AudioFormat(16000, 16, 1, true, false);
            DataLine.Info targetInfo = new Info(TargetDataLine.class, // Set the system information to read from the microphone audio
            audioFormat);
            if (!AudioSystem.isLineSupported(targetInfo)) {
                System.out.println("Microphone not supported");
                System.exit(0);
            }
            // Target data line captures the audio stream the microphone produces.
            targetDataLine = (TargetDataLine) AudioSystem.getLine(targetInfo);
            targetDataLine.open(audioFormat);
            micThread.start();
            long startTime = System.currentTimeMillis();
            while (true) {
                long estimatedTime = System.currentTimeMillis() - startTime;
                if (estimatedTime >= STREAMING_LIMIT) {
                    clientStream.closeSend();
                    // remove Observer
                    referenceToStreamController.cancel();
                    if (resultEndTimeInMS > 0) {
                        finalRequestEndTime = isFinalEndTime;
                    }
                    resultEndTimeInMS = 0;
                    lastAudioInput = null;
                    lastAudioInput = audioInput;
                    audioInput = new ArrayList<ByteString>();
                    restartCounter++;
                    if (!lastTranscriptWasFinal) {
                        System.out.print('\n');
                    }
                    newStream = true;
                    clientStream = client.streamingRecognizeCallable().splitCall(responseObserver);
                    request = StreamingRecognizeRequest.newBuilder().setStreamingConfig(streamingRecognitionConfig).build();
                    System.out.println(YELLOW);
                    System.out.printf("%d: RESTARTING REQUEST\n", restartCounter * STREAMING_LIMIT);
                    startTime = System.currentTimeMillis();
                } else {
                    if ((newStream) && (lastAudioInput.size() > 0)) {
                        // if this is the first audio from a new request
                        // calculate amount of unfinalized audio from last request
                        // resend the audio to the speech client before incoming audio
                        double chunkTime = STREAMING_LIMIT / lastAudioInput.size();
                        // ms length of each chunk in previous request audio arrayList
                        if (chunkTime != 0) {
                            if (bridgingOffset < 0) {
                                // bridging Offset accounts for time of resent audio
                                // calculated from last request
                                bridgingOffset = 0;
                            }
                            if (bridgingOffset > finalRequestEndTime) {
                                bridgingOffset = finalRequestEndTime;
                            }
                            int chunksFromMs = (int) Math.floor((finalRequestEndTime - bridgingOffset) / chunkTime);
                            // chunks from MS is number of chunks to resend
                            bridgingOffset = (int) Math.floor((lastAudioInput.size() - chunksFromMs) * chunkTime);
                            // set bridging offset for next request
                            for (int i = chunksFromMs; i < lastAudioInput.size(); i++) {
                                request = StreamingRecognizeRequest.newBuilder().setAudioContent(lastAudioInput.get(i)).build();
                                clientStream.send(request);
                            }
                        }
                        newStream = false;
                    }
                    tempByteString = ByteString.copyFrom(sharedQueue.take());
                    request = StreamingRecognizeRequest.newBuilder().setAudioContent(tempByteString).build();
                    audioInput.add(tempByteString);
                }
                clientStream.send(request);
            }
        } catch (Exception e) {
            System.out.println(e);
        }
    }
}
Also used : ByteString(com.google.protobuf.ByteString) ArrayList(java.util.ArrayList) StreamingRecognitionConfig(com.google.cloud.speech.v1p1beta1.StreamingRecognitionConfig) RecognitionConfig(com.google.cloud.speech.v1p1beta1.RecognitionConfig) SpeechClient(com.google.cloud.speech.v1p1beta1.SpeechClient) StreamingRecognizeResponse(com.google.cloud.speech.v1p1beta1.StreamingRecognizeResponse) AudioFormat(javax.sound.sampled.AudioFormat) StreamingRecognitionConfig(com.google.cloud.speech.v1p1beta1.StreamingRecognitionConfig) TargetDataLine(javax.sound.sampled.TargetDataLine) DataLine(javax.sound.sampled.DataLine) Duration(com.google.protobuf.Duration) Info(javax.sound.sampled.DataLine.Info) Info(javax.sound.sampled.DataLine.Info) StreamController(com.google.api.gax.rpc.StreamController) SpeechRecognitionAlternative(com.google.cloud.speech.v1p1beta1.SpeechRecognitionAlternative) StreamingRecognitionResult(com.google.cloud.speech.v1p1beta1.StreamingRecognitionResult) StreamingRecognizeRequest(com.google.cloud.speech.v1p1beta1.StreamingRecognizeRequest)

Example 2 with StreamController

use of com.google.api.gax.rpc.StreamController in project java-speech by googleapis.

the class Recognize method streamingMicRecognize.

// [END speech_stream_recognize_punctuation]
// [START speech_transcribe_streaming_mic]
/**
 * Performs microphone streaming speech recognition with a duration of 1 minute.
 */
public static void streamingMicRecognize() throws Exception {
    ResponseObserver<StreamingRecognizeResponse> responseObserver = null;
    try (SpeechClient client = SpeechClient.create()) {
        responseObserver = new ResponseObserver<StreamingRecognizeResponse>() {

            ArrayList<StreamingRecognizeResponse> responses = new ArrayList<>();

            public void onStart(StreamController controller) {
            }

            public void onResponse(StreamingRecognizeResponse response) {
                responses.add(response);
            }

            public void onComplete() {
                for (StreamingRecognizeResponse response : responses) {
                    StreamingRecognitionResult result = response.getResultsList().get(0);
                    SpeechRecognitionAlternative alternative = result.getAlternativesList().get(0);
                    System.out.printf("Transcript : %s\n", alternative.getTranscript());
                }
            }

            public void onError(Throwable t) {
                System.out.println(t);
            }
        };
        ClientStream<StreamingRecognizeRequest> clientStream = client.streamingRecognizeCallable().splitCall(responseObserver);
        RecognitionConfig recognitionConfig = RecognitionConfig.newBuilder().setEncoding(RecognitionConfig.AudioEncoding.LINEAR16).setLanguageCode("en-US").setSampleRateHertz(16000).build();
        StreamingRecognitionConfig streamingRecognitionConfig = StreamingRecognitionConfig.newBuilder().setConfig(recognitionConfig).build();
        StreamingRecognizeRequest request = StreamingRecognizeRequest.newBuilder().setStreamingConfig(streamingRecognitionConfig).build();
        clientStream.send(request);
        // SampleRate:16000Hz, SampleSizeInBits: 16, Number of channels: 1, Signed: true,
        // bigEndian: false
        AudioFormat audioFormat = new AudioFormat(16000, 16, 1, true, false);
        DataLine.Info targetInfo = new Info(TargetDataLine.class, // Set the system information to read from the microphone audio stream
        audioFormat);
        if (!AudioSystem.isLineSupported(targetInfo)) {
            System.out.println("Microphone not supported");
            System.exit(0);
        }
        // Target data line captures the audio stream the microphone produces.
        TargetDataLine targetDataLine = (TargetDataLine) AudioSystem.getLine(targetInfo);
        targetDataLine.open(audioFormat);
        targetDataLine.start();
        System.out.println("Start speaking");
        long startTime = System.currentTimeMillis();
        // Audio Input Stream
        AudioInputStream audio = new AudioInputStream(targetDataLine);
        while (true) {
            long estimatedTime = System.currentTimeMillis() - startTime;
            byte[] data = new byte[6400];
            audio.read(data);
            if (estimatedTime > 60000) {
                // 60 seconds
                System.out.println("Stop speaking.");
                targetDataLine.stop();
                targetDataLine.close();
                break;
            }
            request = StreamingRecognizeRequest.newBuilder().setAudioContent(ByteString.copyFrom(data)).build();
            clientStream.send(request);
        }
    } catch (Exception e) {
        System.out.println(e);
    }
    responseObserver.onComplete();
}
Also used : StreamingRecognitionConfig(com.google.cloud.speech.v1.StreamingRecognitionConfig) TargetDataLine(javax.sound.sampled.TargetDataLine) DataLine(javax.sound.sampled.DataLine) ArrayList(java.util.ArrayList) WordInfo(com.google.cloud.speech.v1.WordInfo) Info(javax.sound.sampled.DataLine.Info) Info(javax.sound.sampled.DataLine.Info) IOException(java.io.IOException) TargetDataLine(javax.sound.sampled.TargetDataLine) StreamController(com.google.api.gax.rpc.StreamController) AudioInputStream(javax.sound.sampled.AudioInputStream) SpeechRecognitionAlternative(com.google.cloud.speech.v1.SpeechRecognitionAlternative) StreamingRecognitionResult(com.google.cloud.speech.v1.StreamingRecognitionResult) StreamingRecognizeRequest(com.google.cloud.speech.v1.StreamingRecognizeRequest) RecognitionConfig(com.google.cloud.speech.v1.RecognitionConfig) StreamingRecognitionConfig(com.google.cloud.speech.v1.StreamingRecognitionConfig) SpeechClient(com.google.cloud.speech.v1.SpeechClient) StreamingRecognizeResponse(com.google.cloud.speech.v1.StreamingRecognizeResponse) AudioFormat(javax.sound.sampled.AudioFormat)

Example 3 with StreamController

use of com.google.api.gax.rpc.StreamController in project java-bigquerystorage by googleapis.

the class ReadRowsAttemptCallable method onCancel.

/**
 * Called when the outer {@link ResponseObserver} wants to prematurely cancel the stream.
 *
 * @see StreamController#cancel()
 */
private void onCancel() {
    StreamController localInnerController;
    synchronized (lock) {
        if (cancellationCause != null) {
            return;
        }
        // NOTE: BasicRetryingFuture will replace j.u.c.CancellationExceptions with it's own,
        // which will not have the current stacktrace, so a special wrapper has be used here.
        cancellationCause = new ServerStreamingAttemptException(new CancellationException("User cancelled stream"), resumptionStrategy.canResume(), seenSuccessSinceLastError);
        localInnerController = innerController;
    }
    if (localInnerController != null) {
        localInnerController.cancel();
    }
}
Also used : StreamController(com.google.api.gax.rpc.StreamController) CancellationException(java.util.concurrent.CancellationException) ServerStreamingAttemptException(com.google.api.gax.retrying.ServerStreamingAttemptException)

Example 4 with StreamController

use of com.google.api.gax.rpc.StreamController in project java-bigquerystorage by googleapis.

the class ReadRowsAttemptCallable method call.

/**
 * Sends the actual RPC. The request being sent will first be transformed by the {@link
 * StreamResumptionStrategy}.
 *
 * <p>This method expects to be called by one thread at a time. Furthermore, it expects that the
 * current RPC finished before the next time it's called.
 */
@Override
public Void call() {
    Preconditions.checkState(isStarted, "Must be started first");
    ReadRowsRequest request = (++numAttempts == 1) ? initialRequest : resumptionStrategy.getResumeRequest(initialRequest);
    // Should never happen. onAttemptError will check if ResumptionStrategy can create a resume
    // request,
    // which the RetryingFuture/StreamResumptionStrategy should respect.
    Preconditions.checkState(request != null, "ResumptionStrategy returned a null request.");
    innerAttemptFuture = SettableApiFuture.create();
    seenSuccessSinceLastError = false;
    ApiCallContext attemptContext = context;
    if (!outerRetryingFuture.getAttemptSettings().getRpcTimeout().isZero()) {
        attemptContext = attemptContext.withStreamWaitTimeout(outerRetryingFuture.getAttemptSettings().getRpcTimeout());
    }
    attemptContext.getTracer().attemptStarted(outerRetryingFuture.getAttemptSettings().getOverallAttemptCount());
    innerCallable.call(request, new StateCheckingResponseObserver<ReadRowsResponse>() {

        @Override
        public void onStartImpl(StreamController controller) {
            onAttemptStart(controller);
        }

        @Override
        public void onResponseImpl(ReadRowsResponse response) {
            onAttemptResponse(response);
        }

        @Override
        public void onErrorImpl(Throwable t) {
            onAttemptError(t);
        }

        @Override
        public void onCompleteImpl() {
            onAttemptComplete();
        }
    }, attemptContext);
    outerRetryingFuture.setAttemptFuture(innerAttemptFuture);
    return null;
}
Also used : StreamController(com.google.api.gax.rpc.StreamController) ReadRowsResponse(com.google.cloud.bigquery.storage.v1beta2.ReadRowsResponse) ReadRowsRequest(com.google.cloud.bigquery.storage.v1beta2.ReadRowsRequest) ApiCallContext(com.google.api.gax.rpc.ApiCallContext)

Example 5 with StreamController

use of com.google.api.gax.rpc.StreamController in project java-bigquerystorage by googleapis.

the class ReadRowsAttemptCallable method call.

/**
 * Sends the actual RPC. The request being sent will first be transformed by the {@link
 * StreamResumptionStrategy}.
 *
 * <p>This method expects to be called by one thread at a time. Furthermore, it expects that the
 * current RPC finished before the next time it's called.
 */
@Override
public Void call() {
    Preconditions.checkState(isStarted, "Must be started first");
    ReadRowsRequest request = (++numAttempts == 1) ? initialRequest : resumptionStrategy.getResumeRequest(initialRequest);
    // Should never happen. onAttemptError will check if ResumptionStrategy can create a resume
    // request,
    // which the RetryingFuture/StreamResumptionStrategy should respect.
    Preconditions.checkState(request != null, "ResumptionStrategy returned a null request.");
    innerAttemptFuture = SettableApiFuture.create();
    seenSuccessSinceLastError = false;
    ApiCallContext attemptContext = context;
    if (!outerRetryingFuture.getAttemptSettings().getRpcTimeout().isZero()) {
        attemptContext = attemptContext.withStreamWaitTimeout(outerRetryingFuture.getAttemptSettings().getRpcTimeout());
    }
    attemptContext.getTracer().attemptStarted(outerRetryingFuture.getAttemptSettings().getOverallAttemptCount());
    innerCallable.call(request, new StateCheckingResponseObserver<ReadRowsResponse>() {

        @Override
        public void onStartImpl(StreamController controller) {
            onAttemptStart(controller);
        }

        @Override
        public void onResponseImpl(ReadRowsResponse response) {
            onAttemptResponse(response);
        }

        @Override
        public void onErrorImpl(Throwable t) {
            onAttemptError(t);
        }

        @Override
        public void onCompleteImpl() {
            onAttemptComplete();
        }
    }, attemptContext);
    outerRetryingFuture.setAttemptFuture(innerAttemptFuture);
    return null;
}
Also used : StreamController(com.google.api.gax.rpc.StreamController) ReadRowsResponse(com.google.cloud.bigquery.storage.v1.ReadRowsResponse) ReadRowsRequest(com.google.cloud.bigquery.storage.v1.ReadRowsRequest) ApiCallContext(com.google.api.gax.rpc.ApiCallContext)

Aggregations

StreamController (com.google.api.gax.rpc.StreamController)21 Duration (org.threeten.bp.Duration)4 ServerStreamingAttemptException (com.google.api.gax.retrying.ServerStreamingAttemptException)3 ApiCallContext (com.google.api.gax.rpc.ApiCallContext)3 CancellationException (java.util.concurrent.CancellationException)3 Test (org.junit.Test)3 GrpcCallContext (com.google.api.gax.grpc.GrpcCallContext)2 Duration (com.google.protobuf.Duration)2 StatusRuntimeException (io.grpc.StatusRuntimeException)2 ArrayList (java.util.ArrayList)2 AudioFormat (javax.sound.sampled.AudioFormat)2 DataLine (javax.sound.sampled.DataLine)2 Info (javax.sound.sampled.DataLine.Info)2 TargetDataLine (javax.sound.sampled.TargetDataLine)2 AbortedException (com.google.api.gax.rpc.AbortedException)1 ApiStreamObserver (com.google.api.gax.rpc.ApiStreamObserver)1 BidiStreamObserver (com.google.api.gax.rpc.BidiStreamObserver)1 ClientStream (com.google.api.gax.rpc.ClientStream)1 ResponseObserver (com.google.api.gax.rpc.ResponseObserver)1 ServerStream (com.google.api.gax.rpc.ServerStream)1