use of android.speech.tts.TextToSpeech in project gdk-compass-sample by googleglass.
the class CompassService method onCreate.
@Override
public void onCreate() {
super.onCreate();
// Even though the text-to-speech engine is only used in response to a menu action, we
// initialize it when the application starts so that we avoid delays that could occur
// if we waited until it was needed to start it up.
mSpeech = new TextToSpeech(this, new TextToSpeech.OnInitListener() {
@Override
public void onInit(int status) {
// Do nothing.
}
});
SensorManager sensorManager = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
LocationManager locationManager = (LocationManager) getSystemService(Context.LOCATION_SERVICE);
mOrientationManager = new OrientationManager(sensorManager, locationManager);
mLandmarks = new Landmarks(this);
}
use of android.speech.tts.TextToSpeech in project Utterance by benbahrenburg.
the class SpeechProxy method onInit.
@Override
public void onInit(int status) {
try {
if (_tts == null) {
_tts = new TextToSpeech(TiApplication.getInstance().getApplicationContext(), this);
}
if (status == TextToSpeech.LANG_MISSING_DATA || status == TextToSpeech.LANG_NOT_SUPPORTED) {
Log.e(_logName, "This Language is not supported");
if (hasListeners("completed")) {
HashMap<String, Object> event = new HashMap<String, Object>();
event.put("success", false);
event.put("message", "This Language is not supported");
event.put("text", _text);
event.put("voice", _voice);
fireEvent("completed", event);
}
}
if (status == TextToSpeech.SUCCESS) {
//Log.d(_logName, "Adding OnUtteranceCompletedListener");
_tts.setOnUtteranceCompletedListener(this);
_voice = _tts.getLanguage().toString();
}
} catch (Exception error) {
if (hasListeners("completed")) {
HashMap<String, Object> event = new HashMap<String, Object>();
event.put("success", false);
event.put("message", "General Err: " + error.getMessage());
event.put("text", _text);
event.put("voice", _voice);
fireEvent("completed", event);
}
Log.e(UtteranceModule.MODULE_FULL_NAME, error.getMessage());
error.printStackTrace();
}
}
use of android.speech.tts.TextToSpeech in project platform_frameworks_base by android.
the class ScoAudioTest method onCreate.
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.scoaudiotest);
mScoStateTxt = (TextView) findViewById(R.id.scoStateTxt);
mVdStateTxt = (TextView) findViewById(R.id.vdStateTxt);
IntentFilter intentFilter = new IntentFilter(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
intentFilter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_CHANGED);
intentFilter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED);
registerReceiver(mReceiver, intentFilter);
mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
mAudioManager2 = (AudioManager) getApplicationContext().getSystemService(Context.AUDIO_SERVICE);
mHandler = new Handler();
mMediaControllers[0] = new SimplePlayerController(this, R.id.playPause1, R.id.stop1, R.raw.sine440_mo_16b_16k, AudioManager.STREAM_BLUETOOTH_SCO);
TextView name = (TextView) findViewById(R.id.playPause1Text);
name.setText("VOICE_CALL stream");
mScoButton = (ToggleButton) findViewById(R.id.ForceScoButton);
mScoButton.setOnCheckedChangeListener(mForceScoChanged);
mForceScoOn = false;
mScoButton.setChecked(mForceScoOn);
mVoiceDialerButton = (ToggleButton) findViewById(R.id.VoiceDialerButton);
mVoiceDialerButton.setOnCheckedChangeListener(mVoiceDialerChanged);
mVoiceDialerOn = false;
mVoiceDialerButton.setChecked(mVoiceDialerOn);
mMediaControllers[1] = new SimpleRecordController(this, R.id.recStop1, 0, "Sco_record_");
mTtsInited = false;
mTts = new TextToSpeech(this, new TtsInitListener());
mTtsParams = new HashMap<String, String>();
mTtsParams.put(TextToSpeech.Engine.KEY_PARAM_STREAM, String.valueOf(AudioManager.STREAM_BLUETOOTH_SCO));
mTtsParams.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, UTTERANCE);
mSpeakText = (EditText) findViewById(R.id.speakTextEdit);
mSpeakText.setOnKeyListener(mSpeakKeyListener);
mSpeakText.setText("sco audio test sentence");
mTtsToFileButton = (ToggleButton) findViewById(R.id.TtsToFileButton);
mTtsToFileButton.setOnCheckedChangeListener(mTtsToFileChanged);
mTtsToFile = true;
mTtsToFileButton.setChecked(mTtsToFile);
mModeSpinner = (Spinner) findViewById(R.id.modeSpinner);
ArrayAdapter<String> adapter = new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, mModeStrings);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mModeSpinner.setAdapter(adapter);
mModeSpinner.setOnItemSelectedListener(mModeChanged);
mCurrentMode = mAudioManager.getMode();
mModeSpinner.setSelection(mCurrentMode);
mBluetoothHeadsetDevice = null;
BluetoothAdapter btAdapter = BluetoothAdapter.getDefaultAdapter();
if (btAdapter != null) {
btAdapter.getProfileProxy(this, mBluetoothProfileServiceListener, BluetoothProfile.HEADSET);
}
sVoiceCommandIntent = new Intent(Intent.ACTION_VOICE_COMMAND);
sVoiceCommandIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
}
use of android.speech.tts.TextToSpeech in project Anki-Android by Ramblurr.
the class ReadText method initializeTts.
public static void initializeTts(Context context) {
mReviewer = context;
mTTSInitDone = false;
Log.i(AnkiDroidApp.TAG, "initializeTts");
final TTSInitListener listener = new TTSInitListener();
mTts = new TextToSpeech(context, listener);
}
use of android.speech.tts.TextToSpeech in project XobotOS by xamarin.
the class WebView method addAccessibilityApisToJavaScript.
/**
* Adds accessibility APIs to JavaScript.
*
* Note: This method is responsible to performing the necessary
* check if the accessibility APIs should be exposed.
*/
private void addAccessibilityApisToJavaScript() {
if (AccessibilityManager.getInstance(mContext).isEnabled() && getSettings().getJavaScriptEnabled()) {
// exposing the TTS for now ...
mTextToSpeech = new TextToSpeech(getContext(), null);
addJavascriptInterface(mTextToSpeech, ALIAS_ACCESSIBILITY_JS_INTERFACE);
}
}
Aggregations