下面列出了android.os.Message#getData ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* @see android.webkit.WebView#requestFocusNodeHref()
*/
public void requestFocusNodeHref(Message msg) {
if (msg == null || mNativeAwContents == 0) return;
nativeUpdateLastHitTestData(mNativeAwContents);
Bundle data = msg.getData();
// In order to maintain compatibility with the old WebView's implementation,
// the absolute (full) url is passed in the |url| field, not only the href attribute.
// Note: HitTestData could be cleaned up at this point. See http://crbug.com/290992.
data.putString("url", mPossiblyStaleHitTestData.hitTestResultExtraData);
data.putString("title", mPossiblyStaleHitTestData.anchorText);
data.putString("src", mPossiblyStaleHitTestData.imgSrc);
msg.setData(data);
msg.sendToTarget();
}
private OtaUpdatePlugin(Registrar registrar) {
this.registrar = registrar;
context = (registrar.activity() != null) ? registrar.activity() : registrar.context();
handler = new Handler(context.getMainLooper()) {
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
if (progressSink != null) {
Bundle data = msg.getData();
if (data.containsKey(ERROR)) {
reportError(OtaStatus.DOWNLOAD_ERROR, data.getString(ERROR));
} else {
long bytesDownloaded = data.getLong(BYTES_DOWNLOADED);
long bytesTotal = data.getLong(BYTES_TOTAL);
progressSink.success(Arrays.asList("" + OtaStatus.DOWNLOADING.ordinal(), "" + ((bytesDownloaded * 100) / bytesTotal)));
}
}
}
};
}
private void updateSub(int subType, byte[] bytes, String encoding, int width, int height) {
if (mEventHandler != null) {
Message m = mEventHandler.obtainMessage(MEDIA_TIMED_TEXT, width, height);
Bundle b = m.getData();
if (subType == SUBTITLE_TEXT) {
b.putInt(MEDIA_SUBTITLE_TYPE, SUBTITLE_TEXT);
if (encoding == null) {
b.putString(MEDIA_SUBTITLE_STRING, new String(bytes));
} else {
try {
b.putString(MEDIA_SUBTITLE_STRING, new String(bytes, encoding.trim()));
} catch (UnsupportedEncodingException e) {
Log.e("updateSub", e);
b.putString(MEDIA_SUBTITLE_STRING, new String(bytes));
}
}
} else if (subType == SUBTITLE_BITMAP) {
b.putInt(MEDIA_SUBTITLE_TYPE, SUBTITLE_BITMAP);
b.putByteArray(MEDIA_SUBTITLE_BYTES, bytes);
}
mEventHandler.sendMessage(m);
}
}
@Override
public void handleMessage(Message message) {
if (message.what == R.id.auto_focus) {
//Log.d(TAG, "Got auto-focus message");
// When one auto focus pass finishes, start another. This is the closest thing to
// continuous AF. It does seem to hunt a bit, but I'm not sure what else to do.
if (state == State.PREVIEW) {
CameraManager.get().requestAutoFocus(this, R.id.auto_focus);
}
} else if (message.what == R.id.restart_preview) {
Log.d(TAG, "Got restart preview message");
restartPreviewAndDecode();
} else if (message.what == R.id.decode_succeeded) {
Log.d(TAG, "Got decode succeeded message");
state = State.SUCCESS;
Bundle bundle = message.getData();
/***********************************************************************/
Bitmap barcode = bundle == null ? null :
(Bitmap) bundle.getParcelable(DecodeThread.BARCODE_BITMAP);//���ñ����߳�
fragment.handleDecode((Result) message.obj, barcode);//���ؽ��
/***********************************************************************/
} else if (message.what == R.id.decode_failed) {
// We're decoding as fast as possible, so when one decode fails, start another.
state = State.PREVIEW;
CameraManager.get().requestPreviewFrame(decodeThread.getHandler(), R.id.decode);
} else if (message.what == R.id.return_scan_result) {
Log.d(TAG, "Got return scan result message");
fragment.getActivity().setResult(Activity.RESULT_OK, (Intent) message.obj);
fragment.getActivity().finish();
} else if (message.what == R.id.launch_product_query) {
Log.d(TAG, "Got product query message");
String url = (String) message.obj;
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
fragment.getActivity().startActivity(intent);
}
}
@Override
public void handleMessage(Message message) {
int id = message.what;
if (id == R.id.auto_focus) {
//Log.d(TAG, "Got auto-focus message");
// When one auto focus pass finishes, start another. This is the closest thing to
// continuous AF. It does seem to hunt a bit, but I'm not sure what else to do.
if (state == State.PREVIEW) {
CameraManager.get().requestAutoFocus(this, R.id.auto_focus);
}
} else if (id == R.id.restart_preview) {
Log.d(TAG, "Got restart preview message");
restartPreviewAndDecode();
} else if (id == R.id.decode_succeeded) {
Log.d(TAG, "Got decode succeeded message");
state = State.SUCCESS;
Bundle bundle = message.getData();
/***********************************************************************/
Bitmap barcode = bundle == null ? null :
(Bitmap) bundle.getParcelable(DecodeThread.BARCODE_BITMAP);//���ñ����߳�
activity.handleDecode((Result) message.obj, barcode);//���ؽ��
/***********************************************************************/
} else if (id == R.id.decode_failed) {
// We're decoding as fast as possible, so when one decode fails, start another.
state = State.PREVIEW;
CameraManager.get().requestPreviewFrame(decodeThread.getHandler(), R.id.decode);
} else if (id == R.id.return_scan_result) {
Log.d(TAG, "Got return scan result message");
activity.setResult(Activity.RESULT_OK, (Intent) message.obj);
activity.finish();
} else if (id == R.id.launch_product_query) {
Log.d(TAG, "Got product query message");
String url = (String) message.obj;
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
activity.startActivity(intent);
}
}
private void updateCacheStatus(int type, int info, long[] segments) {
if (mEventHandler != null) {
Message m = mEventHandler.obtainMessage(MEDIA_CACHING_UPDATE);
Bundle b = m.getData();
b.putInt(MEDIA_CACHING_TYPE, type);
b.putInt(MEDIA_CACHING_INFO, info);
b.putLongArray(MEDIA_CACHING_SEGMENTS, segments);
mEventHandler.sendMessage(m);
}
}
public static String getPathFromMessage(Message message) {
if (message == null) {
return null;
}
Bundle data = message.getData();
if (data == null) {
return null;
}
return data.getString(MessageHandler.KEY_PATH);
}
@Override
public void handleMessage(@NonNull Message message) {
List<TrackTransformationInfo> trackTransformationInfos = message.obj == null ? null : (List<TrackTransformationInfo>) message.obj;
Bundle data = message.getData();
String jobId = data.getString(KEY_JOB_ID);
if (jobId == null) {
throw new IllegalArgumentException("Handler message doesn't contain an id!");
}
switch (message.what) {
case EVENT_STARTED: {
listener.onStarted(jobId);
break;
}
case EVENT_COMPLETED: {
listener.onCompleted(jobId, trackTransformationInfos);
break;
}
case EVENT_CANCELLED: {
listener.onCancelled(jobId, trackTransformationInfos);
break;
}
case EVENT_ERROR: {
Throwable cause = (Throwable) data.getSerializable(KEY_THROWABLE);
listener.onError(jobId, cause, trackTransformationInfos);
break;
}
case EVENT_PROGRESS: {
float progress = data.getFloat(KEY_PROGRESS);
listener.onProgress(jobId, progress);
break;
}
default:
Log.e(TAG, "Unknown event received: " + message.what);
}
}
@Override
public void handleMessage(Message message) {
if (message.what == R.id.auto_focus) {
// Log.d(TAG, "Got auto-focus message");
// When one auto focus pass finishes, start another. This is the
// closest thing to
// continuous AF. It does seem to hunt a bit, but I'm not sure
// what else to do.
if (state == State.PREVIEW) {
CameraManager.get().requestAutoFocus(this, R.id.auto_focus);
}
} else if (message.what == R.id.restart_preview) {
Log.d(TAG, "Got restart preview message");
restartPreviewAndDecode();
} else if (message.what == R.id.decode_succeeded) {
Log.d(TAG, "Got decode succeeded message");
state = State.SUCCESS;
Bundle bundle = message.getData();
/***********************************************************************/
Bitmap barcode = bundle == null ? null : (Bitmap) bundle.getParcelable(DecodeThread.BARCODE_BITMAP);// ���ñ����߳�
activity.handleDecode((Result) message.obj, barcode);// ���ؽ��
} else if (message.what == R.id.decode_failed) {
// We're decoding as fast as possible, so when one decode fails,
// start another.
state = State.PREVIEW;
CameraManager.get().requestPreviewFrame(decodeThread.getHandler(), R.id.decode);
} else if (message.what == R.id.return_scan_result) {
Log.d(TAG, "Got return scan result message");
activity.setResult(Activity.RESULT_OK, (Intent) message.obj);
activity.finish();
} else if (message.what == R.id.launch_product_query) {
Log.d(TAG, "Got product query message");
String url = (String) message.obj;
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
activity.startActivity(intent);
}
}
@Override
public void handleMessage(Message message) {
switch (message.what) {
case ID.restart_preview:
restartPreviewAndDecode();
break;
case ID.decode_succeeded:
state = State.SUCCESS;
Bundle bundle = message.getData();
Bitmap barcode = null;
float scaleFactor = 1.0f;
if (bundle != null) {
byte[] compressedBitmap = bundle.getByteArray(DecodeThread.BARCODE_BITMAP);
if (compressedBitmap != null) {
barcode = BitmapFactory.decodeByteArray(compressedBitmap, 0, compressedBitmap.length, null);
// Mutable copy:
barcode = barcode.copy(Bitmap.Config.ARGB_8888, true);
}
scaleFactor = bundle.getFloat(DecodeThread.BARCODE_SCALED_FACTOR);
}
if (listener != null)
listener.onResult((Result) message.obj, barcode, scaleFactor);
break;
case ID.decode_failed:
// We're decoding as fast as possible, so when one decode fails, start another.
state = State.PREVIEW;
cameraManager.requestPreviewFrame(decodeThread.getHandler(), ID.decode);
break;
}
}
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
Bundle data = msg.getData();
switch (msg.what) {
case WHAT_PLAY:
playTrack(((SoundCloudTrack) data.getParcelable(BUNDLE_KEY_SOUND_CLOUD_TRACK)));
break;
case WHAT_PAUSE_PLAYER:
pause();
break;
case WHAT_RESUME_PLAYER:
resume();
break;
case WHAT_STOP_PLAYER:
stopPlayer();
break;
case WHAT_NEXT_TRACK:
nextTrack();
break;
case WHAT_PREVIOUS_TRACK:
previousTrack();
break;
case WHAT_SEEK_TO:
seekToPosition(data.getInt(BUNDLE_KEY_SOUND_CLOUD_TRACK_POSITION));
break;
case WHAT_CLEAR_PLAYER:
stopSelf();
break;
default:
break;
}
}
@Override
public void handleMessage(Message msg) {
UpdateCheckActivity activity = mActivity.get();
if (activity != null) {
Bundle bundle = msg.getData();
String data = bundle.getString(RESCHK_KEY);
activity.handleUpdateMessage(data);
}
}
private void showNoIconsRecv(Message msg) {
Bundle data = msg.getData();
String category = data.getString("category");
GridLayout iconSheet = mIconSheets.get(category);
if (iconSheet!=null) {
TextView v = new TextView(this);
v.setText(R.string.nothing_in_cat);
v.setTextColor(mStyle.getTextColor());
v.setTextSize(mStyle.getLauncherFontSize());
v.setPadding(2,40,2,2);
v.setMaxLines(3);
iconSheet.addView(v);
}
}
@Override
public void handleMessage(Message msg) {
Bundle data = msg.getData();
String source = data.getString("source");
data.remove("source");
String target = data.getString("target");
data.remove("target");
int connectTimeout = data.getInt("connectTimeout");
data.remove("connectTimeout");
int readTimeout = data.getInt("readTimeout");
data.remove("readTimeout");
int progressInterval = data.getInt("progressInterval");
data.remove("progressInterval");
boolean append = data.getBoolean("append", false);
data.remove("append");
HashMap<String, List<String>> headers = null;
if (data.size() > 0) {
DeviceLog.debug("There are headers left in data, reading them");
headers = new HashMap<>();
List<String> values;
for (String k : data.keySet()) {
values = Arrays.asList(data.getStringArray(k));
headers.put(k, values);
}
}
File targetFile = new File(target);
if ((append && !targetFile.exists()) || (!append && targetFile.exists())) {
_active = false;
WebViewApp.getCurrentApp().sendEvent(WebViewEventCategory.CACHE, CacheEvent.DOWNLOAD_ERROR, CacheError.FILE_STATE_WRONG, source, target, append, targetFile.exists());
return;
}
switch (msg.what) {
case CacheThread.MSG_DOWNLOAD:
downloadFile(source, target, connectTimeout, readTimeout, progressInterval, headers, append);
break;
default:
break;
}
}
@Override
public void handleMessage(Message msg) {
if (mMediaPlayer == null) {
// //Log.i("MiuiVideo: get message after player released, msg type: " + msg.what);
return;
}
switch (msg.what) {
case MEDIA_PREPARED:
if (mOnPreparedListener != null)
mOnPreparedListener.onPrepared(mMediaPlayer);
return;
case MEDIA_PLAYBACK_COMPLETE:
if (mOnCompletionListener != null)
mOnCompletionListener.onCompletion(mMediaPlayer);
stayAwake(false);
return;
case MEDIA_BUFFERING_UPDATE:
onBufferingUpdate(msg);
return;
case MEDIA_SEEK_COMPLETE:
if (isPlaying())
stayAwake(true);
if (mOnSeekCompleteListener != null)
mOnSeekCompleteListener.onSeekComplete(mMediaPlayer);
return;
case MEDIA_SET_VIDEO_SIZE:
if (mOnVideoSizeChangedListener != null)
mOnVideoSizeChangedListener.onVideoSizeChanged(mMediaPlayer, msg.arg1, msg.arg2);
return;
case MEDIA_ERROR:
Log.e("Error (%d, %d)", msg.arg1, msg.arg2);
boolean error_was_handled = false;
if (mOnErrorListener != null)
error_was_handled = mOnErrorListener.onError(mMediaPlayer, msg.arg1, msg.arg2);
if (mOnCompletionListener != null && !error_was_handled)
mOnCompletionListener.onCompletion(mMediaPlayer);
stayAwake(false);
return;
case MEDIA_INFO:
Log.i("Info (%d, %d)", msg.arg1, msg.arg2);
if (mOnInfoListener != null)
mOnInfoListener.onInfo(mMediaPlayer, msg.arg1, msg.arg2);
return;
case MEDIA_CACHE:
return;
case MEDIA_TIMED_TEXT:
mData = msg.getData();
if (mData.getInt(MEDIA_SUBTITLE_TYPE) == SUBTITLE_TEXT) {
Log.i("Subtitle : %s", mData.getString(MEDIA_SUBTITLE_STRING));
if (mOnTimedTextListener != null)
mOnTimedTextListener.onTimedText(mData.getString(MEDIA_SUBTITLE_STRING));
} else if (mData.getInt(MEDIA_SUBTITLE_TYPE) == SUBTITLE_BITMAP) {
Log.i("Subtitle : bitmap");
if (mOnTimedTextListener != null)
mOnTimedTextListener.onTimedTextUpdate(mData.getByteArray(MEDIA_SUBTITLE_BYTES), msg.arg1, msg.arg2);
}
return;
case MEDIA_CACHING_UPDATE:
if (mOnCachingUpdateListener != null) {
int cacheType = msg.getData().getInt(MEDIA_CACHING_TYPE);
if (cacheType == CACHE_TYPE_NOT_AVAILABLE) {
mOnCachingUpdateListener.onCachingNotAvailable(mMediaPlayer, msg.getData().getInt(MEDIA_CACHING_INFO));
} else if (cacheType == CACHE_TYPE_UPDATE) {
mOnCachingUpdateListener.onCachingUpdate(mMediaPlayer, msg.getData().getLongArray(MEDIA_CACHING_SEGMENTS));
} else if (cacheType == CACHE_TYPE_SPEED) {
mOnCachingUpdateListener.onCachingSpeed(mMediaPlayer, msg.getData().getInt(MEDIA_CACHING_INFO));
} else if (cacheType == CACHE_TYPE_START) {
mOnCachingUpdateListener.onCachingStart(mMediaPlayer);
} else if (cacheType == CACHE_TYPE_COMPLETE) {
mOnCachingUpdateListener.onCachingComplete(mMediaPlayer);
}
}
return;
case MEDIA_NOP:
return;
case MEDIA_HW_ERROR:
if (mOnHWRenderFailedListener != null)
mOnHWRenderFailedListener.onFailed();
return;
default:
Log.e("Unknown message type " + msg.what);
return;
}
}
@Override
public void handleMessage(Message message) {
if (message.what == R.id.restart_preview) {
restartPreviewAndDecode();
} else if (message.what == R.id.decode_succeeded) {
state = State.SUCCESS;
Bundle bundle = message.getData();
Bitmap barcode = null;
float scaleFactor = 1.0f;
if (bundle != null) {
byte[] compressedBitmap = bundle.getByteArray(DecodeThread.BARCODE_BITMAP);
if (compressedBitmap != null) {
barcode = BitmapFactory.decodeByteArray(compressedBitmap, 0, compressedBitmap.length, null);
// Mutable copy:
barcode = barcode.copy(Bitmap.Config.ARGB_8888, true);
}
scaleFactor = bundle.getFloat(DecodeThread.BARCODE_SCALED_FACTOR);
}
activity.handleDecode((Result) message.obj, barcode, scaleFactor);
} else if (message.what == R.id.decode_failed) {// We're decoding as fast as possible, so when one decode fails, start another.
state = State.PREVIEW;
cameraManager.requestPreviewFrame(decodeThread.getHandler(), R.id.decode);
} else if (message.what == R.id.return_scan_result) {
activity.setResult(Activity.RESULT_OK, (Intent) message.obj);
activity.finish();
} else if (message.what == R.id.launch_product_query) {
String url = (String) message.obj;
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
intent.setData(Uri.parse(url));
ResolveInfo resolveInfo =
activity.getPackageManager().resolveActivity(intent, PackageManager.MATCH_DEFAULT_ONLY);
String browserPackageName = null;
if (resolveInfo != null && resolveInfo.activityInfo != null) {
browserPackageName = resolveInfo.activityInfo.packageName;
Log.d(TAG, "Using browser in package " + browserPackageName);
}
// Needed for default Android browser / Chrome only apparently
if ("com.android.browser".equals(browserPackageName) || "com.android.chrome".equals(browserPackageName)) {
intent.setPackage(browserPackageName);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.putExtra(Browser.EXTRA_APPLICATION_ID, browserPackageName);
}
try {
activity.startActivity(intent);
} catch (ActivityNotFoundException ignored) {
Log.w(TAG, "Can't find anything to handle VIEW of URI " + url);
}
}
}
@Override
public boolean handleMessage(Message msg) {
Bundle args = msg.getData();
String mac = args.getString(EXTRA_MAC);
UUID service = (UUID) args.getSerializable(EXTRA_SERVICE_UUID);
UUID character = (UUID) args.getSerializable(EXTRA_CHARACTER_UUID);
UUID descriptor = (UUID) args.getSerializable(EXTRA_DESCRIPTOR_UUID);
byte[] value = args.getByteArray(EXTRA_BYTE_VALUE);
BleGeneralResponse response = (BleGeneralResponse) msg.obj;
switch (msg.what) {
case CODE_CONNECT:
BleConnectOptions options = args.getParcelable(EXTRA_OPTIONS);
BleConnectManager.connect(mac, options, response);
break;
case CODE_DISCONNECT:
BleConnectManager.disconnect(mac);
break;
case CODE_READ:
BleConnectManager.read(mac, service, character, response);
break;
case CODE_WRITE:
BleConnectManager.write(mac, service, character, value, response);
break;
case CODE_WRITE_NORSP:
BleConnectManager.writeNoRsp(mac, service, character, value, response);
break;
case CODE_READ_DESCRIPTOR:
BleConnectManager.readDescriptor(mac, service, character, descriptor, response);
break;
case CODE_WRITE_DESCRIPTOR:
BleConnectManager.writeDescriptor(mac, service, character, descriptor, value, response);
break;
case CODE_NOTIFY:
BleConnectManager.notify(mac, service, character, response);
break;
case CODE_UNNOTIFY:
BleConnectManager.unnotify(mac, service, character, response);
break;
case CODE_READ_RSSI:
BleConnectManager.readRssi(mac, response);
break;
case CODE_SEARCH:
SearchRequest request = args.getParcelable(EXTRA_REQUEST);
BluetoothSearchManager.search(request, response);
break;
case CODE_STOP_SESARCH:
BluetoothSearchManager.stopSearch();
break;
case CODE_INDICATE:
BleConnectManager.indicate(mac, service, character, response);
break;
case CODE_REQUEST_MTU:
int mtu = args.getInt(EXTRA_MTU);
BleConnectManager.requestMtu(mac, mtu, response);
break;
case CODE_CLEAR_REQUEST:
int clearType = args.getInt(EXTRA_TYPE, 0);
BleConnectManager.clearRequest(mac, clearType);
break;
case CODE_REFRESH_CACHE:
BleConnectManager.refreshCache(mac);
break;
}
return true;
}
@Override
public void handleMessage(Message message) {
switch (message.what) {
case R.id.auto_focus:
//Log.d(TAG, "Got auto-focus message");
// When one auto focus pass finishes, start another. This is the closest thing to
// continuous AF. It does seem to hunt a bit, but I'm not sure what else to do.
if (state == State.PREVIEW) {
CameraManager.get().requestAutoFocus(this, R.id.auto_focus);
}
break;
case R.id.restart_preview:
Log.d(TAG, "Got restart preview message");
restartPreviewAndDecode();
break;
case R.id.decode_succeeded:
Log.d(TAG, "Got decode succeeded message");
state = State.SUCCESS;
Bundle bundle = message.getData();
// Bitmap barcode = bundle == null ? null :
// (Bitmap) bundle.getParcelable(DecodeThread.BARCODE_BITMAP);
activity.handleDecode((Result) message.obj);
break;
case R.id.decode_failed:
// We're decoding as fast as possible, so when one decode fails, start another.
state = State.PREVIEW;
CameraManager.get().requestPreviewFrame(decodeThread.getHandler(), R.id.decode);
break;
case R.id.return_scan_result:
Log.d(TAG, "Got return scan result message");
activity.setResult(Activity.RESULT_OK, (Intent) message.obj);
activity.finish();
break;
case R.id.launch_product_query:
Log.d(TAG, "Got product query message");
String url = (String) message.obj;
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
activity.startActivity(intent);
break;
}
}
@Override
public void handleMessage(Message message) {
if (message.what == R.id.restart_preview) {
restartPreviewAndDecode();
} else if (message.what == R.id.decode_succeeded) {
state = State.SUCCESS;
Bundle bundle = message.getData();
Bitmap barcode = null;
float scaleFactor = 1.0f;
if (bundle != null) {
byte[] compressedBitmap = bundle.getByteArray(DecodeThread.BARCODE_BITMAP);
if (compressedBitmap != null) {
barcode = BitmapFactory.decodeByteArray(compressedBitmap, 0, compressedBitmap.length, null);
// Mutable copy:
barcode = barcode.copy(Bitmap.Config.ARGB_8888, true);
}
scaleFactor = bundle.getFloat(DecodeThread.BARCODE_SCALED_FACTOR);
}
activity.handleDecode((Result) message.obj, barcode, scaleFactor);
} else if (message.what == R.id.decode_failed) {
// We're decoding as fast as possible, so when one decode fails, start another.
state = State.PREVIEW;
cameraManager.requestPreviewFrame(decodeThread.getHandler(), R.id.decode);
} else if (message.what == R.id.return_scan_result) {
activity.setResult(Activity.RESULT_OK, (Intent) message.obj);
activity.finish();
} else if (message.what == R.id.launch_product_query) {
String url = (String) message.obj;
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
intent.setData(Uri.parse(url));
ResolveInfo resolveInfo =
activity.getPackageManager().resolveActivity(intent, PackageManager.MATCH_DEFAULT_ONLY);
String browserPackageName = null;
if (resolveInfo != null && resolveInfo.activityInfo != null) {
browserPackageName = resolveInfo.activityInfo.packageName;
Log.d(TAG, "Using browser in package " + browserPackageName);
}
// Needed for default Android browser / Chrome only apparently
if ("com.android.browser".equals(browserPackageName) || "com.android.chrome".equals(browserPackageName)) {
intent.setPackage(browserPackageName);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.putExtra(Browser.EXTRA_APPLICATION_ID, browserPackageName);
}
try {
activity.startActivity(intent);
} catch (ActivityNotFoundException ignored) {
Log.w(TAG, "Can't find anything to handle VIEW of URI " + url);
}
}
}
@Override
public void handleMessage(Message message) {
switch (message.what) {
case R.id.auto_focus:
//Log.d(TAG, "Got auto-focus message");
// When one auto focus pass finishes, start another. This is the closest thing to
// continuous AF. It does seem to hunt a bit, but I'm not sure what else to do.
if (state == State.PREVIEW) {
CameraManager.get().requestAutoFocus(this, R.id.auto_focus);
}
break;
case R.id.restart_preview:
Log.d(TAG, "Got restart preview message");
restartPreviewAndDecode();
break;
case R.id.decode_succeeded:
Log.d(TAG, "Got decode succeeded message");
state = State.SUCCESS;
Bundle bundle = message.getData();
/***********************************************************************/
Bitmap barcode = bundle == null ? null :
(Bitmap) bundle.getParcelable(DecodeThread.BARCODE_BITMAP);
activity.handleDecode((Result) message.obj, barcode);
/***********************************************************************/
break;
case R.id.decode_failed:
// We're decoding as fast as possible, so when one decode fails, start another.
state = State.PREVIEW;
CameraManager.get().requestPreviewFrame(decodeThread.getHandler(), R.id.decode);
break;
case R.id.return_scan_result:
Log.d(TAG, "Got return scan result message");
activity.setResult(Activity.RESULT_OK, (Intent) message.obj);
activity.finish();
break;
case R.id.launch_product_query:
Log.d(TAG, "Got product query message");
String url = (String) message.obj;
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
activity.startActivity(intent);
break;
}
}