下面列出了怎么用com.google.zxing.ReaderException的API类实例代码及写法,或者点击链接到github查看源代码。
public DetectorResult[] detectMulti(Map<DecodeHintType,?> hints) throws NotFoundException {
BitMatrix image = getImage();
ResultPointCallback resultPointCallback =
hints == null ? null : (ResultPointCallback) hints.get(DecodeHintType.NEED_RESULT_POINT_CALLBACK);
MultiFinderPatternFinder finder = new MultiFinderPatternFinder(image, resultPointCallback);
FinderPatternInfo[] infos = finder.findMulti(hints);
if (infos.length == 0) {
throw NotFoundException.getNotFoundInstance();
}
List<DetectorResult> result = new ArrayList<>();
for (FinderPatternInfo info : infos) {
try {
result.add(processFinderPatternInfo(info));
} catch (ReaderException e) {
// ignore
}
}
if (result.isEmpty()) {
return EMPTY_DETECTOR_RESULTS;
} else {
return result.toArray(new DetectorResult[result.size()]);
}
}
/**
* 读二维码并输出携带的信息
*/
public static void readQrCode(InputStream inputStream) throws IOException {
//从输入流中获取字符串信息
BufferedImage image = ImageIO.read(inputStream);
//将图像转换为二进制位图源
LuminanceSource source = new BufferedImageLuminanceSource(image);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
QRCodeReader reader = new QRCodeReader();
Result result = null;
try {
result = reader.decode(bitmap);
} catch (ReaderException e) {
e.printStackTrace();
}
System.out.println(result.getText());
}
/**
* Decode barcode from YUV pixels array
*
* @param pixels YUV image data
* @param width Image width
* @param height Image height
* @param rotation Degrees to rotate image before decoding (only 0, 90, 180 or 270 are allowed)
* @param reverseHorizontal Reverse image horizontally before decoding
* @param hints Decoder hints
* @return Decode result, if barcode was decoded successfully, {@code null} otherwise
* @see DecodeHintType
*/
@Nullable
@SuppressWarnings("SuspiciousNameCombination")
public static Result decodeYuv(@NonNull final byte[] pixels, final int width, final int height,
@Rotation final int rotation, final boolean reverseHorizontal,
@Nullable final Map<DecodeHintType, ?> hints) {
Objects.requireNonNull(pixels);
final byte[] rotatedPixels = Utils.rotateYuv(pixels, width, height, rotation);
final int rotatedWidth;
final int rotatedHeight;
if (rotation == ROTATION_90 || rotation == ROTATION_270) {
rotatedWidth = height;
rotatedHeight = width;
} else {
rotatedWidth = width;
rotatedHeight = height;
}
final MultiFormatReader reader = createReader(hints);
try {
return Utils.decodeLuminanceSource(reader,
new PlanarYUVLuminanceSource(rotatedPixels, rotatedWidth, rotatedHeight, 0, 0,
rotatedWidth, rotatedHeight, reverseHorizontal));
} catch (final ReaderException e) {
return null;
}
}
@Nullable
@SuppressWarnings("SuspiciousNameCombination")
public Result decode(@NonNull final MultiFormatReader reader) throws ReaderException {
int imageWidth = mImageSize.getX();
int imageHeight = mImageSize.getY();
final int orientation = mOrientation;
final byte[] image = Utils.rotateYuv(mImage, imageWidth, imageHeight, orientation);
if (orientation == 90 || orientation == 270) {
final int width = imageWidth;
imageWidth = imageHeight;
imageHeight = width;
}
final Rect frameRect =
Utils.getImageFrameRect(imageWidth, imageHeight, mViewFrameRect, mPreviewSize,
mViewSize);
final int frameWidth = frameRect.getWidth();
final int frameHeight = frameRect.getHeight();
if (frameWidth < 1 || frameHeight < 1) {
return null;
}
return Utils.decodeLuminanceSource(reader,
new PlanarYUVLuminanceSource(image, imageWidth, imageHeight, frameRect.getLeft(),
frameRect.getTop(), frameWidth, frameHeight, mReverseHorizontal));
}
public DetectorResult[] detectMulti(Map<DecodeHintType,?> hints) throws NotFoundException {
BitMatrix image = getImage();
ResultPointCallback resultPointCallback =
hints == null ? null : (ResultPointCallback) hints.get(DecodeHintType.NEED_RESULT_POINT_CALLBACK);
MultiFinderPatternFinder finder = new MultiFinderPatternFinder(image, resultPointCallback);
FinderPatternInfo[] infos = finder.findMulti(hints);
if (infos.length == 0) {
throw NotFoundException.getNotFoundInstance();
}
List<DetectorResult> result = new ArrayList<>();
for (FinderPatternInfo info : infos) {
try {
result.add(processFinderPatternInfo(info));
} catch (ReaderException e) {
// ignore
}
}
if (result.isEmpty()) {
return EMPTY_DETECTOR_RESULTS;
} else {
return result.toArray(new DetectorResult[result.size()]);
}
}
public DetectorResult[] detectMulti(Map<DecodeHintType,?> hints) throws NotFoundException {
BitMatrix image = getImage();
ResultPointCallback resultPointCallback =
hints == null ? null : (ResultPointCallback) hints.get(DecodeHintType.NEED_RESULT_POINT_CALLBACK);
MultiFinderPatternFinder finder = new MultiFinderPatternFinder(image, resultPointCallback);
FinderPatternInfo[] infos = finder.findMulti(hints);
if (infos.length == 0) {
throw NotFoundException.getNotFoundInstance();
}
List<DetectorResult> result = new ArrayList<>();
for (FinderPatternInfo info : infos) {
try {
result.add(processFinderPatternInfo(info));
} catch (ReaderException e) {
// ignore
}
}
if (result.isEmpty()) {
return EMPTY_DETECTOR_RESULTS;
} else {
return result.toArray(new DetectorResult[result.size()]);
}
}
public DetectorResult[] detectMulti(Map<DecodeHintType,?> hints) throws NotFoundException {
BitMatrix image = getImage();
ResultPointCallback resultPointCallback =
hints == null ? null : (ResultPointCallback) hints.get(DecodeHintType.NEED_RESULT_POINT_CALLBACK);
MultiFinderPatternFinder finder = new MultiFinderPatternFinder(image, resultPointCallback);
FinderPatternInfo[] infos = finder.findMulti(hints);
if (infos.length == 0) {
throw NotFoundException.getNotFoundInstance();
}
List<DetectorResult> result = new ArrayList<>();
for (FinderPatternInfo info : infos) {
try {
result.add(processFinderPatternInfo(info));
} catch (ReaderException e) {
// ignore
}
}
if (result.isEmpty()) {
return EMPTY_DETECTOR_RESULTS;
} else {
return result.toArray(new DetectorResult[result.size()]);
}
}
public Result decodeWithZxing(Bitmap bitmap) {
MultiFormatReader multiFormatReader = new MultiFormatReader();
multiFormatReader.setHints(changeZXingDecodeDataMode());
int width = bitmap.getWidth();
int height = bitmap.getHeight();
int[] pixels = new int[width * height];
bitmap.getPixels(pixels, 0, width, 0, 0, width, height);
Result rawResult = null;
RGBLuminanceSource source = new RGBLuminanceSource(width, height, pixels);
BinaryBitmap binaryBitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
rawResult = multiFormatReader.decodeWithState(binaryBitmap);
} catch (ReaderException re) {
// continue
} finally {
multiFormatReader.reset();
}
return rawResult;
}
public DetectorResult[] detectMulti(Map<DecodeHintType,?> hints) throws NotFoundException {
BitMatrix image = getImage();
ResultPointCallback resultPointCallback =
hints == null ? null : (ResultPointCallback) hints.get(DecodeHintType.NEED_RESULT_POINT_CALLBACK);
MultiFinderPatternFinder finder = new MultiFinderPatternFinder(image, resultPointCallback);
FinderPatternInfo[] infos = finder.findMulti(hints);
if (infos.length == 0) {
throw NotFoundException.getNotFoundInstance();
}
List<DetectorResult> result = new ArrayList<>();
for (FinderPatternInfo info : infos) {
try {
result.add(processFinderPatternInfo(info));
} catch (ReaderException e) {
// ignore
}
}
if (result.isEmpty()) {
return EMPTY_DETECTOR_RESULTS;
} else {
return result.toArray(new DetectorResult[result.size()]);
}
}
public DetectorResult[] detectMulti(Map<DecodeHintType,?> hints) throws NotFoundException {
BitMatrix image = getImage();
ResultPointCallback resultPointCallback =
hints == null ? null : (ResultPointCallback) hints.get(DecodeHintType.NEED_RESULT_POINT_CALLBACK);
MultiFinderPatternFinder finder = new MultiFinderPatternFinder(image, resultPointCallback);
FinderPatternInfo[] infos = finder.findMulti(hints);
if (infos.length == 0) {
throw NotFoundException.getNotFoundInstance();
}
List<DetectorResult> result = new ArrayList<>();
for (FinderPatternInfo info : infos) {
try {
result.add(processFinderPatternInfo(info));
} catch (ReaderException e) {
// ignore
}
}
if (result.isEmpty()) {
return EMPTY_DETECTOR_RESULTS;
} else {
return result.toArray(new DetectorResult[result.size()]);
}
}
private void decode(final byte[] data) {
final PlanarYUVLuminanceSource source = cameraManager.buildLuminanceSource(data);
final BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
hints.put(DecodeHintType.NEED_RESULT_POINT_CALLBACK, (ResultPointCallback) dot -> runOnUiThread(() -> scannerView.addDot(dot)));
final Result scanResult = reader.decode(bitmap, hints);
runOnUiThread(() -> handleResult(scanResult));
} catch (final ReaderException x) {
// retry
cameraHandler.post(fetchAndDecodeRunnable);
} finally {
reader.reset();
}
}
public String decodeWithZxing(byte[] data, int width, int height, Rect crop) {
MultiFormatReader multiFormatReader = new MultiFormatReader();
multiFormatReader.setHints(changeZXingDecodeDataMode());
Result rawResult = null;
PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(data, width, height,
crop.left, crop.top, crop.width(), crop.height(), false);
if (source != null) {
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
rawResult = multiFormatReader.decodeWithState(bitmap);
} catch (ReaderException re) {
// continue
} finally {
multiFormatReader.reset();
}
}
return rawResult != null ? rawResult.getText() : null;
}
public String decodeWithZxing(Bitmap bitmap) {
MultiFormatReader multiFormatReader = new MultiFormatReader();
multiFormatReader.setHints(changeZXingDecodeDataMode());
int width = bitmap.getWidth();
int height = bitmap.getHeight();
int[] pixels = new int[width * height];
bitmap.getPixels(pixels, 0, width, 0, 0, width, height);
Result rawResult = null;
RGBLuminanceSource source = new RGBLuminanceSource(width, height, pixels);
if (source != null) {
BinaryBitmap binaryBitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
rawResult = multiFormatReader.decodeWithState(binaryBitmap);
} catch (ReaderException re) {
// continue
} finally {
multiFormatReader.reset();
}
}
return rawResult != null ? rawResult.getText() : null;
}
private String decode(byte[] data, int width, int height) {
ScannerManager manager = mManager.get();
if (manager == null) {
return null;
}
Rect rect = manager.getFramingRectInPreview();
PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(data,
width, height, rect.left, rect.top, rect.right, rect.bottom, false);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
QRCodeReader reader = new QRCodeReader();
try {
Result result = reader.decode(bitmap, mHints);
return result.getText();
} catch (ReaderException e) {
// Ignore as we will repeatedly decode the preview frame
return null;
}
}
public DetectorResult[] detectMulti(Map<DecodeHintType,?> hints) throws NotFoundException {
BitMatrix image = getImage();
ResultPointCallback resultPointCallback =
hints == null ? null : (ResultPointCallback) hints.get(DecodeHintType.NEED_RESULT_POINT_CALLBACK);
MultiFinderPatternFinder finder = new MultiFinderPatternFinder(image, resultPointCallback);
FinderPatternInfo[] infos = finder.findMulti(hints);
if (infos.length == 0) {
throw NotFoundException.getNotFoundInstance();
}
List<DetectorResult> result = new ArrayList<>();
for (FinderPatternInfo info : infos) {
try {
result.add(processFinderPatternInfo(info));
} catch (ReaderException e) {
// ignore
}
}
if (result.isEmpty()) {
return EMPTY_DETECTOR_RESULTS;
} else {
return result.toArray(EMPTY_DETECTOR_RESULTS);
}
}
public DetectorResult[] detectMulti(Map<DecodeHintType,?> hints) throws NotFoundException {
BitMatrix image = getImage();
ResultPointCallback resultPointCallback =
hints == null ? null : (ResultPointCallback) hints.get(DecodeHintType.NEED_RESULT_POINT_CALLBACK);
MultiFinderPatternFinder finder = new MultiFinderPatternFinder(image, resultPointCallback);
FinderPatternInfo[] infos = finder.findMulti(hints);
if (infos.length == 0) {
throw NotFoundException.getNotFoundInstance();
}
List<DetectorResult> result = new ArrayList<>();
for (FinderPatternInfo info : infos) {
try {
result.add(processFinderPatternInfo(info));
} catch (ReaderException e) {
// ignore
}
}
if (result.isEmpty()) {
return EMPTY_DETECTOR_RESULTS;
} else {
return result.toArray(new DetectorResult[result.size()]);
}
}
private void decode(final byte[] data) {
final PlanarYUVLuminanceSource source = cameraManager.buildLuminanceSource(data);
final BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
hints.put(DecodeHintType.NEED_RESULT_POINT_CALLBACK, (ResultPointCallback) dot -> runOnUiThread(() -> scannerView.addDot(dot)));
final Result scanResult = reader.decode(bitmap, hints);
runOnUiThread(() -> handleResult(scanResult));
} catch (final ReaderException x) {
// retry
cameraHandler.post(fetchAndDecodeRunnable);
} finally {
reader.reset();
}
}
public DetectorResult[] detectMulti(Map<DecodeHintType,?> hints) throws NotFoundException {
BitMatrix image = getImage();
ResultPointCallback resultPointCallback =
hints == null ? null : (ResultPointCallback) hints.get(DecodeHintType.NEED_RESULT_POINT_CALLBACK);
MultiFinderPatternFinder finder = new MultiFinderPatternFinder(image, resultPointCallback);
FinderPatternInfo[] infos = finder.findMulti(hints);
if (infos.length == 0) {
throw NotFoundException.getNotFoundInstance();
}
List<DetectorResult> result = new ArrayList<DetectorResult>();
for (FinderPatternInfo info : infos) {
try {
result.add(processFinderPatternInfo(info));
} catch (ReaderException e) {
// ignore
}
}
if (result.isEmpty()) {
return EMPTY_DETECTOR_RESULTS;
} else {
return result.toArray(new DetectorResult[result.size()]);
}
}
private void decode(final byte[] data)
{
final PlanarYUVLuminanceSource source = cameraManager.buildLuminanceSource(data);
final BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
hints.put(DecodeHintType.NEED_RESULT_POINT_CALLBACK,
(ResultPointCallback) dot -> runOnUiThread(() -> scannerView.addDot(dot)));
final Result scanResult = reader.decode(bitmap, hints);
final int thumbnailWidth = source.getThumbnailWidth();
final int thumbnailHeight = source.getThumbnailHeight();
final float thumbnailScaleFactor = (float) thumbnailWidth / source.getWidth();
final Bitmap thumbnailImage = Bitmap.createBitmap(thumbnailWidth, thumbnailHeight,
Bitmap.Config.ARGB_8888);
thumbnailImage.setPixels(
source.renderThumbnail(), 0, thumbnailWidth, 0, 0, thumbnailWidth, thumbnailHeight);
runOnUiThread(() -> handleResult(scanResult, thumbnailImage, thumbnailScaleFactor));
} catch (final ReaderException x) {
// retry
cameraHandler.post(fetchAndDecodeRunnable);
} finally {
reader.reset();
}
}
private void decode(final byte[] data)
{
final PlanarYUVLuminanceSource source = cameraManager.buildLuminanceSource(data);
final BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
hints.put(DecodeHintType.NEED_RESULT_POINT_CALLBACK,
(ResultPointCallback) dot -> runOnUiThread(() -> scannerView.addDot(dot)));
final Result scanResult = reader.decode(bitmap, hints);
Log.d(TAG,"scanResult " + scanResult);
final int thumbnailWidth = source.getThumbnailWidth();
final int thumbnailHeight = source.getThumbnailHeight();
final float thumbnailScaleFactor = (float) thumbnailWidth / source.getWidth();
final Bitmap thumbnailImage = Bitmap.createBitmap(thumbnailWidth, thumbnailHeight,
Bitmap.Config.ARGB_8888);
thumbnailImage.setPixels(
source.renderThumbnail(), 0, thumbnailWidth, 0, 0, thumbnailWidth, thumbnailHeight);
runOnUiThread(() -> handleResult(scanResult, thumbnailImage, thumbnailScaleFactor));
} catch (final ReaderException x) {
// retry
cameraHandler.post(fetchAndDecodeRunnable);
} finally {
reader.reset();
}
}
@Override
public Result decodeRow(int rowNumber,
BitArray row,
Map<DecodeHintType,?> hints) throws NotFoundException {
for (OneDReader reader : readers) {
try {
return reader.decodeRow(rowNumber, row, hints);
} catch (ReaderException re) {
// continue
}
}
throw NotFoundException.getNotFoundInstance();
}
Result decodeRow(int rowNumber, BitArray row, int rowOffset) throws NotFoundException {
int[] extensionStartRange = UPCEANReader.findGuardPattern(row, rowOffset, false, EXTENSION_START_PATTERN);
try {
return fiveSupport.decodeRow(rowNumber, row, extensionStartRange);
} catch (ReaderException ignored) {
return twoSupport.decodeRow(rowNumber, row, extensionStartRange);
}
}
@Override
public Result[] decodeMultiple(BinaryBitmap image, Map<DecodeHintType,?> hints) throws NotFoundException {
List<Result> results = new ArrayList<>();
DetectorResult[] detectorResults = new MultiDetector(image.getBlackMatrix()).detectMulti(hints);
for (DetectorResult detectorResult : detectorResults) {
try {
DecoderResult decoderResult = getDecoder().decode(detectorResult.getBits(), hints);
ResultPoint[] points = detectorResult.getPoints();
// If the code was mirrored: swap the bottom-left and the top-right points.
if (decoderResult.getOther() instanceof QRCodeDecoderMetaData) {
((QRCodeDecoderMetaData) decoderResult.getOther()).applyMirroredCorrection(points);
}
Result result = new Result(decoderResult.getText(), decoderResult.getRawBytes(), points,
BarcodeFormat.QR_CODE);
List<byte[]> byteSegments = decoderResult.getByteSegments();
if (byteSegments != null) {
result.putMetadata(ResultMetadataType.BYTE_SEGMENTS, byteSegments);
}
String ecLevel = decoderResult.getECLevel();
if (ecLevel != null) {
result.putMetadata(ResultMetadataType.ERROR_CORRECTION_LEVEL, ecLevel);
}
if (decoderResult.hasStructuredAppend()) {
result.putMetadata(ResultMetadataType.STRUCTURED_APPEND_SEQUENCE,
decoderResult.getStructuredAppendSequenceNumber());
result.putMetadata(ResultMetadataType.STRUCTURED_APPEND_PARITY,
decoderResult.getStructuredAppendParity());
}
results.add(result);
} catch (ReaderException re) {
// ignore and continue
}
}
if (results.isEmpty()) {
return EMPTY_RESULT_ARRAY;
} else {
results = processStructuredAppend(results);
return results.toArray(new Result[results.size()]);
}
}
/**
* Decode barcode from RGB pixels array
*
* @param pixels Colors in standard Android ARGB format
* @param width Image width
* @param height Image height
* @param hints Decoder hints
* @return Decode result, if barcode was decoded successfully, {@code null} otherwise
* @see DecodeHintType
* @see Color
*/
@Nullable
public static Result decodeRgb(@NonNull final int[] pixels, final int width, final int height,
@Nullable final Map<DecodeHintType, ?> hints) {
Objects.requireNonNull(pixels);
final MultiFormatReader reader = createReader(hints);
try {
return Utils
.decodeLuminanceSource(reader, new RGBLuminanceSource(width, height, pixels));
} catch (final ReaderException e) {
return null;
}
}
@Nullable
public static Result decodeLuminanceSource(@NonNull final MultiFormatReader reader,
@NonNull final LuminanceSource luminanceSource) throws ReaderException {
try {
return reader.decodeWithState(new BinaryBitmap(new HybridBinarizer(luminanceSource)));
} catch (final NotFoundException e) {
return reader.decodeWithState(
new BinaryBitmap(new HybridBinarizer(luminanceSource.invert())));
} finally {
reader.reset();
}
}
/**
* Decode the data within the viewfinder rectangle, and time how long it took. For efficiency, reuse the same reader
* objects from one decode to the next.
*/
public static Result decodeImage(byte[] data, int width, int height) {
// 处理
Result result = null;
try {
Hashtable<DecodeHintType, Object> hints = new Hashtable<DecodeHintType, Object>();
hints.put(DecodeHintType.CHARACTER_SET, "utf-8");
hints.put(DecodeHintType.TRY_HARDER, Boolean.TRUE);
hints.put(DecodeHintType.POSSIBLE_FORMATS, BarcodeFormat.QR_CODE);
PlanarYUVLuminanceSource source =
new PlanarYUVLuminanceSource(data, width, height, 0, 0, width, height, false);
/**
* HybridBinarizer算法使用了更高级的算法,但使用GlobalHistogramBinarizer识别效率确实比HybridBinarizer要高一些。
*
* GlobalHistogram算法:(http://kuangjianwei.blog.163.com/blog/static/190088953201361015055110/)
*
* 二值化的关键就是定义出黑白的界限,我们的图像已经转化为了灰度图像,每个点都是由一个灰度值来表示,就需要定义出一个灰度值,大于这个值就为白(0),低于这个值就为黑(1)。
* 在GlobalHistogramBinarizer中,是从图像中均匀取5行(覆盖整个图像高度),每行取中间五分之四作为样本;以灰度值为X轴,每个灰度值的像素个数为Y轴建立一个直方图,
* 从直方图中取点数最多的一个灰度值,然后再去给其他的灰度值进行分数计算,按照点数乘以与最多点数灰度值的距离的平方来进行打分,选分数最高的一个灰度值。接下来在这两个灰度值中间选取一个区分界限,
* 取的原则是尽量靠近中间并且要点数越少越好。界限有了以后就容易了,与整幅图像的每个点进行比较,如果灰度值比界限小的就是黑,在新的矩阵中将该点置1,其余的就是白,为0。
*/
BinaryBitmap bitmap1 = new BinaryBitmap(new GlobalHistogramBinarizer(source));
// BinaryBitmap bitmap1 = new BinaryBitmap(new HybridBinarizer(source));
QRCodeReader reader2 = new QRCodeReader();
result = reader2.decode(bitmap1, hints);
} catch (ReaderException e) {
}
return result;
}
@Override
public Result decodeRow(int rowNumber,
BitArray row,
Map<DecodeHintType,?> hints) throws NotFoundException {
for (OneDReader reader : readers) {
try {
return reader.decodeRow(rowNumber, row, hints);
} catch (ReaderException re) {
// continue
}
}
throw NotFoundException.getNotFoundInstance();
}
Result decodeRow(int rowNumber, BitArray row, int rowOffset) throws NotFoundException {
int[] extensionStartRange = UPCEANReader.findGuardPattern(row, rowOffset, false, EXTENSION_START_PATTERN);
try {
return fiveSupport.decodeRow(rowNumber, row, extensionStartRange);
} catch (ReaderException ignored) {
return twoSupport.decodeRow(rowNumber, row, extensionStartRange);
}
}
@Override
public Result[] decodeMultiple(BinaryBitmap image, Map<DecodeHintType,?> hints) throws NotFoundException {
List<Result> results = new ArrayList<>();
DetectorResult[] detectorResults = new MultiDetector(image.getBlackMatrix()).detectMulti(hints);
for (DetectorResult detectorResult : detectorResults) {
try {
DecoderResult decoderResult = getDecoder().decode(detectorResult.getBits(), hints);
ResultPoint[] points = detectorResult.getPoints();
// If the code was mirrored: swap the bottom-left and the top-right points.
if (decoderResult.getOther() instanceof QRCodeDecoderMetaData) {
((QRCodeDecoderMetaData) decoderResult.getOther()).applyMirroredCorrection(points);
}
Result result = new Result(decoderResult.getText(), decoderResult.getRawBytes(), points,
BarcodeFormat.QR_CODE);
List<byte[]> byteSegments = decoderResult.getByteSegments();
if (byteSegments != null) {
result.putMetadata(ResultMetadataType.BYTE_SEGMENTS, byteSegments);
}
String ecLevel = decoderResult.getECLevel();
if (ecLevel != null) {
result.putMetadata(ResultMetadataType.ERROR_CORRECTION_LEVEL, ecLevel);
}
if (decoderResult.hasStructuredAppend()) {
result.putMetadata(ResultMetadataType.STRUCTURED_APPEND_SEQUENCE,
decoderResult.getStructuredAppendSequenceNumber());
result.putMetadata(ResultMetadataType.STRUCTURED_APPEND_PARITY,
decoderResult.getStructuredAppendParity());
}
results.add(result);
} catch (ReaderException re) {
// ignore and continue
}
}
if (results.isEmpty()) {
return EMPTY_RESULT_ARRAY;
} else {
results = processStructuredAppend(results);
return results.toArray(new Result[results.size()]);
}
}
/**
* Decode the data within the viewfinder rectangle, and time how long it took. For efficiency, reuse the same reader
* objects from one decode to the next.
*/
public static Result decodeImage(byte[] data, int width, int height) {
// 处理
Result result = null;
try {
Hashtable<DecodeHintType, Object> hints = new Hashtable<DecodeHintType, Object>();
hints.put(DecodeHintType.CHARACTER_SET, "utf-8");
hints.put(DecodeHintType.TRY_HARDER, Boolean.TRUE);
hints.put(DecodeHintType.POSSIBLE_FORMATS, BarcodeFormat.QR_CODE);
PlanarYUVLuminanceSource source =
new PlanarYUVLuminanceSource(data, width, height, 0, 0, width, height, false);
/**
* HybridBinarizer算法使用了更高级的算法,但使用GlobalHistogramBinarizer识别效率确实比HybridBinarizer要高一些。
*
* GlobalHistogram算法:(http://kuangjianwei.blog.163.com/blog/static/190088953201361015055110/)
*
* 二值化的关键就是定义出黑白的界限,我们的图像已经转化为了灰度图像,每个点都是由一个灰度值来表示,就需要定义出一个灰度值,大于这个值就为白(0),低于这个值就为黑(1)。
* 在GlobalHistogramBinarizer中,是从图像中均匀取5行(覆盖整个图像高度),每行取中间五分之四作为样本;以灰度值为X轴,每个灰度值的像素个数为Y轴建立一个直方图,
* 从直方图中取点数最多的一个灰度值,然后再去给其他的灰度值进行分数计算,按照点数乘以与最多点数灰度值的距离的平方来进行打分,选分数最高的一个灰度值。接下来在这两个灰度值中间选取一个区分界限,
* 取的原则是尽量靠近中间并且要点数越少越好。界限有了以后就容易了,与整幅图像的每个点进行比较,如果灰度值比界限小的就是黑,在新的矩阵中将该点置1,其余的就是白,为0。
*/
BinaryBitmap bitmap1 = new BinaryBitmap(new GlobalHistogramBinarizer(source));
// BinaryBitmap bitmap1 = new BinaryBitmap(new HybridBinarizer(source));
QRCodeReader reader2 = new QRCodeReader();
result = reader2.decode(bitmap1, hints);
} catch (ReaderException e) {
}
return result;
}