无法在使用Zbar库的GS1 128条形码的第一个位置读取FNC1字符



我使用Google vision Library for GS1 data matrix和Zbar Library for GS1 128 barcode开发了android上的条形码解码应用程序,使用Zbar Library无法读取GS1 128 barcode的第一个位置的FNC1字符。

Zbar库无法在条形码开始处显示任何FNC1字符的标志!

任意解. . . .

即时帮助是值得赞赏的…

下面是我的ZBar扫描仪活动

 @SuppressWarnings("deprecation")
 public class ZBarFirstScannerActivity extends AppCompatActivity{
//TextView tv;
ImageView iv;
LinearLayout ll;
private Camera mCamera;
private CameraPreview mPreview;
private Handler autoFocusHandler;
private ImageScanner scanner;
private boolean barcodeScanned = false;
private boolean previewing = true;
TextView tv;
static {
    System.loadLibrary("iconv");
}
static {
    System.loadLibrary("zbarjni");
}

public void onCreate(Bundle savedInstanceState)
{
    super.onCreate(savedInstanceState);

    setContentView(R.layout.barcode_capture1d);

    tv = (TextView) findViewById(R.id.textVertical);
    tv.setRotation(90);
    initToolbar();

    autoFocusHandler = new Handler();
    mCamera = getCameraInstance();
    // Instance barcode scanner
    scanner = new ImageScanner();
    scanner.setConfig(0, Config.X_DENSITY, 1);
    scanner.setConfig(0, Config.Y_DENSITY, 1);
    scanner.setConfig(Symbol.CODE128, Config.ENABLE,1);
    scanner.setConfig(Symbol.EAN13, Config.ENABLE,1);
    mPreview = new CameraPreview(this, mCamera, previewCb, autoFocusCB);
    FrameLayout preview = (FrameLayout)findViewById(R.id.cameraPreview);
    preview.addView(mPreview);

}
private void initToolbar() {
    final Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
    setSupportActionBar(toolbar);
    final ActionBar actionBar = getSupportActionBar();
    if (actionBar != null) {

        actionBar.setHomeButtonEnabled(true);
        actionBar.setHomeAsUpIndicator(ContextCompat.getDrawable(this, R.drawable.abc_ic_ab_back_mtrl_am_alpha));
        actionBar.setDisplayHomeAsUpEnabled(true);
    }
}
/** A safe way to get an instance of the Camera object. */
public static Camera getCameraInstance()
{
    Camera c = null;
    try
    {
        c = Camera.open();
    } catch (Exception e)
    {
        //nada
    }
    return c;
}
private void releaseCamera()
{
    if (mCamera != null)
    {
        previewing = false;
        mCamera.setPreviewCallback(null);
        mCamera.release();
        mCamera = null;
    }
}
PreviewCallback previewCb = new PreviewCallback()
{
    public void onPreviewFrame(byte[] data, Camera camera)
    {
        Camera.Parameters parameters = camera.getParameters();
        Size size = parameters.getPreviewSize();
        Image barcode = new Image(size.width, size.height, "Y800");
        barcode.setData(data);
        int result = scanner.scanImage(barcode);
        if (result != 0)
        {
            previewing = false;
            mCamera.setPreviewCallback(null);
            mCamera.stopPreview();
            SymbolSet syms = scanner.getResults();
            for (Symbol sym : syms)
            {
                barcodeScanned = true;
                Intent returnIntent = new Intent();
                returnIntent.putExtra("BARCODE", sym.getData());
                setResult(MainActivity.BAR_CODE_TYPE_128,returnIntent);
                releaseCamera();
                finish();
                break;
            }
        }
    }
};
// Mimic continuous auto-focusing
AutoFocusCallback autoFocusCB = new AutoFocusCallback()
{
    public void onAutoFocus(boolean success, Camera camera)
    {
        autoFocusHandler.postDelayed(doAutoFocus, 3000);
    }
};
private Runnable doAutoFocus = new Runnable()
{
    public void run()
    {
        if (previewing)
            mCamera.autoFocus(autoFocusCB);
    }
};
public void onPause() {
    super.onPause();
    releaseCamera();
}
public void onResume(){
    super.onResume();
    new ZBarFirstScannerActivity();
}
@Override
public void onBackPressed() {
    releaseCamera();
    finish();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
    int id = item.getItemId();
    if (id == android.R.id.home) {
        onBackPressed();
        return true;
    }
    return super.onOptionsItemSelected(item);
}
}

下面是我的谷歌扫描仪活动

public final class GoogleScannerActivity extends AppCompatActivity {
private static final String TAG = "Barcode-reader";
// intent request code to handle updating play services if needed.
private static final int RC_HANDLE_GMS = 9001;
// permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
// constants used to pass extra data in the intent
public static final String AutoFocus = "AutoFocus";
public static final String UseFlash = "UseFlash";
public static final String BarcodeObject = "Barcode";
Bitmap bmp;
FileOutputStream fos = null;
private Camera c;
Switch aSwitch;
private CameraSource mCameraSource;
private CameraSourcePreview mPreview;
private GraphicOverlay<BarcodeGraphic> mGraphicOverlay;
// helper objects for detecting taps and pinches.
private ScaleGestureDetector scaleGestureDetector;
private GestureDetector gestureDetector;
/**
 * Initializes the UI and creates the detector pipeline.
 */
@Override
public void onCreate(Bundle icicle) {
    super.onCreate(icicle);
    setContentView(R.layout.barcode_capture2d);
    initToolbar();
    ActivitySource.caller = this;
    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay);
    boolean autoFocus = true;
    boolean useFlash = false;
    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }
    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());
    /*Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom",
            Snackbar.LENGTH_LONG)
            .show();*/
}
private void initToolbar() {
    final Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
    setSupportActionBar(toolbar);
    final ActionBar actionBar = getSupportActionBar();
    if (actionBar != null) {

        actionBar.setHomeButtonEnabled(true);
        actionBar.setHomeAsUpIndicator(ContextCompat.getDrawable(this, R.drawable.abc_ic_ab_back_mtrl_am_alpha));
        actionBar.setDisplayHomeAsUpEnabled(true);
    }
}
private Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters){
    Camera.Size bestSize = null;
    List<Camera.Size> sizeList = parameters.getSupportedPreviewSizes();
    bestSize = sizeList.get(0);
    for(int i = 1; i < sizeList.size(); i++){
        if((sizeList.get(i).width * sizeList.get(i).height) >
                (bestSize.width * bestSize.height)){
            bestSize = sizeList.get(i);
        }
    }
    return bestSize;
}
/**
 * Handles the requesting of the camera permission.  This includes
 * showing a "Snackbar" message of why the permission is needed then
 * sending the request.
 */
private void requestCameraPermission() {
    Log.w(TAG, "Camera permission is not granted. Requesting permission");
    final String[] permissions = new String[]{Manifest.permission.CAMERA};
    if (!ActivityCompat.shouldShowRequestPermissionRationale(this,
            Manifest.permission.CAMERA)) {
        ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM);
        return;
    }
    final Activity thisActivity = this;
    View.OnClickListener listener = new View.OnClickListener() {
        @Override
        public void onClick(View view) {
            ActivityCompat.requestPermissions(thisActivity, permissions,
                    RC_HANDLE_CAMERA_PERM);
        }
    };
    Snackbar.make(mGraphicOverlay, R.string.permission_camera_rationale,
            Snackbar.LENGTH_INDEFINITE)
            .setAction(R.string.ok, listener)
            .show();
}
@Override
public boolean onTouchEvent(MotionEvent e) {
    boolean b = scaleGestureDetector.onTouchEvent(e);
    boolean c = gestureDetector.onTouchEvent(e);
    return b || c || super.onTouchEvent(e);
}
/**
 * Creates and starts the camera.  Note that this uses a higher resolution in comparison
 * to other detection examples to enable the barcode detector to detect small barcodes
 * at long distances.
 *
 * Suppressing InlinedApi since there is a check that the minimum version is met before using
 * the constant.
 */
@SuppressLint("InlinedApi")
private void createCameraSource(boolean autoFocus, boolean useFlash) {
    Context context = getApplicationContext();
    // A barcode detector is created to track barcodes.  An associated multi-processor instance
    // is set to receive the barcode detection results, track the barcodes, and maintain
    // graphics for each barcode on screen.  The factory is used by the multi-processor to
    // create a separate tracker instance for each barcode.
    BarcodeDetector barcodeDetector = new BarcodeDetector.Builder(context).setBarcodeFormats(Barcode.CODE_128 | Barcode.DATA_MATRIX | Barcode.QR_CODE).build();
    BarcodeTrackerFactory barcodeFactory = new BarcodeTrackerFactory(mGraphicOverlay);
    barcodeDetector.setProcessor(
            new MultiProcessor.Builder<>(barcodeFactory).build());
    if (!barcodeDetector.isOperational()) {
        // Note: The first time that an app using the barcode or face API is installed on a
        // device, GMS will download a native libraries to the device in order to do detection.
        // Usually this completes before the app is run for the first time.  But if that
        // download has not yet completed, then the above call will not detect any barcodes
        // and/or faces.
        //
        // isOperational() can be used to check if the required native libraries are currently
        // available.  The detectors will automatically become operational once the library
        // downloads complete on device.
        Log.w(TAG, "Detector dependencies are not yet available.");
        // Check for low storage.  If there is low storage, the native library will not be
        // downloaded, so detection will not become operational.
        IntentFilter lowstorageFilter = new IntentFilter(Intent.ACTION_DEVICE_STORAGE_LOW);
        boolean hasLowStorage = registerReceiver(null, lowstorageFilter) != null;
        if (hasLowStorage) {
            Toast.makeText(this, R.string.low_storage_error, Toast.LENGTH_LONG).show();
            Log.w(TAG, getString(R.string.low_storage_error));
        }
    }
    // Creates and starts the camera.  Note that this uses a higher resolution in comparison
    // to other detection examples to enable the barcode detector to detect small barcodes
    // at long distances.
    CameraSource.Builder builder = new CameraSource.Builder(getApplicationContext(), barcodeDetector)
            .setFacing(CameraSource.CAMERA_FACING_BACK)
            .setRequestedPreviewSize(1100, 844)
            .setRequestedFps(15.0f);
    // make sure that auto focus is an available option
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
        builder = builder.setFocusMode(
                autoFocus ? Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE : null);
    }
    mCameraSource = builder
            .setFlashMode(useFlash ? Camera.Parameters.FLASH_MODE_TORCH : null)
            .build();
}

/**
 * Restarts the camera.
 */
@Override
protected void onResume() {
    super.onResume();
    startCameraSource();
}
/**
 * Stops the camera.
 */
@Override
protected void onPause() {
    super.onPause();
    if (mPreview != null) {
        mPreview.stop();
    }
}
/**
 * Releases the resources associated with the camera source, the associated detectors, and the
 * rest of the processing pipeline.
 */
@Override
protected void onDestroy() {
    super.onDestroy();
    if (mPreview != null) {
        mPreview.release();
    }
}

@Override
public void onRequestPermissionsResult(int requestCode,
                                       @NonNull String[] permissions,
                                       @NonNull int[] grantResults) {
    if (requestCode != RC_HANDLE_CAMERA_PERM) {
        Log.d(TAG, "Got unexpected permission result: " + requestCode);
        super.onRequestPermissionsResult(requestCode, permissions, grantResults);
        return;
    }
    if (grantResults.length != 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
        Log.d(TAG, "Camera permission granted - initialize the camera source");
        // we have permission, so create the camerasource
        boolean autoFocus = getIntent().getBooleanExtra(AutoFocus,false);
        boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);
        createCameraSource(autoFocus, useFlash);
        return;
    }
    Log.e(TAG, "Permission not granted: results len = " + grantResults.length +
            " Result code = " + (grantResults.length > 0 ? grantResults[0] : "(empty)"));
    DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
        public void onClick(DialogInterface dialog, int id) {
            finish();
        }
    };
    AlertDialog.Builder builder = new AlertDialog.Builder(this);
    builder.setTitle("Multitracker sample")
            .setMessage(R.string.no_camera_permission)
            .setPositiveButton(R.string.ok, listener)
            .show();
}
/**
 * Starts or restarts the camera source, if it exists.  If the camera source doesn't exist yet
 * (e.g., because onResume was called before the camera source was created), this will be called
 * again when the camera source is created.
 */
private void startCameraSource() throws SecurityException {
    // check that the device has play services available.
    int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
            getApplicationContext());
    if (code != ConnectionResult.SUCCESS) {
        Dialog dlg =
                GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
        dlg.show();
    }
    if (mCameraSource != null) {
        try {
            mPreview.start(mCameraSource, mGraphicOverlay);
        } catch (IOException e) {
            Log.e(TAG, "Unable to start camera source.", e);
            mCameraSource.release();
            mCameraSource = null;
        }
    }
}
/**
 * onTap is called to capture the oldest barcode currently detected and
 * return it to the caller.
 *
 * @param rawX - the raw position of the tap
 * @param rawY - the raw position of the tap.
 * @return true if the activity is ending.
 */
private boolean onTap(float rawX, float rawY) {
    //TODO: use the tap position to select the barcode.
    BarcodeGraphic graphic = mGraphicOverlay.getFirstGraphic();
    Barcode barcode = null;
    if (graphic != null) {
        barcode = graphic.getBarcode();
        if (barcode != null) {
            Intent data = new Intent();
            data.putExtra(BarcodeObject, barcode);
            setResult(CommonStatusCodes.SUCCESS, data);
            finish();
        }
        else {
            Log.d(TAG, "barcode data is null");
        }
    }
    else {
        Log.d(TAG,"no barcode detected");
    }
    return barcode != null;
}
private class CaptureGestureListener extends GestureDetector.SimpleOnGestureListener {
    @Override
    public boolean onSingleTapConfirmed(MotionEvent e) {
        return onTap(e.getRawX(), e.getRawY()) || super.onSingleTapConfirmed(e);
    }
}
private class ScaleListener implements ScaleGestureDetector.OnScaleGestureListener {
    /**
     * Responds to scaling events for a gesture in progress.
     * Reported by pointer motion.
     *
     * @param detector The detector reporting the event - use this to
     *                 retrieve extended info about event state.
     * @return Whether or not the detector should consider this event
     * as handled. If an event was not handled, the detector
     * will continue to accumulate movement until an event is
     * handled. This can be useful if an application, for example,
     * only wants to update scaling factors if the change is
     * greater than 0.01.
     */
    @Override
    public boolean onScale(ScaleGestureDetector detector) {
        return false;
    }
    /**
     * Responds to the beginning of a scaling gesture. Reported by
     * new pointers going down.
     *
     * @param detector The detector reporting the event - use this to
     *                 retrieve extended info about event state.
     * @return Whether or not the detector should continue recognizing
     * this gesture. For example, if a gesture is beginning
     * with a focal point outside of a region where it makes
     * sense, onScaleBegin() may return false to ignore the
     * rest of the gesture.
     */
    @Override
    public boolean onScaleBegin(ScaleGestureDetector detector) {
        return true;
    }
    /**
     * Responds to the end of a scale gesture. Reported by existing
     * pointers going up.
     * <p/>
     * Once a scale has ended, {@link ScaleGestureDetector#getFocusX()}
     * and {@link ScaleGestureDetector#getFocusY()} will return focal point
     * of the pointers remaining on the screen.
     *
     * @param detector The detector reporting the event - use this to
     *                 retrieve extended info about event state.
     */
    @Override
    public void onScaleEnd(ScaleGestureDetector detector) {
        mCameraSource.doZoom(detector.getScaleFactor());
    }
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
    int id = item.getItemId();
    if (id == android.R.id.home) {
        onBackPressed();
        return true;
    }
    return super.onOptionsItemSelected(item);
}
}

扫描GS1-128符号时,第一个位置的FNC1作为标志字符,表示GS1应用标识符标准格式数据的存在,并有意从扫描数据中省略,而任何字段间的FNC1格式字符都以GS (ASCII 29)传输。

如果您的阅读器配置为在扫描数据的开始发出符号标识符,则可以推断出隐式的前导FNC1。在这种情况下,GS1-128扫描的数据将以]C1开始,而不是通用代码128的]C0

不幸的是,ZBar库或Google Vision库似乎都不能配置为返回符号标识符,这是一个令人失望的限制。另外,谷歌视觉库错误地返回了一个领先的GS1,代表第一位置的FNC1。

GS1格式数据的读取在这个答案中有详细的描述。

具体来说,ISO/IEC 15417 - Code 128条形码符号规范说:

"任何使用代码的应用程序在第一个或第二个数据位置有FNC1的128个符号应该要求启用符号标识符的传输。当FNC1被用于第一或第二位置,它不应该被使用表示在传输的消息中,尽管它的存在是中分别使用修饰语1或2来表示符号标识符。"

相关内容

  • 没有找到相关文章

最新更新