Huawei Hair Segmentation Retuning Null Output

404 views Asked by At

I am trying to run hair segmentation on Android but it's not working. It is returning null results as outputs while Human segmentation works perfectly fine.

This is the code.

public class MainActivity extends AppCompatActivity {

MLImageSegmentationAnalyzer analyzer;
Bitmap originialBitmap;
Bitmap foregroundBitmap;
ImageView imageView;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);


    imageView = (ImageView) findViewById(R.id.imageView);

    Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.person);
    bitmap = Bitmap.createScaledBitmap(bitmap, 1000, 1500, true);

    createImageTransactor(bitmap);

}

private void createImageTransactor(Bitmap bitmap) {
    MLImageSegmentationSetting setting = new MLImageSegmentationSetting.Factory()
            .setExact(true)
            .setAnalyzerType(MLImageSegmentationSetting.HAIR_SEG)
            .create();

    this.analyzer = MLAnalyzerFactory.getInstance().getImageSegmentationAnalyzer(setting);
    MLFrame mlFrame = new MLFrame.Creator().setBitmap(bitmap).create();
    Task<MLImageSegmentation> task = this.analyzer.asyncAnalyseFrame(mlFrame);
    task.addOnSuccessListener(new OnSuccessListener<MLImageSegmentation>() {
        @Override
        public void onSuccess(MLImageSegmentation mlImageSegmentationResults) {
            // Transacting logic for segment success.
            if (mlImageSegmentationResults != null) {
                Toast.makeText(MainActivity.this, "Success", Toast.LENGTH_SHORT).show();
                foregroundBitmap = mlImageSegmentationResults.getGrayscale();
                imageView.setImageBitmap(foregroundBitmap);
            } else {
                Toast.makeText(MainActivity.this, "Failure", Toast.LENGTH_SHORT).show();
            }
        }
    }).addOnFailureListener(new OnFailureListener() {
        @Override
        public void onFailure(Exception e) {
            Toast.makeText(MainActivity.this, "Failure", Toast.LENGTH_SHORT).show();

            // Transacting logic for segment failure.
            return;
        }
    });
}


}

And this is the gradle file.

implementation 'com.huawei.hms:ml-computer-vision-cloud:2.0.5.300'
implementation 'com.huawei.hms:ml-computer-vision-segmentation:2.2.0.300'

implementation 'com.huawei.hms:ml-computer-vision-image-segmentation-body-model:2.2.0.300'
implementation 'com.huawei.hms:ml-computer-vision-image-segmentation-multiclass-model:2.2.0.300'
implementation 'com.huawei.hms:ml-computer-vision-image-segmentation-hair-model:2.2.0.300'

But when the code is run it outputs null image and the masks in segmentation results contain whole 0's in the array.

Please let me know what is the issue.

1

There are 1 answers

6
zhangxaochen On

You may refer to the HairSegmentationStillAnalyseActivity class in the sample code:

public class HairSegmentationStillAnalyseActivity extends AppCompatActivity implements View.OnClickListener {
    private static final String TAG = HairSegmentationStillAnalyseActivity.class.getSimpleName();

    private MLImageSegmentationAnalyzer analyzer;

    private ImageView mImageView;

    private Bitmap bitmap;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        this.setContentView(R.layout.activity_image_segmentation_analyse);
        this.findViewById(R.id.segment_detect).setOnClickListener(this);
        this.mImageView = this.findViewById(R.id.image_result);
    }

    @Override
    public void onClick(View v) {
        switch (v.getId()){
            case R.id.segment_detect:
                this.analyzer();
                break;
            default:
                break;
        }
    }

    private void analyzer() {
        /**
         * Configure image segmentation analyzer with custom parameter MLImageSegmentationSetting.
         *
         * setExact(): Set the segmentation fine mode, true is the fine segmentation mode,
         *     and false is the speed priority segmentation mode.
         * setAnalyzerType(): Set the segmentation mode. When segmenting a static image, support setting
         *     MLImageSegmentationSetting.BODY_SEG (only segment human body and background)
         *     and MLImageSegmentationSetting.IMAGE_SEG (segment 10 categories of scenes, including human bodies)
         * setScene(): Set the type of the returned results. This configuration takes effect only in
         *     MLImageSegmentationSetting.BODY_SEG mode. In MLImageSegmentationSetting.IMAGE_SEG mode,
         *     only pixel-level tagging information is returned.
         *     Supports setting MLImageSegmentationScene.ALL (returns all segmentation results,
         *     including: pixel-level tag information, portrait images with transparent backgrounds
         *     and portraits are white, gray background with black background),
         *     MLImageSegmentationScene.MASK_ONLY (returns only pixel-level tag information),
         *     MLImageSegmentationScene .FOREGROUND_ONLY (returns only portrait images with transparent background),
         *     MLImageSegmentationScene.GRAYSCALE_ONLY (returns only grayscale images with white portrait and black background).
         */
        MLImageSegmentationSetting setting = new MLImageSegmentationSetting.Factory()
                .setExact(false)
                .setAnalyzerType(MLImageSegmentationSetting.HAIR_SEG)
                .create();
        this.analyzer = MLAnalyzerFactory.getInstance().getImageSegmentationAnalyzer(setting);
        // Create an MLFrame by using android.graphics.Bitmap. Recommended image size: large than 224*224.
        this.bitmap = BitmapFactory.decodeResource(this.getResources(), R.drawable.imgseg_foreground);
        MLFrame mlFrame = new MLFrame.Creator().setBitmap(this.bitmap).create();
        Task<MLImageSegmentation> task = this.analyzer.asyncAnalyseFrame(mlFrame);
        task.addOnSuccessListener(new OnSuccessListener<MLImageSegmentation>() {
            @Override
            public void onSuccess(MLImageSegmentation imageSegmentationResult) {
                // Processing logic for recognition success.
                if (imageSegmentationResult != null) {
                    HairSegmentationStillAnalyseActivity.this.displaySuccess(imageSegmentationResult);
                } else {
                    HairSegmentationStillAnalyseActivity.this.displayFailure("imageSegmentationResult is null.");
                }
            }
        }).addOnFailureListener(new OnFailureListener() {
            @Override
            public void onFailure(Exception e) {
                // Processing logic for recognition failure.
                HairSegmentationStillAnalyseActivity.this.displayFailure(e.getMessage());
            }
        });
    }

    private void displaySuccess(MLImageSegmentation imageSegmentationResult) {
        if (this.bitmap == null) {
            this.displayFailure("bitmap is null.");
            return;
        }
        // Draw the portrait with a transparent background.
        if (imageSegmentationResult.getMasks() == null) {
            return;
        }
        Bitmap processedBitmap = null;
        int[] pixels = byteArrToIntArr(imageSegmentationResult.getMasks());
        processedBitmap = Bitmap.createBitmap(pixels, 0, bitmap.getWidth(), bitmap.getWidth(), bitmap.getHeight(), Bitmap.Config.ARGB_8888);

        if (processedBitmap != null) {
            this.mImageView.setImageBitmap(processedBitmap);
        } else {
            this.displayFailure("bitmapFore is null.");
        }
    }

    private void displayFailure(String str) {
        Log.e(TAG, str);
    }

    @Override
    protected void onDestroy() {
        super.onDestroy();
        if (this.analyzer != null) {
            try {
                this.analyzer.stop();
            } catch (IOException e) {
                Log.e(TAG, "Stop failed: " + e.getMessage());
            }
        }
    }

    /**
     * masking
     * @param masks
     * @return
     */
    private int[] byteArrToIntArr(byte[] masks) {
        int[] results = new int[masks.length];
        for (int i = 0; i < masks.length; i++) {
            if (masks[i] == 1) {
                results[i] = Color.WHITE;
            } else if (masks[i] == 2) {
                results[i] = Color.BLUE;
            } else if (masks[i] == 3) {
                results[i] = Color.DKGRAY;
            } else if (masks[i] == 4) {
                results[i] = Color.YELLOW;
            } else if (masks[i] == 5) {
                results[i] = Color.LTGRAY;
            } else if (masks[i] == 6) {
                results[i] = Color.CYAN;
            } else if (masks[i] == 7) {
                results[i] = Color.RED;
            } else if (masks[i] == 8) {
                results[i] = Color.GRAY;
            } else if (masks[i] == 9) {
                results[i] = Color.MAGENTA;
            } else if (masks[i] == 10) {
                results[i] = Color.GREEN;
            } else {
                results[i] = Color.BLACK;
            }
        }
        return results;
    }
}