I have made my point cloud using the code from the librealsense:
var points = pc.Process(depthFrame).As<Points>();
//float depth = depthFrame.GetDistance(x, y);
//bbox = (287, 23, 86, 320);
// We colorize the depth frame for visualization purposes
var colorizedDepth = colorizer.Process<VideoFrame>(depthFrame).DisposeWith(frames);
//var org = Cv2.ImRead(colorFrame);
// CopyVertices is extensible, any of these will do:
//var vertices = new float[points.Count * 3];
var vertices = new Intel.RealSense.Math.Vertex[points.Count];
// var vertices = new UnityEngine.Vector3[points.Count];
// var vertices = new System.Numerics.Vector3[points.Count]; // SIMD
// var vertices = new GlmSharp.vec3[points.Count];
//var vertices = new byte[points.Count * 3 * sizeof(float)];
points.CopyVertices(vertices);
And I have converted the point cloud to a Point3DCollection from Media3D:
Point3DCollection pointss = new Point3DCollection();
foreach (var vertex in vertices)
{
var point3D = new Point3D(vertex.x, vertex.y, vertex.z);
pointss.Add(point3D);
}
I want to display those points using this line in the XAML file:
<h:HelixViewport3D Grid.ColumnSpan="1" Grid.Column="1" Margin="2.4,1,0,-0.4" >
<h:DefaultLights/>
<h:PointsVisual3D Points="{Binding pointss}" Color="Red" Size ="2"/>
</h:HelixViewport3D>
But I don't see my point cloud. Is there something wrong with my code? The code that I am using right now looks like this. I have added what was given in the answer But I get the error object reference is not set on an example of an object. The code I am using is below:
namespace Intel.RealSense
{
/// <summary>
/// Interaction logic for Window.xaml
/// </summary>
public partial class CaptureWindow : System.Windows.Window
{
private Pipeline pipeline;
private Colorizer colorizer;
private CancellationTokenSource tokenSource = new CancellationTokenSource();
private Pipeline pipe = new Pipeline();
private PointCloud pc = new PointCloud();
private ThresholdFilter threshold;
private Point3DCollection _pointss;
public Point3DCollection pointss
{
get => _pointss;
set
{
if (_pointss == value)
return;
_pointss = value;
OnPropertyChanged();
}
}
public event PropertyChangedEventHandler PropertyChanged;
protected virtual void OnPropertyChanged(string propertyName = null)
{
PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName));
}
//static CvTrackbar Track;
//static OpenCvSharp.Point[][] contours;
//static HierarchyIndex[] hierarchy;
static Action<VideoFrame> UpdateImage(Image img)
{
var wbmp = img.Source as WriteableBitmap;
return new Action<VideoFrame>(frame =>
{
var rect = new Int32Rect(0, 0, frame.Width, frame.Height);
wbmp.WritePixels(rect, frame.Data, frame.Stride * frame.Height, frame.Stride);
});
}
public CaptureWindow()
{
InitializeComponent();
ModelImporter import = new ModelImporter();
try
{
Action<VideoFrame> updateDepth;
Action<VideoFrame> updateColor;
// The colorizer processing block will be used to visualize the depth frames.
colorizer = new Colorizer();
// Create and config the pipeline to strem color and depth frames.
pipeline = new Pipeline();
var cfg = new Config();
cfg.EnableStream(Stream.Depth, 640, 480);
cfg.EnableStream(Stream.Color, Format.Rgb8);
var pp = pipeline.Start(cfg);
PipelineProfile selection = pp;
var depth_stream = selection.GetStream<VideoStreamProfile>(Stream.Depth);
Intrinsics i = depth_stream.GetIntrinsics();
float[] fov = i.FOV;
SetupWindow(pp, out updateDepth, out updateColor);
Task.Factory.StartNew(() =>
{
while (!tokenSource.Token.IsCancellationRequested)
{
threshold = new ThresholdFilter();
threshold.Options[Option.MinDistance].Value = 0.0F;
threshold.Options[Option.MaxDistance].Value = 0.1F;
using (var releaser = new FramesReleaser())
{
using (var frames = pipeline.WaitForFrames().DisposeWith(releaser))
{
var pframes = frames
.ApplyFilter(threshold).DisposeWith(releaser);
}
}
// We wait for the next available FrameSet and using it as a releaser object that would track
// all newly allocated .NET frames, and ensure deterministic finalization
// at the end of scope.
using (var frames = pipeline.WaitForFrames())
{
var colorFrame = frames.ColorFrame.DisposeWith(frames);
var depthFrame = frames.DepthFrame.DisposeWith(frames);
var points = pc.Process(depthFrame).As<Points>();
//float depth = depthFrame.GetDistance(x, y);
//bbox = (287, 23, 86, 320);
// We colorize the depth frame for visualization purposes
var colorizedDepth = colorizer.Process<VideoFrame>(depthFrame).DisposeWith(frames);
//var org = Cv2.ImRead(colorFrame);
// CopyVertices is extensible, any of these will do:
//var vertices = new float[points.Count * 3];
var vertices = new Intel.RealSense.Math.Vertex[points.Count];
// var vertices = new UnityEngine.Vector3[points.Count];
// var vertices = new System.Numerics.Vector3[points.Count]; // SIMD
// var vertices = new GlmSharp.vec3[points.Count];
//var vertices = new byte[points.Count * 3 * sizeof(float)];
points.CopyVertices(vertices);
//Point3DCollection pointss = new Point3DCollection();
foreach (var vertex in vertices)
{
var point3D = new Point3D(vertex.x, vertex.y, vertex.z);
pointss.Add(point3D);
}
// Render the frames.
Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
Dispatcher.Invoke(new Action(() =>
{
String depth_dev_sn = depthFrame.Sensor.Info[CameraInfo.SerialNumber];
txtTimeStamp.Text = depth_dev_sn + " : " + String.Format("{0,-20:0.00}", depthFrame.Timestamp) + "(" + depthFrame.TimestampDomain.ToString() + ")";
}));
//HelixToolkit.Wpf.
}
}
}, tokenSource.Token);
}
catch (Exception ex)
{
System.Windows.MessageBox.Show(ex.Message);
System.Windows.Application.Current.Shutdown();
}
}
private void control_Closing(object sender, System.ComponentModel.CancelEventArgs e)
{
tokenSource.Cancel();
}
private void SetupWindow(PipelineProfile pipelineProfile, out Action<VideoFrame> depth, out Action<VideoFrame> color)
{
using (var p = pipelineProfile.GetStream(Stream.Depth).As<VideoStreamProfile>())
imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
depth = UpdateImage(imgDepth);
using (var p = pipelineProfile.GetStream(Stream.Color).As<VideoStreamProfile>())
imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
color = UpdateImage(imgColor);
}
}
You can only bind to public properties, not to fields, so you have to define it like this:
If you want to reassign the collection at runtime, you should also implement
INotifyPropertyChanged
, otherwise assigning a new collection will not trigger a binding update and the change will not be reflected in the UI.