파일 카메라 옵션을 사용하면 실제 카메라에 액세스하지 않고도 SDK를 테스트할 수 있습니다. 파일 카메라는 각각의 카메라 모델에 해당하는 파일 카메라를 Sample Data 에서 찾을 수 있습니다. 각 파일 카메라는 각 카메라 모델의 주요 애플리케이션 중 하나 내에서 사용 사례를 보여줍니다. 아래 예제는 Sample Data 에서 Zivid 2 M70 파일 카메라를 사용하여 파일 카메라를 생성하는 방법을 보여줍니다.
The recommendation is to use Presets available in Zivid Studio and as .yml files (see below).
Presets are designed to work well for most cases right away, making them a great starting point.
If needed, you can easily fine-tune the settings for better results.
You can edit the YAML files in any text editor or code the settings manually.
constautosettingsFile="Settings.yml";std::cout<<"Loading settings from file: "<<settingsFile<<std::endl;constautosettingsFromFile=Zivid::Settings(settingsFile);
std::cout<<"Configuring settings for capture:"<<std::endl;Zivid::Settings2Dsettings2D{Zivid::Settings2D::Sampling::Color::rgb,Zivid::Settings2D::Sampling::Pixel::all,Zivid::Settings2D::Processing::Color::Balance::Blue{1.0},Zivid::Settings2D::Processing::Color::Balance::Green{1.0},Zivid::Settings2D::Processing::Color::Balance::Red{1.0},Zivid::Settings2D::Processing::Color::Gamma{1.0},Zivid::Settings2D::Processing::Color::Experimental::Mode::automatic,};Zivid::Settingssettings{Zivid::Settings::Color{settings2D},Zivid::Settings::Engine::phase,Zivid::Settings::RegionOfInterest::Box::Enabled::yes,Zivid::Settings::RegionOfInterest::Box::PointO{1000,1000,1000},Zivid::Settings::RegionOfInterest::Box::PointA{1000,-1000,1000},Zivid::Settings::RegionOfInterest::Box::PointB{-1000,1000,1000},Zivid::Settings::RegionOfInterest::Box::Extents{-1000,1000},Zivid::Settings::RegionOfInterest::Depth::Enabled::yes,Zivid::Settings::RegionOfInterest::Depth::Range{200,2000},Zivid::Settings::Processing::Filters::Cluster::Removal::Enabled::yes,Zivid::Settings::Processing::Filters::Cluster::Removal::MaxNeighborDistance{10},Zivid::Settings::Processing::Filters::Cluster::Removal::MinArea{100},Zivid::Settings::Processing::Filters::Hole::Repair::Enabled::yes,Zivid::Settings::Processing::Filters::Hole::Repair::HoleSize{0.2},Zivid::Settings::Processing::Filters::Hole::Repair::Strictness{1},Zivid::Settings::Processing::Filters::Noise::Removal::Enabled::yes,Zivid::Settings::Processing::Filters::Noise::Removal::Threshold{7.0},Zivid::Settings::Processing::Filters::Noise::Suppression::Enabled::yes,Zivid::Settings::Processing::Filters::Noise::Repair::Enabled::yes,Zivid::Settings::Processing::Filters::Outlier::Removal::Enabled::yes,Zivid::Settings::Processing::Filters::Outlier::Removal::Threshold{5.0},Zivid::Settings::Processing::Filters::Reflection::Removal::Enabled::yes,Zivid::Settings::Processing::Filters::Reflection::Removal::Mode::global,Zivid::Settings::Processing::Filters::Smoothing::Gaussian::Enabled::yes,Zivid::Settings::Processing::Filters::Smoothing::Gaussian::Sigma{1.5},Zivid::Settings::Processing::Filters::Experimental::ContrastDistortion::Correction::Enabled::yes,Zivid::Settings::Processing::Filters::Experimental::ContrastDistortion::Correction::Strength{0.4},Zivid::Settings::Processing::Filters::Experimental::ContrastDistortion::Removal::Enabled::no,Zivid::Settings::Processing::Filters::Experimental::ContrastDistortion::Removal::Threshold{0.5},Zivid::Settings::Processing::Resampling::Mode::upsample2x2,Zivid::Settings::Diagnostics::Enabled::no,};setSamplingPixel(settings,camera);std::cout<<settings<<std::endl;std::cout<<"Configuring base acquisition with settings same for all HDR acquisition:"<<std::endl;constautobaseAcquisition=Zivid::Settings::Acquisition{};std::cout<<baseAcquisition<<std::endl;constautobaseAquisition2D=Zivid::Settings2D::Acquisition{};std::cout<<"Configuring acquisition settings different for all HDR acquisitions"<<std::endl;autoexposureValues=getExposureValues(camera);conststd::vector<double>aperture=std::get<0>(exposureValues);conststd::vector<double>gain=std::get<1>(exposureValues);conststd::vector<std::chrono::microseconds>exposureTime=std::get<2>(exposureValues);conststd::vector<double>brightness=std::get<3>(exposureValues);for(size_ti=0;i<aperture.size();++i){std::cout<<"Acquisition "<<i+1<<":"<<std::endl;std::cout<<" Exposure Time: "<<exposureTime.at(i).count()<<std::endl;std::cout<<" Aperture: "<<aperture.at(i)<<std::endl;std::cout<<" Gain: "<<gain.at(i)<<std::endl;std::cout<<" Brightness: "<<brightness.at(i)<<std::endl;constautoacquisitionSettings=baseAcquisition.copyWith(Zivid::Settings::Acquisition::Aperture{aperture.at(i)},Zivid::Settings::Acquisition::Gain{gain.at(i)},Zivid::Settings::Acquisition::ExposureTime{exposureTime.at(i)},Zivid::Settings::Acquisition::Brightness{brightness.at(i)});settings.acquisitions().emplaceBack(acquisitionSettings);}constautoacquisitionSettings2D=baseAquisition2D.copyWith(Zivid::Settings2D::Acquisition::Aperture{2.83},Zivid::Settings2D::Acquisition::ExposureTime{microseconds{10000}},Zivid::Settings2D::Acquisition::Brightness{1.8},Zivid::Settings2D::Acquisition::Gain{1.0});settings.color().value().acquisitions().emplaceBack(acquisitionSettings2D);
Console.WriteLine("Configuring settings for capture:");varsettings2D=newZivid.NET.Settings2D(){Sampling={Color=Zivid.NET.Settings2D.SamplingGroup.ColorOption.Rgb,Pixel=Zivid.NET.Settings2D.SamplingGroup.PixelOption.All,},Processing={Color={Balance={Blue=1.0,Green=1.0,Red=1.0,},Gamma=1.0,Experimental={Mode=Zivid.NET.Settings2D.ProcessingGroup.ColorGroup.ExperimentalGroup.ModeOption.Automatic},},},};varsettings=newZivid.NET.Settings(){Engine=Zivid.NET.Settings.EngineOption.Phase,RegionOfInterest={Box={Enabled=true,PointO=newZivid.NET.PointXYZ{x=1000,y=1000,z=1000},PointA=newZivid.NET.PointXYZ{x=1000,y=-1000,z=1000},PointB=newZivid.NET.PointXYZ{x=-1000,y=1000,z=1000},Extents=newZivid.NET.Range<double>(-1000,1000),},Depth={Enabled=true,Range=newZivid.NET.Range<double>(200,2000),},},Processing={Filters={Cluster={Removal={Enabled=true,MaxNeighborDistance=10,MinArea=100}},Hole={Repair={Enabled=true,HoleSize=0.2,Strictness=1},},Noise={Removal={Enabled=true,Threshold=7.0},Suppression={Enabled=true},Repair={Enabled=true},},Outlier={Removal={Enabled=true,Threshold=5.0},},Reflection={Removal={Enabled=true,Mode=ReflectionFilterModeOption.Global},},Smoothing={Gaussian={Enabled=true,Sigma=1.5},},Experimental={ContrastDistortion={Correction={Enabled=true,Strength=0.4},Removal={Enabled=true,Threshold=0.5},},},},Resampling={Mode=Zivid.NET.Settings.ProcessingGroup.ResamplingGroup.ModeOption.Upsample2x2},},Diagnostics={Enabled=false},};settings.Color=settings2D;SetSamplingPixel(refsettings,camera);Console.WriteLine(settings);Console.WriteLine("Configuring base acquisition with settings same for all HDR acquisitions:");varbaseAcquisition=newZivid.NET.Settings.Acquisition{};Console.WriteLine(baseAcquisition);varbaseAcquisition2D=newZivid.NET.Settings2D.Acquisition{};Console.WriteLine("Configuring acquisition settings different for all HDR acquisitions:");Tuple<double[],Duration[],double[],double[]>exposureValues=GetExposureValues(camera);double[]aperture=exposureValues.Item1;Duration[]exposureTime=exposureValues.Item2;double[]gain=exposureValues.Item3;double[]brightness=exposureValues.Item4;for(inti=0;i<aperture.Length;i++){Console.WriteLine("Acquisition {0}:",i+1);Console.WriteLine(" Exposure Time: {0}",exposureTime[i].Microseconds);Console.WriteLine(" Aperture: {0}",aperture[i]);Console.WriteLine(" Gain: {0}",gain[i]);Console.WriteLine(" Brightness: {0}",brightness[i]);varacquisitionSettings=baseAcquisition.CopyWith(s=>{s.Aperture=aperture[i];s.ExposureTime=exposureTime[i];s.Gain=gain[i];s.Brightness=brightness[i];});settings.Acquisitions.Add(acquisitionSettings);}varacquisitionSettings2D=baseAcquisition2D.CopyWith(s=>{s.Aperture=2.83;s.ExposureTime=Duration.FromMicroseconds(1000);s.Gain=1.0;s.Brightness=1.8;});settings.Color.Acquisitions.Add(acquisitionSettings2D);
print("Configuring settings for capture:")settings_2d=zivid.Settings2D()settings_2d.sampling.color=zivid.Settings2D.Sampling.Color.rgbsettings_2d.sampling.pixel=zivid.Settings2D.Sampling.Pixel.allsettings_2d.processing.color.balance.red=1.0settings_2d.processing.color.balance.blue=1.0settings_2d.processing.color.balance.green=1.0settings_2d.processing.color.gamma=1.0settings_2d.processing.color.experimental.mode=zivid.Settings2D.Processing.Color.Experimental.Mode.automaticsettings=zivid.Settings()settings.engine=zivid.Settings.Engine.phasesettings.region_of_interest.box.enabled=Truesettings.region_of_interest.box.point_o=[1000,1000,1000]settings.region_of_interest.box.point_a=[1000,-1000,1000]settings.region_of_interest.box.point_b=[-1000,1000,1000]settings.region_of_interest.box.extents=[-1000,1000]settings.region_of_interest.depth.enabled=Truesettings.region_of_interest.depth.range=[200,2000]settings.processing.filters.cluster.removal.enabled=Truesettings.processing.filters.cluster.removal.max_neighbor_distance=10settings.processing.filters.cluster.removal.min_area=100settings.processing.filters.hole.repair.enabled=Truesettings.processing.filters.hole.repair.hole_size=0.2settings.processing.filters.hole.repair.strictness=1settings.processing.filters.noise.removal.enabled=Truesettings.processing.filters.noise.removal.threshold=7.0settings.processing.filters.noise.suppression.enabled=Truesettings.processing.filters.noise.repair.enabled=Truesettings.processing.filters.outlier.removal.enabled=Truesettings.processing.filters.outlier.removal.threshold=5.0settings.processing.filters.reflection.removal.enabled=Truesettings.processing.filters.reflection.removal.mode=(zivid.Settings.Processing.Filters.Reflection.Removal.Mode.global_)settings.processing.filters.smoothing.gaussian.enabled=Truesettings.processing.filters.smoothing.gaussian.sigma=1.5settings.processing.filters.experimental.contrast_distortion.correction.enabled=Truesettings.processing.filters.experimental.contrast_distortion.correction.strength=0.4settings.processing.filters.experimental.contrast_distortion.removal.enabled=Falsesettings.processing.filters.experimental.contrast_distortion.removal.threshold=0.5settings.processing.resampling.mode=zivid.Settings.Processing.Resampling.Mode.upsample2x2settings.diagnostics.enabled=Falsesettings.color=settings_2d_set_sampling_pixel(settings,camera)print(settings)print("Configuring acquisition settings different for all HDR acquisitions")exposure_values=_get_exposure_values(camera)foraperture,gain,exposure_time,brightnessinexposure_values:settings.acquisitions.append(zivid.Settings.Acquisition(aperture=aperture,exposure_time=exposure_time,brightness=brightness,gain=gain,))settings_2d.acquisitions.append(zivid.Settings2D.Acquisition(aperture=2.83,exposure_time=timedelta(microseconds=10000),brightness=1.8,gain=1.0,))
Now we can capture a 2D and 3D image (point cloud with color).
Whether there is a single acquisition or multiple acquisitions (HDR) is given by the number of acquisitions in settings.
The Zivid::Frame contains the point cloud, the color image, the capture, and the camera information (all of which are stored on the compute device memory).
The Zivid.NET.Frame contains the point cloud, the color image, the capture, and the camera information (all of which are stored on the compute device memory).
The zivid.Frame contains the point cloud, the color image, the capture, and the camera information (all of which are stored on the compute device memory).
In the next code example, the point cloud is exported to the .ply format.
For other exporting options, see Point Cloud for a list of supported formats.
From a capture2D() you get a Frame2D.
There are two color spaces available for 2D images: linear RGB and sRGB.
The imageRGBA() will return an image in the linear RGB color space.
If you append _SRGB to the function name then the returned image will be in the sRGB color space
constautoimageFile="ImageRGBA_sRGB.png";std::cout<<"Saving 2D color image (sRGB color space) to file: "<<imageFile<<std::endl;imageSRGB.save(imageFile);
image_file="ImageRGBA_sRGB.png"print(f"Saving 2D color image (sRGB color space) to file: {image_file}")image_srgb.save(image_file)
We can get 2D color image directly from the point cloud.
This image will have the same resolution as the point cloud and it will be in the sRGB color space.
We can get the 2D color image from Frame2D, which is part of the Frame object, obtained from capture2D3D().
This image will have the resolution given by the 2D settings inside the 2D3D settings.