package org.openni;
import java.util.Map;
import java.util.HashMap;
import java.util.Iterator;
/**
* The {@link VideoStream} object encapsulates a single video stream from a device. Once created, it
* is used to start data flow from the device, and to read individual frames of data. This is the
* central class used to obtain data in OpenNI. It provides the ability to manually read data in a
* polling loop, as well as providing events and a Listener class that can be used to implement
* event-driven data acquisition.
*
* Aside from the video data frames themselves, the class offers a number of functions used for
* obtaining information about a {@link VideoStream}. Field of view, available video modes, and
* minimum and maximum valid pixel values can all be obtained.
*
* In addition to obtaining data, the {@link VideoStream} object is used to set all configuration
* properties that apply to a specific stream (rather than to an entire device). In particular, it
* is used to control cropping, mirroring, and video modes.
*
* A valid, initialized device that provides the desired stream type is required to create a stream.
*
* Several video streams can be created to stream data from the same sensor. This is useful if
* several components of an application need to read frames separately.
*
* While some device might allow different streams from the same sensor to have different
* configurations, most devices will have a single configuration for the sensor, shared by all
* streams.
*/
public class VideoStream {
/**
* The {@link CameraSettings} object encapsulates camera setting for a single device. Once
* created, it is used to get and set auto exposure and auto white balance modes.
*/
public class CameraSettings {
/**
* Set Auto Exposure Enabled for corresponding sensor
*
* @param enabled boolean value for set and unset auto exposure mode
*/
public void setAutoExposureEnabled(boolean enabled) {
NativeMethods.checkReturnStatus(NativeMethods.oniStreamSetProperty(mVideoStream.getHandle(),
NativeMethods.STREAM_PROPERTY_AUTO_EXPOSURE, enabled));
}
/**
* Set Auto White Balance for corresponding sensor
*
* @param enabled boolean value for set and unset auto white balance mode
*/
public void setAutoWhiteBalanceEnabled(boolean enabled) {
NativeMethods.checkReturnStatus(NativeMethods.oniStreamSetProperty(mVideoStream.getHandle(),
NativeMethods.STREAM_PROPERTY_AUTO_WHITE_BALANCE, enabled));
}
/**
* Set Auto Exposure Enabled for corresponding sensor
*
* @return enabled boolean value which define auto exposure mode state
*/
public boolean getAutoExposureEnabled() {
OutArg<Boolean> val = new OutArg<Boolean>();
NativeMethods.oniStreamGetBoolProperty(mVideoStream.getHandle(),
NativeMethods.STREAM_PROPERTY_AUTO_EXPOSURE, val);
return val.mValue;
}
/**
* Set Auto White Balance Enabled for corresponding sensor
*
* @return enabled boolean value which define auto white balance mode state
*/
public boolean getAutoWhiteBalanceEnabled() {
OutArg<Boolean> val = new OutArg<Boolean>();
NativeMethods.oniStreamGetBoolProperty(mVideoStream.getHandle(),
NativeMethods.STREAM_PROPERTY_AUTO_WHITE_BALANCE, val);
return val.mValue;
}
private CameraSettings(VideoStream videoStream) {
this.mVideoStream = videoStream;
}
final private VideoStream mVideoStream;
}
/**
* The VideoStream::NewFrameListener interface is provided to allow the implementation of event
* driven frame reading. To use it, create a class that inherits from it and implement override
* the onNewFrame() method. Then, register your created class with an active {@link VideoStream}
* using the {@link #addNewFrameListener(org.openni.VideoStream.NewFrameListener)} function. Once
* this is done, the event handler function you implemented will be called whenever a new frame
* becomes available. You may call {@link org.openni.VideoStream#readFrame()} from within the
* event handler.
*/
public interface NewFrameListener {
/**
* Derived classes should implement this function to handle new frames.
*/
public void onFrameReady(VideoStream stream);
}
/**
* Creates a stream of frames from a specific sensor type of a specific device. You must supply a
* reference to a Device that supplies the sensor type requested. You can use
* {@link org.openni.Device#hasSensor(SensorType)} to check whether a given sensor is available on
* your target device before calling create().
*
* @param device A reference to the {@link Device} you want to create the stream on.
* @param sensorType The type of sensor the stream should produce data from.
*/
public static VideoStream create(Device device, SensorType sensorType) {
VideoStream videoStream = new VideoStream(sensorType);
if (mFrameListeners == null) mFrameListeners = new HashMap<VideoStream, NewFrameListener>();
NativeMethods.checkReturnStatus(NativeMethods.oniDeviceCreateStream(device.getHandle(),
sensorType.toNative(), videoStream));
return videoStream;
}
/**
* Destroy this stream. This function is currently called automatically by the destructor, but it
* is considered a best practice for applications to manually call this function on any
* {@link VideoStream} that they call create() for.
*/
public void destroy() {
NativeMethods.oniStreamDestroy(getHandle(), mCallbackHandle);
mStreamHandle = 0;
}
/**
* Provides the {@link SensorInfo} object associated with the sensor that is producing this
* {@link VideoStream}.
*
* {@link SensorInfo} is useful primarily as a means of learning which video modes are valid for
* this VideoStream.
*
* @return SensorInfo object associated with the sensor providing this stream.
*/
public final SensorInfo getSensorInfo() {
return NativeMethods.oniStreamGetSensorInfo(getHandle());
}
/**
* Starts data generation from this video stream.
*/
public void start() {
NativeMethods.checkReturnStatus(NativeMethods.oniStreamStart(getHandle()));
}
/**
* Stops data generation from this video stream.
*/
public void stop() {
NativeMethods.oniStreamStop(getHandle());
}
/**
* Read the next frame from this video stream, delivered as a {@link VideoFrameRef}. This is the
* primary method for manually obtaining frames of video data. If no new frame is available, the
* call will block until one is available. To avoid blocking, use
* {@link VideoStream.NewFrameListener} to implement an event driven architecture. Another
* alternative is to use {@link org.openni.OpenNI#waitForAnyStream(java.util.List, int)} to wait
* for new frames from several streams
*
* @return VideoFrameRef object which hold the data of the new frame.
*/
public VideoFrameRef readFrame() {
OutArg<VideoFrameRef> frame = new OutArg<VideoFrameRef>();
NativeMethods.checkReturnStatus(NativeMethods.oniStreamReadFrame(getHandle(), frame));
return frame.mValue;
}
/**
* Adds a new Listener to receive this VideoStream onNewFrame event. See
* {@link VideoStream.NewFrameListener} for more information on implementing an event driven
* frame reading architecture.
*
* @param streamListener Object which implements {@link VideoStream.NewFrameListener} that will
* respond to this event.
*/
public void addNewFrameListener(NewFrameListener streamListener) {
mFrameListeners.put(this, streamListener);
}
/**
* Removes a Listener from this video stream list. The listener removed will no longer receive new
* frame events from this stream.
*
* @param streamListener Object of the listener to be removed.
*/
public void removeNewFrameListener(NewFrameListener streamListener) {
没有合适的资源?快使用搜索试试~ 我知道了~
ubuntu openni2

温馨提示
ubuntu 官网安装openni2 驱动包,ubuntu 官网安装openni2 驱动包
资源推荐
资源详情
资源评论









收起资源包目录





































































































共 983 条
- 1
- 2
- 3
- 4
- 5
- 6
- 10
资源评论

- 笑看零一2019-04-23资源不错,能够运行

博瓦
- 粉丝: 256
- 资源: 13
上传资源 快速赚钱
我的内容管理 展开
我的资源 快来上传第一个资源
我的收益
登录查看自己的收益我的积分 登录查看自己的积分
我的C币 登录后查看C币余额
我的收藏
我的下载
下载帮助


安全验证
文档复制为VIP权益,开通VIP直接复制
