Spaces:
Sleeping
Sleeping
d305997a2345792a43d4207bad32f62e30aebb4204508478c416af54d9c81106
Browse files- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/shared_memory.h +36 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/shift.h +78 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/split.h +65 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/teli.h +122 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/test.h +65 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/thread.h +111 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/transform.h +90 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/truncate.h +71 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/unpack.h +83 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/uvc.h +114 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/uvc_mediafoundation.h +93 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/v4l.h +128 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/iostream_operators.h +132 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/stream_encoder_factory.h +22 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/stream_info.h +100 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video.h +169 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_exception.h +29 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_help.h +21 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_input.h +140 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_interface.h +184 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_output.h +93 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_output_interface.h +52 -0
- third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_record_repeat.h +31 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/debayer.cpp +392 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/deinterlace.cpp +121 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/depthsense.cpp +656 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/ffmpeg.cpp +419 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/ffmpeg_convert.cpp +158 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/ffmpeg_output.cpp +341 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/firewire.cpp +987 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/gamma.cpp +375 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/images.cpp +315 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/images_out.cpp +141 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/join.cpp +620 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/json.cpp +98 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/merge.cpp +182 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/mjpeg.cpp +152 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/openni.cpp +315 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/openni2.cpp +715 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/pack.cpp +277 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/pango.cpp +255 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/pango_video_output.cpp +319 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/pleora.cpp +756 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/realsense.cpp +115 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/realsense2.cpp +129 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/shared_memory.cpp +114 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/shift.cpp +253 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/split.cpp +180 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/teli.cpp +563 -0
- third-party/DPVO/Pangolin/components/pango_video/src/drivers/test.cpp +128 -0
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/shared_memory.h
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#pragma once
|
2 |
+
|
3 |
+
#include <pangolin/video/video_interface.h>
|
4 |
+
#include <pangolin/utils/posix/condition_variable.h>
|
5 |
+
#include <pangolin/utils/posix/shared_memory_buffer.h>
|
6 |
+
|
7 |
+
#include <memory>
|
8 |
+
#include <vector>
|
9 |
+
|
10 |
+
namespace pangolin
|
11 |
+
{
|
12 |
+
|
13 |
+
class SharedMemoryVideo : public VideoInterface
|
14 |
+
{
|
15 |
+
public:
|
16 |
+
SharedMemoryVideo(size_t w, size_t h, std::string pix_fmt,
|
17 |
+
const std::shared_ptr<SharedMemoryBufferInterface>& shared_memory,
|
18 |
+
const std::shared_ptr<ConditionVariableInterface>& buffer_full);
|
19 |
+
~SharedMemoryVideo();
|
20 |
+
|
21 |
+
size_t SizeBytes() const;
|
22 |
+
const std::vector<StreamInfo>& Streams() const;
|
23 |
+
void Start();
|
24 |
+
void Stop();
|
25 |
+
bool GrabNext(unsigned char *image, bool wait);
|
26 |
+
bool GrabNewest(unsigned char *image, bool wait);
|
27 |
+
|
28 |
+
private:
|
29 |
+
PixelFormat _fmt;
|
30 |
+
size_t _frame_size;
|
31 |
+
std::vector<StreamInfo> _streams;
|
32 |
+
std::shared_ptr<SharedMemoryBufferInterface> _shared_memory;
|
33 |
+
std::shared_ptr<ConditionVariableInterface> _buffer_full;
|
34 |
+
};
|
35 |
+
|
36 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/shift.h
ADDED
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <pangolin/video/video_interface.h>
|
31 |
+
#include <set>
|
32 |
+
|
33 |
+
namespace pangolin
|
34 |
+
{
|
35 |
+
|
36 |
+
// Video class that debayers its video input using the given method.
|
37 |
+
class PANGOLIN_EXPORT ShiftVideo : public VideoInterface, public VideoFilterInterface
|
38 |
+
{
|
39 |
+
public:
|
40 |
+
ShiftVideo(std::unique_ptr<VideoInterface>& videoin,
|
41 |
+
const std::map<size_t, int>& shift_right_bits,
|
42 |
+
const std::map<size_t, uint32_t>& masks);
|
43 |
+
~ShiftVideo();
|
44 |
+
|
45 |
+
//! Implement VideoInput::Start()
|
46 |
+
void Start();
|
47 |
+
|
48 |
+
//! Implement VideoInput::Stop()
|
49 |
+
void Stop();
|
50 |
+
|
51 |
+
//! Implement VideoInput::SizeBytes()
|
52 |
+
size_t SizeBytes() const;
|
53 |
+
|
54 |
+
//! Implement VideoInput::Streams()
|
55 |
+
const std::vector<StreamInfo>& Streams() const;
|
56 |
+
|
57 |
+
//! Implement VideoInput::GrabNext()
|
58 |
+
bool GrabNext( uint8_t* image, bool wait = true );
|
59 |
+
|
60 |
+
//! Implement VideoInput::GrabNewest()
|
61 |
+
bool GrabNewest( uint8_t* image, bool wait = true );
|
62 |
+
|
63 |
+
std::vector<VideoInterface*>& InputStreams();
|
64 |
+
|
65 |
+
protected:
|
66 |
+
void Process(uint8_t* buffer_out, const uint8_t* buffer_in);
|
67 |
+
|
68 |
+
std::unique_ptr<VideoInterface> src;
|
69 |
+
std::vector<VideoInterface*> videoin;
|
70 |
+
std::vector<StreamInfo> streams;
|
71 |
+
size_t size_bytes;
|
72 |
+
std::unique_ptr<uint8_t[]> buffer;
|
73 |
+
const std::map<size_t, int> shift_right_bits;
|
74 |
+
const std::map<size_t, uint32_t> masks;
|
75 |
+
std::set<std::string> formats_supported;
|
76 |
+
};
|
77 |
+
|
78 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/split.h
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2013 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <vector>
|
31 |
+
#include <pangolin/video/video_interface.h>
|
32 |
+
|
33 |
+
namespace pangolin
|
34 |
+
{
|
35 |
+
|
36 |
+
class PANGOLIN_EXPORT SplitVideo
|
37 |
+
: public VideoInterface, public VideoFilterInterface
|
38 |
+
{
|
39 |
+
public:
|
40 |
+
SplitVideo(std::unique_ptr<VideoInterface>& videoin, const std::vector<StreamInfo>& streams);
|
41 |
+
|
42 |
+
~SplitVideo();
|
43 |
+
|
44 |
+
size_t SizeBytes() const;
|
45 |
+
|
46 |
+
const std::vector<StreamInfo>& Streams() const;
|
47 |
+
|
48 |
+
void Start();
|
49 |
+
|
50 |
+
void Stop();
|
51 |
+
|
52 |
+
bool GrabNext( unsigned char* image, bool wait = true );
|
53 |
+
|
54 |
+
bool GrabNewest( unsigned char* image, bool wait = true );
|
55 |
+
|
56 |
+
std::vector<VideoInterface*>& InputStreams();
|
57 |
+
|
58 |
+
protected:
|
59 |
+
std::unique_ptr<VideoInterface> src;
|
60 |
+
std::vector<VideoInterface*> videoin;
|
61 |
+
std::vector<StreamInfo> streams;
|
62 |
+
};
|
63 |
+
|
64 |
+
|
65 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/teli.h
ADDED
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2015 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <pangolin/video/video_interface.h>
|
31 |
+
#include <pangolin/utils/timer.h>
|
32 |
+
|
33 |
+
#if defined(_WIN_)
|
34 |
+
# define WIN32_LEAN_AND_MEAN
|
35 |
+
# include <windows.h>
|
36 |
+
#endif
|
37 |
+
|
38 |
+
#include <TeliCamApi.h>
|
39 |
+
|
40 |
+
namespace pangolin
|
41 |
+
{
|
42 |
+
|
43 |
+
// Video class that outputs test video signal.
|
44 |
+
class PANGOLIN_EXPORT TeliVideo : public VideoInterface, public VideoPropertiesInterface,
|
45 |
+
public BufferAwareVideoInterface, public GenicamVideoInterface
|
46 |
+
{
|
47 |
+
public:
|
48 |
+
TeliVideo(const Params &p);
|
49 |
+
~TeliVideo();
|
50 |
+
|
51 |
+
Params OpenCameraAndGetRemainingParameters(Params ¶ms);
|
52 |
+
|
53 |
+
//! Implement VideoInput::Start()
|
54 |
+
void Start();
|
55 |
+
|
56 |
+
//! Implement VideoInput::Stop()
|
57 |
+
void Stop();
|
58 |
+
|
59 |
+
//! Implement VideoInput::SizeBytes()
|
60 |
+
size_t SizeBytes() const;
|
61 |
+
|
62 |
+
//! Implement VideoInput::Streams()
|
63 |
+
const std::vector<StreamInfo>& Streams() const;
|
64 |
+
|
65 |
+
//! Implement VideoInput::GrabNext()
|
66 |
+
bool GrabNext( unsigned char* image, bool wait = true );
|
67 |
+
|
68 |
+
//! Implement VideoInput::GrabNewest()
|
69 |
+
bool GrabNewest( unsigned char* image, bool wait = true );
|
70 |
+
|
71 |
+
inline Teli::CAM_HANDLE GetCameraHandle() {
|
72 |
+
return cam;
|
73 |
+
}
|
74 |
+
|
75 |
+
inline Teli::CAM_STRM_HANDLE GetCameraStreamHandle() {
|
76 |
+
return strm;
|
77 |
+
}
|
78 |
+
|
79 |
+
bool GetParameter(const std::string& name, std::string& result);
|
80 |
+
|
81 |
+
bool SetParameter(const std::string& name, const std::string& value);
|
82 |
+
|
83 |
+
//! Returns number of available frames
|
84 |
+
uint32_t AvailableFrames() const;
|
85 |
+
|
86 |
+
//! Drops N frames in the queue starting from the oldest
|
87 |
+
//! returns false if less than n frames arae available
|
88 |
+
bool DropNFrames(uint32_t n);
|
89 |
+
|
90 |
+
//! Access JSON properties of device
|
91 |
+
const picojson::value& DeviceProperties() const;
|
92 |
+
|
93 |
+
//! Access JSON properties of most recently captured frame
|
94 |
+
const picojson::value& FrameProperties() const;
|
95 |
+
|
96 |
+
void PopulateEstimatedCenterCaptureTime(pangolin::basetime host_reception_time);
|
97 |
+
|
98 |
+
protected:
|
99 |
+
void Initialise();
|
100 |
+
void InitPangoDeviceProperties();
|
101 |
+
void SetDeviceParams(const Params &p);
|
102 |
+
void SetNodeValStr(Teli::CAM_HANDLE cam, Teli::CAM_NODE_HANDLE node, std::string node_str, std::string val_str);
|
103 |
+
|
104 |
+
std::vector<StreamInfo> streams;
|
105 |
+
size_t size_bytes;
|
106 |
+
|
107 |
+
Teli::CAM_HANDLE cam;
|
108 |
+
Teli::CAM_STRM_HANDLE strm;
|
109 |
+
|
110 |
+
#ifdef _WIN_
|
111 |
+
HANDLE hStrmCmpEvt;
|
112 |
+
#endif
|
113 |
+
#ifdef _LINUX_
|
114 |
+
Teli::SIGNAL_HANDLE hStrmCmpEvt;
|
115 |
+
#endif
|
116 |
+
double transfer_bandwidth_gbps;
|
117 |
+
int exposure_us;
|
118 |
+
picojson::value device_properties;
|
119 |
+
picojson::value frame_properties;
|
120 |
+
};
|
121 |
+
|
122 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/test.h
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2013 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <pangolin/video/video_interface.h>
|
31 |
+
|
32 |
+
namespace pangolin
|
33 |
+
{
|
34 |
+
|
35 |
+
// Video class that outputs test video signal.
|
36 |
+
class PANGOLIN_EXPORT TestVideo : public VideoInterface
|
37 |
+
{
|
38 |
+
public:
|
39 |
+
TestVideo(size_t w, size_t h, size_t n, std::string pix_fmt);
|
40 |
+
~TestVideo();
|
41 |
+
|
42 |
+
//! Implement VideoInput::Start()
|
43 |
+
void Start() override;
|
44 |
+
|
45 |
+
//! Implement VideoInput::Stop()
|
46 |
+
void Stop() override;
|
47 |
+
|
48 |
+
//! Implement VideoInput::SizeBytes()
|
49 |
+
size_t SizeBytes() const override;
|
50 |
+
|
51 |
+
//! Implement VideoInput::Streams()
|
52 |
+
const std::vector<StreamInfo>& Streams() const override;
|
53 |
+
|
54 |
+
//! Implement VideoInput::GrabNext()
|
55 |
+
bool GrabNext( unsigned char* image, bool wait = true ) override;
|
56 |
+
|
57 |
+
//! Implement VideoInput::GrabNewest()
|
58 |
+
bool GrabNewest( unsigned char* image, bool wait = true ) override;
|
59 |
+
|
60 |
+
protected:
|
61 |
+
std::vector<StreamInfo> streams;
|
62 |
+
size_t size_bytes;
|
63 |
+
};
|
64 |
+
|
65 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/thread.h
ADDED
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <memory>
|
31 |
+
#include <pangolin/video/video_interface.h>
|
32 |
+
#include <pangolin/utils/fix_size_buffer_queue.h>
|
33 |
+
|
34 |
+
namespace pangolin
|
35 |
+
{
|
36 |
+
|
37 |
+
|
38 |
+
// Video class that creates a thread that keeps pulling frames and processing from its children.
|
39 |
+
class PANGOLIN_EXPORT ThreadVideo : public VideoInterface, public VideoPropertiesInterface,
|
40 |
+
public BufferAwareVideoInterface, public VideoFilterInterface
|
41 |
+
{
|
42 |
+
public:
|
43 |
+
ThreadVideo(std::unique_ptr<VideoInterface>& videoin, size_t num_buffers, const std::string& name);
|
44 |
+
~ThreadVideo();
|
45 |
+
|
46 |
+
//! Implement VideoInput::Start()
|
47 |
+
void Start();
|
48 |
+
|
49 |
+
//! Implement VideoInput::Stop()
|
50 |
+
void Stop();
|
51 |
+
|
52 |
+
//! Implement VideoInput::SizeBytes()
|
53 |
+
size_t SizeBytes() const;
|
54 |
+
|
55 |
+
//! Implement VideoInput::Streams()
|
56 |
+
const std::vector<StreamInfo>& Streams() const;
|
57 |
+
|
58 |
+
//! Implement VideoInput::GrabNext()
|
59 |
+
bool GrabNext( unsigned char* image, bool wait = true );
|
60 |
+
|
61 |
+
//! Implement VideoInput::GrabNewest()
|
62 |
+
bool GrabNewest( unsigned char* image, bool wait = true );
|
63 |
+
|
64 |
+
const picojson::value& DeviceProperties() const;
|
65 |
+
|
66 |
+
const picojson::value& FrameProperties() const;
|
67 |
+
|
68 |
+
uint32_t AvailableFrames() const;
|
69 |
+
|
70 |
+
bool DropNFrames(uint32_t n);
|
71 |
+
|
72 |
+
void operator()();
|
73 |
+
|
74 |
+
std::vector<VideoInterface*>& InputStreams();
|
75 |
+
|
76 |
+
protected:
|
77 |
+
struct GrabResult
|
78 |
+
{
|
79 |
+
GrabResult(const size_t buffer_size)
|
80 |
+
: return_status(false),
|
81 |
+
buffer(new unsigned char[buffer_size])
|
82 |
+
{
|
83 |
+
}
|
84 |
+
|
85 |
+
// No copy constructor.
|
86 |
+
GrabResult(const GrabResult& o) = delete;
|
87 |
+
|
88 |
+
// Default move constructor
|
89 |
+
GrabResult(GrabResult&& o) = default;
|
90 |
+
|
91 |
+
bool return_status;
|
92 |
+
std::unique_ptr<unsigned char[]> buffer;
|
93 |
+
picojson::value frame_properties;
|
94 |
+
};
|
95 |
+
|
96 |
+
std::unique_ptr<VideoInterface> src;
|
97 |
+
std::vector<VideoInterface*> videoin;
|
98 |
+
|
99 |
+
bool quit_grab_thread;
|
100 |
+
FixSizeBuffersQueue<GrabResult> queue;
|
101 |
+
|
102 |
+
std::condition_variable cv;
|
103 |
+
std::mutex cvMtx;
|
104 |
+
std::thread grab_thread;
|
105 |
+
std::string thread_name;
|
106 |
+
|
107 |
+
mutable picojson::value device_properties;
|
108 |
+
picojson::value frame_properties;
|
109 |
+
};
|
110 |
+
|
111 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/transform.h
ADDED
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <pangolin/video/video_interface.h>
|
31 |
+
|
32 |
+
namespace pangolin
|
33 |
+
{
|
34 |
+
|
35 |
+
enum class TransformOptions
|
36 |
+
{
|
37 |
+
None, FlipX, FlipY, FlipXY,
|
38 |
+
Transpose, RotateCW, RotateCCW,
|
39 |
+
};
|
40 |
+
|
41 |
+
// Video class that transforms its video input using the specified method.
|
42 |
+
class PANGOLIN_EXPORT TransformVideo :
|
43 |
+
public VideoInterface,
|
44 |
+
public VideoFilterInterface,
|
45 |
+
public BufferAwareVideoInterface
|
46 |
+
{
|
47 |
+
public:
|
48 |
+
TransformVideo(std::unique_ptr<VideoInterface>& videoin, const std::vector<TransformOptions>& flips);
|
49 |
+
~TransformVideo();
|
50 |
+
|
51 |
+
//! Implement VideoInput::Start()
|
52 |
+
void Start();
|
53 |
+
|
54 |
+
//! Implement VideoInput::Stop()
|
55 |
+
void Stop();
|
56 |
+
|
57 |
+
//! Implement VideoInput::SizeBytes()
|
58 |
+
size_t SizeBytes() const;
|
59 |
+
|
60 |
+
//! Implement VideoInput::Streams()
|
61 |
+
const std::vector<StreamInfo>& Streams() const;
|
62 |
+
|
63 |
+
//! Implement VideoInput::GrabNext()
|
64 |
+
bool GrabNext( unsigned char* image, bool wait = true );
|
65 |
+
|
66 |
+
//! Implement VideoInput::GrabNewest()
|
67 |
+
bool GrabNewest( unsigned char* image, bool wait = true );
|
68 |
+
|
69 |
+
//! Implement VideoFilterInterface method
|
70 |
+
std::vector<VideoInterface*>& InputStreams();
|
71 |
+
|
72 |
+
uint32_t AvailableFrames() const;
|
73 |
+
|
74 |
+
bool DropNFrames(uint32_t n);
|
75 |
+
|
76 |
+
protected:
|
77 |
+
void Process(unsigned char* image, const unsigned char* buffer);
|
78 |
+
|
79 |
+
std::unique_ptr<VideoInterface> videoin;
|
80 |
+
std::vector<VideoInterface*> inputs;
|
81 |
+
std::vector<StreamInfo> streams;
|
82 |
+
std::vector<TransformOptions> flips;
|
83 |
+
size_t size_bytes;
|
84 |
+
unsigned char* buffer;
|
85 |
+
|
86 |
+
picojson::value device_properties;
|
87 |
+
picojson::value frame_properties;
|
88 |
+
};
|
89 |
+
|
90 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/truncate.h
ADDED
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2013 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <vector>
|
31 |
+
#include <pangolin/video/video_interface.h>
|
32 |
+
|
33 |
+
namespace pangolin
|
34 |
+
{
|
35 |
+
|
36 |
+
class PANGOLIN_EXPORT TruncateVideo
|
37 |
+
: public VideoInterface, public VideoFilterInterface
|
38 |
+
{
|
39 |
+
public:
|
40 |
+
TruncateVideo(std::unique_ptr<VideoInterface>& videoin, size_t begin, size_t end);
|
41 |
+
|
42 |
+
~TruncateVideo();
|
43 |
+
|
44 |
+
size_t SizeBytes() const;
|
45 |
+
|
46 |
+
const std::vector<StreamInfo>& Streams() const;
|
47 |
+
|
48 |
+
void Start();
|
49 |
+
|
50 |
+
void Stop();
|
51 |
+
|
52 |
+
bool GrabNext( unsigned char* image, bool wait = true );
|
53 |
+
|
54 |
+
bool GrabNewest( unsigned char* image, bool wait = true );
|
55 |
+
|
56 |
+
std::vector<VideoInterface*>& InputStreams();
|
57 |
+
|
58 |
+
protected:
|
59 |
+
std::unique_ptr<VideoInterface> src;
|
60 |
+
std::vector<VideoInterface*> videoin;
|
61 |
+
std::vector<StreamInfo> streams;
|
62 |
+
|
63 |
+
size_t begin;
|
64 |
+
size_t end;
|
65 |
+
size_t next_frame_to_grab;
|
66 |
+
|
67 |
+
inline VideoPlaybackInterface* GetVideoPlaybackInterface(){ return dynamic_cast<VideoPlaybackInterface*>(src.get()); }
|
68 |
+
};
|
69 |
+
|
70 |
+
|
71 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/unpack.h
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <pangolin/video/video_interface.h>
|
31 |
+
|
32 |
+
namespace pangolin
|
33 |
+
{
|
34 |
+
|
35 |
+
// Video class that debayers its video input using the given method.
|
36 |
+
class PANGOLIN_EXPORT UnpackVideo :
|
37 |
+
public VideoInterface,
|
38 |
+
public VideoFilterInterface,
|
39 |
+
public BufferAwareVideoInterface
|
40 |
+
{
|
41 |
+
public:
|
42 |
+
UnpackVideo(std::unique_ptr<VideoInterface>& videoin, PixelFormat new_fmt);
|
43 |
+
~UnpackVideo();
|
44 |
+
|
45 |
+
//! Implement VideoInput::Start()
|
46 |
+
void Start();
|
47 |
+
|
48 |
+
//! Implement VideoInput::Stop()
|
49 |
+
void Stop();
|
50 |
+
|
51 |
+
//! Implement VideoInput::SizeBytes()
|
52 |
+
size_t SizeBytes() const;
|
53 |
+
|
54 |
+
//! Implement VideoInput::Streams()
|
55 |
+
const std::vector<StreamInfo>& Streams() const;
|
56 |
+
|
57 |
+
//! Implement VideoInput::GrabNext()
|
58 |
+
bool GrabNext( unsigned char* image, bool wait = true );
|
59 |
+
|
60 |
+
//! Implement VideoInput::GrabNewest()
|
61 |
+
bool GrabNewest( unsigned char* image, bool wait = true );
|
62 |
+
|
63 |
+
//! Implement VideoFilterInterface method
|
64 |
+
std::vector<VideoInterface*>& InputStreams();
|
65 |
+
|
66 |
+
uint32_t AvailableFrames() const;
|
67 |
+
|
68 |
+
bool DropNFrames(uint32_t n);
|
69 |
+
|
70 |
+
protected:
|
71 |
+
void Process(unsigned char* image, const unsigned char* buffer);
|
72 |
+
|
73 |
+
std::unique_ptr<VideoInterface> src;
|
74 |
+
std::vector<VideoInterface*> videoin;
|
75 |
+
std::vector<StreamInfo> streams;
|
76 |
+
size_t size_bytes;
|
77 |
+
unsigned char* buffer;
|
78 |
+
|
79 |
+
picojson::value device_properties;
|
80 |
+
picojson::value frame_properties;
|
81 |
+
};
|
82 |
+
|
83 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/uvc.h
ADDED
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2011 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <pangolin/video/video_interface.h>
|
31 |
+
#include <pangolin/utils/timer.h>
|
32 |
+
|
33 |
+
#ifdef _MSC_VER
|
34 |
+
// Define missing timeval struct
|
35 |
+
typedef struct timeval {
|
36 |
+
long tv_sec;
|
37 |
+
long tv_usec;
|
38 |
+
} timeval;
|
39 |
+
#endif // _MSC_VER
|
40 |
+
|
41 |
+
#include <libuvc/libuvc.h>
|
42 |
+
|
43 |
+
namespace pangolin
|
44 |
+
{
|
45 |
+
|
46 |
+
class PANGOLIN_EXPORT UvcVideo : public VideoInterface, public VideoUvcInterface, public VideoPropertiesInterface
|
47 |
+
{
|
48 |
+
public:
|
49 |
+
UvcVideo(int vendor_id, int product_id, const char* sn, int deviceid, int width, int height, int fps);
|
50 |
+
~UvcVideo();
|
51 |
+
|
52 |
+
void InitDevice(int vid, int pid, const char* sn, int deviceid, int width, int height, int fps);
|
53 |
+
void DeinitDevice();
|
54 |
+
|
55 |
+
//! Implement VideoInput::Start()
|
56 |
+
void Start();
|
57 |
+
|
58 |
+
//! Implement VideoInput::Stop()
|
59 |
+
void Stop();
|
60 |
+
|
61 |
+
//! Implement VideoInput::SizeBytes()
|
62 |
+
size_t SizeBytes() const;
|
63 |
+
|
64 |
+
//! Implement VideoInput::Streams()
|
65 |
+
const std::vector<StreamInfo>& Streams() const;
|
66 |
+
|
67 |
+
//! Implement VideoInput::GrabNext()
|
68 |
+
bool GrabNext( unsigned char* image, bool wait = true );
|
69 |
+
|
70 |
+
//! Implement VideoInput::GrabNewest()
|
71 |
+
bool GrabNewest( unsigned char* image, bool wait = true );
|
72 |
+
|
73 |
+
//! Implement VideoUvcInterface::GetCtrl()
|
74 |
+
int IoCtrl(uint8_t unit, uint8_t ctrl, unsigned char* data, int len, UvcRequestCode req_code);
|
75 |
+
|
76 |
+
//! Implement VideoUvcInterface::GetExposure()
|
77 |
+
bool GetExposure(int& exp_us);
|
78 |
+
|
79 |
+
//! Implement VideoUvcInterface::SetExposure()
|
80 |
+
bool SetExposure(int exp_us);
|
81 |
+
|
82 |
+
//! Implement VideoUvcInterface::GetGain()
|
83 |
+
bool GetGain(float& gain);
|
84 |
+
|
85 |
+
//! Implement VideoUvcInterface::SetGain()
|
86 |
+
bool SetGain(float gain);
|
87 |
+
|
88 |
+
//! Access JSON properties of device
|
89 |
+
const picojson::value& DeviceProperties() const;
|
90 |
+
|
91 |
+
//! Access JSON properties of most recently captured frame
|
92 |
+
const picojson::value& FrameProperties() const;
|
93 |
+
|
94 |
+
protected:
|
95 |
+
void InitPangoDeviceProperties();
|
96 |
+
static uvc_error_t FindDevice(
|
97 |
+
uvc_context_t *ctx, uvc_device_t **dev,
|
98 |
+
int vid, int pid, const char *sn, int device_id);
|
99 |
+
|
100 |
+
std::vector<StreamInfo> streams;
|
101 |
+
size_t size_bytes;
|
102 |
+
|
103 |
+
uvc_context* ctx_;
|
104 |
+
uvc_device* dev_;
|
105 |
+
uvc_device_handle* devh_;
|
106 |
+
uvc_stream_handle* strm_;
|
107 |
+
uvc_stream_ctrl_t ctrl_;
|
108 |
+
uvc_frame_t* frame_;
|
109 |
+
picojson::value device_properties;
|
110 |
+
picojson::value frame_properties;
|
111 |
+
bool is_streaming;
|
112 |
+
};
|
113 |
+
|
114 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/uvc_mediafoundation.h
ADDED
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#pragma once
|
2 |
+
|
3 |
+
#include <pangolin/video/video_interface.h>
|
4 |
+
|
5 |
+
struct IMFActivate;
|
6 |
+
struct IMFMediaSource;
|
7 |
+
struct IMFSourceReader;
|
8 |
+
struct IBaseFilter;
|
9 |
+
struct IKsControl;
|
10 |
+
|
11 |
+
namespace pangolin
|
12 |
+
{
|
13 |
+
|
14 |
+
class AsyncSourceReader;
|
15 |
+
class PANGOLIN_EXPORT UvcMediaFoundationVideo
|
16 |
+
: public pangolin::VideoInterface, public pangolin::VideoUvcInterface, public pangolin::VideoPropertiesInterface
|
17 |
+
{
|
18 |
+
public:
|
19 |
+
UvcMediaFoundationVideo(int vendorId, int productId, const std::string& instanceId, size_t width, size_t height, int fps);
|
20 |
+
~UvcMediaFoundationVideo();
|
21 |
+
|
22 |
+
//! Implement VideoInput::Start()
|
23 |
+
void Start();
|
24 |
+
|
25 |
+
//! Implement VideoInput::Stop()
|
26 |
+
void Stop();
|
27 |
+
|
28 |
+
//! Implement VideoInput::SizeBytes()
|
29 |
+
size_t SizeBytes() const;
|
30 |
+
|
31 |
+
//! Implement VideoInput::Streams()
|
32 |
+
const std::vector<pangolin::StreamInfo>& Streams() const;
|
33 |
+
|
34 |
+
//! Implement VideoInput::GrabNext()
|
35 |
+
bool GrabNext(unsigned char* image, bool wait = true);
|
36 |
+
|
37 |
+
//! Implement VideoInput::GrabNewest()
|
38 |
+
bool GrabNewest(unsigned char* image, bool wait = true);
|
39 |
+
|
40 |
+
//! Implement VideoUvcInterface::GetCtrl()
|
41 |
+
int IoCtrl(uint8_t unit, uint8_t ctrl, unsigned char* data, int len, pangolin::UvcRequestCode req_code);
|
42 |
+
|
43 |
+
//! Implement VideoUvcInterface::GetExposure()
|
44 |
+
bool GetExposure(int& exp_us);
|
45 |
+
|
46 |
+
//! Implement VideoUvcInterface::SetExposure()
|
47 |
+
bool SetExposure(int exp_us);
|
48 |
+
|
49 |
+
//! Implement VideoUvcInterface::GetGain()
|
50 |
+
bool GetGain(float& gain);
|
51 |
+
|
52 |
+
//! Implement VideoUvcInterface::SetGain()
|
53 |
+
bool SetGain(float gain);
|
54 |
+
|
55 |
+
//! Access JSON properties of device
|
56 |
+
const picojson::value& DeviceProperties() const;
|
57 |
+
|
58 |
+
//! Access JSON properties of most recently captured frame
|
59 |
+
const picojson::value& FrameProperties() const;
|
60 |
+
|
61 |
+
protected:
|
62 |
+
bool FindDevice(int vendorId, int productId, const std::string& instanceId);
|
63 |
+
void InitDevice(size_t width, size_t height, bool async);
|
64 |
+
void DeinitDevice();
|
65 |
+
void PopulateGainControls();
|
66 |
+
|
67 |
+
static bool DeviceMatches(const std::wstring& symLink, int vendorId, int productId, std::wstring& instanceId);
|
68 |
+
static bool SymLinkIDMatches(const std::wstring& symLink, const wchar_t* idStr, int id);
|
69 |
+
|
70 |
+
std::vector<pangolin::StreamInfo> streams;
|
71 |
+
size_t size_bytes;
|
72 |
+
|
73 |
+
IMFMediaSource* mediaSource;
|
74 |
+
AsyncSourceReader* asyncSourceReader;
|
75 |
+
IMFSourceReader* sourceReader;
|
76 |
+
IBaseFilter* baseFilter;
|
77 |
+
IKsControl* ksControl;
|
78 |
+
DWORD ksControlNodeId;
|
79 |
+
IAMCameraControl* camera_control;
|
80 |
+
IAMVideoProcAmp* video_control;
|
81 |
+
|
82 |
+
long gainCamMin;
|
83 |
+
long gainCamMax;
|
84 |
+
long gainCamDefault;
|
85 |
+
|
86 |
+
const float gainApiMin = 1.0f;
|
87 |
+
const float gainApiMax = 15.5f;
|
88 |
+
int64_t expected_fps;
|
89 |
+
|
90 |
+
picojson::value device_properties;
|
91 |
+
picojson::value frame_properties;
|
92 |
+
};
|
93 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/drivers/v4l.h
ADDED
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2011 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <pangolin/video/video_interface.h>
|
31 |
+
|
32 |
+
#include <asm/types.h>
|
33 |
+
#include <linux/videodev2.h>
|
34 |
+
|
35 |
+
namespace pangolin
|
36 |
+
{
|
37 |
+
|
38 |
+
typedef enum {
|
39 |
+
IO_METHOD_READ,
|
40 |
+
IO_METHOD_MMAP,
|
41 |
+
IO_METHOD_USERPTR,
|
42 |
+
} io_method;
|
43 |
+
|
44 |
+
struct buffer {
|
45 |
+
void* start;
|
46 |
+
size_t length;
|
47 |
+
};
|
48 |
+
|
49 |
+
class PANGOLIN_EXPORT V4lVideo : public VideoInterface, public VideoUvcInterface, public VideoPropertiesInterface
|
50 |
+
{
|
51 |
+
public:
|
52 |
+
V4lVideo(const char* dev_name, uint32_t period, io_method io = IO_METHOD_MMAP, unsigned iwidth=0, unsigned iheight=0, unsigned v4l_format=V4L2_PIX_FMT_YUYV);
|
53 |
+
~V4lVideo();
|
54 |
+
|
55 |
+
//! Implement VideoInput::Start()
|
56 |
+
void Start();
|
57 |
+
|
58 |
+
//! Implement VideoInput::Stop()
|
59 |
+
void Stop();
|
60 |
+
|
61 |
+
//! Implement VideoInput::SizeBytes()
|
62 |
+
size_t SizeBytes() const;
|
63 |
+
|
64 |
+
//! Implement VideoInput::Streams()
|
65 |
+
const std::vector<StreamInfo>& Streams() const;
|
66 |
+
|
67 |
+
//! Implement VideoInput::GrabNext()
|
68 |
+
bool GrabNext( unsigned char* image, bool wait = true );
|
69 |
+
|
70 |
+
//! Implement VideoInput::GrabNewest()
|
71 |
+
bool GrabNewest( unsigned char* image, bool wait = true );
|
72 |
+
|
73 |
+
//! Implement VideoUvcInterface::IoCtrl()
|
74 |
+
int IoCtrl(uint8_t unit, uint8_t ctrl, unsigned char* data, int len, UvcRequestCode req_code);
|
75 |
+
|
76 |
+
bool GetExposure(int& exp_us);
|
77 |
+
|
78 |
+
bool SetExposure(int exp_us);
|
79 |
+
|
80 |
+
bool GetGain(float& gain);
|
81 |
+
|
82 |
+
bool SetGain(float gain);
|
83 |
+
|
84 |
+
int GetFileDescriptor() const{
|
85 |
+
return fd;
|
86 |
+
}
|
87 |
+
|
88 |
+
//! Access JSON properties of device
|
89 |
+
const picojson::value& DeviceProperties() const;
|
90 |
+
|
91 |
+
//! Access JSON properties of most recently captured frame
|
92 |
+
const picojson::value& FrameProperties() const;
|
93 |
+
|
94 |
+
protected:
|
95 |
+
void InitPangoDeviceProperties();
|
96 |
+
|
97 |
+
|
98 |
+
int ReadFrame(unsigned char* image, bool wait = true);
|
99 |
+
void Mainloop();
|
100 |
+
|
101 |
+
void init_read(unsigned int buffer_size);
|
102 |
+
void init_mmap(const char* dev_name);
|
103 |
+
void init_userp(const char* dev_name, unsigned int buffer_size);
|
104 |
+
|
105 |
+
void init_device(const char* dev_name, unsigned iwidth, unsigned iheight, unsigned ifps, unsigned v4l_format = V4L2_PIX_FMT_YUYV, v4l2_field field = V4L2_FIELD_INTERLACED);
|
106 |
+
void uninit_device();
|
107 |
+
|
108 |
+
void open_device(const char* dev_name);
|
109 |
+
void close_device();
|
110 |
+
|
111 |
+
std::vector<StreamInfo> streams;
|
112 |
+
|
113 |
+
io_method io;
|
114 |
+
int fd;
|
115 |
+
buffer* buffers;
|
116 |
+
unsigned int n_buffers;
|
117 |
+
bool running;
|
118 |
+
unsigned width;
|
119 |
+
unsigned height;
|
120 |
+
float fps;
|
121 |
+
size_t image_size;
|
122 |
+
uint32_t period;
|
123 |
+
|
124 |
+
picojson::value device_properties;
|
125 |
+
picojson::value frame_properties;
|
126 |
+
};
|
127 |
+
|
128 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/iostream_operators.h
ADDED
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2015 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <iostream>
|
31 |
+
#include <cctype>
|
32 |
+
|
33 |
+
#include <pangolin/video/video_exception.h>
|
34 |
+
#include <pangolin/utils/file_utils.h>
|
35 |
+
#include <pangolin/video/stream_info.h>
|
36 |
+
|
37 |
+
namespace pangolin
|
38 |
+
{
|
39 |
+
|
40 |
+
struct PANGOLIN_EXPORT Point
|
41 |
+
{
|
42 |
+
inline Point() : x(0), y(0) {}
|
43 |
+
inline Point(size_t x, size_t y) : x(x), y(y) {}
|
44 |
+
size_t x;
|
45 |
+
size_t y;
|
46 |
+
};
|
47 |
+
|
48 |
+
typedef Point ImageDim;
|
49 |
+
|
50 |
+
struct PANGOLIN_EXPORT ImageRoi
|
51 |
+
{
|
52 |
+
inline ImageRoi() : x(0), y(0), w(0), h(0) {}
|
53 |
+
inline ImageRoi(size_t x, size_t y, size_t w, size_t h) : x(x), y(y), w(w), h(h) {}
|
54 |
+
size_t x; size_t y;
|
55 |
+
size_t w; size_t h;
|
56 |
+
};
|
57 |
+
|
58 |
+
inline std::istream& operator>> (std::istream &is, ImageDim &dim)
|
59 |
+
{
|
60 |
+
if(std::isdigit(is.peek()) ) {
|
61 |
+
// Expect 640x480, 640*480, ...
|
62 |
+
is >> dim.x; is.get(); is >> dim.y;
|
63 |
+
}else{
|
64 |
+
// Expect 'VGA', 'QVGA', etc
|
65 |
+
std::string sdim;
|
66 |
+
is >> sdim;
|
67 |
+
ToUpper(sdim);
|
68 |
+
|
69 |
+
if( !sdim.compare("QQVGA") ) {
|
70 |
+
dim = ImageDim(160,120);
|
71 |
+
}else if( !sdim.compare("HQVGA") ) {
|
72 |
+
dim = ImageDim(240,160);
|
73 |
+
}else if( !sdim.compare("QVGA") ) {
|
74 |
+
dim = ImageDim(320,240);
|
75 |
+
}else if( !sdim.compare("WQVGA") ) {
|
76 |
+
dim = ImageDim(360,240);
|
77 |
+
}else if( !sdim.compare("HVGA") ) {
|
78 |
+
dim = ImageDim(480,320);
|
79 |
+
}else if( !sdim.compare("VGA") ) {
|
80 |
+
dim = ImageDim(640,480);
|
81 |
+
}else if( !sdim.compare("WVGA") ) {
|
82 |
+
dim = ImageDim(720,480);
|
83 |
+
}else if( !sdim.compare("SVGA") ) {
|
84 |
+
dim = ImageDim(800,600);
|
85 |
+
}else if( !sdim.compare("DVGA") ) {
|
86 |
+
dim = ImageDim(960,640);
|
87 |
+
}else if( !sdim.compare("WSVGA") ) {
|
88 |
+
dim = ImageDim(1024,600);
|
89 |
+
}else{
|
90 |
+
throw VideoException("Unrecognised image-size string.");
|
91 |
+
}
|
92 |
+
}
|
93 |
+
return is;
|
94 |
+
}
|
95 |
+
|
96 |
+
inline std::istream& operator>> (std::istream &is, ImageRoi &roi)
|
97 |
+
{
|
98 |
+
is >> roi.x; is.get(); is >> roi.y; is.get();
|
99 |
+
is >> roi.w; is.get(); is >> roi.h;
|
100 |
+
return is;
|
101 |
+
}
|
102 |
+
|
103 |
+
inline std::istream& operator>> (std::istream &is, PixelFormat& fmt)
|
104 |
+
{
|
105 |
+
std::string sfmt;
|
106 |
+
is >> sfmt;
|
107 |
+
fmt = PixelFormatFromString(sfmt);
|
108 |
+
return is;
|
109 |
+
}
|
110 |
+
|
111 |
+
inline std::istream& operator>> (std::istream &is, Image<unsigned char>& img)
|
112 |
+
{
|
113 |
+
size_t offset;
|
114 |
+
is >> offset; is.get();
|
115 |
+
img.ptr = (unsigned char*)(offset);
|
116 |
+
is >> img.w; is.get();
|
117 |
+
is >> img.h; is.get();
|
118 |
+
is >> img.pitch;
|
119 |
+
return is;
|
120 |
+
}
|
121 |
+
|
122 |
+
inline std::istream& operator>> (std::istream &is, StreamInfo &stream)
|
123 |
+
{
|
124 |
+
PixelFormat fmt;
|
125 |
+
Image<unsigned char> img_offset;
|
126 |
+
is >> img_offset; is.get();
|
127 |
+
is >> fmt;
|
128 |
+
stream = StreamInfo(fmt, img_offset);
|
129 |
+
return is;
|
130 |
+
}
|
131 |
+
|
132 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/stream_encoder_factory.h
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#pragma once
|
2 |
+
|
3 |
+
#include <memory>
|
4 |
+
|
5 |
+
#include <pangolin/image/image_io.h>
|
6 |
+
|
7 |
+
namespace pangolin {
|
8 |
+
|
9 |
+
using ImageEncoderFunc = std::function<void(std::ostream&, const Image<unsigned char>&)>;
|
10 |
+
using ImageDecoderFunc = std::function<TypedImage(std::istream&)>;
|
11 |
+
|
12 |
+
class StreamEncoderFactory
|
13 |
+
{
|
14 |
+
public:
|
15 |
+
static StreamEncoderFactory& I();
|
16 |
+
|
17 |
+
ImageEncoderFunc GetEncoder(const std::string& encoder_spec, const PixelFormat& fmt);
|
18 |
+
|
19 |
+
ImageDecoderFunc GetDecoder(const std::string& encoder_spec, const PixelFormat& fmt);
|
20 |
+
};
|
21 |
+
|
22 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/stream_info.h
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2011 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <pangolin/image/image.h>
|
31 |
+
#include <pangolin/image/pixel_format.h>
|
32 |
+
|
33 |
+
namespace pangolin {
|
34 |
+
|
35 |
+
class PANGOLIN_EXPORT StreamInfo
|
36 |
+
{
|
37 |
+
public:
|
38 |
+
inline StreamInfo()
|
39 |
+
: fmt(PixelFormatFromString("GRAY8")) {}
|
40 |
+
|
41 |
+
inline StreamInfo(PixelFormat fmt, const Image<unsigned char> img_offset )
|
42 |
+
: fmt(fmt), img_offset(img_offset) {}
|
43 |
+
|
44 |
+
inline StreamInfo(PixelFormat fmt, size_t w, size_t h, size_t pitch, unsigned char* offset = 0)
|
45 |
+
: fmt(fmt), img_offset(offset,w,h,pitch) {}
|
46 |
+
|
47 |
+
//! Format representing how image is laid out in memory
|
48 |
+
inline const PixelFormat &PixFormat() const { return fmt; }
|
49 |
+
|
50 |
+
//! Image width in pixels
|
51 |
+
inline size_t Width() const { return img_offset.w; }
|
52 |
+
|
53 |
+
//! Image height in pixels
|
54 |
+
inline size_t Height() const { return img_offset.h; }
|
55 |
+
|
56 |
+
inline double Aspect() const { return (double)Width() / (double)Height(); }
|
57 |
+
|
58 |
+
//! Pitch: Number of bytes between one image row and the next
|
59 |
+
inline size_t Pitch() const { return img_offset.pitch; }
|
60 |
+
|
61 |
+
//! Number of contiguous bytes in memory that the image occupies
|
62 |
+
inline size_t RowBytes() const {
|
63 |
+
// Row size without padding
|
64 |
+
return (fmt.bpp*img_offset.w)/8;
|
65 |
+
}
|
66 |
+
|
67 |
+
//! Returns true iff image contains padding or stridded access
|
68 |
+
//! This implies that the image data is not contiguous in memory.
|
69 |
+
inline bool IsPitched() const {
|
70 |
+
return Pitch() != RowBytes();
|
71 |
+
}
|
72 |
+
|
73 |
+
//! Number of contiguous bytes in memory that the image occupies
|
74 |
+
inline size_t SizeBytes() const {
|
75 |
+
return (img_offset.h-1) * img_offset.pitch + RowBytes();
|
76 |
+
}
|
77 |
+
|
78 |
+
//! Offset in bytes relative to start of frame buffer
|
79 |
+
inline unsigned char* Offset() const { return img_offset.ptr; }
|
80 |
+
|
81 |
+
//! Return Image wrapper around raw base pointer
|
82 |
+
inline Image<unsigned char> StreamImage(unsigned char* base_ptr) const {
|
83 |
+
Image<unsigned char> img = img_offset;
|
84 |
+
img.ptr += (size_t)base_ptr;
|
85 |
+
return img;
|
86 |
+
}
|
87 |
+
|
88 |
+
//! Return Image wrapper around raw base pointer
|
89 |
+
inline const Image<unsigned char> StreamImage(const unsigned char* base_ptr) const {
|
90 |
+
Image<unsigned char> img = img_offset;
|
91 |
+
img.ptr += (size_t)base_ptr;
|
92 |
+
return img;
|
93 |
+
}
|
94 |
+
|
95 |
+
protected:
|
96 |
+
PixelFormat fmt;
|
97 |
+
Image<unsigned char> img_offset;
|
98 |
+
};
|
99 |
+
|
100 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video.h
ADDED
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2011 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <pangolin/utils/uri.h>
|
31 |
+
#include <pangolin/video/video_exception.h>
|
32 |
+
#include <pangolin/video/video_interface.h>
|
33 |
+
#include <pangolin/video/video_output_interface.h>
|
34 |
+
|
35 |
+
namespace pangolin
|
36 |
+
{
|
37 |
+
|
38 |
+
//! Open Video Interface from string specification (as described in this files header)
|
39 |
+
PANGOLIN_EXPORT
|
40 |
+
std::unique_ptr<VideoInterface> OpenVideo(const std::string& uri);
|
41 |
+
|
42 |
+
//! Open Video Interface from Uri specification
|
43 |
+
PANGOLIN_EXPORT
|
44 |
+
std::unique_ptr<VideoInterface> OpenVideo(const Uri& uri);
|
45 |
+
|
46 |
+
//! Open VideoOutput Interface from string specification (as described in this files header)
|
47 |
+
PANGOLIN_EXPORT
|
48 |
+
std::unique_ptr<VideoOutputInterface> OpenVideoOutput(const std::string& str_uri);
|
49 |
+
|
50 |
+
//! Open VideoOutput Interface from Uri specification
|
51 |
+
PANGOLIN_EXPORT
|
52 |
+
std::unique_ptr<VideoOutputInterface> OpenVideoOutput(const Uri& uri);
|
53 |
+
|
54 |
+
//! Create vector of matching interfaces either through direct cast or filter interface.
|
55 |
+
template<typename T>
|
56 |
+
std::vector<T*> FindMatchingVideoInterfaces( VideoInterface& video )
|
57 |
+
{
|
58 |
+
std::vector<T*> matches;
|
59 |
+
|
60 |
+
T* vid = dynamic_cast<T*>(&video);
|
61 |
+
if(vid) {
|
62 |
+
matches.push_back(vid);
|
63 |
+
}
|
64 |
+
|
65 |
+
VideoFilterInterface* vidf = dynamic_cast<VideoFilterInterface*>(&video);
|
66 |
+
if(vidf) {
|
67 |
+
std::vector<T*> fmatches = vidf->FindMatchingStreams<T>();
|
68 |
+
matches.insert(matches.begin(), fmatches.begin(), fmatches.end());
|
69 |
+
}
|
70 |
+
|
71 |
+
return matches;
|
72 |
+
}
|
73 |
+
|
74 |
+
template<typename T>
|
75 |
+
T* FindFirstMatchingVideoInterface( VideoInterface& video )
|
76 |
+
{
|
77 |
+
T* vid = dynamic_cast<T*>(&video);
|
78 |
+
if(vid) {
|
79 |
+
return vid;
|
80 |
+
}
|
81 |
+
|
82 |
+
VideoFilterInterface* vidf = dynamic_cast<VideoFilterInterface*>(&video);
|
83 |
+
if(vidf) {
|
84 |
+
std::vector<T*> fmatches = vidf->FindMatchingStreams<T>();
|
85 |
+
if(fmatches.size()) {
|
86 |
+
return fmatches[0];
|
87 |
+
}
|
88 |
+
}
|
89 |
+
|
90 |
+
return 0;
|
91 |
+
}
|
92 |
+
|
93 |
+
inline
|
94 |
+
picojson::value GetVideoFrameProperties(VideoInterface* video)
|
95 |
+
{
|
96 |
+
VideoPropertiesInterface* pi = dynamic_cast<VideoPropertiesInterface*>(video);
|
97 |
+
VideoFilterInterface* fi = dynamic_cast<VideoFilterInterface*>(video);
|
98 |
+
|
99 |
+
if(pi) {
|
100 |
+
return pi->FrameProperties();
|
101 |
+
}else if(fi){
|
102 |
+
if(fi->InputStreams().size() == 1) {
|
103 |
+
return GetVideoFrameProperties(fi->InputStreams()[0]);
|
104 |
+
}else if(fi->InputStreams().size() > 0){
|
105 |
+
picojson::value streams;
|
106 |
+
|
107 |
+
for(size_t i=0; i< fi->InputStreams().size(); ++i) {
|
108 |
+
const picojson::value dev_props = GetVideoFrameProperties(fi->InputStreams()[i]);
|
109 |
+
if(dev_props.contains("streams")) {
|
110 |
+
const picojson::value& dev_streams = dev_props["streams"];
|
111 |
+
for(size_t j=0; j < dev_streams.size(); ++j) {
|
112 |
+
streams.push_back(dev_streams[j]);
|
113 |
+
}
|
114 |
+
}else{
|
115 |
+
streams.push_back(dev_props);
|
116 |
+
}
|
117 |
+
}
|
118 |
+
|
119 |
+
if(streams.size() > 1) {
|
120 |
+
picojson::value json = streams[0];
|
121 |
+
json["streams"] = streams;
|
122 |
+
return json;
|
123 |
+
}else{
|
124 |
+
return streams[0];
|
125 |
+
}
|
126 |
+
}
|
127 |
+
}
|
128 |
+
return picojson::value();
|
129 |
+
}
|
130 |
+
|
131 |
+
inline
|
132 |
+
picojson::value GetVideoDeviceProperties(VideoInterface* video)
|
133 |
+
{
|
134 |
+
VideoPropertiesInterface* pi = dynamic_cast<VideoPropertiesInterface*>(video);
|
135 |
+
VideoFilterInterface* fi = dynamic_cast<VideoFilterInterface*>(video);
|
136 |
+
|
137 |
+
if(pi) {
|
138 |
+
return pi->DeviceProperties();
|
139 |
+
}else if(fi){
|
140 |
+
if(fi->InputStreams().size() == 1) {
|
141 |
+
return GetVideoDeviceProperties(fi->InputStreams()[0]);
|
142 |
+
}else if(fi->InputStreams().size() > 0){
|
143 |
+
picojson::value streams;
|
144 |
+
|
145 |
+
for(size_t i=0; i< fi->InputStreams().size(); ++i) {
|
146 |
+
const picojson::value dev_props = GetVideoDeviceProperties(fi->InputStreams()[i]);
|
147 |
+
if(dev_props.contains("streams")) {
|
148 |
+
const picojson::value& dev_streams = dev_props["streams"];
|
149 |
+
for(size_t j=0; j < dev_streams.size(); ++j) {
|
150 |
+
streams.push_back(dev_streams[j]);
|
151 |
+
}
|
152 |
+
}else{
|
153 |
+
streams.push_back(dev_props);
|
154 |
+
}
|
155 |
+
}
|
156 |
+
|
157 |
+
if(streams.size() > 1) {
|
158 |
+
picojson::value json = streams[0];
|
159 |
+
json["streams"] = streams;
|
160 |
+
return json;
|
161 |
+
}else{
|
162 |
+
return streams[0];
|
163 |
+
}
|
164 |
+
}
|
165 |
+
}
|
166 |
+
return picojson::value();
|
167 |
+
}
|
168 |
+
|
169 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_exception.h
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#pragma once
|
2 |
+
|
3 |
+
#include <exception>
|
4 |
+
#include <pangolin/platform.h>
|
5 |
+
#include <string>
|
6 |
+
|
7 |
+
namespace pangolin {
|
8 |
+
|
9 |
+
struct PANGOLIN_EXPORT VideoException : std::exception
|
10 |
+
{
|
11 |
+
VideoException(std::string str) : desc(str) {}
|
12 |
+
VideoException(std::string str, std::string detail) {
|
13 |
+
desc = str + "\n\t" + detail;
|
14 |
+
}
|
15 |
+
~VideoException() throw() {}
|
16 |
+
const char* what() const throw() { return desc.c_str(); }
|
17 |
+
std::string desc;
|
18 |
+
};
|
19 |
+
|
20 |
+
struct PANGOLIN_EXPORT VideoExceptionNoKnownHandler : public VideoException
|
21 |
+
{
|
22 |
+
VideoExceptionNoKnownHandler(const std::string& scheme)
|
23 |
+
: VideoException("No known video handler for URI '" + scheme + "'")
|
24 |
+
{
|
25 |
+
}
|
26 |
+
};
|
27 |
+
|
28 |
+
}
|
29 |
+
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_help.h
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#pragma once
|
2 |
+
|
3 |
+
#include <iostream>
|
4 |
+
#include <pangolin/factory/factory_help.h>
|
5 |
+
|
6 |
+
namespace pangolin {
|
7 |
+
|
8 |
+
/// Print to \p out supported pixel format codes
|
9 |
+
/// \p color whether ANSI Color codes should be used for formatting
|
10 |
+
void PrintPixelFormats(std::ostream& out = std::cout, bool color = true);
|
11 |
+
|
12 |
+
/// Print to \p out general Video URL usage and registered VideoFactories
|
13 |
+
/// \p out the stream to stream the help message to
|
14 |
+
/// \p scheme_filter a constraint on schemes to print, or empty if all should be listed
|
15 |
+
/// \p level the level of detail to use when printing (see enum above)
|
16 |
+
void VideoHelp(
|
17 |
+
std::ostream& out = std::cout, const std::string& scheme_filter="",
|
18 |
+
HelpVerbosity verbosity = HelpVerbosity::SYNOPSIS
|
19 |
+
);
|
20 |
+
|
21 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_input.h
ADDED
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2011 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <pangolin/video/video.h>
|
31 |
+
#include <pangolin/video/video_output.h>
|
32 |
+
|
33 |
+
namespace pangolin
|
34 |
+
{
|
35 |
+
|
36 |
+
struct PANGOLIN_EXPORT VideoInput
|
37 |
+
: public VideoInterface,
|
38 |
+
public VideoFilterInterface
|
39 |
+
{
|
40 |
+
/////////////////////////////////////////////////////////////
|
41 |
+
// VideoInterface Methods
|
42 |
+
/////////////////////////////////////////////////////////////
|
43 |
+
|
44 |
+
size_t SizeBytes() const override;
|
45 |
+
const std::vector<StreamInfo>& Streams() const override;
|
46 |
+
void Start() override;
|
47 |
+
void Stop() override;
|
48 |
+
bool GrabNext( unsigned char* image, bool wait = true ) override;
|
49 |
+
bool GrabNewest( unsigned char* image, bool wait = true ) override;
|
50 |
+
|
51 |
+
/////////////////////////////////////////////////////////////
|
52 |
+
// VideoFilterInterface Methods
|
53 |
+
/////////////////////////////////////////////////////////////
|
54 |
+
|
55 |
+
std::vector<VideoInterface*>& InputStreams() override
|
56 |
+
{
|
57 |
+
return videos;
|
58 |
+
}
|
59 |
+
|
60 |
+
/////////////////////////////////////////////////////////////
|
61 |
+
// VideoInput Methods
|
62 |
+
/////////////////////////////////////////////////////////////
|
63 |
+
|
64 |
+
VideoInput();
|
65 |
+
VideoInput(VideoInput&& other) = default;
|
66 |
+
VideoInput(const std::string &input_uri, const std::string &output_uri = "pango:[buffer_size_mb=100]//video_log.pango");
|
67 |
+
~VideoInput();
|
68 |
+
|
69 |
+
void Open(const std::string &input_uri, const std::string &output_uri = "pango:[buffer_size_mb=100]//video_log.pango");
|
70 |
+
void Close();
|
71 |
+
|
72 |
+
// experimental - not stable
|
73 |
+
bool Grab( unsigned char* buffer, std::vector<Image<unsigned char> >& images, bool wait = true, bool newest = false);
|
74 |
+
|
75 |
+
// Return details of first stream
|
76 |
+
unsigned int Width() const {
|
77 |
+
return (unsigned int)Streams()[0].Width();
|
78 |
+
}
|
79 |
+
unsigned int Height() const {
|
80 |
+
return (unsigned int)Streams()[0].Height();
|
81 |
+
}
|
82 |
+
PixelFormat PixFormat() const {
|
83 |
+
return Streams()[0].PixFormat();
|
84 |
+
}
|
85 |
+
const Uri& VideoUri() const {
|
86 |
+
return uri_input;
|
87 |
+
}
|
88 |
+
|
89 |
+
void Reset() {
|
90 |
+
Close();
|
91 |
+
Open(uri_input.full_uri, uri_output.full_uri);
|
92 |
+
}
|
93 |
+
|
94 |
+
// Return pointer to inner video class as VideoType
|
95 |
+
template<typename VideoType>
|
96 |
+
VideoType* Cast() {
|
97 |
+
return dynamic_cast<VideoType*>(video_src.get());
|
98 |
+
}
|
99 |
+
|
100 |
+
const std::string& LogFilename() const;
|
101 |
+
std::string& LogFilename();
|
102 |
+
|
103 |
+
// Switch to live video and record output to file
|
104 |
+
void Record();
|
105 |
+
|
106 |
+
// Switch to live video and record a single frame
|
107 |
+
void RecordOneFrame();
|
108 |
+
|
109 |
+
// Specify that one in n frames are logged to file. Default is 1.
|
110 |
+
void SetTimelapse(size_t one_in_n_frames);
|
111 |
+
|
112 |
+
// True iff grabbed live frames are being logged to file
|
113 |
+
bool IsRecording() const;
|
114 |
+
|
115 |
+
protected:
|
116 |
+
void InitialiseRecorder();
|
117 |
+
|
118 |
+
Uri uri_input;
|
119 |
+
Uri uri_output;
|
120 |
+
|
121 |
+
std::unique_ptr<VideoInterface> video_src;
|
122 |
+
std::unique_ptr<VideoOutputInterface> video_recorder;
|
123 |
+
|
124 |
+
// Use to store either video_src or video_file for VideoFilterInterface,
|
125 |
+
// depending on which is active
|
126 |
+
std::vector<VideoInterface*> videos;
|
127 |
+
|
128 |
+
int buffer_size_bytes;
|
129 |
+
|
130 |
+
int frame_num;
|
131 |
+
size_t record_frame_skip;
|
132 |
+
|
133 |
+
bool record_once;
|
134 |
+
bool record_continuous;
|
135 |
+
};
|
136 |
+
|
137 |
+
// VideoInput subsumes the previous VideoRecordRepeat class.
|
138 |
+
typedef VideoInput VideoRecordRepeat;
|
139 |
+
|
140 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_interface.h
ADDED
@@ -0,0 +1,184 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2011 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <pangolin/utils/picojson.h>
|
31 |
+
#include <pangolin/video/stream_info.h>
|
32 |
+
|
33 |
+
#include <memory>
|
34 |
+
#include <vector>
|
35 |
+
|
36 |
+
#define PANGO_HAS_TIMING_DATA "has_timing_data"
|
37 |
+
#define PANGO_HOST_RECEPTION_TIME_US "host_reception_time_us"
|
38 |
+
#define PANGO_CAPTURE_TIME_US "capture_time_us"
|
39 |
+
#define PANGO_EXPOSURE_US "exposure_us"
|
40 |
+
#define PANGO_GAMMA "gamma"
|
41 |
+
// analog gain is in linear scale and not dB
|
42 |
+
#define PANGO_ANALOG_GAIN "analog_gain"
|
43 |
+
#define PANGO_ANALOG_BLACK_LEVEL "analog_black_level"
|
44 |
+
#define PANGO_SENSOR_TEMPERATURE_C "sensor_temperature_C"
|
45 |
+
#define PANGO_ESTIMATED_CENTER_CAPTURE_TIME_US "estimated_center_capture_time_us"
|
46 |
+
#define PANGO_JOIN_OFFSET_US "join_offset_us"
|
47 |
+
#define PANGO_FRAME_COUNTER "frame_counter"
|
48 |
+
#define PANGO_HAS_LINE0_METADATA "line0_metadata"
|
49 |
+
|
50 |
+
namespace pangolin {
|
51 |
+
|
52 |
+
//! Interface to video capture sources
|
53 |
+
struct PANGOLIN_EXPORT VideoInterface
|
54 |
+
{
|
55 |
+
virtual ~VideoInterface() {}
|
56 |
+
|
57 |
+
//! Required buffer size to store all frames
|
58 |
+
virtual size_t SizeBytes() const = 0;
|
59 |
+
|
60 |
+
//! Get format and dimensions of all video streams
|
61 |
+
virtual const std::vector<StreamInfo>& Streams() const = 0;
|
62 |
+
|
63 |
+
//! Start Video device
|
64 |
+
virtual void Start() = 0;
|
65 |
+
|
66 |
+
//! Stop Video device
|
67 |
+
virtual void Stop() = 0;
|
68 |
+
|
69 |
+
//! Copy the next frame from the camera to image.
|
70 |
+
//! Optionally wait for a frame if one isn't ready
|
71 |
+
//! Returns true iff image was copied
|
72 |
+
virtual bool GrabNext( unsigned char* image, bool wait = true ) = 0;
|
73 |
+
|
74 |
+
//! Copy the newest frame from the camera to image
|
75 |
+
//! discarding all older frames.
|
76 |
+
//! Optionally wait for a frame if one isn't ready
|
77 |
+
//! Returns true iff image was copied
|
78 |
+
virtual bool GrabNewest( unsigned char* image, bool wait = true ) = 0;
|
79 |
+
};
|
80 |
+
|
81 |
+
//! Interface to GENICAM video capture sources
|
82 |
+
struct PANGOLIN_EXPORT GenicamVideoInterface
|
83 |
+
{
|
84 |
+
virtual ~GenicamVideoInterface() {}
|
85 |
+
|
86 |
+
virtual bool GetParameter(const std::string& name, std::string& result) = 0;
|
87 |
+
|
88 |
+
virtual bool SetParameter(const std::string& name, const std::string& value) = 0;
|
89 |
+
|
90 |
+
virtual size_t CameraCount() const
|
91 |
+
{
|
92 |
+
return 1;
|
93 |
+
}
|
94 |
+
};
|
95 |
+
|
96 |
+
struct PANGOLIN_EXPORT BufferAwareVideoInterface
|
97 |
+
{
|
98 |
+
virtual ~BufferAwareVideoInterface() {}
|
99 |
+
|
100 |
+
//! Returns number of available frames
|
101 |
+
virtual uint32_t AvailableFrames() const = 0;
|
102 |
+
|
103 |
+
//! Drops N frames in the queue starting from the oldest
|
104 |
+
//! returns false if less than n frames arae available
|
105 |
+
virtual bool DropNFrames(uint32_t n) = 0;
|
106 |
+
};
|
107 |
+
|
108 |
+
struct PANGOLIN_EXPORT VideoPropertiesInterface
|
109 |
+
{
|
110 |
+
virtual ~VideoPropertiesInterface() {}
|
111 |
+
|
112 |
+
//! Access JSON properties of device
|
113 |
+
virtual const picojson::value& DeviceProperties() const = 0;
|
114 |
+
|
115 |
+
//! Access JSON properties of most recently captured frame
|
116 |
+
virtual const picojson::value& FrameProperties() const = 0;
|
117 |
+
};
|
118 |
+
|
119 |
+
enum UvcRequestCode {
|
120 |
+
UVC_RC_UNDEFINED = 0x00,
|
121 |
+
UVC_SET_CUR = 0x01,
|
122 |
+
UVC_GET_CUR = 0x81,
|
123 |
+
UVC_GET_MIN = 0x82,
|
124 |
+
UVC_GET_MAX = 0x83,
|
125 |
+
UVC_GET_RES = 0x84,
|
126 |
+
UVC_GET_LEN = 0x85,
|
127 |
+
UVC_GET_INFO = 0x86,
|
128 |
+
UVC_GET_DEF = 0x87
|
129 |
+
};
|
130 |
+
|
131 |
+
struct PANGOLIN_EXPORT VideoFilterInterface
|
132 |
+
{
|
133 |
+
virtual ~VideoFilterInterface() {}
|
134 |
+
|
135 |
+
template<typename T>
|
136 |
+
std::vector<T*> FindMatchingStreams()
|
137 |
+
{
|
138 |
+
std::vector<T*> matches;
|
139 |
+
std::vector<VideoInterface*> children = InputStreams();
|
140 |
+
for(size_t c=0; c < children.size(); ++c) {
|
141 |
+
T* concrete_video = dynamic_cast<T*>(children[c]);
|
142 |
+
if(concrete_video) {
|
143 |
+
matches.push_back(concrete_video);
|
144 |
+
}else{
|
145 |
+
VideoFilterInterface* filter_video = dynamic_cast<VideoFilterInterface*>(children[c]);
|
146 |
+
if(filter_video) {
|
147 |
+
std::vector<T*> child_matches = filter_video->FindMatchingStreams<T>();
|
148 |
+
matches.insert(matches.end(), child_matches.begin(), child_matches.end());
|
149 |
+
}
|
150 |
+
}
|
151 |
+
}
|
152 |
+
return matches;
|
153 |
+
}
|
154 |
+
|
155 |
+
virtual std::vector<VideoInterface*>& InputStreams() = 0;
|
156 |
+
};
|
157 |
+
|
158 |
+
struct PANGOLIN_EXPORT VideoUvcInterface
|
159 |
+
{
|
160 |
+
virtual ~VideoUvcInterface() {}
|
161 |
+
virtual int IoCtrl(uint8_t unit, uint8_t ctrl, unsigned char* data, int len, UvcRequestCode req_code) = 0;
|
162 |
+
virtual bool GetExposure(int& exp_us) = 0;
|
163 |
+
virtual bool SetExposure(int exp_us) = 0;
|
164 |
+
virtual bool GetGain(float& gain) = 0;
|
165 |
+
virtual bool SetGain(float gain) = 0;
|
166 |
+
};
|
167 |
+
|
168 |
+
struct PANGOLIN_EXPORT VideoPlaybackInterface
|
169 |
+
{
|
170 |
+
virtual ~VideoPlaybackInterface() {}
|
171 |
+
|
172 |
+
/// Return monotonic id of current frame
|
173 |
+
/// The 'current frame' is the frame returned from the last successful call to Grab
|
174 |
+
virtual size_t GetCurrentFrameId() const = 0;
|
175 |
+
|
176 |
+
/// Return total number of frames to be captured from device,
|
177 |
+
/// or 0 if unknown.
|
178 |
+
virtual size_t GetTotalFrames() const = 0;
|
179 |
+
|
180 |
+
/// Return frameid on success, or next frame on failure
|
181 |
+
virtual size_t Seek(size_t frameid) = 0;
|
182 |
+
};
|
183 |
+
|
184 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_output.h
ADDED
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2011-2013 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
// Pangolin video output supports various formats using
|
31 |
+
// different 3rd party libraries. (Only one right now)
|
32 |
+
//
|
33 |
+
// VideoOutput URI's take the following form:
|
34 |
+
// scheme:[param1=value1,param2=value2,...]//device
|
35 |
+
//
|
36 |
+
// scheme = ffmpeg
|
37 |
+
//
|
38 |
+
// ffmpeg - encode to compressed file using ffmpeg
|
39 |
+
// fps : fps to embed in encoded file.
|
40 |
+
// bps : bits per second
|
41 |
+
// unique_filename : append unique suffix if file already exists
|
42 |
+
//
|
43 |
+
// e.g. ffmpeg://output_file.avi
|
44 |
+
// e.g. ffmpeg:[fps=30,bps=1000000,unique_filename]//output_file.avi
|
45 |
+
|
46 |
+
#include <pangolin/video/video_output_interface.h>
|
47 |
+
#include <pangolin/utils/uri.h>
|
48 |
+
#include <memory>
|
49 |
+
|
50 |
+
namespace pangolin
|
51 |
+
{
|
52 |
+
|
53 |
+
//! VideoOutput wrap to generically construct instances of VideoOutputInterface.
|
54 |
+
class PANGOLIN_EXPORT VideoOutput : public VideoOutputInterface
|
55 |
+
{
|
56 |
+
public:
|
57 |
+
VideoOutput();
|
58 |
+
VideoOutput(VideoOutput&& other) = default;
|
59 |
+
VideoOutput(const std::string& uri);
|
60 |
+
~VideoOutput();
|
61 |
+
|
62 |
+
bool IsOpen() const;
|
63 |
+
void Open(const std::string& uri);
|
64 |
+
void Close();
|
65 |
+
|
66 |
+
const std::vector<StreamInfo>& Streams() const override;
|
67 |
+
|
68 |
+
void SetStreams(const std::vector<StreamInfo>& streams, const std::string& uri = "", const picojson::value& properties = picojson::value() ) override;
|
69 |
+
|
70 |
+
int WriteStreams(const unsigned char* data, const picojson::value& frame_properties = picojson::value() ) override;
|
71 |
+
|
72 |
+
bool IsPipe() const override;
|
73 |
+
|
74 |
+
void AddStream(const PixelFormat& pf, size_t w,size_t h,size_t pitch);
|
75 |
+
|
76 |
+
void AddStream(const PixelFormat& pf, size_t w,size_t h);
|
77 |
+
|
78 |
+
void SetStreams(const std::string& uri = "", const picojson::value& properties = picojson::value() );
|
79 |
+
|
80 |
+
size_t SizeBytes(void) const ;
|
81 |
+
|
82 |
+
std::vector<Image<unsigned char>> GetOutputImages(unsigned char* buffer) const ;
|
83 |
+
|
84 |
+
std::vector<Image<unsigned char>> GetOutputImages(std::vector<unsigned char>& buffer) const ;
|
85 |
+
|
86 |
+
|
87 |
+
protected:
|
88 |
+
std::vector<StreamInfo> streams;
|
89 |
+
Uri uri;
|
90 |
+
std::unique_ptr<VideoOutputInterface> recorder;
|
91 |
+
};
|
92 |
+
|
93 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_output_interface.h
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2011-2013 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
#include <vector>
|
31 |
+
#include <pangolin/platform.h>
|
32 |
+
#include <pangolin/video/stream_info.h>
|
33 |
+
#include <pangolin/utils/picojson.h>
|
34 |
+
|
35 |
+
namespace pangolin {
|
36 |
+
|
37 |
+
//! Interface to video recording destinations
|
38 |
+
struct PANGOLIN_EXPORT VideoOutputInterface
|
39 |
+
{
|
40 |
+
virtual ~VideoOutputInterface() {}
|
41 |
+
|
42 |
+
//! Get format and dimensions of all video streams
|
43 |
+
virtual const std::vector<StreamInfo>& Streams() const = 0;
|
44 |
+
|
45 |
+
virtual void SetStreams(const std::vector<StreamInfo>& streams, const std::string& uri ="", const picojson::value& properties = picojson::value() ) = 0;
|
46 |
+
|
47 |
+
virtual int WriteStreams(const unsigned char* data, const picojson::value& frame_properties = picojson::value() ) = 0;
|
48 |
+
|
49 |
+
virtual bool IsPipe() const = 0;
|
50 |
+
};
|
51 |
+
|
52 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/include/pangolin/video/video_record_repeat.h
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2011 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#pragma once
|
29 |
+
|
30 |
+
// VideoInput subsumes the previous VideoRecordRepeat class.
|
31 |
+
#include <pangolin/video/video_input.h>
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/debayer.cpp
ADDED
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/video/drivers/debayer.h>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/iostream_operators.h>
|
31 |
+
#include <pangolin/video/video.h>
|
32 |
+
|
33 |
+
#ifdef HAVE_DC1394
|
34 |
+
# include <dc1394/conversions.h>
|
35 |
+
const bool have_dc1394 = true;
|
36 |
+
#else
|
37 |
+
const bool have_dc1394 = false;
|
38 |
+
#endif
|
39 |
+
|
40 |
+
namespace pangolin
|
41 |
+
{
|
42 |
+
|
43 |
+
pangolin::StreamInfo BayerOutputFormat( const StreamInfo& stream_in, bayer_method_t method, size_t start_offset)
|
44 |
+
{
|
45 |
+
const bool downsample = (method == BAYER_METHOD_DOWNSAMPLE) || (method == BAYER_METHOD_DOWNSAMPLE_MONO);
|
46 |
+
|
47 |
+
const size_t w = downsample ? stream_in.Width() / 2 : stream_in.Width();
|
48 |
+
const size_t h = downsample ? stream_in.Height() / 2 : stream_in.Height();
|
49 |
+
|
50 |
+
pangolin::PixelFormat fmt =
|
51 |
+
(method == BAYER_METHOD_NONE) ?
|
52 |
+
stream_in.PixFormat() :
|
53 |
+
pangolin::PixelFormatFromString(
|
54 |
+
(stream_in.PixFormat().bpp == 16) ?
|
55 |
+
(method == BAYER_METHOD_DOWNSAMPLE_MONO ? "GRAY16LE" : "RGB48") :
|
56 |
+
(method == BAYER_METHOD_DOWNSAMPLE_MONO ? "GRAY8" : "RGB24")
|
57 |
+
);
|
58 |
+
|
59 |
+
fmt.channel_bit_depth = stream_in.PixFormat().channel_bit_depth;
|
60 |
+
|
61 |
+
return pangolin::StreamInfo( fmt, w, h, w*fmt.bpp / 8, reinterpret_cast<unsigned char*>(start_offset) );
|
62 |
+
}
|
63 |
+
|
64 |
+
DebayerVideo::DebayerVideo(std::unique_ptr<VideoInterface> &src_, const std::vector<bayer_method_t>& bayer_method, color_filter_t tile, const WbGains& input_wb_gains)
|
65 |
+
: src(std::move(src_)), size_bytes(0), methods(bayer_method), tile(tile), wb_gains(input_wb_gains)
|
66 |
+
{
|
67 |
+
if(!src.get()) {
|
68 |
+
throw VideoException("DebayerVideo: VideoInterface in must not be null");
|
69 |
+
}
|
70 |
+
videoin.push_back(src.get());
|
71 |
+
|
72 |
+
while(methods.size() < src->Streams().size()) {
|
73 |
+
methods.push_back(BAYER_METHOD_NONE);
|
74 |
+
}
|
75 |
+
|
76 |
+
for(size_t s=0; s< src->Streams().size(); ++s) {
|
77 |
+
if( (methods[s] < BAYER_METHOD_NONE) && (!have_dc1394 || src->Streams()[0].IsPitched()) ) {
|
78 |
+
pango_print_warn("debayer: Switching to simple downsampling method because No DC1394 or image is pitched.\n");
|
79 |
+
methods[s] = BAYER_METHOD_DOWNSAMPLE;
|
80 |
+
}
|
81 |
+
|
82 |
+
const StreamInfo& stin = src->Streams()[s];
|
83 |
+
streams.push_back(BayerOutputFormat(stin, methods[s], size_bytes));
|
84 |
+
size_bytes += streams.back().SizeBytes();
|
85 |
+
}
|
86 |
+
buffer = std::unique_ptr<unsigned char[]>(new unsigned char[src->SizeBytes()]);
|
87 |
+
}
|
88 |
+
|
89 |
+
DebayerVideo::~DebayerVideo()
|
90 |
+
{
|
91 |
+
}
|
92 |
+
|
93 |
+
//! Implement VideoInput::Start()
|
94 |
+
void DebayerVideo::Start()
|
95 |
+
{
|
96 |
+
videoin[0]->Start();
|
97 |
+
}
|
98 |
+
|
99 |
+
//! Implement VideoInput::Stop()
|
100 |
+
void DebayerVideo::Stop()
|
101 |
+
{
|
102 |
+
videoin[0]->Stop();
|
103 |
+
}
|
104 |
+
|
105 |
+
//! Implement VideoInput::SizeBytes()
|
106 |
+
size_t DebayerVideo::SizeBytes() const
|
107 |
+
{
|
108 |
+
return size_bytes;
|
109 |
+
}
|
110 |
+
|
111 |
+
//! Implement VideoInput::Streams()
|
112 |
+
const std::vector<StreamInfo>& DebayerVideo::Streams() const
|
113 |
+
{
|
114 |
+
return streams;
|
115 |
+
}
|
116 |
+
|
117 |
+
|
118 |
+
unsigned int DebayerVideo::AvailableFrames() const
|
119 |
+
{
|
120 |
+
BufferAwareVideoInterface* vpi = dynamic_cast<BufferAwareVideoInterface*>(videoin[0]);
|
121 |
+
if(!vpi)
|
122 |
+
{
|
123 |
+
pango_print_warn("Debayer: child interface is not buffer aware.");
|
124 |
+
return 0;
|
125 |
+
}
|
126 |
+
else
|
127 |
+
{
|
128 |
+
return vpi->AvailableFrames();
|
129 |
+
}
|
130 |
+
}
|
131 |
+
|
132 |
+
bool DebayerVideo::DropNFrames(uint32_t n)
|
133 |
+
{
|
134 |
+
BufferAwareVideoInterface* vpi = dynamic_cast<BufferAwareVideoInterface*>(videoin[0]);
|
135 |
+
if(!vpi)
|
136 |
+
{
|
137 |
+
pango_print_warn("Debayer: child interface is not buffer aware.");
|
138 |
+
return false;
|
139 |
+
}
|
140 |
+
else
|
141 |
+
{
|
142 |
+
return vpi->DropNFrames(n);
|
143 |
+
}
|
144 |
+
}
|
145 |
+
|
146 |
+
template<typename Tup, typename Tout, typename Tin>
|
147 |
+
void DownsampleToMono(Image<Tout>& out, const Image<Tin>& in)
|
148 |
+
{
|
149 |
+
for(int y=0; y< (int)out.h; ++y) {
|
150 |
+
Tout* pixout = out.RowPtr(y);
|
151 |
+
const Tin* irow0 = in.RowPtr(2*y);
|
152 |
+
const Tin* irow1 = in.RowPtr(2*y+1);
|
153 |
+
for(size_t x=0; x<out.w; ++x) {
|
154 |
+
Tup val = ((Tup)irow0[0] + (Tup)irow0[1] + (Tup)irow1[0] + (Tup)irow1[1]) / 4;
|
155 |
+
*(pixout++) = (Tout)std::min(std::max(static_cast<Tup>(0), val), static_cast<Tup>(std::numeric_limits<Tout>::max()));
|
156 |
+
irow0 += 2;
|
157 |
+
irow1 += 2;
|
158 |
+
}
|
159 |
+
}
|
160 |
+
}
|
161 |
+
|
162 |
+
template<typename Tout, typename Tin>
|
163 |
+
void DownsampleDebayer(Image<Tout>& out, const Image<Tin>& in, color_filter_t tile, WbGains wb_gains, const bool has_metadata_line)
|
164 |
+
{
|
165 |
+
int y_offset = 0;
|
166 |
+
if (has_metadata_line) {
|
167 |
+
++y_offset;
|
168 |
+
}
|
169 |
+
|
170 |
+
switch(tile) {
|
171 |
+
case DC1394_COLOR_FILTER_RGGB:
|
172 |
+
for(int y=0; y< (int)out.h; ++y) {
|
173 |
+
Tout* pixout = out.RowPtr(y);
|
174 |
+
const Tin* irow0 = in.RowPtr(2*y+y_offset);
|
175 |
+
const Tin* irow1 = in.RowPtr(2*y+1+y_offset);
|
176 |
+
for(size_t x=0; x<out.w; ++x) {
|
177 |
+
*(pixout++) = irow0[2*x] * wb_gains.r;
|
178 |
+
*(pixout++) = ((irow0[2*x+1] + irow1[2*x]) >> 1) * wb_gains.g;
|
179 |
+
*(pixout++) = irow1[2*x+1] * wb_gains.b;
|
180 |
+
}
|
181 |
+
}
|
182 |
+
break;
|
183 |
+
case DC1394_COLOR_FILTER_GBRG:
|
184 |
+
for(int y=0; y< (int)out.h; ++y) {
|
185 |
+
Tout* pixout = out.RowPtr(y);
|
186 |
+
const Tin* irow0 = in.RowPtr(2*y+y_offset);
|
187 |
+
const Tin* irow1 = in.RowPtr(2*y+1+y_offset);
|
188 |
+
for(size_t x=0; x<out.w; ++x) {
|
189 |
+
*(pixout++) = irow1[2*x] * wb_gains.r;
|
190 |
+
*(pixout++) = ((irow0[2*x] + irow1[2*x+1]) >> 1) * wb_gains.g;
|
191 |
+
*(pixout++) = irow0[2*x+1] * wb_gains.b;
|
192 |
+
}
|
193 |
+
}
|
194 |
+
break;
|
195 |
+
case DC1394_COLOR_FILTER_GRBG:
|
196 |
+
for(int y=0; y< (int)out.h; ++y) {
|
197 |
+
Tout* pixout = out.RowPtr(y);
|
198 |
+
const Tin* irow0 = in.RowPtr(2*y+y_offset);
|
199 |
+
const Tin* irow1 = in.RowPtr(2*y+1+y_offset);
|
200 |
+
for(size_t x=0; x<out.w; ++x) {
|
201 |
+
*(pixout++) = irow0[2*x+1] * wb_gains.r;
|
202 |
+
*(pixout++) = ((irow0[2*x] + irow1[2*x+1]) >> 1) * wb_gains.g;
|
203 |
+
*(pixout++) = irow1[2*x] * wb_gains.b;
|
204 |
+
}
|
205 |
+
}
|
206 |
+
break;
|
207 |
+
case DC1394_COLOR_FILTER_BGGR:
|
208 |
+
for(int y=0; y< (int)out.h; ++y) {
|
209 |
+
Tout* pixout = out.RowPtr(y);
|
210 |
+
const Tin* irow0 = in.RowPtr(2*y+y_offset);
|
211 |
+
const Tin* irow1 = in.RowPtr(2*y+1+y_offset);
|
212 |
+
for(size_t x=0; x<out.w; ++x) {
|
213 |
+
*(pixout++) = irow1[2*x+1] * wb_gains.r;
|
214 |
+
*(pixout++) = ((irow0[2*x+1] + irow1[2*x]) >> 1) * wb_gains.g;
|
215 |
+
*(pixout++) = irow0[2*x] * wb_gains.b;
|
216 |
+
}
|
217 |
+
}
|
218 |
+
break;
|
219 |
+
}
|
220 |
+
}
|
221 |
+
|
222 |
+
template<typename T>
|
223 |
+
void PitchedImageCopy( Image<T>& img_out, const Image<T>& img_in ) {
|
224 |
+
if( img_out.w != img_in.w || img_out.h != img_in.h || sizeof(T) * img_in.w > img_out.pitch) {
|
225 |
+
throw std::runtime_error("PitchedImageCopy: Incompatible image sizes");
|
226 |
+
}
|
227 |
+
|
228 |
+
for(size_t y=0; y < img_out.h; ++y) {
|
229 |
+
std::memcpy(img_out.RowPtr((int)y), img_in.RowPtr((int)y), sizeof(T) * img_in.w);
|
230 |
+
}
|
231 |
+
}
|
232 |
+
|
233 |
+
template<typename Tout, typename Tin>
|
234 |
+
void ProcessImage(Image<Tout>& img_out, const Image<Tin>& img_in, bayer_method_t method, color_filter_t tile, WbGains wb_gains, const bool has_metadata_line)
|
235 |
+
{
|
236 |
+
if(method == BAYER_METHOD_NONE) {
|
237 |
+
PitchedImageCopy(img_out, img_in.template UnsafeReinterpret<Tout>() );
|
238 |
+
}else if(method == BAYER_METHOD_DOWNSAMPLE_MONO) {
|
239 |
+
if( sizeof(Tout) == 1) {
|
240 |
+
DownsampleToMono<int,Tout, Tin>(img_out, img_in);
|
241 |
+
}else{
|
242 |
+
DownsampleToMono<double,Tout, Tin>(img_out, img_in);
|
243 |
+
}
|
244 |
+
}else if(method == BAYER_METHOD_DOWNSAMPLE) {
|
245 |
+
DownsampleDebayer(img_out, img_in, tile, wb_gains, has_metadata_line);
|
246 |
+
}else{
|
247 |
+
#ifdef HAVE_DC1394
|
248 |
+
if(sizeof(Tout) == 1) {
|
249 |
+
dc1394_bayer_decoding_8bit(
|
250 |
+
(uint8_t*)img_in.ptr, (uint8_t*)img_out.ptr, img_in.w, img_in.h,
|
251 |
+
(dc1394color_filter_t)tile, (dc1394bayer_method_t)method
|
252 |
+
);
|
253 |
+
}else if(sizeof(Tout) == 2) {
|
254 |
+
dc1394_bayer_decoding_16bit(
|
255 |
+
(uint16_t*)img_in.ptr, (uint16_t*)img_out.ptr, img_in.w, img_in.h,
|
256 |
+
(dc1394color_filter_t)tile, (dc1394bayer_method_t)method,
|
257 |
+
16
|
258 |
+
);
|
259 |
+
}
|
260 |
+
#endif
|
261 |
+
}
|
262 |
+
}
|
263 |
+
|
264 |
+
void DebayerVideo::ProcessStreams(unsigned char* out, const unsigned char *in)
|
265 |
+
{
|
266 |
+
const bool has_metadata_line = frame_properties.get_value<bool>(PANGO_HAS_LINE0_METADATA, false);
|
267 |
+
|
268 |
+
for(size_t s=0; s<streams.size(); ++s) {
|
269 |
+
const StreamInfo& stin = videoin[0]->Streams()[s];
|
270 |
+
Image<unsigned char> img_in = stin.StreamImage(in);
|
271 |
+
Image<unsigned char> img_out = Streams()[s].StreamImage(out);
|
272 |
+
|
273 |
+
if(methods[s] == BAYER_METHOD_NONE) {
|
274 |
+
const size_t num_bytes = std::min(img_in.w, img_out.w) * stin.PixFormat().bpp / 8;
|
275 |
+
for(size_t y=0; y < img_out.h; ++y) {
|
276 |
+
std::memcpy(img_out.RowPtr((int)y), img_in.RowPtr((int)y), num_bytes);
|
277 |
+
}
|
278 |
+
}else if(stin.PixFormat().bpp == 8) {
|
279 |
+
ProcessImage(img_out, img_in, methods[s], tile, wb_gains, has_metadata_line);
|
280 |
+
}else if(stin.PixFormat().bpp == 16){
|
281 |
+
Image<uint16_t> img_in16 = img_in.UnsafeReinterpret<uint16_t>();
|
282 |
+
Image<uint16_t> img_out16 = img_out.UnsafeReinterpret<uint16_t>();
|
283 |
+
ProcessImage(img_out16, img_in16, methods[s], tile, wb_gains, has_metadata_line);
|
284 |
+
}else {
|
285 |
+
throw std::runtime_error("debayer: unhandled format combination: " + stin.PixFormat().format );
|
286 |
+
}
|
287 |
+
}
|
288 |
+
}
|
289 |
+
|
290 |
+
//! Implement VideoInput::GrabNext()
|
291 |
+
bool DebayerVideo::GrabNext( unsigned char* image, bool wait )
|
292 |
+
{
|
293 |
+
if(videoin[0]->GrabNext(buffer.get(),wait)) {
|
294 |
+
frame_properties = GetVideoFrameProperties(videoin[0]);
|
295 |
+
ProcessStreams(image, buffer.get());
|
296 |
+
return true;
|
297 |
+
}else{
|
298 |
+
return false;
|
299 |
+
}
|
300 |
+
}
|
301 |
+
|
302 |
+
//! Implement VideoInput::GrabNewest()
|
303 |
+
bool DebayerVideo::GrabNewest( unsigned char* image, bool wait )
|
304 |
+
{
|
305 |
+
if(videoin[0]->GrabNewest(buffer.get(),wait)) {
|
306 |
+
frame_properties = GetVideoFrameProperties(videoin[0]);
|
307 |
+
ProcessStreams(image, buffer.get());
|
308 |
+
return true;
|
309 |
+
}else{
|
310 |
+
return false;
|
311 |
+
}
|
312 |
+
}
|
313 |
+
|
314 |
+
std::vector<VideoInterface*>& DebayerVideo::InputStreams()
|
315 |
+
{
|
316 |
+
return videoin;
|
317 |
+
}
|
318 |
+
|
319 |
+
color_filter_t DebayerVideo::ColorFilterFromString(std::string str)
|
320 |
+
{
|
321 |
+
if(!str.compare("rggb") || !str.compare("RGGB")) return DC1394_COLOR_FILTER_RGGB;
|
322 |
+
else if(!str.compare("gbrg") || !str.compare("GBRG")) return DC1394_COLOR_FILTER_GBRG;
|
323 |
+
else if(!str.compare("grbg") || !str.compare("GRBG")) return DC1394_COLOR_FILTER_GRBG;
|
324 |
+
else if(!str.compare("bggr") || !str.compare("BGGR")) return DC1394_COLOR_FILTER_BGGR;
|
325 |
+
else {
|
326 |
+
pango_print_error("Debayer error, %s is not a valid tile type using RGGB\n", str.c_str());
|
327 |
+
return DC1394_COLOR_FILTER_RGGB;
|
328 |
+
}
|
329 |
+
}
|
330 |
+
|
331 |
+
bayer_method_t DebayerVideo::BayerMethodFromString(std::string str)
|
332 |
+
{
|
333 |
+
if(!str.compare("nearest")) return BAYER_METHOD_NEAREST;
|
334 |
+
else if(!str.compare("simple")) return BAYER_METHOD_SIMPLE;
|
335 |
+
else if(!str.compare("bilinear")) return BAYER_METHOD_BILINEAR;
|
336 |
+
else if(!str.compare("hqlinear")) return BAYER_METHOD_HQLINEAR;
|
337 |
+
else if(!str.compare("downsample")) return BAYER_METHOD_DOWNSAMPLE;
|
338 |
+
else if(!str.compare("edgesense")) return BAYER_METHOD_EDGESENSE;
|
339 |
+
else if(!str.compare("vng")) return BAYER_METHOD_VNG;
|
340 |
+
else if(!str.compare("ahd")) return BAYER_METHOD_AHD;
|
341 |
+
else if(!str.compare("mono")) return BAYER_METHOD_DOWNSAMPLE_MONO;
|
342 |
+
else if(!str.compare("none")) return BAYER_METHOD_NONE;
|
343 |
+
else {
|
344 |
+
pango_print_error("Debayer error, %s is not a valid debayer method using downsample\n", str.c_str());
|
345 |
+
return BAYER_METHOD_DOWNSAMPLE;
|
346 |
+
}
|
347 |
+
}
|
348 |
+
|
349 |
+
PANGOLIN_REGISTER_FACTORY(DebayerVideo)
|
350 |
+
{
|
351 |
+
struct DebayerVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
352 |
+
std::map<std::string,Precedence> Schemes() const override
|
353 |
+
{
|
354 |
+
return {{"debayer",10}};
|
355 |
+
}
|
356 |
+
const char* Description() const override
|
357 |
+
{
|
358 |
+
return "Demosaics raw RGB sensor data (one or multiple streams) to RGB images";
|
359 |
+
}
|
360 |
+
ParamSet Params() const override
|
361 |
+
{
|
362 |
+
return {{
|
363 |
+
{"tile","rggb","Tiling pattern: possible values: rggb,gbrg,grbg,bggr"},
|
364 |
+
{"method(\\d+)?","none","method, or methodN for multiple sub-streams, N >= 1. Possible values: nearest,simple,bilinear,hqlinear,downsample,edgesense,vng,ahd,mono,none. For methodN, the default values are set to the value of method."},
|
365 |
+
{"wb_r","1.0","White balance - red component"},
|
366 |
+
{"wb_g","1.0","White balance - green component"},
|
367 |
+
{"wb_b","1.0","White balance - blue component"}
|
368 |
+
}};
|
369 |
+
}
|
370 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
371 |
+
ParamReader reader(DebayerVideoFactory::Params(),uri);
|
372 |
+
|
373 |
+
std::unique_ptr<VideoInterface> subvid = pangolin::OpenVideo(uri.url);
|
374 |
+
const std::string tile_string = reader.Get<std::string>("tile");
|
375 |
+
const std::string method = reader.Get<std::string>("method");
|
376 |
+
const color_filter_t tile = DebayerVideo::ColorFilterFromString(tile_string);
|
377 |
+
WbGains input_wb_gains(reader.Get<float>("wb_r"), reader.Get<float>("wb_g"), reader.Get<float>("wb_b"));
|
378 |
+
|
379 |
+
std::vector<bayer_method_t> methods;
|
380 |
+
for(size_t s=0; s < subvid->Streams().size(); ++s) {
|
381 |
+
const std::string key = std::string("method") + ToString(s+1);
|
382 |
+
std::string method_s = reader.Get<std::string>(key, method);
|
383 |
+
methods.push_back(DebayerVideo::BayerMethodFromString(method_s));
|
384 |
+
}
|
385 |
+
return std::unique_ptr<VideoInterface>( new DebayerVideo(subvid, methods, tile, input_wb_gains) );
|
386 |
+
}
|
387 |
+
};
|
388 |
+
|
389 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>( std::make_shared<DebayerVideoFactory>());
|
390 |
+
}
|
391 |
+
|
392 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/deinterlace.cpp
ADDED
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2013 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/video/drivers/deinterlace.h>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/iostream_operators.h>
|
31 |
+
#include <dc1394/conversions.h>
|
32 |
+
#include <pangolin/video/video.h>
|
33 |
+
|
34 |
+
namespace pangolin
|
35 |
+
{
|
36 |
+
|
37 |
+
DeinterlaceVideo::DeinterlaceVideo(std::unique_ptr<VideoInterface> &videoin_)
|
38 |
+
: videoin(std::move(videoin_)), buffer(0)
|
39 |
+
{
|
40 |
+
if(videoin->Streams().size() != 1)
|
41 |
+
throw VideoException("FirewireDeinterlace input must have exactly one stream");
|
42 |
+
|
43 |
+
const StreamInfo& stmin = videoin->Streams()[0];
|
44 |
+
|
45 |
+
StreamInfo stm1(PixelFormatFromString("GRAY8"), stmin.Width(), stmin.Height(), stmin.Width(), 0);
|
46 |
+
StreamInfo stm2(PixelFormatFromString("GRAY8"), stmin.Width(), stmin.Height(), stmin.Width(), (unsigned char*)0 + stmin.Width()*stmin.Height());
|
47 |
+
streams.push_back(stm1);
|
48 |
+
streams.push_back(stm2);
|
49 |
+
|
50 |
+
buffer = new unsigned char[videoin->SizeBytes()];
|
51 |
+
|
52 |
+
std::cout << videoin->Streams()[0].Width() << ", " << videoin->Streams()[0].Height() << std::endl;
|
53 |
+
}
|
54 |
+
|
55 |
+
DeinterlaceVideo::~DeinterlaceVideo()
|
56 |
+
{
|
57 |
+
delete[] buffer;
|
58 |
+
}
|
59 |
+
|
60 |
+
size_t DeinterlaceVideo::SizeBytes() const
|
61 |
+
{
|
62 |
+
return videoin->SizeBytes();
|
63 |
+
}
|
64 |
+
|
65 |
+
const std::vector<StreamInfo>& DeinterlaceVideo::Streams() const
|
66 |
+
{
|
67 |
+
return streams;
|
68 |
+
}
|
69 |
+
|
70 |
+
void DeinterlaceVideo::Start()
|
71 |
+
{
|
72 |
+
videoin->Start();
|
73 |
+
}
|
74 |
+
|
75 |
+
void DeinterlaceVideo::Stop()
|
76 |
+
{
|
77 |
+
videoin->Stop();
|
78 |
+
}
|
79 |
+
|
80 |
+
bool DeinterlaceVideo::GrabNext( unsigned char* image, bool wait )
|
81 |
+
{
|
82 |
+
if(videoin->GrabNext(buffer, wait)) {
|
83 |
+
return ( dc1394_deinterlace_stereo(buffer,image, videoin->Streams()[0].Width(), 2*videoin->Streams()[0].Height() ) == DC1394_SUCCESS );
|
84 |
+
}
|
85 |
+
return false;
|
86 |
+
}
|
87 |
+
|
88 |
+
bool DeinterlaceVideo::GrabNewest( unsigned char* image, bool wait )
|
89 |
+
{
|
90 |
+
if(videoin->GrabNewest(buffer, wait)) {
|
91 |
+
return ( dc1394_deinterlace_stereo(buffer,image, videoin->Streams()[0].Width(), 2*videoin->Streams()[0].Height() ) == DC1394_SUCCESS );
|
92 |
+
}
|
93 |
+
return false;
|
94 |
+
}
|
95 |
+
|
96 |
+
PANGOLIN_REGISTER_FACTORY(DeinterlaceVideo)
|
97 |
+
{
|
98 |
+
struct DeinterlaceVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
99 |
+
std::map<std::string,Precedence> Schemes() const override
|
100 |
+
{
|
101 |
+
return {{"deinterlace",10}};
|
102 |
+
}
|
103 |
+
const char* Description() const override
|
104 |
+
{
|
105 |
+
return "Deinterlace sub-video.";
|
106 |
+
}
|
107 |
+
ParamSet Params() const override
|
108 |
+
{
|
109 |
+
return {{
|
110 |
+
}};
|
111 |
+
}
|
112 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
113 |
+
std::unique_ptr<VideoInterface> subvid = pangolin::OpenVideo(uri.url);
|
114 |
+
return std::unique_ptr<VideoInterface>( new DeinterlaceVideo(subvid) );
|
115 |
+
}
|
116 |
+
};
|
117 |
+
|
118 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<DeinterlaceVideoFactory>());
|
119 |
+
}
|
120 |
+
|
121 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/depthsense.cpp
ADDED
@@ -0,0 +1,656 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/video/drivers/depthsense.h>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/iostream_operators.h>
|
31 |
+
#include <iomanip>
|
32 |
+
|
33 |
+
namespace pangolin
|
34 |
+
{
|
35 |
+
|
36 |
+
const size_t ROGUE_ADDR = 0x01;
|
37 |
+
const double MAX_DELTA_TIME = 20000.0; //u_s
|
38 |
+
|
39 |
+
DepthSenseContext& DepthSenseContext::I()
|
40 |
+
{
|
41 |
+
static DepthSenseContext s;
|
42 |
+
return s;
|
43 |
+
}
|
44 |
+
|
45 |
+
DepthSense::Context& DepthSenseContext::Context()
|
46 |
+
{
|
47 |
+
return g_context;
|
48 |
+
}
|
49 |
+
|
50 |
+
void DepthSenseContext::NewDeviceRunning()
|
51 |
+
{
|
52 |
+
running_devices++;
|
53 |
+
if(running_devices == 1) {
|
54 |
+
StartNodes();
|
55 |
+
}
|
56 |
+
}
|
57 |
+
|
58 |
+
void DepthSenseContext::DeviceClosing()
|
59 |
+
{
|
60 |
+
running_devices--;
|
61 |
+
if(running_devices == 0) {
|
62 |
+
StopNodes();
|
63 |
+
|
64 |
+
// Force destruction of current context
|
65 |
+
g_context = DepthSense::Context();
|
66 |
+
}
|
67 |
+
}
|
68 |
+
|
69 |
+
DepthSenseVideo* DepthSenseContext::GetDepthSenseVideo(size_t device_num, DepthSenseSensorType s1, DepthSenseSensorType s2, ImageDim dim1, ImageDim dim2, unsigned int fps1, unsigned int fps2, const Uri& uri)
|
70 |
+
{
|
71 |
+
if(running_devices == 0) {
|
72 |
+
// Initialise SDK
|
73 |
+
g_context = DepthSense::Context::create("localhost");
|
74 |
+
}
|
75 |
+
|
76 |
+
// Get the list of currently connected devices
|
77 |
+
std::vector<DepthSense::Device> da = g_context.getDevices();
|
78 |
+
|
79 |
+
if( da.size() > device_num )
|
80 |
+
{
|
81 |
+
return new DepthSenseVideo(da[device_num], s1, s2, dim1, dim2, fps1, fps2, uri);
|
82 |
+
}
|
83 |
+
|
84 |
+
throw VideoException("DepthSense device not connected.");
|
85 |
+
}
|
86 |
+
|
87 |
+
DepthSenseContext::DepthSenseContext()
|
88 |
+
: is_running(false), running_devices(0)
|
89 |
+
{
|
90 |
+
}
|
91 |
+
|
92 |
+
DepthSenseContext::~DepthSenseContext()
|
93 |
+
{
|
94 |
+
StopNodes();
|
95 |
+
}
|
96 |
+
|
97 |
+
|
98 |
+
void DepthSenseContext::StartNodes()
|
99 |
+
{
|
100 |
+
if(!is_running) {
|
101 |
+
// Launch EventLoop thread
|
102 |
+
event_thread = std::thread(&DepthSenseContext::EventLoop, this );
|
103 |
+
}
|
104 |
+
}
|
105 |
+
|
106 |
+
void DepthSenseContext::StopNodes()
|
107 |
+
{
|
108 |
+
if(is_running && event_thread.joinable()) {
|
109 |
+
g_context.quit();
|
110 |
+
event_thread.join();
|
111 |
+
}
|
112 |
+
}
|
113 |
+
|
114 |
+
void DepthSenseContext::EventLoop()
|
115 |
+
{
|
116 |
+
is_running = true;
|
117 |
+
g_context.startNodes();
|
118 |
+
g_context.run();
|
119 |
+
g_context.stopNodes();
|
120 |
+
is_running = false;
|
121 |
+
}
|
122 |
+
|
123 |
+
DepthSenseVideo::DepthSenseVideo(DepthSense::Device device, DepthSenseSensorType s1, DepthSenseSensorType s2, ImageDim dim1, ImageDim dim2, unsigned int fps1, unsigned int fps2, const Uri& uri)
|
124 |
+
: device(device), fill_image(0), depthmap_stream(-1), rgb_stream(-1), gotDepth(0), gotColor(0),
|
125 |
+
enableDepth(false), enableColor(false), depthTs(0.0), colorTs(0.0), size_bytes(0)
|
126 |
+
{
|
127 |
+
streams_properties = &frame_properties["streams"];
|
128 |
+
*streams_properties = picojson::value(picojson::array_type, false);
|
129 |
+
streams_properties->get<picojson::array>().resize(2);
|
130 |
+
|
131 |
+
sensorConfig[0] = {s1, dim1, fps1};
|
132 |
+
sensorConfig[1] = {s2, dim2, fps2};
|
133 |
+
ConfigureNodes(uri);
|
134 |
+
|
135 |
+
DepthSenseContext::I().NewDeviceRunning();
|
136 |
+
}
|
137 |
+
|
138 |
+
DepthSenseVideo::~DepthSenseVideo()
|
139 |
+
{
|
140 |
+
if (g_cnode.isSet()) DepthSenseContext::I().Context().unregisterNode(g_cnode);
|
141 |
+
if (g_dnode.isSet()) DepthSenseContext::I().Context().unregisterNode(g_dnode);
|
142 |
+
|
143 |
+
fill_image = (unsigned char*)ROGUE_ADDR;
|
144 |
+
cond_image_requested.notify_all();
|
145 |
+
|
146 |
+
DepthSenseContext::I().DeviceClosing();
|
147 |
+
}
|
148 |
+
|
149 |
+
picojson::value Json(DepthSense::IntrinsicParameters& p)
|
150 |
+
{
|
151 |
+
picojson::value js;
|
152 |
+
js["model"] = "polynomial";
|
153 |
+
js["width"] = p.width;
|
154 |
+
js["height"] = p.height;
|
155 |
+
js["RDF"] = "[1,0,0; 0,1,0; 0,0,1]";
|
156 |
+
|
157 |
+
js["fx"] = p.fx;
|
158 |
+
js["fy"] = p.fy;
|
159 |
+
js["u0"] = p.cx;
|
160 |
+
js["v0"] = p.cy;
|
161 |
+
js["k1"] = p.k1;
|
162 |
+
js["k2"] = p.k2;
|
163 |
+
js["k3"] = p.k3;
|
164 |
+
js["p1"] = p.p1;
|
165 |
+
js["p2"] = p.p2;
|
166 |
+
|
167 |
+
return js;
|
168 |
+
}
|
169 |
+
|
170 |
+
picojson::value Json(DepthSense::ExtrinsicParameters& p)
|
171 |
+
{
|
172 |
+
picojson::value js;
|
173 |
+
js["rows"] = "3";
|
174 |
+
js["cols"] = "4";
|
175 |
+
|
176 |
+
std::ostringstream oss;
|
177 |
+
oss << std::setprecision(17);
|
178 |
+
oss << "[" << p.r11 << "," << p.r12 << "," << p.r13 << "," << p.t1 << ";";
|
179 |
+
oss << p.r21 << "," << p.r22 << "," << p.r23 << "," << p.t2 << ";";
|
180 |
+
oss << p.r31 << "," << p.r32 << "," << p.r33 << "," << p.t3 << "]";
|
181 |
+
|
182 |
+
js["data"] = oss.str();
|
183 |
+
return js;
|
184 |
+
}
|
185 |
+
|
186 |
+
void DepthSenseVideo::ConfigureNodes(const Uri& uri)
|
187 |
+
{
|
188 |
+
std::vector<DepthSense::Node> nodes = device.getNodes();
|
189 |
+
|
190 |
+
for (int i = 0; i<2; ++i)
|
191 |
+
{
|
192 |
+
switch (sensorConfig[i].type)
|
193 |
+
{
|
194 |
+
case DepthSenseDepth:
|
195 |
+
{
|
196 |
+
for (int n = 0; n < (int)nodes.size(); n++)
|
197 |
+
{
|
198 |
+
DepthSense::Node node = nodes[n];
|
199 |
+
if ((node.is<DepthSense::DepthNode>()) && (!g_dnode.isSet()))
|
200 |
+
{
|
201 |
+
depthmap_stream = i;
|
202 |
+
g_dnode = node.as<DepthSense::DepthNode>();
|
203 |
+
ConfigureDepthNode(sensorConfig[i], uri);
|
204 |
+
DepthSenseContext::I().Context().registerNode(node);
|
205 |
+
}
|
206 |
+
}
|
207 |
+
break;
|
208 |
+
}
|
209 |
+
case DepthSenseRgb:
|
210 |
+
{
|
211 |
+
for (int n = 0; n < (int)nodes.size(); n++)
|
212 |
+
{
|
213 |
+
DepthSense::Node node = nodes[n];
|
214 |
+
if ((node.is<DepthSense::ColorNode>()) && (!g_cnode.isSet()))
|
215 |
+
{
|
216 |
+
rgb_stream = i;
|
217 |
+
g_cnode = node.as<DepthSense::ColorNode>();
|
218 |
+
ConfigureColorNode(sensorConfig[i], uri);
|
219 |
+
DepthSenseContext::I().Context().registerNode(node);
|
220 |
+
}
|
221 |
+
}
|
222 |
+
break;
|
223 |
+
}
|
224 |
+
default:
|
225 |
+
continue;
|
226 |
+
}
|
227 |
+
}
|
228 |
+
|
229 |
+
DepthSense::StereoCameraParameters scp = device.getStereoCameraParameters();
|
230 |
+
|
231 |
+
//Set json device properties for intrinsics and extrinsics
|
232 |
+
picojson::value& jsintrinsics = device_properties["intrinsics"];
|
233 |
+
if (jsintrinsics.is<picojson::null>()) {
|
234 |
+
jsintrinsics = picojson::value(picojson::array_type, false);
|
235 |
+
jsintrinsics.get<picojson::array>().resize(streams.size());
|
236 |
+
if (depthmap_stream >= 0) jsintrinsics[depthmap_stream] = Json(scp.depthIntrinsics);
|
237 |
+
if (rgb_stream >= 0) jsintrinsics[rgb_stream] = Json(scp.colorIntrinsics);
|
238 |
+
}
|
239 |
+
|
240 |
+
picojson::value& jsextrinsics = device_properties["extrinsics"];
|
241 |
+
if(jsextrinsics.is<picojson::null>()){
|
242 |
+
jsextrinsics = Json(scp.extrinsics);
|
243 |
+
}
|
244 |
+
}
|
245 |
+
|
246 |
+
inline DepthSense::FrameFormat ImageDim2FrameFormat(const ImageDim& dim)
|
247 |
+
{
|
248 |
+
DepthSense::FrameFormat retVal = DepthSense::FRAME_FORMAT_UNKNOWN;
|
249 |
+
if(dim.x == 160 && dim.y == 120)
|
250 |
+
{
|
251 |
+
retVal = DepthSense::FRAME_FORMAT_QQVGA;
|
252 |
+
}
|
253 |
+
else if(dim.x == 176 && dim.y == 144)
|
254 |
+
{
|
255 |
+
retVal = DepthSense::FRAME_FORMAT_QCIF;
|
256 |
+
}
|
257 |
+
else if(dim.x == 240 && dim.y == 160)
|
258 |
+
{
|
259 |
+
retVal = DepthSense::FRAME_FORMAT_HQVGA;
|
260 |
+
}
|
261 |
+
else if(dim.x == 320 && dim.y == 240)
|
262 |
+
{
|
263 |
+
retVal = DepthSense::FRAME_FORMAT_QVGA;
|
264 |
+
}
|
265 |
+
else if(dim.x == 352 && dim.y == 288)
|
266 |
+
{
|
267 |
+
retVal = DepthSense::FRAME_FORMAT_CIF;
|
268 |
+
}
|
269 |
+
else if(dim.x == 480 && dim.y == 320)
|
270 |
+
{
|
271 |
+
retVal = DepthSense::FRAME_FORMAT_HVGA;
|
272 |
+
}
|
273 |
+
else if(dim.x == 640 && dim.y == 480)
|
274 |
+
{
|
275 |
+
retVal = DepthSense::FRAME_FORMAT_VGA;
|
276 |
+
}
|
277 |
+
else if(dim.x == 1280 && dim.y == 720)
|
278 |
+
{
|
279 |
+
retVal = DepthSense::FRAME_FORMAT_WXGA_H;
|
280 |
+
}
|
281 |
+
else if(dim.x == 320 && dim.y == 120)
|
282 |
+
{
|
283 |
+
retVal = DepthSense::FRAME_FORMAT_DS311;
|
284 |
+
}
|
285 |
+
else if(dim.x == 1024 && dim.y == 768)
|
286 |
+
{
|
287 |
+
retVal = DepthSense::FRAME_FORMAT_XGA;
|
288 |
+
}
|
289 |
+
else if(dim.x == 800 && dim.y == 600)
|
290 |
+
{
|
291 |
+
retVal = DepthSense::FRAME_FORMAT_SVGA;
|
292 |
+
}
|
293 |
+
else if(dim.x == 636 && dim.y == 480)
|
294 |
+
{
|
295 |
+
retVal = DepthSense::FRAME_FORMAT_OVVGA;
|
296 |
+
}
|
297 |
+
else if(dim.x == 640 && dim.y == 240)
|
298 |
+
{
|
299 |
+
retVal = DepthSense::FRAME_FORMAT_WHVGA;
|
300 |
+
}
|
301 |
+
else if(dim.x == 640 && dim.y == 360)
|
302 |
+
{
|
303 |
+
retVal = DepthSense::FRAME_FORMAT_NHD;
|
304 |
+
}
|
305 |
+
return retVal;
|
306 |
+
}
|
307 |
+
|
308 |
+
void DepthSenseVideo::UpdateParameters(const DepthSense::Node& node, const Uri& uri)
|
309 |
+
{
|
310 |
+
DepthSense::Type type = node.getType();
|
311 |
+
picojson::value& jsnode = device_properties[type.name()];
|
312 |
+
|
313 |
+
std::vector<DepthSense::PropertyBase> properties = type.getProperties();
|
314 |
+
for(std::vector<DepthSense::PropertyBase>::const_iterator it = properties.begin(); it != properties.end(); ++it) {
|
315 |
+
const DepthSense::PropertyBase& prop = *it;
|
316 |
+
|
317 |
+
if (prop.is<DepthSense::Property<int32_t> >()) {
|
318 |
+
DepthSense::Property<int32_t> tprop = prop.as<DepthSense::Property<int32_t> >();
|
319 |
+
if (uri.Contains(prop.name())) {
|
320 |
+
if (!prop.isReadOnly()) {
|
321 |
+
tprop.setValue(node, uri.Get<int32_t>(prop.name(), tprop.getValue(node)));
|
322 |
+
} else {
|
323 |
+
pango_print_warn("DepthSense property '%s' is read-only\n", prop.name().c_str() );
|
324 |
+
}
|
325 |
+
}
|
326 |
+
jsnode[prop.name()] = tprop.getValue(node);
|
327 |
+
} else if (prop.is<DepthSense::Property<float> >()) {
|
328 |
+
DepthSense::Property<float> tprop = prop.as<DepthSense::Property<float> >();
|
329 |
+
if (uri.Contains(prop.name())) {
|
330 |
+
if (!prop.isReadOnly()) {
|
331 |
+
tprop.setValue(node, uri.Get<float>(prop.name(), tprop.getValue(node)));
|
332 |
+
} else {
|
333 |
+
pango_print_warn("DepthSense property '%s' is read-only\n", prop.name().c_str() );
|
334 |
+
}
|
335 |
+
}
|
336 |
+
jsnode[prop.name()] = tprop.getValue(node);
|
337 |
+
} else if (prop.is<DepthSense::Property<bool> >()) {
|
338 |
+
DepthSense::Property<bool> tprop = prop.as<DepthSense::Property<bool> >();
|
339 |
+
if (uri.Contains(prop.name())) {
|
340 |
+
if (!prop.isReadOnly()) {
|
341 |
+
tprop.setValue(node, uri.Get<bool>(prop.name(), tprop.getValue(node)));
|
342 |
+
} else {
|
343 |
+
pango_print_warn("DepthSense property '%s' is read-only\n", prop.name().c_str() );
|
344 |
+
}
|
345 |
+
}
|
346 |
+
jsnode[prop.name()] = tprop.getValue(node);
|
347 |
+
} else if (prop.is<DepthSense::Property<std::string> >()){
|
348 |
+
DepthSense::Property<std::string> tprop = prop.as<DepthSense::Property<std::string> >();
|
349 |
+
if (uri.Contains(prop.name())) {
|
350 |
+
if (!prop.isReadOnly()) {
|
351 |
+
tprop.setValue(node, uri.Get<std::string>(prop.name(), tprop.getValue(node)).c_str() );
|
352 |
+
} else {
|
353 |
+
pango_print_warn("DepthSense property '%s' is read-only\n", prop.name().c_str() );
|
354 |
+
}
|
355 |
+
}
|
356 |
+
jsnode[prop.name()] = tprop.getValue(node);
|
357 |
+
}
|
358 |
+
}
|
359 |
+
}
|
360 |
+
|
361 |
+
void DepthSenseVideo::ConfigureDepthNode(const SensorConfig& sensorConfig, const Uri& uri)
|
362 |
+
{
|
363 |
+
g_dnode.newSampleReceivedEvent().connect(this, &DepthSenseVideo::onNewDepthSample);
|
364 |
+
|
365 |
+
DepthSense::DepthNode::Configuration config = g_dnode.getConfiguration();
|
366 |
+
|
367 |
+
config.frameFormat = ImageDim2FrameFormat(sensorConfig.dim);
|
368 |
+
config.framerate = sensorConfig.fps;
|
369 |
+
config.mode = DepthSense::DepthNode::CAMERA_MODE_CLOSE_MODE;
|
370 |
+
config.saturation = true;
|
371 |
+
|
372 |
+
try {
|
373 |
+
DepthSenseContext::I().Context().requestControl(g_dnode, 0);
|
374 |
+
g_dnode.setConfiguration(config);
|
375 |
+
g_dnode.setEnableDepthMap(true);
|
376 |
+
} catch (DepthSense::Exception& e) {
|
377 |
+
throw pangolin::VideoException("DepthSense exception whilst configuring node", e.what());
|
378 |
+
}
|
379 |
+
|
380 |
+
//Set pangolin stream for this channel
|
381 |
+
const int w = sensorConfig.dim.x;
|
382 |
+
const int h = sensorConfig.dim.y;
|
383 |
+
|
384 |
+
const PixelFormat pfmt = PixelFormatFromString("GRAY16LE");
|
385 |
+
|
386 |
+
const StreamInfo stream_info(pfmt, w, h, (w*pfmt.bpp) / 8, (unsigned char*)0);
|
387 |
+
streams.push_back(stream_info);
|
388 |
+
|
389 |
+
size_bytes += stream_info.SizeBytes();
|
390 |
+
|
391 |
+
enableDepth = true;
|
392 |
+
|
393 |
+
UpdateParameters(g_dnode, uri);
|
394 |
+
}
|
395 |
+
|
396 |
+
void DepthSenseVideo::ConfigureColorNode(const SensorConfig& sensorConfig, const Uri& uri)
|
397 |
+
{
|
398 |
+
// connect new color sample handler
|
399 |
+
g_cnode.newSampleReceivedEvent().connect(this, &DepthSenseVideo::onNewColorSample);
|
400 |
+
|
401 |
+
DepthSense::ColorNode::Configuration config = g_cnode.getConfiguration();
|
402 |
+
config.frameFormat = ImageDim2FrameFormat(sensorConfig.dim);
|
403 |
+
config.compression = DepthSense::COMPRESSION_TYPE_MJPEG;
|
404 |
+
config.powerLineFrequency = DepthSense::POWER_LINE_FREQUENCY_50HZ;
|
405 |
+
config.framerate = sensorConfig.fps;
|
406 |
+
|
407 |
+
try {
|
408 |
+
DepthSenseContext::I().Context().requestControl(g_cnode,0);
|
409 |
+
g_cnode.setConfiguration(config);
|
410 |
+
g_cnode.setEnableColorMap(true);
|
411 |
+
UpdateParameters(g_cnode, uri);
|
412 |
+
} catch (DepthSense::Exception& e) {
|
413 |
+
throw pangolin::VideoException("DepthSense exception whilst configuring node", e.what());
|
414 |
+
}
|
415 |
+
|
416 |
+
//Set pangolin stream for this channel
|
417 |
+
const int w = sensorConfig.dim.x;
|
418 |
+
const int h = sensorConfig.dim.y;
|
419 |
+
|
420 |
+
const PixelFormat pfmt = PixelFormatFromString("BGR24");
|
421 |
+
|
422 |
+
const StreamInfo stream_info(pfmt, w, h, (w*pfmt.bpp) / 8, (unsigned char*)0 + size_bytes);
|
423 |
+
streams.push_back(stream_info);
|
424 |
+
|
425 |
+
size_bytes += stream_info.SizeBytes();
|
426 |
+
|
427 |
+
enableColor = true;
|
428 |
+
}
|
429 |
+
|
430 |
+
void DepthSenseVideo::onNewColorSample(DepthSense::ColorNode node, DepthSense::ColorNode::NewSampleReceivedData data)
|
431 |
+
{
|
432 |
+
{
|
433 |
+
std::unique_lock<std::mutex> lock(update_mutex);
|
434 |
+
|
435 |
+
// Wait for fill request
|
436 |
+
while (!fill_image) {
|
437 |
+
cond_image_requested.wait(lock);
|
438 |
+
}
|
439 |
+
|
440 |
+
// Update per-frame parameters
|
441 |
+
//printf("Color delta: %.1f\n", fabs(colorTs - data.timeOfCapture));
|
442 |
+
colorTs = data.timeOfCapture;
|
443 |
+
picojson::value& jsstream = frame_properties["streams"][rgb_stream];
|
444 |
+
jsstream["time_us"] = data.timeOfCapture;
|
445 |
+
|
446 |
+
if (fill_image != (unsigned char*)ROGUE_ADDR) {
|
447 |
+
// Fill with data
|
448 |
+
unsigned char* imagePtr = fill_image;
|
449 |
+
bool copied = false;
|
450 |
+
for (int i = 0; !copied && i < 2; ++i)
|
451 |
+
{
|
452 |
+
switch (sensorConfig[i].type)
|
453 |
+
{
|
454 |
+
case DepthSenseDepth:
|
455 |
+
{
|
456 |
+
imagePtr += streams[i].SizeBytes();
|
457 |
+
break;
|
458 |
+
}
|
459 |
+
case DepthSenseRgb:
|
460 |
+
{
|
461 |
+
// Leave as BGR
|
462 |
+
std::memcpy(imagePtr, data.colorMap, streams[i].SizeBytes());
|
463 |
+
copied = true;
|
464 |
+
break;
|
465 |
+
}
|
466 |
+
default:
|
467 |
+
continue;
|
468 |
+
}
|
469 |
+
}
|
470 |
+
gotColor++;
|
471 |
+
}
|
472 |
+
|
473 |
+
//printf("Got color at: %.1f\n", colorTs);
|
474 |
+
|
475 |
+
if(gotDepth)
|
476 |
+
{
|
477 |
+
double delta = fabs(GetDeltaTime());
|
478 |
+
if(delta > MAX_DELTA_TIME)
|
479 |
+
{
|
480 |
+
//printf("**** Waiting for another depth, delta: %.1f ****\n", delta);
|
481 |
+
gotDepth = 0;
|
482 |
+
return;
|
483 |
+
}
|
484 |
+
}
|
485 |
+
}
|
486 |
+
|
487 |
+
cond_image_filled.notify_one();
|
488 |
+
}
|
489 |
+
|
490 |
+
void DepthSenseVideo::onNewDepthSample(DepthSense::DepthNode node, DepthSense::DepthNode::NewSampleReceivedData data)
|
491 |
+
{
|
492 |
+
{
|
493 |
+
std::unique_lock<std::mutex> lock(update_mutex);
|
494 |
+
|
495 |
+
// Wait for fill request
|
496 |
+
while(!fill_image) {
|
497 |
+
cond_image_requested.wait(lock);
|
498 |
+
}
|
499 |
+
|
500 |
+
// Update per-frame parameters
|
501 |
+
//printf("Depth delta: %.1f\n", fabs(depthTs - data.timeOfCapture));
|
502 |
+
depthTs = data.timeOfCapture;
|
503 |
+
|
504 |
+
picojson::value& jsstream = frame_properties["streams"][depthmap_stream];
|
505 |
+
jsstream["time_us"] = depthTs;
|
506 |
+
|
507 |
+
if(fill_image != (unsigned char*)ROGUE_ADDR) {
|
508 |
+
// Fill with data
|
509 |
+
unsigned char* imagePtr = fill_image;
|
510 |
+
bool copied = false;
|
511 |
+
for (int i = 0; i < 2; ++i)
|
512 |
+
{
|
513 |
+
switch (sensorConfig[i].type)
|
514 |
+
{
|
515 |
+
case DepthSenseDepth:
|
516 |
+
{
|
517 |
+
memcpy(imagePtr, data.depthMap, streams[i].SizeBytes());
|
518 |
+
copied = true;
|
519 |
+
break;
|
520 |
+
}
|
521 |
+
case DepthSenseRgb:
|
522 |
+
{
|
523 |
+
imagePtr += streams[i].SizeBytes();
|
524 |
+
break;
|
525 |
+
}
|
526 |
+
default:
|
527 |
+
continue;
|
528 |
+
}
|
529 |
+
if(copied)
|
530 |
+
{
|
531 |
+
break;
|
532 |
+
}
|
533 |
+
}
|
534 |
+
gotDepth++;
|
535 |
+
}
|
536 |
+
|
537 |
+
//printf("Got depth at: %.1f\n", depthTs);
|
538 |
+
|
539 |
+
if(gotColor)
|
540 |
+
{
|
541 |
+
double delta = fabs(GetDeltaTime());
|
542 |
+
if(delta > MAX_DELTA_TIME)
|
543 |
+
{
|
544 |
+
//printf("**** Waiting for another color, delta: %.1f ****\n", delta);
|
545 |
+
gotColor = 0;
|
546 |
+
return;
|
547 |
+
}
|
548 |
+
}
|
549 |
+
}
|
550 |
+
|
551 |
+
cond_image_filled.notify_one();
|
552 |
+
}
|
553 |
+
|
554 |
+
void DepthSenseVideo::Start()
|
555 |
+
{
|
556 |
+
}
|
557 |
+
|
558 |
+
void DepthSenseVideo::Stop()
|
559 |
+
{
|
560 |
+
}
|
561 |
+
|
562 |
+
size_t DepthSenseVideo::SizeBytes() const
|
563 |
+
{
|
564 |
+
return size_bytes;
|
565 |
+
}
|
566 |
+
|
567 |
+
const std::vector<StreamInfo>& DepthSenseVideo::Streams() const
|
568 |
+
{
|
569 |
+
return streams;
|
570 |
+
}
|
571 |
+
|
572 |
+
bool DepthSenseVideo::GrabNext( unsigned char* image, bool /*wait*/ )
|
573 |
+
{
|
574 |
+
if(fill_image) {
|
575 |
+
throw std::runtime_error("GrabNext Cannot be called concurrently");
|
576 |
+
}
|
577 |
+
|
578 |
+
//printf("#### Grab Next ####\n");
|
579 |
+
|
580 |
+
// Request that image is filled with data
|
581 |
+
fill_image = image;
|
582 |
+
cond_image_requested.notify_one();
|
583 |
+
|
584 |
+
// Wait until it has been filled successfully.
|
585 |
+
{
|
586 |
+
std::unique_lock<std::mutex> lock(update_mutex);
|
587 |
+
while ((enableDepth && !gotDepth) || (enableColor && !gotColor))
|
588 |
+
{
|
589 |
+
cond_image_filled.wait(lock);
|
590 |
+
}
|
591 |
+
|
592 |
+
if (gotDepth)
|
593 |
+
{
|
594 |
+
gotDepth = 0;
|
595 |
+
}
|
596 |
+
if (gotColor)
|
597 |
+
{
|
598 |
+
gotColor = 0;
|
599 |
+
}
|
600 |
+
fill_image = 0;
|
601 |
+
}
|
602 |
+
|
603 |
+
//printf("Delta time: %.1f\n", fabs(GetDeltaTime()));
|
604 |
+
|
605 |
+
return true;
|
606 |
+
}
|
607 |
+
|
608 |
+
bool DepthSenseVideo::GrabNewest( unsigned char* image, bool wait )
|
609 |
+
{
|
610 |
+
return GrabNext(image,wait);
|
611 |
+
}
|
612 |
+
|
613 |
+
double DepthSenseVideo::GetDeltaTime() const
|
614 |
+
{
|
615 |
+
return depthTs - colorTs;
|
616 |
+
}
|
617 |
+
|
618 |
+
DepthSenseSensorType depthsense_sensor(const std::string& str)
|
619 |
+
{
|
620 |
+
if (!str.compare("rgb")) {
|
621 |
+
return DepthSenseRgb;
|
622 |
+
}
|
623 |
+
else if (!str.compare("depth")) {
|
624 |
+
return DepthSenseDepth;
|
625 |
+
}
|
626 |
+
else if (str.empty()) {
|
627 |
+
return DepthSenseUnassigned;
|
628 |
+
}
|
629 |
+
else{
|
630 |
+
throw pangolin::VideoException("Unknown DepthSense sensor", str);
|
631 |
+
}
|
632 |
+
}
|
633 |
+
|
634 |
+
PANGOLIN_REGISTER_FACTORY(DepthSenseVideo)
|
635 |
+
{
|
636 |
+
struct DepthSenseVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
637 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
638 |
+
DepthSenseSensorType img1 = depthsense_sensor(uri.Get<std::string>("img1", "depth"));
|
639 |
+
DepthSenseSensorType img2 = depthsense_sensor(uri.Get<std::string>("img2", ""));
|
640 |
+
|
641 |
+
const ImageDim dim1 = uri.Get<ImageDim>("size1", img1 == DepthSenseDepth ? ImageDim(320, 240) : ImageDim(640, 480) );
|
642 |
+
const ImageDim dim2 = uri.Get<ImageDim>("size2", img2 == DepthSenseDepth ? ImageDim(320, 240) : ImageDim(640, 480) );
|
643 |
+
|
644 |
+
const unsigned int fps1 = uri.Get<unsigned int>("fps1", 30);
|
645 |
+
const unsigned int fps2 = uri.Get<unsigned int>("fps2", 30);
|
646 |
+
|
647 |
+
return std::unique_ptr<VideoInterface>(
|
648 |
+
DepthSenseContext::I().GetDepthSenseVideo(0, img1, img2, dim1, dim2, fps1, fps2, uri)
|
649 |
+
);
|
650 |
+
}
|
651 |
+
};
|
652 |
+
|
653 |
+
FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<DepthSenseVideoFactory>(), 10, "depthsense");
|
654 |
+
}
|
655 |
+
|
656 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/ffmpeg.cpp
ADDED
@@ -0,0 +1,419 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2011 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
// It is impossible to keep up with ffmpeg deprecations, so ignore these warnings.
|
29 |
+
#if defined(__GNUC__)
|
30 |
+
# pragma GCC diagnostic ignored "-Wdeprecated"
|
31 |
+
# pragma GCC diagnostic ignored "-Wdeprecated-declarations"
|
32 |
+
#endif
|
33 |
+
|
34 |
+
#include <array>
|
35 |
+
#include <pangolin/factory/factory_registry.h>
|
36 |
+
#include <pangolin/video/iostream_operators.h>
|
37 |
+
#include <pangolin/video/drivers/ffmpeg.h>
|
38 |
+
#include <pangolin/utils/file_extension.h>
|
39 |
+
|
40 |
+
extern "C"
|
41 |
+
{
|
42 |
+
#include <libavformat/avio.h>
|
43 |
+
#include <libavutil/mathematics.h>
|
44 |
+
#include <libavdevice/avdevice.h>
|
45 |
+
#include <libavcodec/avcodec.h>
|
46 |
+
#include <libavformat/avformat.h>
|
47 |
+
}
|
48 |
+
|
49 |
+
// Found https://github.com/leandromoreira/ffmpeg-libav-tutorial
|
50 |
+
// Best reference I've seen for ffmpeg api
|
51 |
+
|
52 |
+
namespace pangolin
|
53 |
+
{
|
54 |
+
|
55 |
+
std::string ffmpeg_error_string(int err)
|
56 |
+
{
|
57 |
+
std::string ret(256, '\0');
|
58 |
+
av_make_error_string(ret.data(), ret.size(), err);
|
59 |
+
return ret;
|
60 |
+
}
|
61 |
+
|
62 |
+
std::ostream& operator<<(std::ostream& os, const AVRational& v)
|
63 |
+
{
|
64 |
+
os << v.num << "/" << v.den;
|
65 |
+
return os;
|
66 |
+
}
|
67 |
+
|
68 |
+
// Implementation of sws_scale_frame for versions which don't have it.
|
69 |
+
int pango_sws_scale_frame(struct SwsContext *c, AVFrame *dst, const AVFrame *src)
|
70 |
+
{
|
71 |
+
return sws_scale(c,
|
72 |
+
src->data, src->linesize, 0, src->height,
|
73 |
+
dst->data, dst->linesize
|
74 |
+
);
|
75 |
+
}
|
76 |
+
|
77 |
+
FfmpegVideo::FfmpegVideo(const std::string filename, const std::string strfmtout, const std::string codec_hint, bool dump_info, int user_video_stream, ImageDim size)
|
78 |
+
:pFormatCtx(nullptr), pCodecContext(nullptr)
|
79 |
+
{
|
80 |
+
InitUrl(PathExpand(filename), strfmtout, codec_hint, dump_info, user_video_stream, size);
|
81 |
+
}
|
82 |
+
|
83 |
+
void FfmpegVideo::InitUrl(const std::string url, const std::string strfmtout, const std::string codec_hint, bool dump_info, int user_video_stream, ImageDim size)
|
84 |
+
{
|
85 |
+
if( url.find('*') != url.npos )
|
86 |
+
throw VideoException("Wildcards not supported. Please use ffmpegs printf style formatting for image sequences. e.g. img-000000%04d.ppm");
|
87 |
+
|
88 |
+
// Register all devices
|
89 |
+
avdevice_register_all();
|
90 |
+
|
91 |
+
#if (LIBAVFORMAT_VERSION_MAJOR >= 59)
|
92 |
+
const AVInputFormat* fmt = nullptr;
|
93 |
+
#else
|
94 |
+
AVInputFormat* fmt = nullptr;
|
95 |
+
#endif
|
96 |
+
|
97 |
+
if( !codec_hint.empty() ) {
|
98 |
+
fmt = av_find_input_format(codec_hint.c_str());
|
99 |
+
}
|
100 |
+
|
101 |
+
AVDictionary* options = nullptr;
|
102 |
+
if(size.x != 0 && size.y != 0) {
|
103 |
+
std::string s = std::to_string(size.x) + "x" + std::to_string(size.y);
|
104 |
+
av_dict_set(&options, "video_size", s.c_str(), 0);
|
105 |
+
}
|
106 |
+
if( avformat_open_input(&pFormatCtx, url.c_str(), fmt, &options) )
|
107 |
+
throw VideoException("Couldn't open stream");
|
108 |
+
|
109 |
+
if( !ToLowerCopy(codec_hint).compare("mjpeg") )
|
110 |
+
#ifdef HAVE_FFMPEG_MAX_ANALYZE_DURATION2
|
111 |
+
pFormatCtx->max_analyze_duration2 = AV_TIME_BASE * 0.0;
|
112 |
+
#else
|
113 |
+
pFormatCtx->max_analyze_duration = AV_TIME_BASE * 0.0;
|
114 |
+
#endif
|
115 |
+
|
116 |
+
|
117 |
+
// Retrieve stream information
|
118 |
+
if(avformat_find_stream_info(pFormatCtx, 0)<0)
|
119 |
+
throw VideoException("Couldn't find stream information");
|
120 |
+
|
121 |
+
if(dump_info) {
|
122 |
+
// Dump information about file onto standard error
|
123 |
+
av_dump_format(pFormatCtx, 0, url.c_str(), false);
|
124 |
+
}
|
125 |
+
|
126 |
+
const AVCodec *pCodec = nullptr;
|
127 |
+
const AVCodecParameters *pCodecParameters = NULL;
|
128 |
+
int found_video_streams = 0;
|
129 |
+
|
130 |
+
// loop though all the streams and print its main information
|
131 |
+
for (int i = 0; i < pFormatCtx->nb_streams; i++)
|
132 |
+
{
|
133 |
+
AVStream * stream = pFormatCtx->streams[i];
|
134 |
+
const AVCodecParameters *pLocalCodecParameters = stream->codecpar;
|
135 |
+
|
136 |
+
// finds the registered decoder for a codec ID
|
137 |
+
const AVCodec *pLocalCodec = avcodec_find_decoder(pLocalCodecParameters->codec_id);
|
138 |
+
if (!pLocalCodec) {
|
139 |
+
pango_print_debug("Skipping stream with unsupported codec.\n");
|
140 |
+
stream->discard = AVDISCARD_ALL;
|
141 |
+
continue;
|
142 |
+
}
|
143 |
+
|
144 |
+
// When the stream is a video we store its index, codec parameters and codec
|
145 |
+
if (pLocalCodecParameters->codec_type == AVMEDIA_TYPE_VIDEO) {
|
146 |
+
if (found_video_streams == user_video_stream) {
|
147 |
+
pCodec = pLocalCodec;
|
148 |
+
pCodecParameters = pLocalCodecParameters;
|
149 |
+
videoStream = i;
|
150 |
+
}
|
151 |
+
++found_video_streams;
|
152 |
+
}else{
|
153 |
+
stream->discard = AVDISCARD_ALL;
|
154 |
+
pango_print_debug("Skipping stream with supported but non-video codec.\n");
|
155 |
+
}
|
156 |
+
}
|
157 |
+
|
158 |
+
if(found_video_streams==0 || user_video_stream >= found_video_streams)
|
159 |
+
throw VideoException("Couldn't find appropriate video stream");
|
160 |
+
|
161 |
+
packet = av_packet_alloc();
|
162 |
+
if (!packet)
|
163 |
+
throw VideoException("Failed to allocated memory for AVPacket");
|
164 |
+
|
165 |
+
// try to work out how many frames we have in video and conversion from frames to pts
|
166 |
+
auto vid_stream = pFormatCtx->streams[videoStream];
|
167 |
+
|
168 |
+
auto set_or_check = [this](int64_t& var, int64_t val){
|
169 |
+
if(!var) {
|
170 |
+
var = val;
|
171 |
+
}else if(var != val) {
|
172 |
+
pango_print_warn("Inconsistent calculation for video.");
|
173 |
+
}
|
174 |
+
};
|
175 |
+
|
176 |
+
numFrames = vid_stream->nb_frames;
|
177 |
+
|
178 |
+
// try to fix missing duration if we have numFrames
|
179 |
+
if(numFrames > 0 && vid_stream->duration <= 0 && vid_stream->avg_frame_rate.num > 0) {
|
180 |
+
auto duration_s = av_div_q( av_make_q(numFrames,1), vid_stream->avg_frame_rate);
|
181 |
+
auto duration_pts = av_div_q( duration_s, vid_stream->time_base);
|
182 |
+
if(duration_pts.den == 1) {
|
183 |
+
vid_stream->duration = duration_pts.num;
|
184 |
+
}else{
|
185 |
+
pango_print_warn("Non integral result for duration in pts. Ignoring.\n");
|
186 |
+
}
|
187 |
+
}
|
188 |
+
|
189 |
+
// try to fix numFrames if we have no duration
|
190 |
+
if(numFrames <= 0 && vid_stream->duration > 0 && vid_stream->avg_frame_rate.num > 0 && vid_stream->time_base.num > 0) {
|
191 |
+
auto duration_s = av_mul_q( av_make_q(vid_stream->duration,1), vid_stream->time_base);
|
192 |
+
auto frames_rational = av_mul_q(duration_s, vid_stream->avg_frame_rate);
|
193 |
+
if(frames_rational.num > 0 && frames_rational.den == 1) {
|
194 |
+
numFrames = frames_rational.num;
|
195 |
+
}else{
|
196 |
+
pango_print_warn("Non integral result for numFrames. Ignoring.\n");
|
197 |
+
}
|
198 |
+
}
|
199 |
+
|
200 |
+
if(numFrames && vid_stream->duration && vid_stream->duration % numFrames == 0) {
|
201 |
+
ptsPerFrame = vid_stream->duration / numFrames;
|
202 |
+
}else{
|
203 |
+
ptsPerFrame = 0;
|
204 |
+
numFrames = 0;
|
205 |
+
pango_print_warn("Video Doesn't contain seeking information\n");
|
206 |
+
}
|
207 |
+
|
208 |
+
next_frame = 0;
|
209 |
+
|
210 |
+
// Find the decoder for the video stream
|
211 |
+
pVidCodec = pCodec;
|
212 |
+
if(pVidCodec==0)
|
213 |
+
throw VideoException("Codec not found");
|
214 |
+
|
215 |
+
// Allocate video frames
|
216 |
+
pFrame = av_frame_alloc();
|
217 |
+
pFrameOut = av_frame_alloc();
|
218 |
+
if(!pFrame || !pFrameOut)
|
219 |
+
throw VideoException("Couldn't allocate frames");
|
220 |
+
|
221 |
+
fmtout = FfmpegFmtFromString(strfmtout);
|
222 |
+
if(fmtout == AV_PIX_FMT_NONE )
|
223 |
+
throw VideoException("Output format not recognised",strfmtout);
|
224 |
+
|
225 |
+
pCodecContext = avcodec_alloc_context3(pCodec);
|
226 |
+
if (!pCodecContext)
|
227 |
+
throw VideoException("failed to allocated memory for AVCodecContext");
|
228 |
+
|
229 |
+
if (avcodec_parameters_to_context(pCodecContext, pCodecParameters) < 0)
|
230 |
+
throw VideoException("failed to copy codec params to codec context");
|
231 |
+
|
232 |
+
if (avcodec_open2(pCodecContext, pCodec, NULL) < 0)
|
233 |
+
throw VideoException("failed to open codec through avcodec_open2");
|
234 |
+
|
235 |
+
|
236 |
+
// Image dimensions
|
237 |
+
const int w = pCodecContext->width;
|
238 |
+
const int h = pCodecContext->height;
|
239 |
+
|
240 |
+
pFrameOut->width = w;
|
241 |
+
pFrameOut->height = h;
|
242 |
+
pFrameOut->format = fmtout;
|
243 |
+
if(av_frame_get_buffer(pFrameOut, 0) != 0) {
|
244 |
+
throw VideoException("");
|
245 |
+
}
|
246 |
+
|
247 |
+
// Allocate SWS for converting pixel formats
|
248 |
+
img_convert_ctx = sws_getContext(w, h,
|
249 |
+
pCodecContext->pix_fmt,
|
250 |
+
w, h, fmtout, SWS_FAST_BILINEAR,
|
251 |
+
NULL, NULL, NULL);
|
252 |
+
if(!img_convert_ctx) {
|
253 |
+
throw VideoException("Cannot initialize the conversion context");
|
254 |
+
}
|
255 |
+
|
256 |
+
// Populate stream info for users to query
|
257 |
+
numBytesOut = 0;
|
258 |
+
{
|
259 |
+
const PixelFormat strm_fmt = PixelFormatFromString(FfmpegFmtToString(fmtout));
|
260 |
+
const size_t pitch = (w*strm_fmt.bpp)/8;
|
261 |
+
const size_t size_bytes = h*pitch;
|
262 |
+
streams.emplace_back(strm_fmt, w, h, pitch, (unsigned char*)0 + numBytesOut);
|
263 |
+
numBytesOut += size_bytes;
|
264 |
+
}
|
265 |
+
|
266 |
+
auto s = pFormatCtx->streams[videoStream];
|
267 |
+
}
|
268 |
+
|
269 |
+
FfmpegVideo::~FfmpegVideo()
|
270 |
+
{
|
271 |
+
av_free(pFrameOut);
|
272 |
+
av_free(pFrame);
|
273 |
+
|
274 |
+
avcodec_close(pCodecContext);
|
275 |
+
avformat_close_input(&pFormatCtx);
|
276 |
+
sws_freeContext(img_convert_ctx);
|
277 |
+
}
|
278 |
+
|
279 |
+
const std::vector<StreamInfo>& FfmpegVideo::Streams() const
|
280 |
+
{
|
281 |
+
return streams;
|
282 |
+
}
|
283 |
+
|
284 |
+
size_t FfmpegVideo::SizeBytes() const
|
285 |
+
{
|
286 |
+
return numBytesOut;
|
287 |
+
}
|
288 |
+
|
289 |
+
void FfmpegVideo::Start()
|
290 |
+
{
|
291 |
+
}
|
292 |
+
|
293 |
+
void FfmpegVideo::Stop()
|
294 |
+
{
|
295 |
+
}
|
296 |
+
|
297 |
+
bool FfmpegVideo::GrabNext(unsigned char* image, bool /*wait*/)
|
298 |
+
{
|
299 |
+
auto vid_stream = pFormatCtx->streams[videoStream];
|
300 |
+
|
301 |
+
while(true)
|
302 |
+
{
|
303 |
+
const int rx_res = avcodec_receive_frame(pCodecContext, pFrame);
|
304 |
+
if(rx_res == 0) {
|
305 |
+
const int expected_pts = vid_stream->start_time + next_frame * ptsPerFrame;
|
306 |
+
if(ptsPerFrame > 0 && expected_pts > pFrame->pts) {
|
307 |
+
// We dont have the right frame, probably from seek to keyframe.
|
308 |
+
continue;
|
309 |
+
}
|
310 |
+
pango_sws_scale_frame(img_convert_ctx, pFrameOut, pFrame);
|
311 |
+
av_image_copy_to_buffer(image, numBytesOut, pFrameOut->data, pFrameOut->linesize, fmtout, pFrameOut->width, pFrameOut->height, 1);
|
312 |
+
next_frame++;
|
313 |
+
return true;
|
314 |
+
}else{
|
315 |
+
while(true) {
|
316 |
+
const int read_res = av_read_frame(pFormatCtx, packet);
|
317 |
+
if(read_res == 0) {
|
318 |
+
if(packet->stream_index==videoStream) {
|
319 |
+
if(avcodec_send_packet(pCodecContext, packet) == 0) {
|
320 |
+
break; // have frame for codex
|
321 |
+
}
|
322 |
+
}
|
323 |
+
av_packet_unref(packet);
|
324 |
+
}else{
|
325 |
+
// No more packets for codec
|
326 |
+
return false;
|
327 |
+
}
|
328 |
+
}
|
329 |
+
}
|
330 |
+
}
|
331 |
+
}
|
332 |
+
|
333 |
+
bool FfmpegVideo::GrabNewest(unsigned char *image, bool wait)
|
334 |
+
{
|
335 |
+
return GrabNext(image,wait);
|
336 |
+
}
|
337 |
+
|
338 |
+
size_t FfmpegVideo::GetCurrentFrameId() const
|
339 |
+
{
|
340 |
+
return next_frame-1;
|
341 |
+
}
|
342 |
+
|
343 |
+
size_t FfmpegVideo::GetTotalFrames() const
|
344 |
+
{
|
345 |
+
return numFrames;
|
346 |
+
}
|
347 |
+
|
348 |
+
size_t FfmpegVideo::Seek(size_t frameid)
|
349 |
+
{
|
350 |
+
if(ptsPerFrame && frameid != next_frame) {
|
351 |
+
const int64_t pts = ptsPerFrame*frameid;
|
352 |
+
const int res = avformat_seek_file(pFormatCtx, videoStream, 0, pts, pts, 0);
|
353 |
+
avcodec_flush_buffers(pCodecContext);
|
354 |
+
|
355 |
+
if(res >= 0) {
|
356 |
+
// success - next frame to read will be frameid, so 'current frame' is one before that.
|
357 |
+
next_frame = frameid;
|
358 |
+
}else{
|
359 |
+
pango_print_info("error whilst seeking. %u, %s\n", (unsigned)frameid, ffmpeg_error_string(res).data());
|
360 |
+
}
|
361 |
+
}
|
362 |
+
|
363 |
+
return next_frame;
|
364 |
+
}
|
365 |
+
|
366 |
+
|
367 |
+
PANGOLIN_REGISTER_FACTORY(FfmpegVideo)
|
368 |
+
{
|
369 |
+
struct FfmpegVideoFactory : public TypedFactoryInterface<VideoInterface> {
|
370 |
+
std::map<std::string,Precedence> Schemes() const override
|
371 |
+
{
|
372 |
+
return {{"ffmpeg",0}, {"file",15}, {"files",15}};
|
373 |
+
}
|
374 |
+
const char* Description() const override
|
375 |
+
{
|
376 |
+
return "Use the FFMPEG library to decode videos.";
|
377 |
+
}
|
378 |
+
ParamSet Params() const override
|
379 |
+
{
|
380 |
+
return {{
|
381 |
+
{"fmt","RGB24","Use FFMPEG to decode to this output format."},
|
382 |
+
{"stream","0","Decode stream with this index."},
|
383 |
+
{"codec_hint","","Apply a hint to FFMPEG on codec. Examples include {MJPEG,video4linux,...}"},
|
384 |
+
{"size","","Request a particular size output from FFMPEG"},
|
385 |
+
{"verbose","0","Output FFMPEG instantiation information."},
|
386 |
+
}};
|
387 |
+
}
|
388 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
389 |
+
const std::array<std::string,43> ffmpeg_ext = {{
|
390 |
+
".3g2",".3gp", ".amv", ".asf", ".avi", ".drc", ".flv", ".f4v",
|
391 |
+
".f4p", ".f4a", ".f4b", ".gif", ".gifv", ".m4v", ".mkv", ".mng", ".mov", ".qt",
|
392 |
+
".mp4", ".m4p", ".m4v", ".mpg", ".mp2", ".mpeg", ".mpe", ".mpv", ".mpg", ".mpeg",
|
393 |
+
".m2v", ".mxf", ".nsv", ".ogv", ".ogg", ".rm", ".rmvb", ".roq", ".svi", ".vob",
|
394 |
+
".webm", ".wmv", ".yuv", ".h264", ".h265"
|
395 |
+
}};
|
396 |
+
|
397 |
+
if(!uri.scheme.compare("file") || !uri.scheme.compare("files")) {
|
398 |
+
const std::string ext = FileLowercaseExtention(uri.url);
|
399 |
+
if(std::find(ffmpeg_ext.begin(), ffmpeg_ext.end(), ext) == ffmpeg_ext.end()) {
|
400 |
+
// Don't try to load unknown files without the ffmpeg:// scheme.
|
401 |
+
return std::unique_ptr<VideoInterface>();
|
402 |
+
}
|
403 |
+
}
|
404 |
+
|
405 |
+
const bool verbose = uri.Get<bool>("verbose",false);
|
406 |
+
std::string outfmt = uri.Get<std::string>("fmt","RGB24");
|
407 |
+
std::string codec_hint = uri.Get<std::string>("codec_hint","");
|
408 |
+
ToUpper(outfmt);
|
409 |
+
ToUpper(codec_hint);
|
410 |
+
const int video_stream = uri.Get<int>("stream",0);
|
411 |
+
const ImageDim size = uri.Get<ImageDim>("size",ImageDim(0,0));
|
412 |
+
return std::unique_ptr<VideoInterface>( new FfmpegVideo(uri.url.c_str(), outfmt, codec_hint, verbose, video_stream) );
|
413 |
+
}
|
414 |
+
};
|
415 |
+
|
416 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<FfmpegVideoFactory>());
|
417 |
+
}
|
418 |
+
|
419 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/ffmpeg_convert.cpp
ADDED
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
// It is impossible to keep up with ffmpeg deprecations, so ignore these warnings.
|
2 |
+
#if defined(__GNUC__)
|
3 |
+
# pragma GCC diagnostic ignored "-Wdeprecated"
|
4 |
+
# pragma GCC diagnostic ignored "-Wdeprecated-declarations"
|
5 |
+
#endif
|
6 |
+
|
7 |
+
#include <pangolin/video/video.h>
|
8 |
+
#include <pangolin/factory/factory_registry.h>
|
9 |
+
#include <pangolin/video/drivers/ffmpeg_convert.h>
|
10 |
+
|
11 |
+
namespace pangolin {
|
12 |
+
|
13 |
+
// Defined in ffmpeg.cpp
|
14 |
+
int pango_sws_scale_frame(struct SwsContext *c, AVFrame *dst, const AVFrame *src);
|
15 |
+
|
16 |
+
void FfmpegConverter::ConvertContext::convert(const unsigned char* src, unsigned char* dst)
|
17 |
+
{
|
18 |
+
// Copy into ffmpeg src buffer from user buffer
|
19 |
+
memcpy(avsrc->buf[0]->data, src + src_buffer_offset, avsrc->buf[0]->size);
|
20 |
+
pango_sws_scale_frame(img_convert_ctx, avdst, avsrc);
|
21 |
+
av_image_copy_to_buffer(dst + dst_buffer_offset, avdst->buf[0]->size, avdst->data, avdst->linesize, fmtdst, avdst->width, avdst->height, 1);
|
22 |
+
}
|
23 |
+
|
24 |
+
FfmpegConverter::FfmpegConverter(std::unique_ptr<VideoInterface> &videoin_, const std::string sfmtdst, FfmpegMethod method )
|
25 |
+
:videoin(std::move(videoin_))
|
26 |
+
{
|
27 |
+
if( !videoin )
|
28 |
+
throw VideoException("Source video interface not specified");
|
29 |
+
|
30 |
+
input_buffer = std::unique_ptr<unsigned char[]>(new unsigned char[videoin->SizeBytes()]);
|
31 |
+
|
32 |
+
converters.resize(videoin->Streams().size());
|
33 |
+
|
34 |
+
dst_buffer_size = 0;
|
35 |
+
|
36 |
+
for(size_t i=0; i < videoin->Streams().size(); ++i) {
|
37 |
+
const StreamInfo instrm = videoin->Streams()[i];
|
38 |
+
|
39 |
+
converters[i].w=instrm.Width();
|
40 |
+
converters[i].h=instrm.Height();
|
41 |
+
|
42 |
+
converters[i].fmtdst = FfmpegFmtFromString(sfmtdst);
|
43 |
+
converters[i].fmtsrc = FfmpegFmtFromString(instrm.PixFormat());
|
44 |
+
converters[i].img_convert_ctx = sws_getContext(
|
45 |
+
instrm.Width(), instrm.Height(), converters[i].fmtsrc,
|
46 |
+
instrm.Width(), instrm.Height(), converters[i].fmtdst,
|
47 |
+
method, NULL, NULL, NULL
|
48 |
+
);
|
49 |
+
if(!converters[i].img_convert_ctx)
|
50 |
+
throw VideoException("Could not create SwScale context for pixel conversion");
|
51 |
+
|
52 |
+
converters[i].dst_buffer_offset=dst_buffer_size;
|
53 |
+
converters[i].src_buffer_offset=instrm.Offset() - (unsigned char*)0;
|
54 |
+
//converters[i].src_buffer_offset=src_buffer_size;
|
55 |
+
|
56 |
+
converters[i].avsrc = av_frame_alloc();
|
57 |
+
converters[i].avsrc->width = instrm.Width();
|
58 |
+
converters[i].avsrc->height = instrm.Height();
|
59 |
+
converters[i].avsrc->format = FfmpegFmtFromString(instrm.PixFormat());
|
60 |
+
av_frame_get_buffer(converters[i].avsrc, 0);
|
61 |
+
|
62 |
+
converters[i].avdst = av_frame_alloc();
|
63 |
+
converters[i].avdst->width = instrm.Width();
|
64 |
+
converters[i].avdst->height = instrm.Height();
|
65 |
+
converters[i].avdst->format = FfmpegFmtFromString(sfmtdst);
|
66 |
+
av_frame_get_buffer(converters[i].avdst, 0);
|
67 |
+
|
68 |
+
const PixelFormat pxfmtdst = PixelFormatFromString(sfmtdst);
|
69 |
+
const StreamInfo sdst( pxfmtdst, instrm.Width(), instrm.Height(), (instrm.Width()*pxfmtdst.bpp)/8, (unsigned char*)0 + converters[i].dst_buffer_offset );
|
70 |
+
streams.push_back(sdst);
|
71 |
+
|
72 |
+
dst_buffer_size += av_image_get_buffer_size(converters[i].fmtdst, instrm.Width(), instrm.Height(), 0);
|
73 |
+
}
|
74 |
+
|
75 |
+
}
|
76 |
+
|
77 |
+
FfmpegConverter::~FfmpegConverter()
|
78 |
+
{
|
79 |
+
for(ConvertContext&c:converters)
|
80 |
+
{
|
81 |
+
av_free(c.avsrc);
|
82 |
+
av_free(c.avdst);
|
83 |
+
}
|
84 |
+
}
|
85 |
+
|
86 |
+
void FfmpegConverter::Start()
|
87 |
+
{
|
88 |
+
// No-Op
|
89 |
+
}
|
90 |
+
|
91 |
+
void FfmpegConverter::Stop()
|
92 |
+
{
|
93 |
+
// No-Op
|
94 |
+
}
|
95 |
+
|
96 |
+
size_t FfmpegConverter::SizeBytes() const
|
97 |
+
{
|
98 |
+
return dst_buffer_size;
|
99 |
+
}
|
100 |
+
|
101 |
+
const std::vector<StreamInfo>& FfmpegConverter::Streams() const
|
102 |
+
{
|
103 |
+
return streams;
|
104 |
+
}
|
105 |
+
|
106 |
+
bool FfmpegConverter::GrabNext( unsigned char* image, bool wait )
|
107 |
+
{
|
108 |
+
if( videoin->GrabNext(input_buffer.get(),wait) )
|
109 |
+
{
|
110 |
+
for(ConvertContext&c:converters) {
|
111 |
+
c.convert(input_buffer.get(), image);
|
112 |
+
}
|
113 |
+
return true;
|
114 |
+
}
|
115 |
+
return false;
|
116 |
+
}
|
117 |
+
|
118 |
+
bool FfmpegConverter::GrabNewest( unsigned char* image, bool wait )
|
119 |
+
{
|
120 |
+
if( videoin->GrabNewest(input_buffer.get(),wait) )
|
121 |
+
{
|
122 |
+
for(ConvertContext&c:converters) {
|
123 |
+
c.convert(input_buffer.get(),image);
|
124 |
+
}
|
125 |
+
return true;
|
126 |
+
}
|
127 |
+
return false;
|
128 |
+
}
|
129 |
+
|
130 |
+
PANGOLIN_REGISTER_FACTORY(FfmpegVideoConvert)
|
131 |
+
{
|
132 |
+
struct FfmpegVideoFactory : public TypedFactoryInterface<VideoInterface> {
|
133 |
+
std::map<std::string,Precedence> Schemes() const override
|
134 |
+
{
|
135 |
+
return {{"ffmpeg_convert", 0}, {"convert", 20}};
|
136 |
+
}
|
137 |
+
const char* Description() const override
|
138 |
+
{
|
139 |
+
return "Use FFMPEG library to convert pixel format.";
|
140 |
+
}
|
141 |
+
ParamSet Params() const override
|
142 |
+
{
|
143 |
+
return {{
|
144 |
+
{"fmt","RGB24","Pixel format: see pixel format help for all possible values"}
|
145 |
+
}};
|
146 |
+
}
|
147 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
148 |
+
std::string outfmt = uri.Get<std::string>("fmt","RGB24");
|
149 |
+
ToUpper(outfmt);
|
150 |
+
std::unique_ptr<VideoInterface> subvid = pangolin::OpenVideo(uri.url);
|
151 |
+
return std::unique_ptr<VideoInterface>( new FfmpegConverter(subvid,outfmt,FFMPEG_POINT) );
|
152 |
+
}
|
153 |
+
};
|
154 |
+
|
155 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<FfmpegVideoFactory>());
|
156 |
+
}
|
157 |
+
|
158 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/ffmpeg_output.cpp
ADDED
@@ -0,0 +1,341 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
// It is impossible to keep up with ffmpeg deprecations, so ignore these warnings.
|
2 |
+
#if defined(__GNUC__)
|
3 |
+
# pragma GCC diagnostic ignored "-Wdeprecated"
|
4 |
+
# pragma GCC diagnostic ignored "-Wdeprecated-declarations"
|
5 |
+
#endif
|
6 |
+
|
7 |
+
#include <pangolin/video/drivers/ffmpeg_output.h>
|
8 |
+
#include <pangolin/factory/factory_registry.h>
|
9 |
+
|
10 |
+
namespace pangolin {
|
11 |
+
|
12 |
+
// Defined in ffmpeg.cpp
|
13 |
+
int pango_sws_scale_frame(struct SwsContext *c, AVFrame *dst, const AVFrame *src);
|
14 |
+
|
15 |
+
AVCodecContext* CreateVideoCodecContext(AVCodecID codec_id, uint64_t frame_rate, int bit_rate, AVPixelFormat EncoderFormat, int width, int height)
|
16 |
+
{
|
17 |
+
const AVCodec* codec = avcodec_find_encoder(codec_id);
|
18 |
+
if (!(codec))
|
19 |
+
throw VideoException("Could not find encoder");
|
20 |
+
|
21 |
+
if(codec->type != AVMEDIA_TYPE_VIDEO)
|
22 |
+
throw VideoException("Encoder is not a video encoder");
|
23 |
+
|
24 |
+
AVCodecContext* codec_context = avcodec_alloc_context3(codec);
|
25 |
+
if(!codec_context)
|
26 |
+
throw VideoException("Unable to create codec context");
|
27 |
+
|
28 |
+
codec_context->codec_id = codec_id;
|
29 |
+
codec_context->bit_rate = bit_rate;
|
30 |
+
codec_context->width = width;
|
31 |
+
codec_context->height = height;
|
32 |
+
codec_context->time_base = av_make_q(1,frame_rate);
|
33 |
+
codec_context->framerate = av_make_q(frame_rate,1);
|
34 |
+
codec_context->gop_size = 10;
|
35 |
+
codec_context->max_b_frames = 1;
|
36 |
+
codec_context->pix_fmt = EncoderFormat;
|
37 |
+
|
38 |
+
/* open the codec */
|
39 |
+
int ret = avcodec_open2(codec_context, nullptr, nullptr);
|
40 |
+
if (ret < 0) throw VideoException("Could not open video codec");
|
41 |
+
|
42 |
+
return codec_context;
|
43 |
+
}
|
44 |
+
|
45 |
+
// Based on this example
|
46 |
+
// http://cekirdek.pardus.org.tr/~ismail/ffmpeg-docs/output-example_8c-source.html
|
47 |
+
static AVStream* CreateStream(AVFormatContext *oc, AVCodecContext* codec_context)
|
48 |
+
{
|
49 |
+
/* Some formats want stream headers to be separate. */
|
50 |
+
if (oc->oformat->flags & AVFMT_GLOBALHEADER)
|
51 |
+
codec_context->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
|
52 |
+
|
53 |
+
AVStream* stream = avformat_new_stream(oc, codec_context->codec);
|
54 |
+
if (!stream) throw VideoException("Could not allocate stream");
|
55 |
+
|
56 |
+
stream->id = oc->nb_streams-1;
|
57 |
+
stream->time_base = codec_context->time_base;
|
58 |
+
stream->avg_frame_rate = codec_context->framerate;
|
59 |
+
stream->r_frame_rate = stream->avg_frame_rate;
|
60 |
+
stream->duration = codec_context->framerate.num * 60 / codec_context->framerate.den;
|
61 |
+
avcodec_parameters_from_context(stream->codecpar, codec_context);
|
62 |
+
|
63 |
+
return stream;
|
64 |
+
}
|
65 |
+
|
66 |
+
void FfmpegVideoOutputStream::WriteFrame(AVFrame* frame)
|
67 |
+
{
|
68 |
+
AVPacket* pkt = av_packet_alloc();
|
69 |
+
av_init_packet(pkt);
|
70 |
+
pkt->data = NULL;
|
71 |
+
pkt->size = 0;
|
72 |
+
|
73 |
+
int ret;
|
74 |
+
int got_packet = 1;
|
75 |
+
|
76 |
+
/* encode the image */
|
77 |
+
int response = avcodec_send_frame(codec_context, frame);
|
78 |
+
while (response >= 0) {
|
79 |
+
response = avcodec_receive_packet(codec_context, pkt);
|
80 |
+
if (response == AVERROR(EAGAIN) || response == AVERROR_EOF) {
|
81 |
+
break;
|
82 |
+
} else if (response < 0) {
|
83 |
+
pango_print_error("Error while receiving packet from encoder.\n");
|
84 |
+
return;
|
85 |
+
}
|
86 |
+
|
87 |
+
pkt->stream_index = stream->index;
|
88 |
+
// pkt->pts = frame->pts;
|
89 |
+
// pkt->dts = frame->pkt_dts;
|
90 |
+
// pkt->time_base = codec_context->time_base;
|
91 |
+
pkt->duration = 1; //av_div_q(codec_context->framerate, codec_context->time_base).num;
|
92 |
+
// av_packet_rescale_ts(pkt, codec_context->time_base, pkt->time_base);
|
93 |
+
|
94 |
+
if (pkt->size) {
|
95 |
+
int64_t pts = pkt->pts;
|
96 |
+
int ret = av_interleaved_write_frame(recorder.oc, pkt);
|
97 |
+
if (ret < 0) throw VideoException("Error writing video frame");
|
98 |
+
if(pkt->pts != (int64_t)AV_NOPTS_VALUE) last_pts = pts;
|
99 |
+
}
|
100 |
+
}
|
101 |
+
av_packet_free(&pkt);
|
102 |
+
return;
|
103 |
+
}
|
104 |
+
|
105 |
+
void FfmpegVideoOutputStream::WriteImage(const uint8_t* img, int w, int h)
|
106 |
+
{
|
107 |
+
static int64_t pts = 0;
|
108 |
+
|
109 |
+
av_frame_make_writable(src_frame);
|
110 |
+
memcpy(src_frame->buf[0]->data, img, src_frame->buf[0]->size);
|
111 |
+
|
112 |
+
recorder.StartStream();
|
113 |
+
|
114 |
+
AVFrame* frame_to_write = nullptr;
|
115 |
+
|
116 |
+
if (codec_context->pix_fmt != input_format || codec_context->width != w || codec_context->height != h) {
|
117 |
+
if(!sws_ctx) {
|
118 |
+
sws_ctx = sws_getCachedContext( sws_ctx,
|
119 |
+
w, h, input_format,
|
120 |
+
codec_context->width, codec_context->height, codec_context->pix_fmt,
|
121 |
+
SWS_BICUBIC, NULL, NULL, NULL
|
122 |
+
);
|
123 |
+
if (!sws_ctx) throw VideoException("Could not initialize the conversion context");
|
124 |
+
}
|
125 |
+
av_frame_make_writable(frame);
|
126 |
+
pango_sws_scale_frame(sws_ctx, frame, src_frame);
|
127 |
+
frame_to_write = frame;
|
128 |
+
} else {
|
129 |
+
frame_to_write = src_frame;
|
130 |
+
}
|
131 |
+
|
132 |
+
if(frame_to_write) {
|
133 |
+
frame_to_write->pts = pts;
|
134 |
+
WriteFrame(frame_to_write);
|
135 |
+
++pts;
|
136 |
+
}
|
137 |
+
}
|
138 |
+
|
139 |
+
void FfmpegVideoOutputStream::Flush()
|
140 |
+
{
|
141 |
+
WriteFrame(nullptr);
|
142 |
+
}
|
143 |
+
|
144 |
+
const StreamInfo& FfmpegVideoOutputStream::GetStreamInfo() const
|
145 |
+
{
|
146 |
+
return input_info;
|
147 |
+
}
|
148 |
+
|
149 |
+
double FfmpegVideoOutputStream::BaseFrameTime()
|
150 |
+
{
|
151 |
+
return (double)codec_context->time_base.num / (double)codec_context->time_base.den;
|
152 |
+
}
|
153 |
+
|
154 |
+
FfmpegVideoOutputStream::FfmpegVideoOutputStream(
|
155 |
+
FfmpegVideoOutput& recorder, CodecID codec_id, uint64_t frame_rate,
|
156 |
+
int bit_rate, const StreamInfo& input_info, bool flip_image
|
157 |
+
)
|
158 |
+
: recorder(recorder), input_info(input_info),
|
159 |
+
input_format(FfmpegFmtFromString(input_info.PixFormat())),
|
160 |
+
output_format( FfmpegFmtFromString("YUV420P") ),
|
161 |
+
last_pts(-1), sws_ctx(NULL), frame(NULL), flip(flip_image)
|
162 |
+
{
|
163 |
+
codec_context = CreateVideoCodecContext(codec_id, frame_rate, bit_rate, output_format, input_info.Width(), input_info.Height());
|
164 |
+
stream = CreateStream(recorder.oc, codec_context);
|
165 |
+
|
166 |
+
// Allocate frame
|
167 |
+
frame = av_frame_alloc();
|
168 |
+
frame->format = codec_context->pix_fmt;
|
169 |
+
frame->width = codec_context->width;
|
170 |
+
frame->height = codec_context->height;
|
171 |
+
if(av_frame_get_buffer(frame,0)) {
|
172 |
+
throw VideoException("Could not allocate picture");
|
173 |
+
}
|
174 |
+
|
175 |
+
src_frame = av_frame_alloc();
|
176 |
+
src_frame->format = input_format;
|
177 |
+
src_frame->width = input_info.Width();
|
178 |
+
src_frame->height = input_info.Height();
|
179 |
+
if(av_frame_get_buffer(src_frame,0)) {
|
180 |
+
throw VideoException("Could not allocate picture");
|
181 |
+
}
|
182 |
+
|
183 |
+
if(flip) {
|
184 |
+
// setup data pointer to end of memory, and negate line sizes.
|
185 |
+
for(int i=0; i<4; ++i) {
|
186 |
+
if(src_frame->data[i]) {
|
187 |
+
src_frame->data[i] += (src_frame->height-1) * src_frame->linesize[i];
|
188 |
+
}
|
189 |
+
if(src_frame->linesize[i]) {
|
190 |
+
src_frame->linesize[i] *= -1;
|
191 |
+
}
|
192 |
+
}
|
193 |
+
}
|
194 |
+
}
|
195 |
+
|
196 |
+
FfmpegVideoOutputStream::~FfmpegVideoOutputStream()
|
197 |
+
{
|
198 |
+
Flush();
|
199 |
+
|
200 |
+
if(sws_ctx) {
|
201 |
+
sws_freeContext(sws_ctx);
|
202 |
+
}
|
203 |
+
|
204 |
+
av_free(frame);
|
205 |
+
avcodec_close(codec_context);
|
206 |
+
}
|
207 |
+
|
208 |
+
FfmpegVideoOutput::FfmpegVideoOutput(const std::string& filename, int base_frame_rate, int bit_rate, bool flip_image)
|
209 |
+
: filename(filename), started(false), oc(NULL),
|
210 |
+
frame_count(0), base_frame_rate(base_frame_rate), bit_rate(bit_rate), is_pipe(pangolin::IsPipe(filename)), flip(flip_image)
|
211 |
+
{
|
212 |
+
Initialise(filename);
|
213 |
+
}
|
214 |
+
|
215 |
+
FfmpegVideoOutput::~FfmpegVideoOutput()
|
216 |
+
{
|
217 |
+
Close();
|
218 |
+
}
|
219 |
+
|
220 |
+
bool FfmpegVideoOutput::IsPipe() const
|
221 |
+
{
|
222 |
+
return is_pipe;
|
223 |
+
}
|
224 |
+
|
225 |
+
void FfmpegVideoOutput::Initialise(std::string filename)
|
226 |
+
{
|
227 |
+
int ret = avformat_alloc_output_context2(&oc, NULL, NULL, filename.c_str());
|
228 |
+
|
229 |
+
if (ret < 0 || !oc) {
|
230 |
+
pango_print_error("Could not deduce output format from file extension: using MPEG.\n");
|
231 |
+
ret = avformat_alloc_output_context2(&oc, NULL, "mpeg", filename.c_str());
|
232 |
+
if (ret < 0 || !oc) throw VideoException("Couldn't create AVFormatContext");
|
233 |
+
}
|
234 |
+
|
235 |
+
/* open the output file, if needed */
|
236 |
+
if (!(oc->oformat->flags & AVFMT_NOFILE)) {
|
237 |
+
ret = avio_open(&oc->pb, filename.c_str(), AVIO_FLAG_WRITE);
|
238 |
+
if (ret < 0) throw VideoException("Could not open '%s'\n", filename);
|
239 |
+
}
|
240 |
+
}
|
241 |
+
|
242 |
+
void FfmpegVideoOutput::StartStream()
|
243 |
+
{
|
244 |
+
if(!started) {
|
245 |
+
av_dump_format(oc, 0, filename.c_str(), 1);
|
246 |
+
|
247 |
+
/* Write the stream header, if any. */
|
248 |
+
int ret = avformat_write_header(oc, NULL);
|
249 |
+
if (ret < 0) throw VideoException("Error occurred when opening output file");
|
250 |
+
|
251 |
+
started = true;
|
252 |
+
}
|
253 |
+
}
|
254 |
+
|
255 |
+
void FfmpegVideoOutput::Close()
|
256 |
+
{
|
257 |
+
for(std::vector<FfmpegVideoOutputStream*>::iterator i = streams.begin(); i!=streams.end(); ++i)
|
258 |
+
{
|
259 |
+
(*i)->Flush();
|
260 |
+
delete *i;
|
261 |
+
}
|
262 |
+
|
263 |
+
av_write_trailer(oc);
|
264 |
+
|
265 |
+
if (!(oc->oformat->flags & AVFMT_NOFILE)) avio_close(oc->pb);
|
266 |
+
|
267 |
+
avformat_free_context(oc);
|
268 |
+
}
|
269 |
+
|
270 |
+
const std::vector<StreamInfo>& FfmpegVideoOutput::Streams() const
|
271 |
+
{
|
272 |
+
return strs;
|
273 |
+
}
|
274 |
+
|
275 |
+
void FfmpegVideoOutput::SetStreams(const std::vector<StreamInfo>& str, const std::string& /*uri*/, const picojson::value& properties)
|
276 |
+
{
|
277 |
+
strs.insert(strs.end(), str.begin(), str.end());
|
278 |
+
|
279 |
+
for(std::vector<StreamInfo>::const_iterator i = str.begin(); i!= str.end(); ++i)
|
280 |
+
{
|
281 |
+
streams.push_back( new FfmpegVideoOutputStream(
|
282 |
+
*this, oc->oformat->video_codec, base_frame_rate, bit_rate, *i, flip
|
283 |
+
) );
|
284 |
+
}
|
285 |
+
|
286 |
+
if(!properties.is<picojson::null>()) {
|
287 |
+
pango_print_warn("Ignoring attached video properties.");
|
288 |
+
}
|
289 |
+
}
|
290 |
+
|
291 |
+
int FfmpegVideoOutput::WriteStreams(const unsigned char* data, const picojson::value& /*frame_properties*/)
|
292 |
+
{
|
293 |
+
for(std::vector<FfmpegVideoOutputStream*>::iterator i = streams.begin(); i!= streams.end(); ++i)
|
294 |
+
{
|
295 |
+
FfmpegVideoOutputStream& s = **i;
|
296 |
+
Image<unsigned char> img = s.GetStreamInfo().StreamImage(data);
|
297 |
+
s.WriteImage(img.ptr, img.w, img.h);
|
298 |
+
}
|
299 |
+
return frame_count++;
|
300 |
+
}
|
301 |
+
|
302 |
+
PANGOLIN_REGISTER_FACTORY(FfmpegVideoOutput)
|
303 |
+
{
|
304 |
+
struct FfmpegVideoFactory final : public TypedFactoryInterface<VideoOutputInterface> {
|
305 |
+
std::map<std::string,Precedence> Schemes() const override
|
306 |
+
{
|
307 |
+
return {{"ffmpeg",10}};
|
308 |
+
}
|
309 |
+
const char* Description() const override
|
310 |
+
{
|
311 |
+
return "Use FFMPEG lib to encode video";
|
312 |
+
}
|
313 |
+
ParamSet Params() const override
|
314 |
+
{
|
315 |
+
return {{
|
316 |
+
{"fps","60","Playback frames-per-second to recommend in meta-data"},
|
317 |
+
{"bps","20000*1024","desired bitrate (hint)"},
|
318 |
+
{"flip","0","Flip the output vertically before recording"},
|
319 |
+
{"unique_filename","","Automatically append a unique number instead of overwriting files"},
|
320 |
+
}};
|
321 |
+
}
|
322 |
+
std::unique_ptr<VideoOutputInterface> Open(const Uri& uri) override {
|
323 |
+
const int desired_frame_rate = uri.Get("fps", 60);
|
324 |
+
const int desired_bit_rate = uri.Get("bps", 20000*1024);
|
325 |
+
const bool flip = uri.Get("flip", false);
|
326 |
+
std::string filename = uri.url;
|
327 |
+
|
328 |
+
if(uri.Contains("unique_filename")) {
|
329 |
+
filename = MakeUniqueFilename(filename);
|
330 |
+
}
|
331 |
+
|
332 |
+
return std::unique_ptr<VideoOutputInterface>(
|
333 |
+
new FfmpegVideoOutput(filename, desired_frame_rate, desired_bit_rate, flip)
|
334 |
+
);
|
335 |
+
}
|
336 |
+
};
|
337 |
+
|
338 |
+
return FactoryRegistry::I()->RegisterFactory<VideoOutputInterface>(std::make_shared<FfmpegVideoFactory>());
|
339 |
+
}
|
340 |
+
|
341 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/firewire.cpp
ADDED
@@ -0,0 +1,987 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2011 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/video/drivers/firewire.h>
|
29 |
+
#include <pangolin/video/drivers/deinterlace.h>
|
30 |
+
#include <pangolin/factory/factory_registry.h>
|
31 |
+
#include <pangolin/video/iostream_operators.h>
|
32 |
+
|
33 |
+
#include <stdio.h>
|
34 |
+
#include <stdint.h>
|
35 |
+
#include <stdlib.h>
|
36 |
+
#include <inttypes.h>
|
37 |
+
|
38 |
+
using namespace std;
|
39 |
+
|
40 |
+
namespace pangolin
|
41 |
+
{
|
42 |
+
|
43 |
+
void FirewireVideo::init_camera(
|
44 |
+
uint64_t guid, int dma_frames,
|
45 |
+
dc1394speed_t iso_speed,
|
46 |
+
dc1394video_mode_t video_mode,
|
47 |
+
dc1394framerate_t framerate
|
48 |
+
) {
|
49 |
+
|
50 |
+
if(video_mode>=DC1394_VIDEO_MODE_FORMAT7_0)
|
51 |
+
throw VideoException("format7 modes need to be initialized through the constructor that allows for specifying the roi");
|
52 |
+
|
53 |
+
camera = dc1394_camera_new (d, guid);
|
54 |
+
if (!camera)
|
55 |
+
throw VideoException("Failed to initialize camera");
|
56 |
+
|
57 |
+
// Attempt to stop camera if it is already running
|
58 |
+
dc1394switch_t is_iso_on = DC1394_OFF;
|
59 |
+
dc1394_video_get_transmission(camera, &is_iso_on);
|
60 |
+
if (is_iso_on==DC1394_ON) {
|
61 |
+
dc1394_video_set_transmission(camera, DC1394_OFF);
|
62 |
+
}
|
63 |
+
|
64 |
+
|
65 |
+
cout << "Using camera with GUID " << camera->guid << endl;
|
66 |
+
|
67 |
+
//-----------------------------------------------------------------------
|
68 |
+
// setup capture
|
69 |
+
//-----------------------------------------------------------------------
|
70 |
+
|
71 |
+
if( iso_speed >= DC1394_ISO_SPEED_800)
|
72 |
+
{
|
73 |
+
err=dc1394_video_set_operation_mode(camera, DC1394_OPERATION_MODE_1394B);
|
74 |
+
if( err != DC1394_SUCCESS )
|
75 |
+
throw VideoException("Could not set DC1394_OPERATION_MODE_1394B");
|
76 |
+
}
|
77 |
+
|
78 |
+
err=dc1394_video_set_iso_speed(camera, iso_speed);
|
79 |
+
if( err != DC1394_SUCCESS )
|
80 |
+
throw VideoException("Could not set iso speed");
|
81 |
+
|
82 |
+
err=dc1394_video_set_mode(camera, video_mode);
|
83 |
+
if( err != DC1394_SUCCESS )
|
84 |
+
throw VideoException("Could not set video mode");
|
85 |
+
|
86 |
+
err=dc1394_video_set_framerate(camera, framerate);
|
87 |
+
if( err != DC1394_SUCCESS )
|
88 |
+
throw VideoException("Could not set framerate");
|
89 |
+
|
90 |
+
err=dc1394_capture_setup(camera,dma_frames, DC1394_CAPTURE_FLAGS_DEFAULT);
|
91 |
+
if( err != DC1394_SUCCESS )
|
92 |
+
throw VideoException("Could not setup camera - check settings");
|
93 |
+
|
94 |
+
//-----------------------------------------------------------------------
|
95 |
+
// initialise width and height from mode
|
96 |
+
//-----------------------------------------------------------------------
|
97 |
+
dc1394_get_image_size_from_video_mode(camera, video_mode, &width, &height);
|
98 |
+
|
99 |
+
init_stream_info();
|
100 |
+
Start();
|
101 |
+
}
|
102 |
+
|
103 |
+
|
104 |
+
// Note:
|
105 |
+
// the following was tested on a IIDC camera over USB therefore might not work as
|
106 |
+
// well on a camera over proper firewire transport
|
107 |
+
void FirewireVideo::init_format7_camera(
|
108 |
+
uint64_t guid, int dma_frames,
|
109 |
+
dc1394speed_t iso_speed,
|
110 |
+
dc1394video_mode_t video_mode,
|
111 |
+
float framerate,
|
112 |
+
uint32_t width, uint32_t height,
|
113 |
+
uint32_t left, uint32_t top, bool reset_at_boot
|
114 |
+
) {
|
115 |
+
|
116 |
+
if(video_mode< DC1394_VIDEO_MODE_FORMAT7_0)
|
117 |
+
throw VideoException("roi can be specified only for format7 modes");
|
118 |
+
|
119 |
+
camera = dc1394_camera_new (d, guid);
|
120 |
+
if (!camera)
|
121 |
+
throw VideoException("Failed to initialize camera");
|
122 |
+
|
123 |
+
// Attempt to stop camera if it is already running
|
124 |
+
dc1394switch_t is_iso_on = DC1394_OFF;
|
125 |
+
dc1394_video_get_transmission(camera, &is_iso_on);
|
126 |
+
if (is_iso_on==DC1394_ON) {
|
127 |
+
dc1394_video_set_transmission(camera, DC1394_OFF);
|
128 |
+
}
|
129 |
+
|
130 |
+
cout << "Using camera with GUID " << camera->guid << endl;
|
131 |
+
|
132 |
+
if(reset_at_boot){
|
133 |
+
dc1394_camera_reset(camera);
|
134 |
+
}
|
135 |
+
|
136 |
+
//-----------------------------------------------------------------------
|
137 |
+
// setup mode and roi
|
138 |
+
//-----------------------------------------------------------------------
|
139 |
+
|
140 |
+
if(iso_speed >= DC1394_ISO_SPEED_800)
|
141 |
+
{
|
142 |
+
err=dc1394_video_set_operation_mode(camera, DC1394_OPERATION_MODE_1394B);
|
143 |
+
if( err != DC1394_SUCCESS )
|
144 |
+
throw VideoException("Could not set DC1394_OPERATION_MODE_1394B");
|
145 |
+
}
|
146 |
+
|
147 |
+
err=dc1394_video_set_iso_speed(camera, iso_speed);
|
148 |
+
if( err != DC1394_SUCCESS )
|
149 |
+
throw VideoException("Could not set iso speed");
|
150 |
+
|
151 |
+
// check that the required mode is actually supported
|
152 |
+
dc1394format7mode_t format7_info;
|
153 |
+
|
154 |
+
err = dc1394_format7_get_mode_info(camera, video_mode, &format7_info);
|
155 |
+
if( err != DC1394_SUCCESS )
|
156 |
+
throw VideoException("Could not get format7 mode info");
|
157 |
+
|
158 |
+
// safely set the video mode
|
159 |
+
err=dc1394_video_set_mode(camera, video_mode);
|
160 |
+
if( err != DC1394_SUCCESS )
|
161 |
+
throw VideoException("Could not set format7 video mode");
|
162 |
+
|
163 |
+
// set position to 0,0 so that setting any size within min and max is a valid command
|
164 |
+
err = dc1394_format7_set_image_position(camera, video_mode,0,0);
|
165 |
+
if( err != DC1394_SUCCESS )
|
166 |
+
throw VideoException("Could not set format7 image position");
|
167 |
+
|
168 |
+
// work out the desired image size
|
169 |
+
width = nearest_value(width, format7_info.unit_pos_x, 0, format7_info.max_size_x - left);
|
170 |
+
height = nearest_value(height, format7_info.unit_pos_y, 0, format7_info.max_size_y - top);
|
171 |
+
|
172 |
+
// set size
|
173 |
+
err = dc1394_format7_set_image_size(camera,video_mode,width,height);
|
174 |
+
if( err != DC1394_SUCCESS )
|
175 |
+
throw VideoException("Could not set format7 size");
|
176 |
+
|
177 |
+
// get the info again since many parameters depend on image size
|
178 |
+
err = dc1394_format7_get_mode_info(camera, video_mode, &format7_info);
|
179 |
+
if( err != DC1394_SUCCESS )
|
180 |
+
throw VideoException("Could not get format7 mode info");
|
181 |
+
|
182 |
+
// work out position of roi
|
183 |
+
left = nearest_value(left, format7_info.unit_size_x, format7_info.unit_size_x, format7_info.max_size_x - width);
|
184 |
+
top = nearest_value(top, format7_info.unit_size_y, format7_info.unit_size_y, format7_info.max_size_y - height);
|
185 |
+
|
186 |
+
// set roi position
|
187 |
+
err = dc1394_format7_set_image_position(camera,video_mode,left,top);
|
188 |
+
if( err != DC1394_SUCCESS )
|
189 |
+
throw VideoException("Could not set format7 size");
|
190 |
+
|
191 |
+
this->width = width;
|
192 |
+
this->height = height;
|
193 |
+
this->top = top;
|
194 |
+
this->left = left;
|
195 |
+
|
196 |
+
cout<<"roi: "<<left<<" "<<top<<" "<<width<<" "<<height<<" ";
|
197 |
+
|
198 |
+
|
199 |
+
//-----------------------------------------------------------------------
|
200 |
+
// setup frame rate
|
201 |
+
//-----------------------------------------------------------------------
|
202 |
+
|
203 |
+
err=dc1394_format7_set_packet_size(camera,video_mode, format7_info.max_packet_size);
|
204 |
+
if( err != DC1394_SUCCESS )
|
205 |
+
throw VideoException("Could not set format7 packet size");
|
206 |
+
|
207 |
+
if((framerate != MAX_FR) && (framerate != EXT_TRIG)){
|
208 |
+
//set the framerate by using the absolute feature as suggested by the
|
209 |
+
//folks at PointGrey
|
210 |
+
err = dc1394_feature_set_absolute_control(camera,DC1394_FEATURE_FRAME_RATE,DC1394_ON);
|
211 |
+
if( err != DC1394_SUCCESS )
|
212 |
+
throw VideoException("Could not turn on absolute frame rate control");
|
213 |
+
|
214 |
+
err = dc1394_feature_set_mode(camera,DC1394_FEATURE_FRAME_RATE,DC1394_FEATURE_MODE_MANUAL);
|
215 |
+
if( err != DC1394_SUCCESS )
|
216 |
+
throw VideoException("Could not make frame rate manual ");
|
217 |
+
|
218 |
+
err=dc1394_feature_set_absolute_value(camera,DC1394_FEATURE_FRAME_RATE,framerate);
|
219 |
+
if( err != DC1394_SUCCESS )
|
220 |
+
throw VideoException("Could not set format7 framerate ");
|
221 |
+
}
|
222 |
+
|
223 |
+
// ask the camera what is the resulting framerate (this assume that such a rate is actually
|
224 |
+
// allowed by the shutter time)
|
225 |
+
float value;
|
226 |
+
err=dc1394_feature_get_absolute_value(camera,DC1394_FEATURE_FRAME_RATE,&value);
|
227 |
+
if( err != DC1394_SUCCESS )
|
228 |
+
throw VideoException("Could not get framerate");
|
229 |
+
|
230 |
+
cout<<" framerate(shutter permitting):"<<value<<endl;
|
231 |
+
|
232 |
+
//-----------------------------------------------------------------------
|
233 |
+
// setup capture
|
234 |
+
//-----------------------------------------------------------------------
|
235 |
+
|
236 |
+
err=dc1394_capture_setup(camera,dma_frames, DC1394_CAPTURE_FLAGS_DEFAULT);
|
237 |
+
if( err != DC1394_SUCCESS )
|
238 |
+
throw VideoException("Could not setup camera - check settings");
|
239 |
+
|
240 |
+
init_stream_info();
|
241 |
+
Start();
|
242 |
+
}
|
243 |
+
|
244 |
+
std::string Dc1394ColorCodingToString(dc1394color_coding_t coding)
|
245 |
+
{
|
246 |
+
switch(coding)
|
247 |
+
{
|
248 |
+
case DC1394_COLOR_CODING_RGB8 : return "RGB24";
|
249 |
+
case DC1394_COLOR_CODING_MONO8 : return "GRAY8";
|
250 |
+
|
251 |
+
case DC1394_COLOR_CODING_MONO16 : return "GRAY16LE";
|
252 |
+
case DC1394_COLOR_CODING_RGB16 : return "RGB48LE";
|
253 |
+
|
254 |
+
case DC1394_COLOR_CODING_MONO16S : return "GRAY16BE";
|
255 |
+
case DC1394_COLOR_CODING_RGB16S : return "RGB48BE";
|
256 |
+
|
257 |
+
case DC1394_COLOR_CODING_YUV411 : return "YUV411P";
|
258 |
+
case DC1394_COLOR_CODING_YUV422 : return "YUV422P";
|
259 |
+
case DC1394_COLOR_CODING_YUV444 : return "YUV444P";
|
260 |
+
|
261 |
+
case DC1394_COLOR_CODING_RAW8 : return "GRAY8";
|
262 |
+
case DC1394_COLOR_CODING_RAW16 : return "GRAY16LE";
|
263 |
+
|
264 |
+
default:
|
265 |
+
throw VideoException("Unknown colour coding");
|
266 |
+
}
|
267 |
+
}
|
268 |
+
|
269 |
+
dc1394color_coding_t Dc1394ColorCodingFromString(std::string coding)
|
270 |
+
{
|
271 |
+
if( !coding.compare("RGB24")) return DC1394_COLOR_CODING_RGB8;
|
272 |
+
else if(!coding.compare("GRAY8")) return DC1394_COLOR_CODING_MONO8;
|
273 |
+
|
274 |
+
else if(!coding.compare("GRAY16LE")) return DC1394_COLOR_CODING_MONO16;
|
275 |
+
else if(!coding.compare("RGB48LE")) return DC1394_COLOR_CODING_RGB16;
|
276 |
+
else if(!coding.compare("GRAY16BE")) return DC1394_COLOR_CODING_MONO16S;
|
277 |
+
else if(!coding.compare("RGB48BE")) return DC1394_COLOR_CODING_RGB16S;
|
278 |
+
|
279 |
+
else if(!coding.compare("YUV411P")) return DC1394_COLOR_CODING_YUV411;
|
280 |
+
else if(!coding.compare("YUV422P")) return DC1394_COLOR_CODING_YUV422;
|
281 |
+
else if(!coding.compare("YUV444P")) return DC1394_COLOR_CODING_YUV444;
|
282 |
+
// else if(!coding.compare("RAW8")) return DC1394_COLOR_CODING_RAW8;
|
283 |
+
// else if(!coding.compare("RAW16")) return DC1394_COLOR_CODING_RAW16;
|
284 |
+
throw VideoException("Unknown colour coding");
|
285 |
+
}
|
286 |
+
|
287 |
+
void Dc1394ModeDetails(dc1394video_mode_t mode, unsigned& w, unsigned& h, string& format )
|
288 |
+
{
|
289 |
+
switch( mode )
|
290 |
+
{
|
291 |
+
// RGB Modes
|
292 |
+
case DC1394_VIDEO_MODE_1024x768_RGB8:
|
293 |
+
w=1024; h=768; format = "RGB24";
|
294 |
+
break;
|
295 |
+
case DC1394_VIDEO_MODE_640x480_RGB8:
|
296 |
+
w=640; h=480; format = "RGB24";
|
297 |
+
break;
|
298 |
+
case DC1394_VIDEO_MODE_800x600_RGB8:
|
299 |
+
w=800; h=600; format = "RGB24";
|
300 |
+
break;
|
301 |
+
case DC1394_VIDEO_MODE_1280x960_RGB8:
|
302 |
+
w=1280; h=960; format = "RGB24";
|
303 |
+
break;
|
304 |
+
case DC1394_VIDEO_MODE_1600x1200_RGB8:
|
305 |
+
w=1600; h=1200; format = "RGB24";
|
306 |
+
break;
|
307 |
+
|
308 |
+
// Greyscale modes
|
309 |
+
case DC1394_VIDEO_MODE_640x480_MONO8:
|
310 |
+
w=640; h=480; format = "GRAY8";
|
311 |
+
break;
|
312 |
+
case DC1394_VIDEO_MODE_800x600_MONO8:
|
313 |
+
w=800; h=600; format = "GRAY8";
|
314 |
+
break;
|
315 |
+
case DC1394_VIDEO_MODE_1024x768_MONO8:
|
316 |
+
w=1024; h=768; format = "GRAY8";
|
317 |
+
break;
|
318 |
+
case DC1394_VIDEO_MODE_1280x960_MONO8:
|
319 |
+
w=1280; h=960; format = "GRAY8";
|
320 |
+
break;
|
321 |
+
case DC1394_VIDEO_MODE_1600x1200_MONO8:
|
322 |
+
w=1600; h=1200; format = "GRAY8";
|
323 |
+
break;
|
324 |
+
case DC1394_VIDEO_MODE_640x480_MONO16:
|
325 |
+
w=640; h=480; format = "GRAY16";
|
326 |
+
break;
|
327 |
+
case DC1394_VIDEO_MODE_800x600_MONO16:
|
328 |
+
w=800; h=600; format = "GRAY16";
|
329 |
+
break;
|
330 |
+
case DC1394_VIDEO_MODE_1024x768_MONO16:
|
331 |
+
w=1024; h=768; format = "GRAY16";
|
332 |
+
break;
|
333 |
+
case DC1394_VIDEO_MODE_1280x960_MONO16:
|
334 |
+
w=1280; h=960; format = "GRAY16";
|
335 |
+
break;
|
336 |
+
case DC1394_VIDEO_MODE_1600x1200_MONO16:
|
337 |
+
w=1600; h=1200; format = "GRAY16";
|
338 |
+
break;
|
339 |
+
|
340 |
+
// Chrome modes
|
341 |
+
case DC1394_VIDEO_MODE_640x480_YUV411:
|
342 |
+
w=640; h=480; format = "YUV411P";
|
343 |
+
break;
|
344 |
+
case DC1394_VIDEO_MODE_160x120_YUV444:
|
345 |
+
w=160; h=120; format = "YUV444P";
|
346 |
+
break;
|
347 |
+
case DC1394_VIDEO_MODE_320x240_YUV422:
|
348 |
+
w=320; h=240; format = "YUV422P";
|
349 |
+
break;
|
350 |
+
case DC1394_VIDEO_MODE_640x480_YUV422:
|
351 |
+
w=640; h=480; format = "YUV422P";
|
352 |
+
break;
|
353 |
+
case DC1394_VIDEO_MODE_800x600_YUV422:
|
354 |
+
w=800; h=600; format = "YUV422P";
|
355 |
+
break;
|
356 |
+
case DC1394_VIDEO_MODE_1024x768_YUV422:
|
357 |
+
w=1024; h=768; format = "YUV422P";
|
358 |
+
break;
|
359 |
+
case DC1394_VIDEO_MODE_1600x1200_YUV422:
|
360 |
+
w=1600; h=1200; format = "YUV422P";
|
361 |
+
break;
|
362 |
+
case DC1394_VIDEO_MODE_1280x960_YUV422:
|
363 |
+
w=1280; h=960; format = "YUV422P";
|
364 |
+
break;
|
365 |
+
default:
|
366 |
+
throw VideoException("Unknown colour coding");
|
367 |
+
}
|
368 |
+
}
|
369 |
+
|
370 |
+
void FirewireVideo::init_stream_info()
|
371 |
+
{
|
372 |
+
streams.clear();
|
373 |
+
|
374 |
+
dc1394video_mode_t video_mode;
|
375 |
+
dc1394color_coding_t color_coding;
|
376 |
+
dc1394_video_get_mode(camera,&video_mode);
|
377 |
+
dc1394_get_color_coding_from_video_mode(camera,video_mode,&color_coding);
|
378 |
+
const std::string strformat = Dc1394ColorCodingToString(color_coding);
|
379 |
+
const PixelFormat fmt = PixelFormatFromString(strformat);
|
380 |
+
|
381 |
+
StreamInfo stream(fmt, width, height, (width*fmt.bpp)/8, 0 );
|
382 |
+
streams.push_back( stream );
|
383 |
+
|
384 |
+
frame_size_bytes = stream.Pitch() * stream.Height();
|
385 |
+
}
|
386 |
+
|
387 |
+
const std::vector<StreamInfo>& FirewireVideo::Streams() const
|
388 |
+
{
|
389 |
+
return streams;
|
390 |
+
}
|
391 |
+
|
392 |
+
size_t FirewireVideo::SizeBytes() const
|
393 |
+
{
|
394 |
+
return frame_size_bytes;
|
395 |
+
}
|
396 |
+
|
397 |
+
void FirewireVideo::Start()
|
398 |
+
{
|
399 |
+
if( !running )
|
400 |
+
{
|
401 |
+
err=dc1394_video_set_transmission(camera, DC1394_ON);
|
402 |
+
if( err != DC1394_SUCCESS )
|
403 |
+
throw VideoException("Could not start camera iso transmission");
|
404 |
+
running = true;
|
405 |
+
}
|
406 |
+
}
|
407 |
+
|
408 |
+
void FirewireVideo::Stop()
|
409 |
+
{
|
410 |
+
if( running )
|
411 |
+
{
|
412 |
+
// Stop transmission
|
413 |
+
err=dc1394_video_set_transmission(camera,DC1394_OFF);
|
414 |
+
if( err != DC1394_SUCCESS )
|
415 |
+
throw VideoException("Could not stop the camera");
|
416 |
+
running = false;
|
417 |
+
}
|
418 |
+
}
|
419 |
+
|
420 |
+
FirewireVideo::FirewireVideo(
|
421 |
+
Guid guid,
|
422 |
+
dc1394video_mode_t video_mode,
|
423 |
+
dc1394framerate_t framerate,
|
424 |
+
dc1394speed_t iso_speed,
|
425 |
+
int dma_buffers
|
426 |
+
) :running(false),top(0),left(0)
|
427 |
+
{
|
428 |
+
d = dc1394_new ();
|
429 |
+
if (!d)
|
430 |
+
throw VideoException("Failed to get 1394 bus");
|
431 |
+
|
432 |
+
init_camera(guid.guid,dma_buffers,iso_speed,video_mode,framerate);
|
433 |
+
}
|
434 |
+
|
435 |
+
FirewireVideo::FirewireVideo(
|
436 |
+
Guid guid,
|
437 |
+
dc1394video_mode_t video_mode,
|
438 |
+
float framerate,
|
439 |
+
uint32_t width, uint32_t height,
|
440 |
+
uint32_t left, uint32_t top,
|
441 |
+
dc1394speed_t iso_speed,
|
442 |
+
int dma_buffers, bool reset_at_boot
|
443 |
+
) :running(false)
|
444 |
+
{
|
445 |
+
d = dc1394_new ();
|
446 |
+
if (!d)
|
447 |
+
throw VideoException("Failed to get 1394 bus");
|
448 |
+
|
449 |
+
init_format7_camera(guid.guid,dma_buffers,iso_speed,video_mode,framerate,width,height,left,top, reset_at_boot);
|
450 |
+
}
|
451 |
+
|
452 |
+
FirewireVideo::FirewireVideo(
|
453 |
+
unsigned deviceid,
|
454 |
+
dc1394video_mode_t video_mode,
|
455 |
+
dc1394framerate_t framerate,
|
456 |
+
dc1394speed_t iso_speed,
|
457 |
+
int dma_buffers
|
458 |
+
) :running(false),top(0),left(0)
|
459 |
+
{
|
460 |
+
d = dc1394_new ();
|
461 |
+
if (!d)
|
462 |
+
throw VideoException("Failed to get 1394 bus");
|
463 |
+
|
464 |
+
err=dc1394_camera_enumerate (d, &list);
|
465 |
+
if( err != DC1394_SUCCESS )
|
466 |
+
throw VideoException("Failed to enumerate cameras");
|
467 |
+
|
468 |
+
if (list->num == 0)
|
469 |
+
throw VideoException("No cameras found");
|
470 |
+
|
471 |
+
if( deviceid >= list->num )
|
472 |
+
throw VideoException("Invalid camera index");
|
473 |
+
|
474 |
+
const uint64_t guid = list->ids[deviceid].guid;
|
475 |
+
|
476 |
+
dc1394_camera_free_list (list);
|
477 |
+
|
478 |
+
init_camera(guid,dma_buffers,iso_speed,video_mode,framerate);
|
479 |
+
|
480 |
+
}
|
481 |
+
|
482 |
+
FirewireVideo::FirewireVideo(
|
483 |
+
unsigned deviceid,
|
484 |
+
dc1394video_mode_t video_mode,
|
485 |
+
float framerate,
|
486 |
+
uint32_t width, uint32_t height,
|
487 |
+
uint32_t left, uint32_t top,
|
488 |
+
dc1394speed_t iso_speed,
|
489 |
+
int dma_buffers, bool reset_at_boot
|
490 |
+
) :running(false)
|
491 |
+
{
|
492 |
+
d = dc1394_new ();
|
493 |
+
if (!d)
|
494 |
+
throw VideoException("Failed to get 1394 bus");
|
495 |
+
|
496 |
+
err=dc1394_camera_enumerate (d, &list);
|
497 |
+
if( err != DC1394_SUCCESS )
|
498 |
+
throw VideoException("Failed to enumerate cameras");
|
499 |
+
|
500 |
+
if (list->num == 0)
|
501 |
+
throw VideoException("No cameras found");
|
502 |
+
|
503 |
+
if( deviceid >= list->num )
|
504 |
+
throw VideoException("Invalid camera index");
|
505 |
+
|
506 |
+
const uint64_t guid = list->ids[deviceid].guid;
|
507 |
+
|
508 |
+
dc1394_camera_free_list (list);
|
509 |
+
|
510 |
+
init_format7_camera(guid,dma_buffers,iso_speed,video_mode,framerate,width,height,left,top, reset_at_boot);
|
511 |
+
|
512 |
+
}
|
513 |
+
|
514 |
+
bool FirewireVideo::GrabNext( unsigned char* image, bool wait )
|
515 |
+
{
|
516 |
+
const dc1394capture_policy_t policy =
|
517 |
+
wait ? DC1394_CAPTURE_POLICY_WAIT : DC1394_CAPTURE_POLICY_POLL;
|
518 |
+
|
519 |
+
dc1394video_frame_t *frame;
|
520 |
+
err = dc1394_capture_dequeue(camera, policy, &frame);
|
521 |
+
if( err != DC1394_SUCCESS)
|
522 |
+
throw VideoException("Could not capture frame", dc1394_error_get_string(err) );
|
523 |
+
|
524 |
+
if( frame )
|
525 |
+
{
|
526 |
+
memcpy(image,frame->image,frame->image_bytes);
|
527 |
+
dc1394_capture_enqueue(camera,frame);
|
528 |
+
return true;
|
529 |
+
}
|
530 |
+
return false;
|
531 |
+
}
|
532 |
+
|
533 |
+
bool FirewireVideo::GrabNewest( unsigned char* image, bool wait )
|
534 |
+
{
|
535 |
+
dc1394video_frame_t *f;
|
536 |
+
err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_POLL, &f);
|
537 |
+
if( err != DC1394_SUCCESS)
|
538 |
+
throw VideoException("Could not capture frame", dc1394_error_get_string(err) );
|
539 |
+
|
540 |
+
if( f ) {
|
541 |
+
while( true )
|
542 |
+
{
|
543 |
+
dc1394video_frame_t *nf;
|
544 |
+
err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_POLL, &nf);
|
545 |
+
if( err != DC1394_SUCCESS)
|
546 |
+
throw VideoException("Could not capture frame", dc1394_error_get_string(err) );
|
547 |
+
|
548 |
+
if( nf )
|
549 |
+
{
|
550 |
+
err=dc1394_capture_enqueue(camera,f);
|
551 |
+
f = nf;
|
552 |
+
}else{
|
553 |
+
break;
|
554 |
+
}
|
555 |
+
}
|
556 |
+
memcpy(image,f->image,f->image_bytes);
|
557 |
+
err=dc1394_capture_enqueue(camera,f);
|
558 |
+
return true;
|
559 |
+
}else if(wait){
|
560 |
+
return GrabNext(image,true);
|
561 |
+
}
|
562 |
+
return false;
|
563 |
+
}
|
564 |
+
|
565 |
+
FirewireFrame FirewireVideo::GetNext(bool wait)
|
566 |
+
{
|
567 |
+
const dc1394capture_policy_t policy =
|
568 |
+
wait ? DC1394_CAPTURE_POLICY_WAIT : DC1394_CAPTURE_POLICY_POLL;
|
569 |
+
|
570 |
+
dc1394video_frame_t *frame;
|
571 |
+
dc1394_capture_dequeue(camera, policy, &frame);
|
572 |
+
return FirewireFrame(frame);
|
573 |
+
}
|
574 |
+
|
575 |
+
FirewireFrame FirewireVideo::GetNewest(bool wait)
|
576 |
+
{
|
577 |
+
dc1394video_frame_t *f;
|
578 |
+
err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_POLL, &f);
|
579 |
+
if( err != DC1394_SUCCESS)
|
580 |
+
throw VideoException("Could not capture frame", dc1394_error_get_string(err) );
|
581 |
+
|
582 |
+
if( f ) {
|
583 |
+
while( true )
|
584 |
+
{
|
585 |
+
dc1394video_frame_t *nf;
|
586 |
+
err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_POLL, &nf);
|
587 |
+
if( err != DC1394_SUCCESS)
|
588 |
+
throw VideoException("Could not capture frame", dc1394_error_get_string(err) );
|
589 |
+
|
590 |
+
if( nf )
|
591 |
+
{
|
592 |
+
err=dc1394_capture_enqueue(camera,f);
|
593 |
+
f = nf;
|
594 |
+
}else{
|
595 |
+
break;
|
596 |
+
}
|
597 |
+
}
|
598 |
+
return FirewireFrame(f);
|
599 |
+
}else if(wait){
|
600 |
+
return GetNext(true);
|
601 |
+
}
|
602 |
+
return FirewireFrame(0);
|
603 |
+
}
|
604 |
+
|
605 |
+
void FirewireVideo::PutFrame(FirewireFrame& f)
|
606 |
+
{
|
607 |
+
if( f.frame )
|
608 |
+
{
|
609 |
+
dc1394_capture_enqueue(camera,f.frame);
|
610 |
+
f.frame = 0;
|
611 |
+
}
|
612 |
+
}
|
613 |
+
|
614 |
+
float FirewireVideo::GetGain() const
|
615 |
+
{
|
616 |
+
float gain;
|
617 |
+
err = dc1394_feature_get_absolute_value(camera,DC1394_FEATURE_GAIN,&gain);
|
618 |
+
if( err != DC1394_SUCCESS )
|
619 |
+
throw VideoException("Failed to read gain");
|
620 |
+
|
621 |
+
return gain;
|
622 |
+
|
623 |
+
}
|
624 |
+
|
625 |
+
void FirewireVideo::SetAutoGain()
|
626 |
+
{
|
627 |
+
|
628 |
+
dc1394error_t err = dc1394_feature_set_mode(camera, DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_AUTO);
|
629 |
+
if (err < 0) {
|
630 |
+
throw VideoException("Could not set auto gain mode");
|
631 |
+
}
|
632 |
+
}
|
633 |
+
|
634 |
+
void FirewireVideo::SetGain(float val)
|
635 |
+
{
|
636 |
+
dc1394error_t err = dc1394_feature_set_mode(camera, DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_MANUAL);
|
637 |
+
if (err < 0) {
|
638 |
+
throw VideoException("Could not set manual gain mode");
|
639 |
+
}
|
640 |
+
|
641 |
+
err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_GAIN, DC1394_ON);
|
642 |
+
if (err < 0) {
|
643 |
+
throw VideoException("Could not set absolute control for gain");
|
644 |
+
}
|
645 |
+
|
646 |
+
err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_GAIN, val);
|
647 |
+
if (err < 0) {
|
648 |
+
throw VideoException("Could not set gain value");
|
649 |
+
}
|
650 |
+
}
|
651 |
+
|
652 |
+
|
653 |
+
float FirewireVideo::GetBrightness() const
|
654 |
+
{
|
655 |
+
float brightness;
|
656 |
+
err = dc1394_feature_get_absolute_value(camera,DC1394_FEATURE_BRIGHTNESS,&brightness);
|
657 |
+
if( err != DC1394_SUCCESS )
|
658 |
+
throw VideoException("Failed to read brightness");
|
659 |
+
|
660 |
+
return brightness;
|
661 |
+
|
662 |
+
}
|
663 |
+
|
664 |
+
void FirewireVideo::SetAutoBrightness()
|
665 |
+
{
|
666 |
+
dc1394error_t err = dc1394_feature_set_mode(camera, DC1394_FEATURE_BRIGHTNESS, DC1394_FEATURE_MODE_AUTO);
|
667 |
+
if (err < 0) {
|
668 |
+
throw VideoException("Could not set auto brightness mode");
|
669 |
+
}
|
670 |
+
}
|
671 |
+
|
672 |
+
void FirewireVideo::SetBrightness(float val)
|
673 |
+
{
|
674 |
+
dc1394error_t err = dc1394_feature_set_mode(camera, DC1394_FEATURE_BRIGHTNESS, DC1394_FEATURE_MODE_MANUAL);
|
675 |
+
if (err < 0) {
|
676 |
+
throw VideoException("Could not set manual brightness mode");
|
677 |
+
}
|
678 |
+
|
679 |
+
err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_BRIGHTNESS, DC1394_ON);
|
680 |
+
if (err < 0) {
|
681 |
+
throw VideoException("Could not set absolute control for brightness");
|
682 |
+
}
|
683 |
+
|
684 |
+
err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_BRIGHTNESS, val);
|
685 |
+
if (err < 0) {
|
686 |
+
throw VideoException("Could not set brightness value");
|
687 |
+
}
|
688 |
+
}
|
689 |
+
|
690 |
+
float FirewireVideo::GetShutterTime() const
|
691 |
+
{
|
692 |
+
float shutter;
|
693 |
+
err = dc1394_feature_get_absolute_value(camera,DC1394_FEATURE_SHUTTER,&shutter);
|
694 |
+
if( err != DC1394_SUCCESS )
|
695 |
+
throw VideoException("Failed to read shutter");
|
696 |
+
|
697 |
+
return shutter;
|
698 |
+
}
|
699 |
+
|
700 |
+
void FirewireVideo::SetAutoShutterTime()
|
701 |
+
{
|
702 |
+
dc1394error_t err = dc1394_feature_set_mode(camera, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_AUTO);
|
703 |
+
if (err < 0) {
|
704 |
+
throw VideoException("Could not set auto shutter mode");
|
705 |
+
}
|
706 |
+
}
|
707 |
+
|
708 |
+
void FirewireVideo::SetShutterTime(float val)
|
709 |
+
{
|
710 |
+
dc1394error_t err = dc1394_feature_set_mode(camera, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL);
|
711 |
+
if (err < 0) {
|
712 |
+
throw VideoException("Could not set manual shutter mode");
|
713 |
+
}
|
714 |
+
|
715 |
+
err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_SHUTTER, DC1394_ON);
|
716 |
+
if (err < 0) {
|
717 |
+
throw VideoException("Could not set absolute control for shutter");
|
718 |
+
}
|
719 |
+
|
720 |
+
err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_SHUTTER, val);
|
721 |
+
if (err < 0) {
|
722 |
+
throw VideoException("Could not set shutter value");
|
723 |
+
}
|
724 |
+
}
|
725 |
+
|
726 |
+
void FirewireVideo::SetShutterTimeQuant(int shutter)
|
727 |
+
{
|
728 |
+
// TODO: Set mode as well
|
729 |
+
|
730 |
+
err = dc1394_feature_set_value(camera,DC1394_FEATURE_SHUTTER,shutter);
|
731 |
+
|
732 |
+
if( err != DC1394_SUCCESS )
|
733 |
+
throw VideoException("Failed to set shutter");
|
734 |
+
}
|
735 |
+
|
736 |
+
float FirewireVideo::GetGamma() const
|
737 |
+
{
|
738 |
+
float gamma;
|
739 |
+
err = dc1394_feature_get_absolute_value(camera,DC1394_FEATURE_GAMMA,&gamma);
|
740 |
+
if( err != DC1394_SUCCESS )
|
741 |
+
throw VideoException("Failed to read gamma");
|
742 |
+
return gamma;
|
743 |
+
}
|
744 |
+
|
745 |
+
void FirewireVideo::SetInternalTrigger()
|
746 |
+
{
|
747 |
+
dc1394error_t err = dc1394_external_trigger_set_power(camera, DC1394_OFF);
|
748 |
+
if (err < 0) {
|
749 |
+
throw VideoException("Could not set internal trigger mode");
|
750 |
+
}
|
751 |
+
}
|
752 |
+
|
753 |
+
void FirewireVideo::SetExternalTrigger(dc1394trigger_mode_t mode, dc1394trigger_polarity_t polarity, dc1394trigger_source_t source)
|
754 |
+
{
|
755 |
+
dc1394error_t err = dc1394_external_trigger_set_polarity(camera, polarity);
|
756 |
+
if (err < 0) {
|
757 |
+
throw VideoException("Could not set external trigger polarity");
|
758 |
+
}
|
759 |
+
|
760 |
+
err = dc1394_external_trigger_set_mode(camera, mode);
|
761 |
+
if (err < 0) {
|
762 |
+
throw VideoException("Could not set external trigger mode");
|
763 |
+
}
|
764 |
+
|
765 |
+
err = dc1394_external_trigger_set_source(camera, source);
|
766 |
+
if (err < 0) {
|
767 |
+
throw VideoException("Could not set external trigger source");
|
768 |
+
}
|
769 |
+
|
770 |
+
err = dc1394_external_trigger_set_power(camera, DC1394_ON);
|
771 |
+
if (err < 0) {
|
772 |
+
throw VideoException("Could not set external trigger power");
|
773 |
+
}
|
774 |
+
}
|
775 |
+
|
776 |
+
|
777 |
+
FirewireVideo::~FirewireVideo()
|
778 |
+
{
|
779 |
+
Stop();
|
780 |
+
|
781 |
+
// Close camera
|
782 |
+
dc1394_video_set_transmission(camera, DC1394_OFF);
|
783 |
+
dc1394_capture_stop(camera);
|
784 |
+
dc1394_camera_free(camera);
|
785 |
+
dc1394_free (d);
|
786 |
+
}
|
787 |
+
|
788 |
+
void FirewireVideo::SetRegister(uint64_t offset, uint32_t value){
|
789 |
+
dc1394error_t err = dc1394_set_register (camera, offset, value);
|
790 |
+
if (err < 0) {
|
791 |
+
throw VideoException("Could not set camera register");
|
792 |
+
}
|
793 |
+
}
|
794 |
+
|
795 |
+
uint32_t FirewireVideo::GetRegister(uint64_t offset)
|
796 |
+
{
|
797 |
+
uint32_t value = 0;
|
798 |
+
dc1394error_t err = dc1394_get_register (camera, offset, &value);
|
799 |
+
if (err < 0) {
|
800 |
+
throw VideoException("Could not get camera register");
|
801 |
+
}
|
802 |
+
return value;
|
803 |
+
}
|
804 |
+
|
805 |
+
void FirewireVideo::SetControlRegister(uint64_t offset, uint32_t value)
|
806 |
+
{
|
807 |
+
dc1394error_t err = dc1394_set_control_register (camera, offset, value);
|
808 |
+
if (err < 0) {
|
809 |
+
throw VideoException("Could not set camera control register");
|
810 |
+
}
|
811 |
+
}
|
812 |
+
|
813 |
+
uint32_t FirewireVideo::GetControlRegister(uint64_t offset)
|
814 |
+
{
|
815 |
+
uint32_t value = 0;
|
816 |
+
dc1394error_t err = dc1394_get_control_register(camera, offset, &value);
|
817 |
+
if (err < 0) {
|
818 |
+
throw VideoException("Could not get camera control register");
|
819 |
+
}
|
820 |
+
return value;
|
821 |
+
}
|
822 |
+
|
823 |
+
int FirewireVideo::nearest_value(int value, int step, int min, int max)
|
824 |
+
{
|
825 |
+
int low, high;
|
826 |
+
|
827 |
+
low=value-(value%step);
|
828 |
+
high=value-(value%step)+step;
|
829 |
+
if (low<min)
|
830 |
+
low=min;
|
831 |
+
if (high>max)
|
832 |
+
high=max;
|
833 |
+
|
834 |
+
if (abs(low-value)<abs(high-value))
|
835 |
+
return low;
|
836 |
+
else
|
837 |
+
return high;
|
838 |
+
}
|
839 |
+
|
840 |
+
double FirewireVideo::bus_period_from_iso_speed(dc1394speed_t iso_speed)
|
841 |
+
{
|
842 |
+
double bus_period;
|
843 |
+
|
844 |
+
switch(iso_speed){
|
845 |
+
case DC1394_ISO_SPEED_3200:
|
846 |
+
bus_period = 15.625e-6;
|
847 |
+
break;
|
848 |
+
case DC1394_ISO_SPEED_1600:
|
849 |
+
bus_period = 31.25e-6;
|
850 |
+
break;
|
851 |
+
case DC1394_ISO_SPEED_800:
|
852 |
+
bus_period = 62.5e-6;
|
853 |
+
break;
|
854 |
+
case DC1394_ISO_SPEED_400:
|
855 |
+
bus_period = 125e-6;
|
856 |
+
break;
|
857 |
+
case DC1394_ISO_SPEED_200:
|
858 |
+
bus_period = 250e-6;
|
859 |
+
break;
|
860 |
+
case DC1394_ISO_SPEED_100:
|
861 |
+
bus_period = 500e-6;
|
862 |
+
break;
|
863 |
+
default:
|
864 |
+
throw VideoException("iso speed not valid");
|
865 |
+
}
|
866 |
+
|
867 |
+
return bus_period;
|
868 |
+
}
|
869 |
+
|
870 |
+
dc1394video_mode_t get_firewire_format7_mode(const std::string fmt)
|
871 |
+
{
|
872 |
+
const std::string FMT7_prefix = "FORMAT7_";
|
873 |
+
|
874 |
+
if( StartsWith(fmt, FMT7_prefix) )
|
875 |
+
{
|
876 |
+
int fmt7_mode = 0;
|
877 |
+
std::istringstream iss( fmt.substr(FMT7_prefix.size()) );
|
878 |
+
iss >> fmt7_mode;
|
879 |
+
if( !iss.fail() ) {
|
880 |
+
return (dc1394video_mode_t)(DC1394_VIDEO_MODE_FORMAT7_0 + fmt7_mode);
|
881 |
+
}
|
882 |
+
}
|
883 |
+
|
884 |
+
throw VideoException("Unknown video mode");
|
885 |
+
}
|
886 |
+
|
887 |
+
dc1394video_mode_t get_firewire_mode(unsigned width, unsigned height, const std::string fmt)
|
888 |
+
{
|
889 |
+
for( dc1394video_mode_t video_mode=DC1394_VIDEO_MODE_MIN; video_mode<DC1394_VIDEO_MODE_MAX; video_mode = (dc1394video_mode_t)(video_mode +1) )
|
890 |
+
{
|
891 |
+
try {
|
892 |
+
unsigned w,h;
|
893 |
+
std::string format;
|
894 |
+
Dc1394ModeDetails(video_mode,w,h,format);
|
895 |
+
|
896 |
+
if( w == width && h==height && !fmt.compare(format) )
|
897 |
+
return video_mode;
|
898 |
+
} catch (const VideoException& e) {}
|
899 |
+
}
|
900 |
+
|
901 |
+
throw VideoException("Unknown video mode");
|
902 |
+
}
|
903 |
+
|
904 |
+
dc1394framerate_t get_firewire_framerate(float framerate)
|
905 |
+
{
|
906 |
+
if(framerate==1.875) return DC1394_FRAMERATE_1_875;
|
907 |
+
else if(framerate==3.75) return DC1394_FRAMERATE_3_75;
|
908 |
+
else if(framerate==7.5) return DC1394_FRAMERATE_7_5;
|
909 |
+
else if(framerate==15) return DC1394_FRAMERATE_15;
|
910 |
+
else if(framerate==30) return DC1394_FRAMERATE_30;
|
911 |
+
else if(framerate==60) return DC1394_FRAMERATE_60;
|
912 |
+
else if(framerate==120) return DC1394_FRAMERATE_120;
|
913 |
+
else if(framerate==240) return DC1394_FRAMERATE_240;
|
914 |
+
else throw VideoException("Invalid framerate");
|
915 |
+
}
|
916 |
+
|
917 |
+
PANGOLIN_REGISTER_FACTORY(FirewireVideo)
|
918 |
+
{
|
919 |
+
struct FirewireVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
920 |
+
std::map<std::string,Precedence> Schemes() const override
|
921 |
+
{
|
922 |
+
return {{"dc1394",10}, {"firewire",10}};
|
923 |
+
}
|
924 |
+
const char* Description() const override
|
925 |
+
{
|
926 |
+
return "Access Firewire (dc1394) cameras";
|
927 |
+
}
|
928 |
+
ParamSet Params() const override
|
929 |
+
{
|
930 |
+
return {{
|
931 |
+
{"fmt","RGB24","Pixel format: see pixel format help for all possible values. "},
|
932 |
+
{"size","640x480","Image dimensions in pixels. Will be crop from full sensor resolution."},
|
933 |
+
{"pos","0+0","top-left requested pixel offset"},
|
934 |
+
{"dma","10","Direct Memory Access (DMA) device queue size"},
|
935 |
+
{"iso","400","ISO sensitivity"},
|
936 |
+
{"fps","30","Frame per Second"},
|
937 |
+
{"deinterlace","0","Apply deinterlacing on video stream (0 off, 1 on)"}
|
938 |
+
}};
|
939 |
+
}
|
940 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
941 |
+
std::string desired_format = uri.Get<std::string>("fmt","RGB24");
|
942 |
+
ToUpper(desired_format);
|
943 |
+
const ImageDim desired_dim = uri.Get<ImageDim>("size", ImageDim(640,480));
|
944 |
+
const ImageDim desired_xy = uri.Get<ImageDim>("pos", ImageDim(0,0));
|
945 |
+
const int desired_dma = uri.Get<int>("dma", 10);
|
946 |
+
const int desired_iso = uri.Get<int>("iso", 400);
|
947 |
+
const float desired_fps = uri.Get<float>("fps", 30);
|
948 |
+
const bool deinterlace = uri.Get<bool>("deinterlace", 0);
|
949 |
+
|
950 |
+
Guid guid = 0;
|
951 |
+
unsigned deviceid = 0;
|
952 |
+
dc1394framerate_t framerate = get_firewire_framerate(desired_fps);
|
953 |
+
dc1394speed_t iso_speed = (dc1394speed_t)(log(desired_iso/100) / log(2));
|
954 |
+
int dma_buffers = desired_dma;
|
955 |
+
|
956 |
+
VideoInterface* video_raw = nullptr;
|
957 |
+
|
958 |
+
if( StartsWith(desired_format, "FORMAT7") )
|
959 |
+
{
|
960 |
+
dc1394video_mode_t video_mode = get_firewire_format7_mode(desired_format);
|
961 |
+
if( guid.guid == 0 ) {
|
962 |
+
video_raw = new FirewireVideo(deviceid,video_mode,FirewireVideo::MAX_FR, desired_dim.x, desired_dim.y, desired_xy.x, desired_xy.y, iso_speed, dma_buffers,true);
|
963 |
+
}else{
|
964 |
+
video_raw = new FirewireVideo(guid,video_mode,FirewireVideo::MAX_FR, desired_dim.x, desired_dim.y, desired_xy.x, desired_xy.y, iso_speed, dma_buffers,true);
|
965 |
+
}
|
966 |
+
}else{
|
967 |
+
dc1394video_mode_t video_mode = get_firewire_mode(desired_dim.x, desired_dim.y,desired_format);
|
968 |
+
if( guid.guid == 0 ) {
|
969 |
+
video_raw = new FirewireVideo(deviceid,video_mode,framerate,iso_speed,dma_buffers);
|
970 |
+
}else{
|
971 |
+
video_raw = new FirewireVideo(guid,video_mode,framerate,iso_speed,dma_buffers);
|
972 |
+
}
|
973 |
+
}
|
974 |
+
|
975 |
+
if(deinterlace) {
|
976 |
+
std::unique_ptr<VideoInterface> video(video_raw);
|
977 |
+
video_raw = new DeinterlaceVideo(video);
|
978 |
+
}
|
979 |
+
|
980 |
+
return std::unique_ptr<VideoInterface>(video_raw);
|
981 |
+
}
|
982 |
+
};
|
983 |
+
|
984 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<FirewireVideoFactory>());
|
985 |
+
}
|
986 |
+
|
987 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/gamma.cpp
ADDED
@@ -0,0 +1,375 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/video/drivers/gamma.h>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/iostream_operators.h>
|
31 |
+
#include <pangolin/utils/avx_math.h>
|
32 |
+
|
33 |
+
namespace pangolin
|
34 |
+
{
|
35 |
+
|
36 |
+
|
37 |
+
GammaVideo::GammaVideo(std::unique_ptr<VideoInterface>& src_, const std::map<size_t, float> &stream_gammas)
|
38 |
+
: src(std::move(src_)), size_bytes(0), stream_gammas(stream_gammas)
|
39 |
+
{
|
40 |
+
if(!src.get()) {
|
41 |
+
throw VideoException("GammaVideo: VideoInterface in must not be null");
|
42 |
+
}
|
43 |
+
|
44 |
+
videoin.push_back(src.get());
|
45 |
+
|
46 |
+
formats_supported.insert("GRAY8");
|
47 |
+
formats_supported.insert("GRAY16LE");
|
48 |
+
formats_supported.insert("RGB24");
|
49 |
+
formats_supported.insert("BGR24");
|
50 |
+
formats_supported.insert("RGB48");
|
51 |
+
formats_supported.insert("BGR48");
|
52 |
+
formats_supported.insert("RGBA32");
|
53 |
+
formats_supported.insert("BGRA32");
|
54 |
+
formats_supported.insert("RGBA64");
|
55 |
+
formats_supported.insert("BGRA64");
|
56 |
+
|
57 |
+
for(size_t s = 0; s < src->Streams().size(); s++)
|
58 |
+
{
|
59 |
+
auto i = stream_gammas.find(s);
|
60 |
+
|
61 |
+
if(i != stream_gammas.end() && i->second != 0.0f && i->second != 1.0f &&
|
62 |
+
formats_supported.count(src->Streams()[s].PixFormat().format) == 0)
|
63 |
+
{
|
64 |
+
throw VideoException("GammaVideo: Stream format not supported");
|
65 |
+
}
|
66 |
+
|
67 |
+
streams.push_back(src->Streams()[s]);
|
68 |
+
size_bytes += streams.back().SizeBytes();
|
69 |
+
}
|
70 |
+
|
71 |
+
buffer.reset(new uint8_t[src->SizeBytes()]);
|
72 |
+
}
|
73 |
+
|
74 |
+
GammaVideo::~GammaVideo()
|
75 |
+
{
|
76 |
+
}
|
77 |
+
|
78 |
+
//! Implement VideoInput::Start()
|
79 |
+
void GammaVideo::Start()
|
80 |
+
{
|
81 |
+
videoin[0]->Start();
|
82 |
+
}
|
83 |
+
|
84 |
+
//! Implement VideoInput::Stop()
|
85 |
+
void GammaVideo::Stop()
|
86 |
+
{
|
87 |
+
videoin[0]->Stop();
|
88 |
+
}
|
89 |
+
|
90 |
+
//! Implement VideoInput::SizeBytes()
|
91 |
+
size_t GammaVideo::SizeBytes() const
|
92 |
+
{
|
93 |
+
return size_bytes;
|
94 |
+
}
|
95 |
+
|
96 |
+
//! Implement VideoInput::Streams()
|
97 |
+
const std::vector<StreamInfo>& GammaVideo::Streams() const
|
98 |
+
{
|
99 |
+
return streams;
|
100 |
+
}
|
101 |
+
|
102 |
+
template <typename T>
|
103 |
+
void ApplyGamma(Image<uint8_t>& out,
|
104 |
+
const Image<uint8_t>& in,
|
105 |
+
const float gamma,
|
106 |
+
const float channel_max_value);
|
107 |
+
|
108 |
+
#ifdef __AVX2__
|
109 |
+
template <>
|
110 |
+
void ApplyGamma<uint8_t>(Image<uint8_t>& out,
|
111 |
+
const Image<uint8_t>& in,
|
112 |
+
const float gamma,
|
113 |
+
const float channel_max_value)
|
114 |
+
{
|
115 |
+
//Special shuffling constants for bytes across lanes
|
116 |
+
const __m256i K0 = _mm256_setr_epi8(
|
117 |
+
0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u,
|
118 |
+
0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u);
|
119 |
+
|
120 |
+
const __m256i K1 = _mm256_setr_epi8(
|
121 |
+
0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u, 0xF0u,
|
122 |
+
0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u, 0x70u);
|
123 |
+
|
124 |
+
const __m256i shuffle = _mm256_setr_epi8(0, 4, 8, 12, 16, 20, 24, 28, 2, 9, 10, 11, 3, 13, 14, 15, 4,
|
125 |
+
17, 18, 19, 5, 21, 22, 23, 6, 25, 26, 27, 7, 29, 30, 31);
|
126 |
+
|
127 |
+
for(size_t r = 0; r < out.h; ++r)
|
128 |
+
{
|
129 |
+
uint8_t* pout = out.ptr + r * out.pitch;
|
130 |
+
uint8_t* pin = in.ptr + r * in.pitch;
|
131 |
+
const uint8_t* pin_end = in.ptr + (r + 1) * in.pitch;
|
132 |
+
const uint32_t numElems = in.pitch / sizeof(uint8_t);
|
133 |
+
|
134 |
+
constexpr int vecElems = 8;
|
135 |
+
|
136 |
+
const int numVecs = (numElems / vecElems) * vecElems;
|
137 |
+
|
138 |
+
//Processing eight at a time (max number of floats per vector)
|
139 |
+
for(int vec = 0; vec < numVecs; vec += vecElems, pin += vecElems, pout += vecElems)
|
140 |
+
{
|
141 |
+
//Convert bytes to floats
|
142 |
+
const __m256 floatVals = _mm256_cvtepi32_ps(_mm256_cvtepu8_epi32(_mm_cvtsi64_si128(*(uint64_t*)pin)));
|
143 |
+
|
144 |
+
//Apply gamma and prepare for truncation (rounding) to integer
|
145 |
+
const __m256 gammaValues = _mm256_add_ps(_mm256_mul_ps(pow256_ps(_mm256_div_ps(floatVals, _mm256_set1_ps(channel_max_value)), _mm256_set1_ps(gamma)), _mm256_set1_ps(channel_max_value)), _mm256_set1_ps(0.5f));
|
146 |
+
|
147 |
+
//Clamp and convert to integer
|
148 |
+
__m256i gammaValuesI = _mm256_min_epi32(_mm256_max_epi32(_mm256_cvtps_epi32(gammaValues), _mm256_set1_epi32(0)), _mm256_set1_epi32(255));
|
149 |
+
|
150 |
+
//Unshuffle bytes to end of vector
|
151 |
+
gammaValuesI = _mm256_or_si256(_mm256_shuffle_epi8(gammaValuesI, _mm256_add_epi8(shuffle, K0)),
|
152 |
+
_mm256_shuffle_epi8(_mm256_permute4x64_epi64(gammaValuesI, 0x4E), _mm256_add_epi8(shuffle, K1)));
|
153 |
+
|
154 |
+
//Copy result out
|
155 |
+
*(uint64_t*)pout = _mm_cvtsi128_si64(_mm256_castsi256_si128(gammaValuesI));
|
156 |
+
}
|
157 |
+
|
158 |
+
//Remainder loose ends
|
159 |
+
while(pin != pin_end) {
|
160 |
+
*(pout++) = uint8_t(std::pow(float(*(pin++)) / channel_max_value, gamma) * channel_max_value + 0.5f);
|
161 |
+
}
|
162 |
+
}
|
163 |
+
}
|
164 |
+
|
165 |
+
template <>
|
166 |
+
void ApplyGamma<uint16_t>(Image<uint8_t>& out,
|
167 |
+
const Image<uint8_t>& in,
|
168 |
+
const float gamma,
|
169 |
+
const float channel_max_value)
|
170 |
+
{
|
171 |
+
for(size_t r = 0; r < out.h; ++r)
|
172 |
+
{
|
173 |
+
uint16_t* pout = (uint16_t*)(out.ptr + r * out.pitch);
|
174 |
+
uint16_t* pin = (uint16_t*)(in.ptr + r * in.pitch);
|
175 |
+
const uint16_t* pin_end = (uint16_t*)(in.ptr + (r + 1) * in.pitch);
|
176 |
+
const uint32_t numElems = in.pitch / sizeof(uint16_t);
|
177 |
+
|
178 |
+
constexpr int vecElems = 8;
|
179 |
+
|
180 |
+
const int numVecs = (numElems / vecElems) * vecElems;
|
181 |
+
|
182 |
+
//Processing eight at a time (max number of floats per vector)
|
183 |
+
for(int vec = 0; vec < numVecs; vec += vecElems, pin += vecElems, pout += vecElems)
|
184 |
+
{
|
185 |
+
//Convert shorts to floats
|
186 |
+
const __m256 floatVals = _mm256_cvtepi32_ps(_mm256_cvtepu16_epi32(_mm_loadu_si128((const __m128i*)pin)));
|
187 |
+
|
188 |
+
//Apply gamma and prepare for truncation (rounding) to integer
|
189 |
+
const __m256 gammaValues = _mm256_add_ps(_mm256_mul_ps(pow256_ps(_mm256_div_ps(floatVals, _mm256_set1_ps(channel_max_value)), _mm256_set1_ps(gamma)), _mm256_set1_ps(channel_max_value)), _mm256_set1_ps(0.5f));
|
190 |
+
|
191 |
+
//Clamp and convert to integer
|
192 |
+
__m256i gammaValuesI = _mm256_min_epi32(_mm256_max_epi32(_mm256_cvtps_epi32(gammaValues), _mm256_set1_epi32(0)), _mm256_set1_epi32(65535));
|
193 |
+
|
194 |
+
//Unshuffle shorts to end of vector
|
195 |
+
gammaValuesI = _mm256_packs_epi32(gammaValuesI, _mm256_setzero_si256());
|
196 |
+
gammaValuesI = _mm256_permute4x64_epi64(gammaValuesI, 0xD8);
|
197 |
+
|
198 |
+
// Copy result out
|
199 |
+
*(__m128i*)pout = _mm256_castsi256_si128(gammaValuesI);
|
200 |
+
}
|
201 |
+
|
202 |
+
//Remainder loose ends
|
203 |
+
while(pin != pin_end) {
|
204 |
+
*(pout++) = uint16_t(std::pow(float(*(pin++)) / channel_max_value, gamma) * channel_max_value + 0.5f);
|
205 |
+
}
|
206 |
+
}
|
207 |
+
}
|
208 |
+
#else
|
209 |
+
template <typename T>
|
210 |
+
void ApplyGamma(Image<uint8_t>& out,
|
211 |
+
const Image<uint8_t>& in,
|
212 |
+
const float gamma,
|
213 |
+
const float channel_max_value)
|
214 |
+
{
|
215 |
+
for(size_t r = 0; r < out.h; ++r)
|
216 |
+
{
|
217 |
+
T* pout = (T*)(out.ptr + r*out.pitch);
|
218 |
+
T* pin = (T*)(in.ptr + r*in.pitch);
|
219 |
+
const T* pin_end = (T*)(in.ptr + (r+1)*in.pitch);
|
220 |
+
while(pin != pin_end) {
|
221 |
+
*(pout++) = T(std::pow(float(*(pin++)) / channel_max_value, gamma) * channel_max_value + 0.5f);
|
222 |
+
}
|
223 |
+
}
|
224 |
+
}
|
225 |
+
#endif
|
226 |
+
|
227 |
+
void GammaVideo::Process(uint8_t* buffer_out, const uint8_t* buffer_in)
|
228 |
+
{
|
229 |
+
for(size_t s=0; s<streams.size(); ++s) {
|
230 |
+
Image<uint8_t> img_out = Streams()[s].StreamImage(buffer_out);
|
231 |
+
const Image<uint8_t> img_in = videoin[0]->Streams()[s].StreamImage(buffer_in);
|
232 |
+
const size_t bytes_per_pixel = Streams()[s].PixFormat().bpp / 8;
|
233 |
+
|
234 |
+
auto i = stream_gammas.find(s);
|
235 |
+
|
236 |
+
if(i != stream_gammas.end() && i->second != 0.0f && i->second != 1.0f)
|
237 |
+
{
|
238 |
+
const float gamma = i->second;
|
239 |
+
|
240 |
+
if(Streams()[s].PixFormat().format == "GRAY8" ||
|
241 |
+
Streams()[s].PixFormat().format == "RGB24" ||
|
242 |
+
Streams()[s].PixFormat().format == "BGR24" ||
|
243 |
+
Streams()[s].PixFormat().format == "RGBA32" ||
|
244 |
+
Streams()[s].PixFormat().format == "BGRA32")
|
245 |
+
{
|
246 |
+
ApplyGamma<uint8_t>(img_out, img_in, gamma, std::pow(2, Streams()[s].PixFormat().channel_bit_depth) - 1);
|
247 |
+
}
|
248 |
+
else if(Streams()[s].PixFormat().format == "GRAY16LE" ||
|
249 |
+
Streams()[s].PixFormat().format == "RGB48" ||
|
250 |
+
Streams()[s].PixFormat().format == "BGR48" ||
|
251 |
+
Streams()[s].PixFormat().format == "RGBA64" ||
|
252 |
+
Streams()[s].PixFormat().format == "BGRA64")
|
253 |
+
{
|
254 |
+
ApplyGamma<uint16_t>(img_out, img_in, gamma, std::pow(2, Streams()[s].PixFormat().channel_bit_depth) - 1);
|
255 |
+
}
|
256 |
+
else
|
257 |
+
{
|
258 |
+
throw VideoException("GammaVideo: Stream format not supported");
|
259 |
+
}
|
260 |
+
}
|
261 |
+
else
|
262 |
+
{
|
263 |
+
//straight copy
|
264 |
+
if( img_out.w != img_in.w || img_out.h != img_in.h ) {
|
265 |
+
throw std::runtime_error("GammaVideo: Incompatible image sizes");
|
266 |
+
}
|
267 |
+
|
268 |
+
for(size_t y=0; y < img_out.h; ++y) {
|
269 |
+
std::memcpy(img_out.RowPtr((int)y), img_in.RowPtr((int)y), bytes_per_pixel * img_in.w);
|
270 |
+
}
|
271 |
+
}
|
272 |
+
}
|
273 |
+
}
|
274 |
+
|
275 |
+
//! Implement VideoInput::GrabNext()
|
276 |
+
bool GammaVideo::GrabNext( uint8_t* image, bool wait )
|
277 |
+
{
|
278 |
+
if(videoin[0]->GrabNext(buffer.get(),wait)) {
|
279 |
+
Process(image, buffer.get());
|
280 |
+
return true;
|
281 |
+
}else{
|
282 |
+
return false;
|
283 |
+
}
|
284 |
+
}
|
285 |
+
|
286 |
+
//! Implement VideoInput::GrabNewest()
|
287 |
+
bool GammaVideo::GrabNewest( uint8_t* image, bool wait )
|
288 |
+
{
|
289 |
+
if(videoin[0]->GrabNewest(buffer.get(),wait)) {
|
290 |
+
Process(image, buffer.get());
|
291 |
+
return true;
|
292 |
+
}else{
|
293 |
+
return false;
|
294 |
+
}
|
295 |
+
}
|
296 |
+
|
297 |
+
std::vector<VideoInterface*>& GammaVideo::InputStreams()
|
298 |
+
{
|
299 |
+
return videoin;
|
300 |
+
}
|
301 |
+
|
302 |
+
uint32_t GammaVideo::AvailableFrames() const
|
303 |
+
{
|
304 |
+
BufferAwareVideoInterface* vpi = dynamic_cast<BufferAwareVideoInterface*>(videoin[0]);
|
305 |
+
if(!vpi)
|
306 |
+
{
|
307 |
+
pango_print_warn("Gamma: child interface is not buffer aware.");
|
308 |
+
return 0;
|
309 |
+
}
|
310 |
+
else
|
311 |
+
{
|
312 |
+
return vpi->AvailableFrames();
|
313 |
+
}
|
314 |
+
}
|
315 |
+
|
316 |
+
bool GammaVideo::DropNFrames(uint32_t n)
|
317 |
+
{
|
318 |
+
BufferAwareVideoInterface* vpi = dynamic_cast<BufferAwareVideoInterface*>(videoin[0]);
|
319 |
+
if(!vpi)
|
320 |
+
{
|
321 |
+
pango_print_warn("Gamma: child interface is not buffer aware.");
|
322 |
+
return false;
|
323 |
+
}
|
324 |
+
else
|
325 |
+
{
|
326 |
+
return vpi->DropNFrames(n);
|
327 |
+
}
|
328 |
+
}
|
329 |
+
|
330 |
+
PANGOLIN_REGISTER_FACTORY(GammaVideo)
|
331 |
+
{
|
332 |
+
struct GammaVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
333 |
+
GammaVideoFactory()
|
334 |
+
{
|
335 |
+
param_set_ = {{
|
336 |
+
{"gamma\\d+","1.0","gammaK, where 1 <= K <= N where N is the number of streams"}
|
337 |
+
}};
|
338 |
+
}
|
339 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
340 |
+
|
341 |
+
ParamReader reader(param_set_, uri);
|
342 |
+
// Gamma for each stream
|
343 |
+
std::map<size_t, float> stream_gammas;
|
344 |
+
for(size_t i=0; i<100; ++i)
|
345 |
+
{
|
346 |
+
const std::string gamma_key = pangolin::FormatString("gamma%",i+1);
|
347 |
+
|
348 |
+
if(uri.Contains(gamma_key))
|
349 |
+
{
|
350 |
+
stream_gammas[i] = reader.Get<float>(gamma_key, 1.0f);
|
351 |
+
}
|
352 |
+
}
|
353 |
+
|
354 |
+
std::unique_ptr<VideoInterface> subvid = pangolin::OpenVideo(uri.url);
|
355 |
+
|
356 |
+
return std::unique_ptr<VideoInterface> (new GammaVideo(subvid, stream_gammas));
|
357 |
+
}
|
358 |
+
FactoryUseInfo Help( const std::string& scheme ) const override {
|
359 |
+
return FactoryUseInfo(scheme, "Gamma corrects a set of video streams", param_set_);
|
360 |
+
}
|
361 |
+
|
362 |
+
bool ValidateUri( const std::string& scheme, const Uri& uri, std::unordered_set<std::string>& unrecognized_params) const override {
|
363 |
+
return ValidateUriAgainstParamSet(scheme, param_set_, uri, unrecognized_params );
|
364 |
+
}
|
365 |
+
|
366 |
+
bool IsValidated( const std::string& ) const override {return true;}
|
367 |
+
|
368 |
+
ParamSet param_set_;
|
369 |
+
};
|
370 |
+
|
371 |
+
auto factory = std::make_shared<GammaVideoFactory>();
|
372 |
+
FactoryRegistry::I()->RegisterFactory<VideoInterface>(factory, 10, "gamma");
|
373 |
+
}
|
374 |
+
|
375 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/images.cpp
ADDED
@@ -0,0 +1,315 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2013 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/factory/factory_registry.h>
|
29 |
+
#include <pangolin/utils/file_utils.h>
|
30 |
+
#include <pangolin/video/drivers/images.h>
|
31 |
+
#include <pangolin/video/iostream_operators.h>
|
32 |
+
|
33 |
+
#include <cstring>
|
34 |
+
#include <fstream>
|
35 |
+
|
36 |
+
namespace pangolin
|
37 |
+
{
|
38 |
+
|
39 |
+
bool ImagesVideo::LoadFrame(size_t i)
|
40 |
+
{
|
41 |
+
if( i < num_files) {
|
42 |
+
Frame& frame = loaded[i];
|
43 |
+
for(size_t c=0; c< num_channels; ++c) {
|
44 |
+
const std::string& filename = Filename(i,c);
|
45 |
+
const ImageFileType file_type = FileType(filename);
|
46 |
+
|
47 |
+
if(file_type == ImageFileTypeUnknown && unknowns_are_raw) {
|
48 |
+
// if raw_pitch is zero, assume image is packed.
|
49 |
+
const size_t pitch = raw_pitch ? raw_pitch : raw_fmt.bpp * raw_width / 8;
|
50 |
+
frame.push_back( LoadImage( filename, raw_fmt, raw_width, raw_height, pitch) );
|
51 |
+
}else{
|
52 |
+
frame.push_back( LoadImage( filename, file_type ) );
|
53 |
+
}
|
54 |
+
}
|
55 |
+
return true;
|
56 |
+
}
|
57 |
+
return false;
|
58 |
+
}
|
59 |
+
|
60 |
+
void ImagesVideo::PopulateFilenamesFromJson(const std::string& filename)
|
61 |
+
{
|
62 |
+
std::ifstream ifs( PathExpand(filename));
|
63 |
+
picojson::value json;
|
64 |
+
const std::string err = picojson::parse(json, ifs);
|
65 |
+
if(err.empty()) {
|
66 |
+
const std::string folder = PathParent(filename) + "/";
|
67 |
+
device_properties = json["device_properties"];
|
68 |
+
json_frames = json["frames"];
|
69 |
+
|
70 |
+
num_files = json_frames.size();
|
71 |
+
if(num_files == 0) {
|
72 |
+
throw VideoException("Empty Json Image archive.");
|
73 |
+
}
|
74 |
+
|
75 |
+
num_channels = json_frames[0]["stream_files"].size();
|
76 |
+
if(num_channels == 0) {
|
77 |
+
throw VideoException("Empty Json Image archive.");
|
78 |
+
}
|
79 |
+
|
80 |
+
filenames.resize(num_channels);
|
81 |
+
for(size_t c=0; c < num_channels; ++c) {
|
82 |
+
filenames[c].resize(num_files);
|
83 |
+
for(size_t i = 0; i < num_files; ++i) {
|
84 |
+
const std::string path = json_frames[i]["stream_files"][c].get<std::string>();
|
85 |
+
filenames[c][i] = (path.size() && path[0] == '/') ? path : (folder + path);
|
86 |
+
}
|
87 |
+
}
|
88 |
+
loaded.resize(num_files);
|
89 |
+
}else{
|
90 |
+
throw VideoException(err);
|
91 |
+
}
|
92 |
+
}
|
93 |
+
|
94 |
+
void ImagesVideo::PopulateFilenames(const std::string& wildcard_path)
|
95 |
+
{
|
96 |
+
const std::vector<std::string> wildcards = Expand(wildcard_path, '[', ']', ',');
|
97 |
+
num_channels = wildcards.size();
|
98 |
+
|
99 |
+
if(wildcards.size() == 1 ) {
|
100 |
+
const std::string expanded_path = PathExpand(wildcards[0]);
|
101 |
+
const std::string possible_archive_path = expanded_path + "/archive.json";
|
102 |
+
|
103 |
+
if (FileLowercaseExtention(expanded_path) == ".json" ) {
|
104 |
+
PopulateFilenamesFromJson(wildcards[0]);
|
105 |
+
return;
|
106 |
+
}else if(FileExists(possible_archive_path)){
|
107 |
+
PopulateFilenamesFromJson(possible_archive_path);
|
108 |
+
return;
|
109 |
+
}
|
110 |
+
}
|
111 |
+
|
112 |
+
filenames.resize(num_channels);
|
113 |
+
|
114 |
+
for(size_t i = 0; i < wildcards.size(); ++i) {
|
115 |
+
const std::string channel_wildcard = PathExpand(wildcards[i]);
|
116 |
+
FilesMatchingWildcard(channel_wildcard, filenames[i], SortMethod::NATURAL);
|
117 |
+
if(num_files == size_t(-1)) {
|
118 |
+
num_files = filenames[i].size();
|
119 |
+
}else{
|
120 |
+
if( num_files != filenames[i].size() ) {
|
121 |
+
std::cerr << "Warning: Video Channels have unequal number of files" << std::endl;
|
122 |
+
}
|
123 |
+
num_files = std::min(num_files, filenames[i].size());
|
124 |
+
}
|
125 |
+
if(num_files == 0) {
|
126 |
+
throw VideoException("No files found for wildcard '" + channel_wildcard + "'");
|
127 |
+
}
|
128 |
+
}
|
129 |
+
|
130 |
+
// Resize empty frames vector to hold future images.
|
131 |
+
loaded.resize(num_files);
|
132 |
+
}
|
133 |
+
|
134 |
+
void ImagesVideo::ConfigureStreamSizes()
|
135 |
+
{
|
136 |
+
size_bytes = 0;
|
137 |
+
for(size_t c=0; c < num_channels; ++c) {
|
138 |
+
const TypedImage& img = loaded[0][c];
|
139 |
+
const StreamInfo stream_info(img.fmt, img.w, img.h, img.pitch, (unsigned char*)(size_bytes));
|
140 |
+
streams.push_back(stream_info);
|
141 |
+
size_bytes += img.h*img.pitch;
|
142 |
+
}
|
143 |
+
}
|
144 |
+
|
145 |
+
ImagesVideo::ImagesVideo(const std::string& wildcard_path)
|
146 |
+
: num_files(-1), num_channels(0), next_frame_id(0),
|
147 |
+
unknowns_are_raw(false)
|
148 |
+
{
|
149 |
+
// Work out which files to sequence
|
150 |
+
PopulateFilenames(wildcard_path);
|
151 |
+
|
152 |
+
// Load first image in order to determine stream sizes etc
|
153 |
+
LoadFrame(next_frame_id);
|
154 |
+
|
155 |
+
ConfigureStreamSizes();
|
156 |
+
|
157 |
+
// TODO: Queue frames in another thread.
|
158 |
+
}
|
159 |
+
|
160 |
+
ImagesVideo::ImagesVideo(const std::string& wildcard_path,
|
161 |
+
const PixelFormat& raw_fmt,
|
162 |
+
size_t raw_width, size_t raw_height, size_t raw_pitch
|
163 |
+
) : num_files(-1), num_channels(0), next_frame_id(0),
|
164 |
+
unknowns_are_raw(true), raw_fmt(raw_fmt),
|
165 |
+
raw_width(raw_width), raw_height(raw_height), raw_pitch(raw_pitch)
|
166 |
+
{
|
167 |
+
// Work out which files to sequence
|
168 |
+
PopulateFilenames(wildcard_path);
|
169 |
+
|
170 |
+
// Load first image in order to determine stream sizes etc
|
171 |
+
LoadFrame(next_frame_id);
|
172 |
+
|
173 |
+
ConfigureStreamSizes();
|
174 |
+
|
175 |
+
// TODO: Queue frames in another thread.
|
176 |
+
}
|
177 |
+
|
178 |
+
ImagesVideo::~ImagesVideo()
|
179 |
+
{
|
180 |
+
}
|
181 |
+
|
182 |
+
//! Implement VideoInput::Start()
|
183 |
+
void ImagesVideo::Start()
|
184 |
+
{
|
185 |
+
|
186 |
+
}
|
187 |
+
|
188 |
+
//! Implement VideoInput::Stop()
|
189 |
+
void ImagesVideo::Stop()
|
190 |
+
{
|
191 |
+
|
192 |
+
}
|
193 |
+
|
194 |
+
//! Implement VideoInput::SizeBytes()
|
195 |
+
size_t ImagesVideo::SizeBytes() const
|
196 |
+
{
|
197 |
+
return size_bytes;
|
198 |
+
}
|
199 |
+
|
200 |
+
//! Implement VideoInput::Streams()
|
201 |
+
const std::vector<StreamInfo>& ImagesVideo::Streams() const
|
202 |
+
{
|
203 |
+
return streams;
|
204 |
+
}
|
205 |
+
|
206 |
+
//! Implement VideoInput::GrabNext()
|
207 |
+
bool ImagesVideo::GrabNext( unsigned char* image, bool /*wait*/ )
|
208 |
+
{
|
209 |
+
if(next_frame_id < loaded.size()) {
|
210 |
+
Frame& frame = loaded[next_frame_id];
|
211 |
+
|
212 |
+
if(frame.size() != num_channels) {
|
213 |
+
LoadFrame(next_frame_id);
|
214 |
+
}
|
215 |
+
|
216 |
+
for(size_t c=0; c < num_channels; ++c){
|
217 |
+
TypedImage& img = frame[c];
|
218 |
+
if(!img.ptr || img.w != streams[c].Width() || img.h != streams[c].Height() ) {
|
219 |
+
return false;
|
220 |
+
}
|
221 |
+
const StreamInfo& si = streams[c];
|
222 |
+
std::memcpy(image + (size_t)si.Offset(), img.ptr, si.SizeBytes());
|
223 |
+
img.Deallocate();
|
224 |
+
}
|
225 |
+
frame.clear();
|
226 |
+
|
227 |
+
next_frame_id++;
|
228 |
+
return true;
|
229 |
+
}
|
230 |
+
|
231 |
+
return false;
|
232 |
+
}
|
233 |
+
|
234 |
+
//! Implement VideoInput::GrabNewest()
|
235 |
+
bool ImagesVideo::GrabNewest( unsigned char* image, bool wait )
|
236 |
+
{
|
237 |
+
return GrabNext(image,wait);
|
238 |
+
}
|
239 |
+
|
240 |
+
size_t ImagesVideo::GetCurrentFrameId() const
|
241 |
+
{
|
242 |
+
return (int)next_frame_id - 1;
|
243 |
+
}
|
244 |
+
|
245 |
+
size_t ImagesVideo::GetTotalFrames() const
|
246 |
+
{
|
247 |
+
return num_files;
|
248 |
+
}
|
249 |
+
|
250 |
+
size_t ImagesVideo::Seek(size_t frameid)
|
251 |
+
{
|
252 |
+
next_frame_id = std::max(size_t(0), std::min(frameid, num_files));
|
253 |
+
return next_frame_id;
|
254 |
+
}
|
255 |
+
|
256 |
+
const picojson::value& ImagesVideo::DeviceProperties() const
|
257 |
+
{
|
258 |
+
return device_properties;
|
259 |
+
}
|
260 |
+
|
261 |
+
const picojson::value& ImagesVideo::FrameProperties() const
|
262 |
+
{
|
263 |
+
const size_t frame = GetCurrentFrameId();
|
264 |
+
|
265 |
+
if( json_frames.evaluate_as_boolean() && frame < json_frames.size()) {
|
266 |
+
const picojson::value& frame_props = json_frames[frame];
|
267 |
+
if(frame_props.contains("frame_properties")) {
|
268 |
+
return frame_props["frame_properties"];
|
269 |
+
}
|
270 |
+
}
|
271 |
+
|
272 |
+
return null_props;
|
273 |
+
}
|
274 |
+
|
275 |
+
PANGOLIN_REGISTER_FACTORY(ImagesVideo)
|
276 |
+
{
|
277 |
+
struct ImagesVideoVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
278 |
+
std::map<std::string,Precedence> Schemes() const override
|
279 |
+
{
|
280 |
+
return {{"file",20}, {"files",20}, {"image",10}, {"images",10}};
|
281 |
+
}
|
282 |
+
const char* Description() const override
|
283 |
+
{
|
284 |
+
return "Load an image collection as a video. Supports one or more synchronized streams.";
|
285 |
+
}
|
286 |
+
ParamSet Params() const override
|
287 |
+
{
|
288 |
+
return {{
|
289 |
+
{"fmt","GRAY8","RAW files only. Pixel format, see pixel format help for all possible values"},
|
290 |
+
{"size","640x480","RAW files only. Image size, required if fmt is specified"},
|
291 |
+
{"pitch","0","RAW files only. Specify distance from the start of one row to the next in bytes. If not specified, assumed image is packed."}
|
292 |
+
}};
|
293 |
+
}
|
294 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
295 |
+
ParamReader reader(Params(),uri);
|
296 |
+
|
297 |
+
const bool raw = reader.Contains("fmt");
|
298 |
+
const std::string path = PathExpand(uri.url);
|
299 |
+
|
300 |
+
if(raw) {
|
301 |
+
const std::string sfmt = reader.Get<std::string>("fmt");
|
302 |
+
const PixelFormat fmt = PixelFormatFromString(sfmt);
|
303 |
+
const ImageDim dim = reader.Get<ImageDim>("size");
|
304 |
+
const size_t image_pitch = reader.Get<int>("pitch");
|
305 |
+
return std::unique_ptr<VideoInterface>( new ImagesVideo(path, fmt, dim.x, dim.y, image_pitch) );
|
306 |
+
}else{
|
307 |
+
return std::unique_ptr<VideoInterface>( new ImagesVideo(path) );
|
308 |
+
}
|
309 |
+
}
|
310 |
+
};
|
311 |
+
|
312 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<ImagesVideoVideoFactory>());
|
313 |
+
}
|
314 |
+
|
315 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/images_out.cpp
ADDED
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <iomanip>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/image/image_io.h>
|
31 |
+
#include <pangolin/utils/file_utils.h>
|
32 |
+
#include <pangolin/video/drivers/images_out.h>
|
33 |
+
|
34 |
+
namespace pangolin {
|
35 |
+
|
36 |
+
ImagesVideoOutput::ImagesVideoOutput(const std::string& image_folder, const std::string& json_file_out, const std::string& image_file_extension)
|
37 |
+
: json_frames(picojson::array_type,true),
|
38 |
+
image_index(0), image_folder( PathExpand(image_folder) + "/" ), image_file_extension(image_file_extension)
|
39 |
+
{
|
40 |
+
if(!json_file_out.empty()) {
|
41 |
+
file.open(json_file_out);
|
42 |
+
if(!file.is_open()) {
|
43 |
+
throw std::runtime_error("Unable to open json file for writing, " + json_file_out + ". Make sure output folder already exists.");
|
44 |
+
}
|
45 |
+
}
|
46 |
+
}
|
47 |
+
|
48 |
+
ImagesVideoOutput::~ImagesVideoOutput()
|
49 |
+
{
|
50 |
+
if(file.is_open())
|
51 |
+
{
|
52 |
+
const std::string video_uri = "images://" + image_folder + "archive.json";
|
53 |
+
picojson::value json_file;
|
54 |
+
json_file["device_properties"] = device_properties;
|
55 |
+
json_file["frames"] = json_frames;
|
56 |
+
json_file["input_uri"] = input_uri;
|
57 |
+
json_file["video_uri"] = video_uri;
|
58 |
+
|
59 |
+
// Serialize json to file.
|
60 |
+
file << json_file.serialize(true);
|
61 |
+
}
|
62 |
+
}
|
63 |
+
|
64 |
+
const std::vector<StreamInfo>& ImagesVideoOutput::Streams() const
|
65 |
+
{
|
66 |
+
return streams;
|
67 |
+
}
|
68 |
+
|
69 |
+
void ImagesVideoOutput::SetStreams(const std::vector<StreamInfo>& streams, const std::string& uri, const picojson::value& device_properties)
|
70 |
+
{
|
71 |
+
this->streams = streams;
|
72 |
+
this->input_uri = uri;
|
73 |
+
this->device_properties = device_properties;
|
74 |
+
}
|
75 |
+
|
76 |
+
int ImagesVideoOutput::WriteStreams(const unsigned char* data, const picojson::value& frame_properties)
|
77 |
+
{
|
78 |
+
picojson::value json_filenames(picojson::array_type, true);
|
79 |
+
|
80 |
+
// Write each stream image to file.
|
81 |
+
for(size_t s=0; s < streams.size(); ++s) {
|
82 |
+
const pangolin::StreamInfo& si = streams[s];
|
83 |
+
const std::string filename = pangolin::FormatString("image_%%%_%.%",std::setfill('0'),std::setw(10),image_index, s, image_file_extension);
|
84 |
+
json_filenames.push_back(filename);
|
85 |
+
const Image<unsigned char> img = si.StreamImage(data);
|
86 |
+
pangolin::SaveImage(img, si.PixFormat(), image_folder + filename);
|
87 |
+
}
|
88 |
+
|
89 |
+
// Add frame_properties to json file.
|
90 |
+
picojson::value json_frame;
|
91 |
+
json_frame["frame_properties"] = frame_properties;
|
92 |
+
json_frame["stream_files"] = json_filenames;
|
93 |
+
json_frames.push_back(json_frame);
|
94 |
+
|
95 |
+
++image_index;
|
96 |
+
return 0;
|
97 |
+
}
|
98 |
+
|
99 |
+
bool ImagesVideoOutput::IsPipe() const
|
100 |
+
{
|
101 |
+
return false;
|
102 |
+
}
|
103 |
+
|
104 |
+
PANGOLIN_REGISTER_FACTORY(ImagesVideoOutput)
|
105 |
+
{
|
106 |
+
struct ImagesVideoFactory final : public TypedFactoryInterface<VideoOutputInterface> {
|
107 |
+
std::map<std::string,Precedence> Schemes() const override
|
108 |
+
{
|
109 |
+
return {{"images",10}};
|
110 |
+
}
|
111 |
+
const char* Description() const override
|
112 |
+
{
|
113 |
+
return "Writes video frames out to sequance of images + json index file.";
|
114 |
+
}
|
115 |
+
ParamSet Params() const override
|
116 |
+
{
|
117 |
+
return {{
|
118 |
+
{"fmt","png","Output image format. Possible values are all Pangolin image formats e.g.: png,jpg,jpeg,ppm,pgm,pxm,pdm,zstd,lzf,p12b,exr,pango"}
|
119 |
+
}};
|
120 |
+
}
|
121 |
+
std::unique_ptr<VideoOutputInterface> Open(const Uri& uri) override {
|
122 |
+
ParamReader reader(Params(),uri);
|
123 |
+
|
124 |
+
const std::string images_folder = PathExpand(uri.url);
|
125 |
+
const std::string json_filename = images_folder + "/archive.json";
|
126 |
+
const std::string image_extension = reader.Get<std::string>("fmt");
|
127 |
+
|
128 |
+
if(FileExists(json_filename)) {
|
129 |
+
throw std::runtime_error("Dataset already exists in directory.");
|
130 |
+
}
|
131 |
+
|
132 |
+
return std::unique_ptr<VideoOutputInterface>(
|
133 |
+
new ImagesVideoOutput(images_folder, json_filename, image_extension)
|
134 |
+
);
|
135 |
+
}
|
136 |
+
};
|
137 |
+
|
138 |
+
return FactoryRegistry::I()->RegisterFactory<VideoOutputInterface>(std::make_shared<ImagesVideoFactory>());
|
139 |
+
}
|
140 |
+
|
141 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/join.cpp
ADDED
@@ -0,0 +1,620 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <thread>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/drivers/join.h>
|
31 |
+
#include <pangolin/video/iostream_operators.h>
|
32 |
+
#include <pangolin/video/video.h>
|
33 |
+
|
34 |
+
//#define DEBUGJOIN
|
35 |
+
|
36 |
+
#ifdef DEBUGJOIN
|
37 |
+
#include <pangolin/utils/timer.h>
|
38 |
+
#define TSTART() \
|
39 |
+
pangolin::basetime start, last, now; \
|
40 |
+
start = pangolin::TimeNow(); \
|
41 |
+
last = start;
|
42 |
+
#define TGRABANDPRINT(...) \
|
43 |
+
now = pangolin::TimeNow(); \
|
44 |
+
fprintf(stderr, "JOIN: "); \
|
45 |
+
fprintf(stderr, __VA_ARGS__); \
|
46 |
+
fprintf(stderr, " %fms.\n", 1000 * pangolin::TimeDiff_s(last, now)); \
|
47 |
+
last = now;
|
48 |
+
#define DBGPRINT(...) \
|
49 |
+
fprintf(stderr, "JOIN: "); \
|
50 |
+
fprintf(stderr, __VA_ARGS__); \
|
51 |
+
fprintf(stderr, "\n");
|
52 |
+
#else
|
53 |
+
#define TSTART()
|
54 |
+
#define TGRABANDPRINT(...)
|
55 |
+
#define DBGPRINT(...)
|
56 |
+
#endif
|
57 |
+
|
58 |
+
namespace pangolin
|
59 |
+
{
|
60 |
+
JoinVideo::JoinVideo(std::vector<std::unique_ptr<VideoInterface>>& src_, const bool verbose)
|
61 |
+
: storage(std::move(src_)), size_bytes(0), sync_tolerance_us(0), verbose(verbose)
|
62 |
+
{
|
63 |
+
for(auto& p : storage)
|
64 |
+
{
|
65 |
+
src.push_back(p.get());
|
66 |
+
frame_seen.push_back(false);
|
67 |
+
}
|
68 |
+
|
69 |
+
// Add individual streams
|
70 |
+
for(size_t s = 0; s < src.size(); ++s)
|
71 |
+
{
|
72 |
+
VideoInterface& vid = *src[s];
|
73 |
+
for(size_t i = 0; i < vid.Streams().size(); ++i)
|
74 |
+
{
|
75 |
+
const StreamInfo si = vid.Streams()[i];
|
76 |
+
const PixelFormat fmt = si.PixFormat();
|
77 |
+
const Image<unsigned char> img_offset = si.StreamImage((unsigned char*)size_bytes);
|
78 |
+
streams.push_back(StreamInfo(fmt, img_offset));
|
79 |
+
}
|
80 |
+
size_bytes += src[s]->SizeBytes();
|
81 |
+
}
|
82 |
+
}
|
83 |
+
|
84 |
+
JoinVideo::~JoinVideo()
|
85 |
+
{
|
86 |
+
for(size_t s = 0; s < src.size(); ++s)
|
87 |
+
{
|
88 |
+
src[s]->Stop();
|
89 |
+
}
|
90 |
+
}
|
91 |
+
|
92 |
+
size_t JoinVideo::SizeBytes() const
|
93 |
+
{
|
94 |
+
return size_bytes;
|
95 |
+
}
|
96 |
+
|
97 |
+
const std::vector<StreamInfo>& JoinVideo::Streams() const
|
98 |
+
{
|
99 |
+
return streams;
|
100 |
+
}
|
101 |
+
|
102 |
+
void JoinVideo::Start()
|
103 |
+
{
|
104 |
+
for(size_t s = 0; s < src.size(); ++s)
|
105 |
+
{
|
106 |
+
src[s]->Start();
|
107 |
+
}
|
108 |
+
}
|
109 |
+
|
110 |
+
void JoinVideo::Stop()
|
111 |
+
{
|
112 |
+
for(size_t s = 0; s < src.size(); ++s)
|
113 |
+
{
|
114 |
+
src[s]->Stop();
|
115 |
+
}
|
116 |
+
}
|
117 |
+
|
118 |
+
bool JoinVideo::Sync(int64_t tolerance_us, double transfer_bandwidth_gbps)
|
119 |
+
{
|
120 |
+
transfer_bandwidth_bytes_per_us = int64_t((transfer_bandwidth_gbps * 1E3) / 8.0);
|
121 |
+
// std::cout << "transfer_bandwidth_gbps: " << transfer_bandwidth_gbps << std::endl;
|
122 |
+
|
123 |
+
for(size_t s = 0; s < src.size(); ++s)
|
124 |
+
{
|
125 |
+
picojson::value props = GetVideoDeviceProperties(src[s]);
|
126 |
+
if(!props.get_value(PANGO_HAS_TIMING_DATA, false))
|
127 |
+
{
|
128 |
+
if(props.contains("streams"))
|
129 |
+
{
|
130 |
+
picojson::value streams = props["streams"];
|
131 |
+
for(size_t i = 0; i < streams.size(); ++i)
|
132 |
+
{
|
133 |
+
if(!streams[i].get_value(PANGO_HAS_TIMING_DATA, false))
|
134 |
+
{
|
135 |
+
sync_tolerance_us = 0;
|
136 |
+
return false;
|
137 |
+
}
|
138 |
+
}
|
139 |
+
}
|
140 |
+
else
|
141 |
+
{
|
142 |
+
sync_tolerance_us = 0;
|
143 |
+
return false;
|
144 |
+
}
|
145 |
+
}
|
146 |
+
}
|
147 |
+
|
148 |
+
sync_tolerance_us = tolerance_us;
|
149 |
+
|
150 |
+
// std::cout << "transfer_bandwidth_bytes_per_us: " << transfer_bandwidth_bytes_per_us << std::endl;
|
151 |
+
return true;
|
152 |
+
}
|
153 |
+
|
154 |
+
// Assuming that src_index supports VideoPropertiesInterface and has a valid PANGO_HOST_RECEPTION_TIME_US, or
|
155 |
+
// PANGO_ESTIMATED_CENTER_CAPTURE_TIME_US
|
156 |
+
// returns a capture time adjusted for transfer time and when possible also for exposure.
|
157 |
+
int64_t JoinVideo::GetAdjustedCaptureTime(size_t src_index)
|
158 |
+
{
|
159 |
+
picojson::value props = GetVideoFrameProperties(src[src_index]);
|
160 |
+
if(props.contains(PANGO_ESTIMATED_CENTER_CAPTURE_TIME_US))
|
161 |
+
{
|
162 |
+
// great, the driver already gave us an estimated center of capture
|
163 |
+
if(props.contains(PANGO_JOIN_OFFSET_US))
|
164 |
+
{
|
165 |
+
// apply join offset if the driver gave it to us
|
166 |
+
return props[PANGO_ESTIMATED_CENTER_CAPTURE_TIME_US].get<int64_t>() +
|
167 |
+
props[PANGO_JOIN_OFFSET_US].get<int64_t>();
|
168 |
+
}
|
169 |
+
else
|
170 |
+
{
|
171 |
+
return props[PANGO_ESTIMATED_CENTER_CAPTURE_TIME_US].get<int64_t>();
|
172 |
+
}
|
173 |
+
}
|
174 |
+
else
|
175 |
+
{
|
176 |
+
if(props.contains(PANGO_HOST_RECEPTION_TIME_US))
|
177 |
+
{
|
178 |
+
int64_t transfer_time_us = 0;
|
179 |
+
if(transfer_bandwidth_bytes_per_us > 0)
|
180 |
+
{
|
181 |
+
transfer_time_us = src[src_index]->SizeBytes() / transfer_bandwidth_bytes_per_us;
|
182 |
+
}
|
183 |
+
std::cerr << "JoinVideo: Stream " << src_index << " does contain PANGO_ESTIMATED_CENTER_CAPTURE_TIME_US using incorrect fallback. " << std::endl;
|
184 |
+
return props[PANGO_HOST_RECEPTION_TIME_US].get<int64_t>() - transfer_time_us;
|
185 |
+
}
|
186 |
+
else
|
187 |
+
{
|
188 |
+
if(props.contains("streams"))
|
189 |
+
{
|
190 |
+
picojson::value streams = props["streams"];
|
191 |
+
|
192 |
+
if(streams.size() > 0)
|
193 |
+
{
|
194 |
+
if(streams[0].contains(PANGO_ESTIMATED_CENTER_CAPTURE_TIME_US))
|
195 |
+
{
|
196 |
+
// great, the driver already gave us an estimated center of capture
|
197 |
+
return streams[0][PANGO_ESTIMATED_CENTER_CAPTURE_TIME_US].get<int64_t>();
|
198 |
+
}
|
199 |
+
else if(streams[0].contains(PANGO_HOST_RECEPTION_TIME_US))
|
200 |
+
{
|
201 |
+
int64_t transfer_time_us = 0;
|
202 |
+
if(transfer_bandwidth_bytes_per_us > 0)
|
203 |
+
{
|
204 |
+
transfer_time_us = src[src_index]->SizeBytes() / transfer_bandwidth_bytes_per_us;
|
205 |
+
}
|
206 |
+
std::cerr << "JoinVideo: Stream " << src_index << " does contain PANGO_ESTIMATED_CENTER_CAPTURE_TIME_US using incorrect fallback. " << std::endl;
|
207 |
+
return streams[0][PANGO_HOST_RECEPTION_TIME_US].get<int64_t>() - transfer_time_us;
|
208 |
+
}
|
209 |
+
}
|
210 |
+
}
|
211 |
+
}
|
212 |
+
|
213 |
+
PANGO_ENSURE(false,
|
214 |
+
"JoinVideo: Stream % does contain suffcient timing info to obtain or estimate the host center "
|
215 |
+
"capture time.\n",
|
216 |
+
src_index);
|
217 |
+
return 0;
|
218 |
+
}
|
219 |
+
}
|
220 |
+
|
221 |
+
bool JoinVideo::GrabNext(unsigned char* image, bool wait)
|
222 |
+
{
|
223 |
+
std::vector<size_t> offsets(src.size(), 0);
|
224 |
+
std::vector<int64_t> capture_us(src.size(), 0);
|
225 |
+
|
226 |
+
TSTART()
|
227 |
+
DBGPRINT("Entering GrabNext:")
|
228 |
+
|
229 |
+
constexpr size_t loop_sleep_us = 500;
|
230 |
+
size_t total_sleep_us = 0;
|
231 |
+
// Arbitrary length of time larger than any reasonble period/exposure.
|
232 |
+
const size_t total_sleep_threshold_us = 200000;
|
233 |
+
size_t unfilled_images = src.size();
|
234 |
+
|
235 |
+
while (true)
|
236 |
+
{
|
237 |
+
size_t offset = 0;
|
238 |
+
for(size_t s = 0; s < src.size(); ++s)
|
239 |
+
{
|
240 |
+
if (capture_us[s] == 0) {
|
241 |
+
if(src[s]->GrabNext(image + offset, false))
|
242 |
+
{
|
243 |
+
if(sync_tolerance_us > 0)
|
244 |
+
{
|
245 |
+
capture_us[s] = GetAdjustedCaptureTime(s);
|
246 |
+
}
|
247 |
+
else
|
248 |
+
{
|
249 |
+
capture_us[s] = std::numeric_limits<int64_t>::max();
|
250 |
+
}
|
251 |
+
|
252 |
+
frame_seen[s] = true;
|
253 |
+
offsets[s] = offset;
|
254 |
+
unfilled_images -= 1;
|
255 |
+
}
|
256 |
+
}
|
257 |
+
|
258 |
+
offset += src[s]->SizeBytes();
|
259 |
+
}
|
260 |
+
|
261 |
+
if (!wait || unfilled_images == 0)
|
262 |
+
{
|
263 |
+
// If the caller did not request to wait, or all images were retrieved, we are done.
|
264 |
+
break;
|
265 |
+
}
|
266 |
+
|
267 |
+
// Sleep to simulate the blocking behavior of wait == true.
|
268 |
+
std::this_thread::sleep_for(std::chrono::microseconds(loop_sleep_us));
|
269 |
+
|
270 |
+
total_sleep_us += loop_sleep_us;
|
271 |
+
if (sync_tolerance_us != 0 && total_sleep_us > total_sleep_threshold_us)
|
272 |
+
{
|
273 |
+
// We've waited long enough. Report on which cameras were not responding.
|
274 |
+
pango_print_warn(
|
275 |
+
"JoinVideo: Not all frames were delivered within the threshold of %zuus. Cameras not reporting:\n",
|
276 |
+
total_sleep_threshold_us);
|
277 |
+
for(size_t blocked = 0; blocked < src.size(); ++blocked)
|
278 |
+
{
|
279 |
+
if (capture_us[blocked] == 0)
|
280 |
+
{
|
281 |
+
// Unfortunately, at this level we don't have any good label/description for the stream.
|
282 |
+
pango_print_warn(" Stream %zu%s\n",
|
283 |
+
blocked,
|
284 |
+
frame_seen[blocked] ? "" : " [never reported]");
|
285 |
+
}
|
286 |
+
}
|
287 |
+
// Give up on this frame.
|
288 |
+
break;
|
289 |
+
}
|
290 |
+
}
|
291 |
+
|
292 |
+
// Check if any streams didn't return an image. This means a stream is waiting on data or has finished.
|
293 |
+
if(std::any_of(capture_us.begin(), capture_us.end(), [](int64_t v) { return v == 0; }))
|
294 |
+
{
|
295 |
+
return false;
|
296 |
+
}
|
297 |
+
|
298 |
+
// Check Sync if a tolerence has been specified.
|
299 |
+
if(sync_tolerance_us > 0)
|
300 |
+
{
|
301 |
+
auto range = std::minmax_element(capture_us.begin(), capture_us.end());
|
302 |
+
if((*range.second - *range.first) > sync_tolerance_us)
|
303 |
+
{
|
304 |
+
if(verbose)
|
305 |
+
{
|
306 |
+
pango_print_warn(
|
307 |
+
"JoinVideo: Source timestamps span %lu us, not within %lu us. Ignoring frames, trying to "
|
308 |
+
"sync...\n",
|
309 |
+
(unsigned long)((*range.second - *range.first)),
|
310 |
+
(unsigned long)sync_tolerance_us);
|
311 |
+
}
|
312 |
+
|
313 |
+
// Attempt to resync...
|
314 |
+
for(size_t n = 0; n < 10; ++n)
|
315 |
+
{
|
316 |
+
for(size_t s = 0; s < src.size(); ++s)
|
317 |
+
{
|
318 |
+
// Catch up frames that are behind
|
319 |
+
if(capture_us[s] < (*range.second - sync_tolerance_us))
|
320 |
+
{
|
321 |
+
if(src[s]->GrabNext(image + offsets[s], true))
|
322 |
+
{
|
323 |
+
capture_us[s] = GetAdjustedCaptureTime(s);
|
324 |
+
}
|
325 |
+
}
|
326 |
+
}
|
327 |
+
}
|
328 |
+
}
|
329 |
+
|
330 |
+
// Check sync again
|
331 |
+
range = std::minmax_element(capture_us.begin(), capture_us.end());
|
332 |
+
if((*range.second - *range.first) > sync_tolerance_us)
|
333 |
+
{
|
334 |
+
TGRABANDPRINT("NOT IN SYNC oldest:%ld newest:%ld delta:%ld",
|
335 |
+
*range.first,
|
336 |
+
*range.second,
|
337 |
+
(*range.second - *range.first));
|
338 |
+
return false;
|
339 |
+
}
|
340 |
+
else
|
341 |
+
{
|
342 |
+
TGRABANDPRINT(" IN SYNC oldest:%ld newest:%ld delta:%ld",
|
343 |
+
*range.first,
|
344 |
+
*range.second,
|
345 |
+
(*range.second - *range.first));
|
346 |
+
return true;
|
347 |
+
}
|
348 |
+
}
|
349 |
+
else
|
350 |
+
{
|
351 |
+
pango_print_warn("JoinVideo: sync_tolerance_us = 0, frames are not synced!\n");
|
352 |
+
return true;
|
353 |
+
}
|
354 |
+
}
|
355 |
+
|
356 |
+
bool AllInterfacesAreBufferAware(std::vector<VideoInterface*>& src)
|
357 |
+
{
|
358 |
+
for(size_t s = 0; s < src.size(); ++s)
|
359 |
+
{
|
360 |
+
if(!dynamic_cast<BufferAwareVideoInterface*>(src[s]))
|
361 |
+
return false;
|
362 |
+
}
|
363 |
+
return true;
|
364 |
+
}
|
365 |
+
|
366 |
+
bool JoinVideo::GrabNewest(unsigned char* image, bool wait)
|
367 |
+
{
|
368 |
+
// TODO: Tidy to correspond to GrabNext()
|
369 |
+
TSTART()
|
370 |
+
DBGPRINT("Entering GrabNewest:");
|
371 |
+
if(AllInterfacesAreBufferAware(src))
|
372 |
+
{
|
373 |
+
DBGPRINT("All interfaces are BufferAwareVideoInterface.")
|
374 |
+
unsigned int minN = std::numeric_limits<unsigned int>::max();
|
375 |
+
// Find smallest number of frames it is safe to drop.
|
376 |
+
for(size_t s = 0; s < src.size(); ++s)
|
377 |
+
{
|
378 |
+
auto bai = dynamic_cast<BufferAwareVideoInterface*>(src[s]);
|
379 |
+
unsigned int n = bai->AvailableFrames();
|
380 |
+
minN = std::min(n, minN);
|
381 |
+
DBGPRINT("Interface %ld has %u frames available.", s, n)
|
382 |
+
}
|
383 |
+
TGRABANDPRINT("Quering avalable frames took ")
|
384 |
+
DBGPRINT("Safe number of buffers to drop: %d.", ((minN > 1) ? (minN - 1) : 0));
|
385 |
+
|
386 |
+
// Safely drop minN-1 frames on each interface.
|
387 |
+
if(minN > 1)
|
388 |
+
{
|
389 |
+
for(size_t s = 0; s < src.size(); ++s)
|
390 |
+
{
|
391 |
+
auto bai = dynamic_cast<BufferAwareVideoInterface*>(src[s]);
|
392 |
+
if(!bai->DropNFrames(minN - 1))
|
393 |
+
{
|
394 |
+
pango_print_error(
|
395 |
+
"Stream %lu did not drop %u frames altough available.\n", (unsigned long)s, (minN - 1));
|
396 |
+
return false;
|
397 |
+
}
|
398 |
+
}
|
399 |
+
TGRABANDPRINT("Dropping %u frames on each interface took ", (minN - 1));
|
400 |
+
}
|
401 |
+
return GrabNext(image, wait);
|
402 |
+
}
|
403 |
+
else
|
404 |
+
{
|
405 |
+
DBGPRINT("NOT all interfaces are BufferAwareVideoInterface.")
|
406 |
+
// Simply calling GrabNewest on the child streams might cause loss of sync,
|
407 |
+
// instead we perform as many GrabNext as possible on the first stream and
|
408 |
+
// then pull the same number of frames from every other stream.
|
409 |
+
size_t offset = 0;
|
410 |
+
std::vector<size_t> offsets;
|
411 |
+
std::vector<int64_t> reception_times;
|
412 |
+
int64_t newest = std::numeric_limits<int64_t>::min();
|
413 |
+
int64_t oldest = std::numeric_limits<int64_t>::max();
|
414 |
+
bool grabbed_any = false;
|
415 |
+
int first_stream_backlog = 0;
|
416 |
+
int64_t rt = 0;
|
417 |
+
bool got_frame = false;
|
418 |
+
|
419 |
+
do
|
420 |
+
{
|
421 |
+
got_frame = src[0]->GrabNext(image + offset, false);
|
422 |
+
if(got_frame)
|
423 |
+
{
|
424 |
+
if(sync_tolerance_us > 0)
|
425 |
+
{
|
426 |
+
rt = GetAdjustedCaptureTime(0);
|
427 |
+
}
|
428 |
+
first_stream_backlog++;
|
429 |
+
grabbed_any = true;
|
430 |
+
}
|
431 |
+
} while(got_frame);
|
432 |
+
offsets.push_back(offset);
|
433 |
+
offset += src[0]->SizeBytes();
|
434 |
+
if(sync_tolerance_us > 0)
|
435 |
+
{
|
436 |
+
reception_times.push_back(rt);
|
437 |
+
if(newest < rt)
|
438 |
+
newest = rt;
|
439 |
+
if(oldest > rt)
|
440 |
+
oldest = rt;
|
441 |
+
}
|
442 |
+
TGRABANDPRINT("Stream 0 grab took ");
|
443 |
+
|
444 |
+
for(size_t s = 1; s < src.size(); ++s)
|
445 |
+
{
|
446 |
+
for(int i = 0; i < first_stream_backlog; i++)
|
447 |
+
{
|
448 |
+
grabbed_any |= src[s]->GrabNext(image + offset, true);
|
449 |
+
if(sync_tolerance_us > 0)
|
450 |
+
{
|
451 |
+
rt = GetAdjustedCaptureTime(s);
|
452 |
+
}
|
453 |
+
}
|
454 |
+
offsets.push_back(offset);
|
455 |
+
offset += src[s]->SizeBytes();
|
456 |
+
if(sync_tolerance_us > 0)
|
457 |
+
{
|
458 |
+
reception_times.push_back(rt);
|
459 |
+
if(newest < rt)
|
460 |
+
newest = rt;
|
461 |
+
if(oldest > rt)
|
462 |
+
oldest = rt;
|
463 |
+
}
|
464 |
+
}
|
465 |
+
TGRABANDPRINT("Stream >=1 grab took ");
|
466 |
+
|
467 |
+
if(sync_tolerance_us > 0)
|
468 |
+
{
|
469 |
+
if(std::abs(newest - oldest) > sync_tolerance_us)
|
470 |
+
{
|
471 |
+
if(verbose)
|
472 |
+
{
|
473 |
+
pango_print_warn("Join timestamps not within %lu us trying to sync\n",
|
474 |
+
(unsigned long)sync_tolerance_us);
|
475 |
+
}
|
476 |
+
for(size_t n = 0; n < 10; ++n)
|
477 |
+
{
|
478 |
+
for(size_t s = 0; s < src.size(); ++s)
|
479 |
+
{
|
480 |
+
if(reception_times[s] < (newest - sync_tolerance_us))
|
481 |
+
{
|
482 |
+
VideoInterface& vid = *src[s];
|
483 |
+
if(vid.GrabNewest(image + offsets[s], false))
|
484 |
+
{
|
485 |
+
rt = GetAdjustedCaptureTime(s);
|
486 |
+
if(newest < rt)
|
487 |
+
newest = rt;
|
488 |
+
if(oldest > rt)
|
489 |
+
oldest = rt;
|
490 |
+
reception_times[s] = rt;
|
491 |
+
}
|
492 |
+
}
|
493 |
+
}
|
494 |
+
}
|
495 |
+
}
|
496 |
+
|
497 |
+
if(std::abs(newest - oldest) > sync_tolerance_us)
|
498 |
+
{
|
499 |
+
TGRABANDPRINT(
|
500 |
+
"NOT IN SYNC newest:%ld oldest:%ld delta:%ld syncing took ", newest, oldest, (newest - oldest));
|
501 |
+
return false;
|
502 |
+
}
|
503 |
+
else
|
504 |
+
{
|
505 |
+
TGRABANDPRINT(
|
506 |
+
" IN SYNC newest:%ld oldest:%ld delta:%ld syncing took ", newest, oldest, (newest - oldest));
|
507 |
+
return true;
|
508 |
+
}
|
509 |
+
}
|
510 |
+
else
|
511 |
+
{
|
512 |
+
return true;
|
513 |
+
}
|
514 |
+
}
|
515 |
+
}
|
516 |
+
|
517 |
+
std::vector<VideoInterface*>& JoinVideo::InputStreams()
|
518 |
+
{
|
519 |
+
return src;
|
520 |
+
}
|
521 |
+
|
522 |
+
std::vector<std::string> SplitBrackets(const std::string src, char open = '{', char close = '}')
|
523 |
+
{
|
524 |
+
std::vector<std::string> splits;
|
525 |
+
|
526 |
+
int nesting = 0;
|
527 |
+
int begin = -1;
|
528 |
+
|
529 |
+
for(size_t i = 0; i < src.length(); ++i)
|
530 |
+
{
|
531 |
+
if(src[i] == open)
|
532 |
+
{
|
533 |
+
if(nesting == 0)
|
534 |
+
{
|
535 |
+
begin = (int)i;
|
536 |
+
}
|
537 |
+
nesting++;
|
538 |
+
}
|
539 |
+
else if(src[i] == close)
|
540 |
+
{
|
541 |
+
nesting--;
|
542 |
+
if(nesting == 0)
|
543 |
+
{
|
544 |
+
// matching close bracket.
|
545 |
+
int str_start = begin + 1;
|
546 |
+
splits.push_back(src.substr(str_start, i - str_start));
|
547 |
+
}
|
548 |
+
}
|
549 |
+
}
|
550 |
+
|
551 |
+
return splits;
|
552 |
+
}
|
553 |
+
|
554 |
+
PANGOLIN_REGISTER_FACTORY(JoinVideo)
|
555 |
+
{
|
556 |
+
struct JoinVideoFactory final : public TypedFactoryInterface<VideoInterface>
|
557 |
+
{
|
558 |
+
std::map<std::string,Precedence> Schemes() const override
|
559 |
+
{
|
560 |
+
return {{"join",10}, {"zip",10}};
|
561 |
+
}
|
562 |
+
const char* Description() const override
|
563 |
+
{
|
564 |
+
return "Zips two or more videos together to create a new video containing all of constituent streams in correspondence.";
|
565 |
+
}
|
566 |
+
ParamSet Params() const override
|
567 |
+
{
|
568 |
+
return {{
|
569 |
+
{"sync_tolerance_us", "0", "The maximum timestamp difference (in microsecs) between images that are considered to be in sync for joining"},
|
570 |
+
{"transfer_bandwidth_gbps","0", "Bandwidth used to compute exposure end time from reception time for sync logic"},
|
571 |
+
{"Verbose","false","For verbose error/warning messages"}
|
572 |
+
}};
|
573 |
+
}
|
574 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override
|
575 |
+
{
|
576 |
+
std::vector<std::string> uris = SplitBrackets(uri.url);
|
577 |
+
|
578 |
+
ParamReader reader(Params(), uri);
|
579 |
+
|
580 |
+
// Standard by which we should measure if frames are in sync.
|
581 |
+
const unsigned long sync_tol_us = reader.Get<unsigned long>("sync_tolerance_us");
|
582 |
+
|
583 |
+
// Bandwidth used to compute exposure end time from reception time for sync logic
|
584 |
+
const double transfer_bandwidth_gbps = reader.Get<double>("transfer_bandwidth_gbps");
|
585 |
+
const bool verbose = reader.Get<bool>("Verbose");
|
586 |
+
if(uris.size() == 0)
|
587 |
+
{
|
588 |
+
throw VideoException("No VideoSources found in join URL.",
|
589 |
+
"Specify videos to join with curly braces, e.g. join://{test://}{test://}");
|
590 |
+
}
|
591 |
+
|
592 |
+
std::vector<std::unique_ptr<VideoInterface>> src;
|
593 |
+
for(size_t i = 0; i < uris.size(); ++i)
|
594 |
+
{
|
595 |
+
src.push_back(pangolin::OpenVideo(uris[i]));
|
596 |
+
}
|
597 |
+
|
598 |
+
JoinVideo* video_raw = new JoinVideo(src, verbose);
|
599 |
+
|
600 |
+
if(sync_tol_us > 0)
|
601 |
+
{
|
602 |
+
if(!video_raw->Sync(sync_tol_us, transfer_bandwidth_gbps))
|
603 |
+
{
|
604 |
+
pango_print_error(
|
605 |
+
"WARNING: not all streams in join support sync_tolerance_us option. Not using "
|
606 |
+
"tolerance.\n");
|
607 |
+
}
|
608 |
+
}
|
609 |
+
|
610 |
+
return std::unique_ptr<VideoInterface>(video_raw);
|
611 |
+
}
|
612 |
+
};
|
613 |
+
|
614 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<JoinVideoFactory>());
|
615 |
+
}
|
616 |
+
}
|
617 |
+
|
618 |
+
#undef TSTART
|
619 |
+
#undef TGRABANDPRINT
|
620 |
+
#undef DBGPRINT
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/json.cpp
ADDED
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/video/video.h>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/utils/file_utils.h>
|
31 |
+
#include <pangolin/utils/file_extension.h>
|
32 |
+
#include <pangolin/utils/transform.h>
|
33 |
+
|
34 |
+
#include <fstream>
|
35 |
+
#include <functional>
|
36 |
+
|
37 |
+
namespace pangolin {
|
38 |
+
|
39 |
+
PANGOLIN_REGISTER_FACTORY(JsonVideo)
|
40 |
+
{
|
41 |
+
struct JsonVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
42 |
+
std::map<std::string,Precedence> Schemes() const override
|
43 |
+
{
|
44 |
+
return {{"json",0}, {"file",5}};
|
45 |
+
}
|
46 |
+
const char* Description() const override
|
47 |
+
{
|
48 |
+
return "Construct Video URI from supplied json file. Json file should contain video_uri string and video_uri_defaults map for overridable substitutions.";
|
49 |
+
}
|
50 |
+
ParamSet Params() const override
|
51 |
+
{
|
52 |
+
return {{
|
53 |
+
{"*","","Override any video_uri_defaults keys in the json file."},
|
54 |
+
}};
|
55 |
+
}
|
56 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
57 |
+
if(uri.scheme == "json" || (uri.scheme == "file" && FileLowercaseExtention(uri.url) == ".json")) {
|
58 |
+
const std::string json_filename = PathExpand(uri.url);
|
59 |
+
std::ifstream f( json_filename );
|
60 |
+
|
61 |
+
// Parse json file to determine sub-video
|
62 |
+
if(f.is_open())
|
63 |
+
{
|
64 |
+
picojson::value file_json(picojson::object_type,true);
|
65 |
+
const std::string err = picojson::parse(file_json,f);
|
66 |
+
if(err.empty())
|
67 |
+
{
|
68 |
+
// Json loaded. Parse output.
|
69 |
+
std::string input_uri = file_json.get_value<std::string>("video_uri", "");
|
70 |
+
if(!input_uri.empty())
|
71 |
+
{
|
72 |
+
// Transform input_uri based on sub args.
|
73 |
+
const picojson::value input_uri_params = file_json.get_value<picojson::object>("video_uri_defaults", picojson::object());
|
74 |
+
input_uri = Transform(input_uri, [&](const std::string& k) {
|
75 |
+
return uri.Get<std::string>(k, input_uri_params.contains(k) ? input_uri_params[k].to_str() : "#");
|
76 |
+
});
|
77 |
+
|
78 |
+
return pangolin::OpenVideo(input_uri);
|
79 |
+
}else{
|
80 |
+
throw VideoException("JsonVideo failed.", "Bad input URI.");
|
81 |
+
}
|
82 |
+
}else{
|
83 |
+
throw VideoException("JsonVideo failed.", err);
|
84 |
+
}
|
85 |
+
}else{
|
86 |
+
throw VideoException("JsonVideo failed. Unable to load file.", json_filename);
|
87 |
+
}
|
88 |
+
}else{
|
89 |
+
// Not applicable for this factory.
|
90 |
+
return std::unique_ptr<VideoInterface>();
|
91 |
+
}
|
92 |
+
}
|
93 |
+
};
|
94 |
+
|
95 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<JsonVideoFactory>());
|
96 |
+
}
|
97 |
+
|
98 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/merge.cpp
ADDED
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2013 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/video/drivers/merge.h>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/video.h>
|
31 |
+
#include <pangolin/video/iostream_operators.h>
|
32 |
+
#include <pangolin/utils/range.h>
|
33 |
+
#include <assert.h>
|
34 |
+
|
35 |
+
#include <assert.h>
|
36 |
+
|
37 |
+
namespace pangolin
|
38 |
+
{
|
39 |
+
|
40 |
+
MergeVideo::MergeVideo(std::unique_ptr<VideoInterface>& src_, const std::vector<Point>& stream_pos, size_t w = 0, size_t h = 0 )
|
41 |
+
: src( std::move(src_) ), buffer(new uint8_t[src->SizeBytes()]), stream_pos(stream_pos)
|
42 |
+
{
|
43 |
+
videoin.push_back(src.get());
|
44 |
+
|
45 |
+
// Must be a stream_pos for each stream
|
46 |
+
// Each stream must have the same format.
|
47 |
+
assert(stream_pos.size() == src->Streams().size());
|
48 |
+
assert(src->Streams().size() > 0);
|
49 |
+
const PixelFormat fmt = src->Streams()[0].PixFormat();
|
50 |
+
for(size_t i=1; i < src->Streams().size(); ++i) {
|
51 |
+
assert(src->Streams()[i].PixFormat().format == fmt.format);
|
52 |
+
}
|
53 |
+
|
54 |
+
// Compute buffer regions for data copying.
|
55 |
+
XYRange<size_t> r = XYRange<size_t>::Empty();
|
56 |
+
for(size_t i=0; i < src->Streams().size(); ++i) {
|
57 |
+
const StreamInfo& si = src->Streams()[i];
|
58 |
+
const size_t x = stream_pos[i].x;
|
59 |
+
const size_t y = stream_pos[i].y;
|
60 |
+
XYRange<size_t> sr(x, x + si.Width(), y, y + si.Height());
|
61 |
+
r.Insert(sr);
|
62 |
+
}
|
63 |
+
|
64 |
+
// Use implied min / max based on points
|
65 |
+
if(!w && !h) {
|
66 |
+
w = r.x.max;
|
67 |
+
h = r.y.max;
|
68 |
+
}
|
69 |
+
|
70 |
+
size_bytes = w*h*fmt.bpp/8;
|
71 |
+
streams.emplace_back(fmt,w,h,w*fmt.bpp/8,(unsigned char*)0);
|
72 |
+
}
|
73 |
+
|
74 |
+
MergeVideo::~MergeVideo()
|
75 |
+
{
|
76 |
+
|
77 |
+
}
|
78 |
+
|
79 |
+
//! Implement VideoInput::Start()
|
80 |
+
void MergeVideo::Start()
|
81 |
+
{
|
82 |
+
src->Start();
|
83 |
+
}
|
84 |
+
|
85 |
+
//! Implement VideoInput::Stop()
|
86 |
+
void MergeVideo::Stop()
|
87 |
+
{
|
88 |
+
src->Stop();
|
89 |
+
}
|
90 |
+
|
91 |
+
//! Implement VideoInput::SizeBytes()
|
92 |
+
size_t MergeVideo::SizeBytes() const
|
93 |
+
{
|
94 |
+
return size_bytes;
|
95 |
+
}
|
96 |
+
|
97 |
+
//! Implement VideoInput::Streams()
|
98 |
+
const std::vector<StreamInfo>& MergeVideo::Streams() const
|
99 |
+
{
|
100 |
+
return streams;
|
101 |
+
}
|
102 |
+
|
103 |
+
void MergeVideo::CopyBuffer(unsigned char* dst_bytes, unsigned char* src_bytes)
|
104 |
+
{
|
105 |
+
Image<unsigned char> dst_image = Streams()[0].StreamImage(dst_bytes);
|
106 |
+
const size_t dst_pix_bytes = Streams()[0].PixFormat().bpp / 8;
|
107 |
+
|
108 |
+
for(size_t i=0; i < stream_pos.size(); ++i) {
|
109 |
+
const StreamInfo& src_stream = src->Streams()[i];
|
110 |
+
const Image<unsigned char> src_image = src_stream.StreamImage(src_bytes);
|
111 |
+
const Point& p = stream_pos[i];
|
112 |
+
for(size_t y=0; y < src_stream.Height(); ++y) {
|
113 |
+
// Copy row from src to dst
|
114 |
+
std::memcpy(
|
115 |
+
dst_image.RowPtr(y + p.y) + p.x * dst_pix_bytes,
|
116 |
+
src_image.RowPtr(y), src_stream.RowBytes()
|
117 |
+
);
|
118 |
+
}
|
119 |
+
}
|
120 |
+
}
|
121 |
+
|
122 |
+
//! Implement VideoInput::GrabNext()
|
123 |
+
bool MergeVideo::GrabNext( unsigned char* image, bool wait )
|
124 |
+
{
|
125 |
+
const bool success = src->GrabNext(buffer.get(), wait);
|
126 |
+
if(success) CopyBuffer(image, buffer.get());
|
127 |
+
return success;
|
128 |
+
}
|
129 |
+
|
130 |
+
//! Implement VideoInput::GrabNewest()
|
131 |
+
bool MergeVideo::GrabNewest( unsigned char* image, bool wait )
|
132 |
+
{
|
133 |
+
const bool success = src->GrabNewest(buffer.get(), wait);
|
134 |
+
if(success) CopyBuffer(image, buffer.get());
|
135 |
+
return success;
|
136 |
+
}
|
137 |
+
|
138 |
+
std::vector<VideoInterface*>& MergeVideo::InputStreams()
|
139 |
+
{
|
140 |
+
return videoin;
|
141 |
+
}
|
142 |
+
|
143 |
+
PANGOLIN_REGISTER_FACTORY(MergeVideo)
|
144 |
+
{
|
145 |
+
struct MergeVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
146 |
+
std::map<std::string,Precedence> Schemes() const override
|
147 |
+
{
|
148 |
+
return {{"merge",10}};
|
149 |
+
}
|
150 |
+
const char* Description() const override
|
151 |
+
{
|
152 |
+
return "Merges seperate video streams into one larger stream with configurable position.";
|
153 |
+
}
|
154 |
+
ParamSet Params() const override
|
155 |
+
{
|
156 |
+
return {{
|
157 |
+
{"size","0x0","Destination image size. 0x0 will dynamically create a bounding box size from all the streams and their x,y positions"},
|
158 |
+
{"pos\\d+","0x0","posK, 0 <= K < N, where N is the number of streams. Destination x,y positions to merge video streams into."}
|
159 |
+
}};
|
160 |
+
}
|
161 |
+
|
162 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
163 |
+
ParamReader reader(Params(), uri);
|
164 |
+
const ImageDim dim = reader.Get<ImageDim>("size", ImageDim(0,0));
|
165 |
+
std::unique_ptr<VideoInterface> subvid = pangolin::OpenVideo(uri.url);
|
166 |
+
std::vector<Point> points;
|
167 |
+
Point p(0,0);
|
168 |
+
for(size_t s=0; s < subvid->Streams().size(); ++s) {
|
169 |
+
const StreamInfo& si = subvid->Streams()[s];
|
170 |
+
p = reader.Get<Point>("pos"+std::to_string(s+1), p);
|
171 |
+
points.push_back(p);
|
172 |
+
p.x += si.Width();
|
173 |
+
}
|
174 |
+
|
175 |
+
return std::unique_ptr<VideoInterface>(new MergeVideo(subvid, points, dim.x, dim.y));
|
176 |
+
}
|
177 |
+
};
|
178 |
+
|
179 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<MergeVideoFactory>());
|
180 |
+
}
|
181 |
+
|
182 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/mjpeg.cpp
ADDED
@@ -0,0 +1,152 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#include <pangolin/platform.h>
|
2 |
+
#include <pangolin/video/drivers/mjpeg.h>
|
3 |
+
#include <pangolin/video/video_exception.h>
|
4 |
+
#include <pangolin/factory/factory_registry.h>
|
5 |
+
#include <pangolin/image/image_io.h>
|
6 |
+
#include <pangolin/utils/file_utils.h>
|
7 |
+
|
8 |
+
namespace pangolin
|
9 |
+
{
|
10 |
+
|
11 |
+
// this is defined in image_io_jpg.cpp but not in any public headers.
|
12 |
+
std::vector<std::streampos> GetMJpegOffsets(std::ifstream& is);
|
13 |
+
|
14 |
+
MjpegVideo::MjpegVideo(const std::string& filename)
|
15 |
+
{
|
16 |
+
const std::string full_path = PathExpand(filename);
|
17 |
+
if(!FileExists(full_path)) {
|
18 |
+
throw VideoException("No such file, " + full_path);
|
19 |
+
}
|
20 |
+
|
21 |
+
const ImageFileType file_type = FileType(full_path);
|
22 |
+
if(file_type != ImageFileType::ImageFileTypeJpg) {
|
23 |
+
throw VideoException(full_path + " has no jpeg header when attempting to open as mjpeg stream.");
|
24 |
+
}
|
25 |
+
|
26 |
+
bFile.open( full_path.c_str(), std::ios::in | std::ios::binary );
|
27 |
+
if(!bFile.is_open()) {
|
28 |
+
throw VideoException("Unable to open " + full_path);
|
29 |
+
}
|
30 |
+
|
31 |
+
offsets = GetMJpegOffsets(bFile);
|
32 |
+
|
33 |
+
next_image = LoadImage(bFile, ImageFileType::ImageFileTypeJpg);
|
34 |
+
if(!next_image.IsValid()) {
|
35 |
+
throw VideoException("Unable to load first jpeg in mjpeg stream");
|
36 |
+
}
|
37 |
+
|
38 |
+
streams.emplace_back(next_image.fmt, next_image.w, next_image.h, next_image.pitch, nullptr);
|
39 |
+
size_bytes = next_image.SizeBytes();
|
40 |
+
}
|
41 |
+
|
42 |
+
MjpegVideo::~MjpegVideo()
|
43 |
+
{
|
44 |
+
|
45 |
+
}
|
46 |
+
|
47 |
+
//! Implement VideoInput::Start()
|
48 |
+
void MjpegVideo::Start()
|
49 |
+
{
|
50 |
+
|
51 |
+
}
|
52 |
+
|
53 |
+
//! Implement VideoInput::Stop()
|
54 |
+
void MjpegVideo::Stop()
|
55 |
+
{
|
56 |
+
|
57 |
+
}
|
58 |
+
|
59 |
+
//! Implement VideoInput::SizeBytes()
|
60 |
+
size_t MjpegVideo::SizeBytes() const
|
61 |
+
{
|
62 |
+
return size_bytes;
|
63 |
+
}
|
64 |
+
|
65 |
+
//! Implement VideoInput::Streams()
|
66 |
+
const std::vector<StreamInfo>& MjpegVideo::Streams() const
|
67 |
+
{
|
68 |
+
return streams;
|
69 |
+
}
|
70 |
+
|
71 |
+
bool MjpegVideo::LoadNext()
|
72 |
+
{
|
73 |
+
if(!next_image.IsValid() && bFile.good()) {
|
74 |
+
try {
|
75 |
+
next_image = LoadImage(bFile, ImageFileType::ImageFileTypeJpg);
|
76 |
+
} catch (const std::runtime_error&) {
|
77 |
+
return false;
|
78 |
+
}
|
79 |
+
}
|
80 |
+
return next_image.IsValid();
|
81 |
+
}
|
82 |
+
|
83 |
+
//! Implement VideoInput::GrabNext()
|
84 |
+
bool MjpegVideo::GrabNext( unsigned char* image, bool wait )
|
85 |
+
{
|
86 |
+
if( LoadNext() ) {
|
87 |
+
memcpy(image, next_image.ptr, size_bytes);
|
88 |
+
next_image.Deallocate();
|
89 |
+
++next_frame_id;
|
90 |
+
return true;
|
91 |
+
}
|
92 |
+
return false;
|
93 |
+
}
|
94 |
+
|
95 |
+
//! Implement VideoInput::GrabNewest()
|
96 |
+
bool MjpegVideo::GrabNewest( unsigned char* image, bool wait )
|
97 |
+
{
|
98 |
+
return GrabNext(image, wait);
|
99 |
+
}
|
100 |
+
|
101 |
+
size_t MjpegVideo::GetCurrentFrameId() const
|
102 |
+
{
|
103 |
+
return next_frame_id - 1;
|
104 |
+
}
|
105 |
+
|
106 |
+
size_t MjpegVideo::GetTotalFrames() const
|
107 |
+
{
|
108 |
+
return offsets.size();
|
109 |
+
}
|
110 |
+
|
111 |
+
size_t MjpegVideo::Seek(size_t frameid)
|
112 |
+
{
|
113 |
+
if(frameid != next_frame_id) {
|
114 |
+
// Clamp to within range
|
115 |
+
next_frame_id = std::min(frameid, offsets.size()-1);
|
116 |
+
// Clear any eof markers etc
|
117 |
+
bFile.clear();
|
118 |
+
bFile.seekg(offsets[next_frame_id]);
|
119 |
+
// Remove any cached image data
|
120 |
+
next_image.Deallocate();
|
121 |
+
}else{
|
122 |
+
// Do nothing
|
123 |
+
}
|
124 |
+
return next_frame_id;
|
125 |
+
}
|
126 |
+
|
127 |
+
|
128 |
+
PANGOLIN_REGISTER_FACTORY(MjpegVideo)
|
129 |
+
{
|
130 |
+
struct MjpegVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
131 |
+
std::map<std::string,Precedence> Schemes() const override
|
132 |
+
{
|
133 |
+
return {{"mjpeg",0}};
|
134 |
+
}
|
135 |
+
const char* Description() const override
|
136 |
+
{
|
137 |
+
return "Load Motion Jpeg video streams";
|
138 |
+
}
|
139 |
+
ParamSet Params() const override
|
140 |
+
{
|
141 |
+
return {{
|
142 |
+
}};
|
143 |
+
}
|
144 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
145 |
+
return std::unique_ptr<VideoInterface>(new MjpegVideo(uri.url));
|
146 |
+
}
|
147 |
+
};
|
148 |
+
|
149 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<MjpegVideoFactory>());
|
150 |
+
}
|
151 |
+
|
152 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/openni.cpp
ADDED
@@ -0,0 +1,315 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2013 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/video/drivers/openni.h>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/iostream_operators.h>
|
31 |
+
|
32 |
+
namespace pangolin
|
33 |
+
{
|
34 |
+
|
35 |
+
OpenNiVideo::OpenNiVideo(OpenNiSensorType s1, OpenNiSensorType s2, ImageDim dim, int fps)
|
36 |
+
{
|
37 |
+
sensor_type[0] = s1;
|
38 |
+
sensor_type[1] = s2;
|
39 |
+
|
40 |
+
XnStatus nRetVal = XN_STATUS_OK;
|
41 |
+
nRetVal = context.Init();
|
42 |
+
if (nRetVal != XN_STATUS_OK) {
|
43 |
+
std::cerr << "context.Init: " << xnGetStatusString(nRetVal) << std::endl;
|
44 |
+
}
|
45 |
+
|
46 |
+
XnMapOutputMode mapMode;
|
47 |
+
mapMode.nXRes = dim.x;
|
48 |
+
mapMode.nYRes = dim.y;
|
49 |
+
mapMode.nFPS = fps;
|
50 |
+
|
51 |
+
sizeBytes = 0;
|
52 |
+
|
53 |
+
bool use_depth = false;
|
54 |
+
bool use_ir = false;
|
55 |
+
bool use_rgb = false;
|
56 |
+
bool depth_to_color = false;
|
57 |
+
|
58 |
+
for(int i=0; i<2; ++i) {
|
59 |
+
PixelFormat fmt;
|
60 |
+
|
61 |
+
// Establish output pixel format for sensor streams
|
62 |
+
switch( sensor_type[i] ) {
|
63 |
+
case OpenNiDepth_1mm_Registered:
|
64 |
+
case OpenNiDepth_1mm:
|
65 |
+
case OpenNiIr:
|
66 |
+
case OpenNiIrProj:
|
67 |
+
fmt = PixelFormatFromString("GRAY16LE");
|
68 |
+
break;
|
69 |
+
case OpenNiIr8bit:
|
70 |
+
case OpenNiIr8bitProj:
|
71 |
+
fmt = PixelFormatFromString("GRAY8");
|
72 |
+
break;
|
73 |
+
case OpenNiRgb:
|
74 |
+
fmt = PixelFormatFromString("RGB24");
|
75 |
+
break;
|
76 |
+
default:
|
77 |
+
continue;
|
78 |
+
}
|
79 |
+
|
80 |
+
switch( sensor_type[i] ) {
|
81 |
+
case OpenNiDepth_1mm_Registered:
|
82 |
+
depth_to_color = true;
|
83 |
+
use_depth = true;
|
84 |
+
break;
|
85 |
+
case OpenNiDepth_1mm:
|
86 |
+
use_depth = true;
|
87 |
+
break;
|
88 |
+
case OpenNiIr:
|
89 |
+
case OpenNiIr8bit:
|
90 |
+
use_ir = true;
|
91 |
+
break;
|
92 |
+
case OpenNiIrProj:
|
93 |
+
case OpenNiIr8bitProj:
|
94 |
+
use_ir = true;
|
95 |
+
use_depth = true;
|
96 |
+
break;
|
97 |
+
case OpenNiRgb:
|
98 |
+
use_rgb = true;
|
99 |
+
break;
|
100 |
+
default:
|
101 |
+
break;
|
102 |
+
}
|
103 |
+
|
104 |
+
const StreamInfo stream(fmt, mapMode.nXRes, mapMode.nYRes, (mapMode.nXRes * fmt.bpp) / 8, (unsigned char*)0 + sizeBytes);
|
105 |
+
sizeBytes += stream.SizeBytes();
|
106 |
+
streams.push_back(stream);
|
107 |
+
}
|
108 |
+
|
109 |
+
if( use_depth ) {
|
110 |
+
nRetVal = depthNode.Create(context);
|
111 |
+
if (nRetVal != XN_STATUS_OK) {
|
112 |
+
throw VideoException( (std::string)"Unable to create DepthNode: " + xnGetStatusString(nRetVal) );
|
113 |
+
}else{
|
114 |
+
nRetVal = depthNode.SetMapOutputMode(mapMode);
|
115 |
+
if (nRetVal != XN_STATUS_OK) {
|
116 |
+
throw VideoException( (std::string)"Invalid DepthNode mode: " + xnGetStatusString(nRetVal) );
|
117 |
+
}
|
118 |
+
}
|
119 |
+
}
|
120 |
+
|
121 |
+
if( use_rgb ) {
|
122 |
+
nRetVal = imageNode.Create(context);
|
123 |
+
if (nRetVal != XN_STATUS_OK) {
|
124 |
+
throw VideoException( (std::string)"Unable to create ImageNode: " + xnGetStatusString(nRetVal) );
|
125 |
+
}else{
|
126 |
+
nRetVal = imageNode.SetMapOutputMode(mapMode);
|
127 |
+
if (nRetVal != XN_STATUS_OK) {
|
128 |
+
throw VideoException( (std::string)"Invalid ImageNode mode: " + xnGetStatusString(nRetVal) );
|
129 |
+
}
|
130 |
+
}
|
131 |
+
}
|
132 |
+
|
133 |
+
if (depth_to_color && use_rgb) {
|
134 |
+
//Registration
|
135 |
+
if( depthNode.IsCapabilitySupported(XN_CAPABILITY_ALTERNATIVE_VIEW_POINT) ) {
|
136 |
+
nRetVal = depthNode.GetAlternativeViewPointCap().SetViewPoint( imageNode );
|
137 |
+
if (nRetVal != XN_STATUS_OK) {
|
138 |
+
std::cerr << "depthNode.GetAlternativeViewPointCap().SetViewPoint(imageNode): " << xnGetStatusString(nRetVal) << std::endl;
|
139 |
+
}
|
140 |
+
}
|
141 |
+
|
142 |
+
// Frame Sync
|
143 |
+
if (depthNode.IsCapabilitySupported(XN_CAPABILITY_FRAME_SYNC))
|
144 |
+
{
|
145 |
+
if (depthNode.GetFrameSyncCap().CanFrameSyncWith(imageNode))
|
146 |
+
{
|
147 |
+
nRetVal = depthNode.GetFrameSyncCap().FrameSyncWith(imageNode);
|
148 |
+
if (nRetVal != XN_STATUS_OK) {
|
149 |
+
std::cerr << "depthNode.GetFrameSyncCap().FrameSyncWith(imageNode): " << xnGetStatusString(nRetVal) << std::endl;
|
150 |
+
}
|
151 |
+
}
|
152 |
+
}
|
153 |
+
}
|
154 |
+
|
155 |
+
if( use_ir ) {
|
156 |
+
nRetVal = irNode.Create(context);
|
157 |
+
if (nRetVal != XN_STATUS_OK) {
|
158 |
+
throw VideoException( (std::string)"Unable to create IrNode: " + xnGetStatusString(nRetVal) );
|
159 |
+
}else{
|
160 |
+
nRetVal = irNode.SetMapOutputMode(mapMode);
|
161 |
+
if (nRetVal != XN_STATUS_OK) {
|
162 |
+
throw VideoException( (std::string)"Invalid IrNode mode: " + xnGetStatusString(nRetVal) );
|
163 |
+
}
|
164 |
+
}
|
165 |
+
}
|
166 |
+
|
167 |
+
Start();
|
168 |
+
}
|
169 |
+
|
170 |
+
OpenNiVideo::~OpenNiVideo()
|
171 |
+
{
|
172 |
+
context.Release();
|
173 |
+
}
|
174 |
+
|
175 |
+
size_t OpenNiVideo::SizeBytes() const
|
176 |
+
{
|
177 |
+
return sizeBytes;
|
178 |
+
}
|
179 |
+
|
180 |
+
const std::vector<StreamInfo>& OpenNiVideo::Streams() const
|
181 |
+
{
|
182 |
+
return streams;
|
183 |
+
}
|
184 |
+
|
185 |
+
void OpenNiVideo::Start()
|
186 |
+
{
|
187 |
+
// XnStatus nRetVal =
|
188 |
+
context.StartGeneratingAll();
|
189 |
+
}
|
190 |
+
|
191 |
+
void OpenNiVideo::Stop()
|
192 |
+
{
|
193 |
+
context.StopGeneratingAll();
|
194 |
+
}
|
195 |
+
|
196 |
+
bool OpenNiVideo::GrabNext( unsigned char* image, bool /*wait*/ )
|
197 |
+
{
|
198 |
+
// XnStatus nRetVal = context.WaitAndUpdateAll();
|
199 |
+
XnStatus nRetVal = context.WaitAnyUpdateAll();
|
200 |
+
// nRetVal = context.WaitOneUpdateAll(imageNode);
|
201 |
+
|
202 |
+
if (nRetVal != XN_STATUS_OK) {
|
203 |
+
std::cerr << "Failed updating data: " << xnGetStatusString(nRetVal) << std::endl;
|
204 |
+
return false;
|
205 |
+
}else{
|
206 |
+
unsigned char* out_img = image;
|
207 |
+
|
208 |
+
for(int i=0; i<2; ++i) {
|
209 |
+
switch (sensor_type[i]) {
|
210 |
+
case OpenNiDepth_1mm:
|
211 |
+
case OpenNiDepth_1mm_Registered:
|
212 |
+
{
|
213 |
+
const XnDepthPixel* pDepthMap = depthNode.GetDepthMap();
|
214 |
+
memcpy(out_img,pDepthMap, streams[i].SizeBytes() );
|
215 |
+
break;
|
216 |
+
}
|
217 |
+
case OpenNiIr:
|
218 |
+
case OpenNiIrProj:
|
219 |
+
{
|
220 |
+
const XnIRPixel* pIrMap = irNode.GetIRMap();
|
221 |
+
memcpy(out_img, pIrMap, streams[i].SizeBytes() );
|
222 |
+
break;
|
223 |
+
}
|
224 |
+
case OpenNiIr8bit:
|
225 |
+
case OpenNiIr8bitProj:
|
226 |
+
{
|
227 |
+
const XnIRPixel* pIr16Map = irNode.GetIRMap();
|
228 |
+
|
229 |
+
// rescale from 16-bit (10 effective) to 8-bit
|
230 |
+
xn::IRMetaData meta_data;
|
231 |
+
irNode.GetMetaData(meta_data);
|
232 |
+
int w = meta_data.XRes();
|
233 |
+
int h = meta_data.YRes();
|
234 |
+
|
235 |
+
// Copy to out_img with conversion
|
236 |
+
XnUInt8* pIrMapScaled = (XnUInt8*)out_img;
|
237 |
+
for (int v = 0; v < h; ++v)
|
238 |
+
for (int u = 0; u < w; ++u) {
|
239 |
+
int val = *pIr16Map >> 2; // 10bit to 8 bit
|
240 |
+
pIrMapScaled[w * v + u] = val;
|
241 |
+
pIr16Map++;
|
242 |
+
}
|
243 |
+
|
244 |
+
break;
|
245 |
+
}
|
246 |
+
case OpenNiRgb:
|
247 |
+
{
|
248 |
+
const XnUInt8* pImageMap = imageNode.GetImageMap();
|
249 |
+
memcpy(out_img,pImageMap, streams[i].SizeBytes());
|
250 |
+
break;
|
251 |
+
}
|
252 |
+
default:
|
253 |
+
continue;
|
254 |
+
break;
|
255 |
+
}
|
256 |
+
|
257 |
+
out_img += streams[i].SizeBytes();
|
258 |
+
}
|
259 |
+
|
260 |
+
return true;
|
261 |
+
}
|
262 |
+
}
|
263 |
+
|
264 |
+
bool OpenNiVideo::GrabNewest( unsigned char* image, bool wait )
|
265 |
+
{
|
266 |
+
return GrabNext(image,wait);
|
267 |
+
}
|
268 |
+
|
269 |
+
PANGOLIN_REGISTER_FACTORY(OpenNiVideo)
|
270 |
+
{
|
271 |
+
struct OpenNiVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
272 |
+
std::map<std::string,Precedence> Schemes() const override
|
273 |
+
{
|
274 |
+
return {{"openni1",10}, {"openni",100}, {"oni",100}};
|
275 |
+
}
|
276 |
+
const char* Description() const override
|
277 |
+
{
|
278 |
+
return "OpenNI v1 Driver to access Kinect / Primesense devices.";
|
279 |
+
}
|
280 |
+
ParamSet Params() const override
|
281 |
+
{
|
282 |
+
return {{
|
283 |
+
{"size","640x480","Image dimension"},
|
284 |
+
{"fps","30","Frames per second"},
|
285 |
+
{"autoexposure","1","enable (1) or disable (0) RGB autoexposure"},
|
286 |
+
{"img1","depth","Camera stream to use for stream 1 {depth,rgb,ir}"},
|
287 |
+
{"img2","","Camera stream to use for stream 2 {depth,rgb,ir}"}
|
288 |
+
}};
|
289 |
+
}
|
290 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
291 |
+
const ImageDim dim = uri.Get<ImageDim>("size", ImageDim(640,480));
|
292 |
+
const unsigned int fps = uri.Get<unsigned int>("fps", 30);
|
293 |
+
const bool autoexposure = uri.Get<bool>("autoexposure", true);
|
294 |
+
|
295 |
+
OpenNiSensorType img1 = OpenNiRgb;
|
296 |
+
OpenNiSensorType img2 = OpenNiUnassigned;
|
297 |
+
|
298 |
+
if( uri.Contains("img1") ){
|
299 |
+
img1 = openni_sensor(uri.Get<std::string>("img1", "depth"));
|
300 |
+
}
|
301 |
+
|
302 |
+
if( uri.Contains("img2") ){
|
303 |
+
img2 = openni_sensor(uri.Get<std::string>("img2","rgb"));
|
304 |
+
}
|
305 |
+
|
306 |
+
OpenNiVideo* oniv = new OpenNiVideo(img1, img2, dim, fps);
|
307 |
+
oniv->SetAutoExposure(autoexposure);
|
308 |
+
return std::unique_ptr<VideoInterface>(oniv);
|
309 |
+
}
|
310 |
+
};
|
311 |
+
|
312 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>( std::make_shared<OpenNiVideoFactory>());
|
313 |
+
}
|
314 |
+
|
315 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/openni2.cpp
ADDED
@@ -0,0 +1,715 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Richard Newcombe
|
5 |
+
* 2014 Steven Lovegrove
|
6 |
+
*
|
7 |
+
* Permission is hereby granted, free of charge, to any person
|
8 |
+
* obtaining a copy of this software and associated documentation
|
9 |
+
* files (the "Software"), to deal in the Software without
|
10 |
+
* restriction, including without limitation the rights to use,
|
11 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
12 |
+
* copies of the Software, and to permit persons to whom the
|
13 |
+
* Software is furnished to do so, subject to the following
|
14 |
+
* conditions:
|
15 |
+
*
|
16 |
+
* The above copyright notice and this permission notice shall be
|
17 |
+
* included in all copies or substantial portions of the Software.
|
18 |
+
*
|
19 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
20 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
21 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
22 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
23 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
24 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
25 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
26 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
27 |
+
*/
|
28 |
+
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/drivers/openni2.h>
|
31 |
+
|
32 |
+
#include <OniVersion.h>
|
33 |
+
#include <PS1080.h>
|
34 |
+
|
35 |
+
// OpenNI must be including a horrid windows header
|
36 |
+
// which defines these nasty macros.
|
37 |
+
#undef max
|
38 |
+
#undef min
|
39 |
+
|
40 |
+
namespace pangolin
|
41 |
+
{
|
42 |
+
|
43 |
+
PixelFormat VideoFormatFromOpenNI2(openni::PixelFormat fmt)
|
44 |
+
{
|
45 |
+
std::string pvfmt;
|
46 |
+
|
47 |
+
switch (fmt) {
|
48 |
+
case openni::PIXEL_FORMAT_DEPTH_1_MM: pvfmt = "GRAY16LE"; break;
|
49 |
+
case openni::PIXEL_FORMAT_DEPTH_100_UM: pvfmt = "GRAY16LE"; break;
|
50 |
+
case openni::PIXEL_FORMAT_SHIFT_9_2: pvfmt = "GRAY16LE"; break; // ?
|
51 |
+
case openni::PIXEL_FORMAT_SHIFT_9_3: pvfmt = "GRAY16LE"; break; // ?
|
52 |
+
case openni::PIXEL_FORMAT_RGB888: pvfmt = "RGB24"; break;
|
53 |
+
case openni::PIXEL_FORMAT_GRAY8: pvfmt = "GRAY8"; break;
|
54 |
+
case openni::PIXEL_FORMAT_GRAY16: pvfmt = "GRAY16LE"; break;
|
55 |
+
case openni::PIXEL_FORMAT_YUV422: pvfmt = "YUYV422"; break;
|
56 |
+
#if ONI_VERSION_MAJOR >= 2 && ONI_VERSION_MINOR >= 2
|
57 |
+
case openni::PIXEL_FORMAT_YUYV: pvfmt = "Y400A"; break;
|
58 |
+
#endif
|
59 |
+
default:
|
60 |
+
throw VideoException("Unknown OpenNI pixel format");
|
61 |
+
break;
|
62 |
+
}
|
63 |
+
|
64 |
+
return PixelFormatFromString(pvfmt);
|
65 |
+
}
|
66 |
+
|
67 |
+
void OpenNi2Video::PrintOpenNI2Modes(openni::SensorType sensorType)
|
68 |
+
{
|
69 |
+
// Query supported modes for device
|
70 |
+
const openni::Array<openni::VideoMode>& modes =
|
71 |
+
devices[0].getSensorInfo(sensorType)->getSupportedVideoModes();
|
72 |
+
|
73 |
+
switch (sensorType) {
|
74 |
+
case openni::SENSOR_COLOR: pango_print_info("OpenNI Colour Modes:\n"); break;
|
75 |
+
case openni::SENSOR_DEPTH: pango_print_info("OpenNI Depth Modes:\n"); break;
|
76 |
+
case openni::SENSOR_IR: pango_print_info("OpenNI IR Modes:\n"); break;
|
77 |
+
}
|
78 |
+
|
79 |
+
for(int i = 0; i < modes.getSize(); i++) {
|
80 |
+
std::string sfmt = "PangolinUnknown";
|
81 |
+
try{
|
82 |
+
sfmt = VideoFormatFromOpenNI2(modes[i].getPixelFormat()).format;
|
83 |
+
}catch(const VideoException&){}
|
84 |
+
pango_print_info( " %dx%d, %d fps, %s\n",
|
85 |
+
modes[i].getResolutionX(), modes[i].getResolutionY(),
|
86 |
+
modes[i].getFps(), sfmt.c_str()
|
87 |
+
);
|
88 |
+
}
|
89 |
+
}
|
90 |
+
|
91 |
+
openni::VideoMode OpenNi2Video::FindOpenNI2Mode(
|
92 |
+
openni::Device & device,
|
93 |
+
openni::SensorType sensorType,
|
94 |
+
int width, int height,
|
95 |
+
int fps, openni::PixelFormat fmt
|
96 |
+
) {
|
97 |
+
// Query supported modes for device
|
98 |
+
const openni::Array<openni::VideoMode>& modes =
|
99 |
+
device.getSensorInfo(sensorType)->getSupportedVideoModes();
|
100 |
+
|
101 |
+
// Select last listed mode which matches parameters
|
102 |
+
int best_mode = -1;
|
103 |
+
for(int i = 0; i < modes.getSize(); i++) {
|
104 |
+
if( (!width || modes[i].getResolutionX() == width) &&
|
105 |
+
(!height || modes[i].getResolutionY() == height) &&
|
106 |
+
(!fps || modes[i].getFps() == fps) &&
|
107 |
+
(!fmt || modes[i].getPixelFormat() == fmt)
|
108 |
+
) {
|
109 |
+
best_mode = i;
|
110 |
+
}
|
111 |
+
}
|
112 |
+
|
113 |
+
if(best_mode >= 0) {
|
114 |
+
return modes[best_mode];
|
115 |
+
}
|
116 |
+
|
117 |
+
throw pangolin::VideoException("Video mode not supported");
|
118 |
+
}
|
119 |
+
|
120 |
+
inline openni::SensorType SensorType(const OpenNiSensorType sensor)
|
121 |
+
{
|
122 |
+
switch (sensor) {
|
123 |
+
case OpenNiRgb:
|
124 |
+
case OpenNiGrey:
|
125 |
+
return openni::SENSOR_COLOR;
|
126 |
+
case OpenNiDepth_1mm:
|
127 |
+
case OpenNiDepth_100um:
|
128 |
+
case OpenNiDepth_1mm_Registered:
|
129 |
+
return openni::SENSOR_DEPTH;
|
130 |
+
case OpenNiIr:
|
131 |
+
case OpenNiIr8bit:
|
132 |
+
case OpenNiIr24bit:
|
133 |
+
case OpenNiIrProj:
|
134 |
+
case OpenNiIr8bitProj:
|
135 |
+
return openni::SENSOR_IR;
|
136 |
+
default:
|
137 |
+
throw std::invalid_argument("OpenNI: Bad sensor type");
|
138 |
+
}
|
139 |
+
}
|
140 |
+
|
141 |
+
OpenNi2Video::OpenNi2Video(ImageDim dim, ImageRoi roi, int fps)
|
142 |
+
{
|
143 |
+
InitialiseOpenNI();
|
144 |
+
|
145 |
+
openni::Array<openni::DeviceInfo> deviceList;
|
146 |
+
openni::OpenNI::enumerateDevices(&deviceList);
|
147 |
+
|
148 |
+
if (deviceList.getSize() < 1) {
|
149 |
+
throw VideoException("No OpenNI Devices available. Ensure your camera is plugged in.");
|
150 |
+
}
|
151 |
+
|
152 |
+
for(int i = 0 ; i < deviceList.getSize(); i ++) {
|
153 |
+
const char* device_uri = deviceList[i].getUri();
|
154 |
+
const int dev_id = AddDevice(device_uri);
|
155 |
+
AddStream(OpenNiStreamMode( OpenNiDepth_1mm, dim, roi, fps, dev_id) );
|
156 |
+
AddStream(OpenNiStreamMode( OpenNiRgb, dim, roi, fps, dev_id) );
|
157 |
+
}
|
158 |
+
|
159 |
+
SetupStreamModes();
|
160 |
+
}
|
161 |
+
|
162 |
+
OpenNi2Video::OpenNi2Video(const std::string& device_uri)
|
163 |
+
{
|
164 |
+
InitialiseOpenNI();
|
165 |
+
|
166 |
+
const int dev_id = AddDevice(device_uri);
|
167 |
+
AddStream(OpenNiStreamMode( OpenNiDepth_1mm, ImageDim(), ImageRoi(), 30, dev_id) );
|
168 |
+
AddStream(OpenNiStreamMode( OpenNiRgb, ImageDim(), ImageRoi(), 30, dev_id) );
|
169 |
+
|
170 |
+
SetupStreamModes();
|
171 |
+
}
|
172 |
+
|
173 |
+
OpenNi2Video::OpenNi2Video(const std::string& device_uri, std::vector<OpenNiStreamMode> &stream_modes)
|
174 |
+
{
|
175 |
+
InitialiseOpenNI();
|
176 |
+
|
177 |
+
AddDevice(device_uri);
|
178 |
+
|
179 |
+
for(size_t i=0; i < stream_modes.size(); ++i) {
|
180 |
+
OpenNiStreamMode& mode = stream_modes[i];
|
181 |
+
AddStream(mode);
|
182 |
+
}
|
183 |
+
|
184 |
+
SetupStreamModes();
|
185 |
+
}
|
186 |
+
|
187 |
+
OpenNi2Video::OpenNi2Video(std::vector<OpenNiStreamMode>& stream_modes)
|
188 |
+
{
|
189 |
+
InitialiseOpenNI();
|
190 |
+
|
191 |
+
openni::Array<openni::DeviceInfo> deviceList;
|
192 |
+
openni::OpenNI::enumerateDevices(&deviceList);
|
193 |
+
|
194 |
+
if (deviceList.getSize() < 1) {
|
195 |
+
throw VideoException("OpenNI2: No devices available. Ensure your camera is plugged in.");
|
196 |
+
}
|
197 |
+
|
198 |
+
for(int i = 0 ; i < deviceList.getSize(); i ++) {
|
199 |
+
const char* device_uri = deviceList[i].getUri();
|
200 |
+
AddDevice(device_uri);
|
201 |
+
}
|
202 |
+
|
203 |
+
for(size_t i=0; i < stream_modes.size(); ++i) {
|
204 |
+
OpenNiStreamMode& mode = stream_modes[i];
|
205 |
+
AddStream(mode);
|
206 |
+
}
|
207 |
+
|
208 |
+
SetupStreamModes();
|
209 |
+
}
|
210 |
+
|
211 |
+
void OpenNi2Video::InitialiseOpenNI()
|
212 |
+
{
|
213 |
+
// Initialise member variables
|
214 |
+
numDevices = 0;
|
215 |
+
numStreams = 0;
|
216 |
+
current_frame_index = 0;
|
217 |
+
total_frames = std::numeric_limits<size_t>::max();
|
218 |
+
|
219 |
+
openni::Status rc = openni::STATUS_OK;
|
220 |
+
|
221 |
+
rc = openni::OpenNI::initialize();
|
222 |
+
if (rc != openni::STATUS_OK) {
|
223 |
+
throw VideoException( "Unable to initialise OpenNI library", openni::OpenNI::getExtendedError() );
|
224 |
+
}
|
225 |
+
}
|
226 |
+
|
227 |
+
int OpenNi2Video::AddDevice(const std::string& device_uri)
|
228 |
+
{
|
229 |
+
const size_t dev_id = numDevices;
|
230 |
+
openni::Status rc = devices[dev_id].open(device_uri.c_str());
|
231 |
+
if (rc != openni::STATUS_OK) {
|
232 |
+
throw VideoException( "OpenNI2: Couldn't open device.", openni::OpenNI::getExtendedError() );
|
233 |
+
}
|
234 |
+
++numDevices;
|
235 |
+
return dev_id;
|
236 |
+
}
|
237 |
+
|
238 |
+
void OpenNi2Video::AddStream(const OpenNiStreamMode& mode)
|
239 |
+
{
|
240 |
+
sensor_type[numStreams] = mode;
|
241 |
+
openni::Device& device = devices[mode.device];
|
242 |
+
openni::VideoStream& stream = video_stream[numStreams];
|
243 |
+
openni::Status rc = stream.create(device, SensorType(mode.sensor_type));
|
244 |
+
if (rc != openni::STATUS_OK) {
|
245 |
+
throw VideoException( "OpenNI2: Couldn't create stream.", openni::OpenNI::getExtendedError() );
|
246 |
+
}
|
247 |
+
|
248 |
+
openni::PlaybackControl* control = device.getPlaybackControl();
|
249 |
+
if(control && numStreams==0) {
|
250 |
+
total_frames = std::min(total_frames, (size_t)control->getNumberOfFrames(stream));
|
251 |
+
}
|
252 |
+
|
253 |
+
numStreams++;
|
254 |
+
}
|
255 |
+
|
256 |
+
void OpenNi2Video::SetupStreamModes()
|
257 |
+
{
|
258 |
+
streams_properties = &frame_properties["streams"];
|
259 |
+
*streams_properties = picojson::value(picojson::array_type,false);
|
260 |
+
streams_properties->get<picojson::array>().resize(numStreams);
|
261 |
+
|
262 |
+
use_depth = false;
|
263 |
+
use_ir = false;
|
264 |
+
use_rgb = false;
|
265 |
+
depth_to_color = false;
|
266 |
+
use_ir_and_rgb = false;
|
267 |
+
|
268 |
+
sizeBytes =0;
|
269 |
+
for(size_t i=0; i<numStreams; ++i) {
|
270 |
+
const OpenNiStreamMode& mode = sensor_type[i];
|
271 |
+
openni::SensorType nisensortype;
|
272 |
+
openni::PixelFormat nipixelfmt;
|
273 |
+
|
274 |
+
switch( mode.sensor_type ) {
|
275 |
+
case OpenNiDepth_1mm_Registered:
|
276 |
+
depth_to_color = true;
|
277 |
+
nisensortype = openni::SENSOR_DEPTH;
|
278 |
+
nipixelfmt = openni::PIXEL_FORMAT_DEPTH_1_MM;
|
279 |
+
use_depth = true;
|
280 |
+
break;
|
281 |
+
case OpenNiDepth_1mm:
|
282 |
+
nisensortype = openni::SENSOR_DEPTH;
|
283 |
+
nipixelfmt = openni::PIXEL_FORMAT_DEPTH_1_MM;
|
284 |
+
use_depth = true;
|
285 |
+
break;
|
286 |
+
case OpenNiDepth_100um:
|
287 |
+
nisensortype = openni::SENSOR_DEPTH;
|
288 |
+
nipixelfmt = openni::PIXEL_FORMAT_DEPTH_100_UM;
|
289 |
+
use_depth = true;
|
290 |
+
break;
|
291 |
+
case OpenNiIrProj:
|
292 |
+
case OpenNiIr:
|
293 |
+
nisensortype = openni::SENSOR_IR;
|
294 |
+
nipixelfmt = openni::PIXEL_FORMAT_GRAY16;
|
295 |
+
use_ir = true;
|
296 |
+
break;
|
297 |
+
case OpenNiIr24bit:
|
298 |
+
nisensortype = openni::SENSOR_IR;
|
299 |
+
nipixelfmt = openni::PIXEL_FORMAT_RGB888;
|
300 |
+
use_ir = true;
|
301 |
+
break;
|
302 |
+
case OpenNiIr8bitProj:
|
303 |
+
case OpenNiIr8bit:
|
304 |
+
nisensortype = openni::SENSOR_IR;
|
305 |
+
nipixelfmt = openni::PIXEL_FORMAT_GRAY8;
|
306 |
+
use_ir = true;
|
307 |
+
break;
|
308 |
+
case OpenNiRgb:
|
309 |
+
nisensortype = openni::SENSOR_COLOR;
|
310 |
+
nipixelfmt = openni::PIXEL_FORMAT_RGB888;
|
311 |
+
use_rgb = true;
|
312 |
+
break;
|
313 |
+
case OpenNiGrey:
|
314 |
+
nisensortype = openni::SENSOR_COLOR;
|
315 |
+
nipixelfmt = openni::PIXEL_FORMAT_GRAY8;
|
316 |
+
use_rgb = true;
|
317 |
+
break;
|
318 |
+
case OpenNiUnassigned:
|
319 |
+
default:
|
320 |
+
continue;
|
321 |
+
}
|
322 |
+
|
323 |
+
openni::VideoMode onivmode;
|
324 |
+
try {
|
325 |
+
onivmode = FindOpenNI2Mode(devices[mode.device], nisensortype, mode.dim.x, mode.dim.y, mode.fps, nipixelfmt);
|
326 |
+
}catch(const VideoException& e) {
|
327 |
+
pango_print_error("Unable to find compatible OpenNI Video Mode. Please choose from:\n");
|
328 |
+
PrintOpenNI2Modes(nisensortype);
|
329 |
+
fflush(stdout);
|
330 |
+
throw e;
|
331 |
+
}
|
332 |
+
|
333 |
+
openni::Status rc;
|
334 |
+
if(!devices[mode.device].isFile()){//trying to setVideoMode on a file results in an OpenNI error
|
335 |
+
rc = video_stream[i].setVideoMode(onivmode);
|
336 |
+
if(rc != openni::STATUS_OK)
|
337 |
+
throw VideoException("Couldn't set OpenNI VideoMode", openni::OpenNI::getExtendedError());
|
338 |
+
}
|
339 |
+
|
340 |
+
int outputWidth = onivmode.getResolutionX();
|
341 |
+
int outputHeight = onivmode.getResolutionY();
|
342 |
+
|
343 |
+
if (mode.roi.w && mode.roi.h) {
|
344 |
+
rc = video_stream[i].setCropping(mode.roi.x,mode.roi.y,mode.roi.w,mode.roi.h);
|
345 |
+
if(rc != openni::STATUS_OK)
|
346 |
+
throw VideoException("Couldn't set OpenNI cropping", openni::OpenNI::getExtendedError());
|
347 |
+
|
348 |
+
outputWidth = mode.roi.w;
|
349 |
+
outputHeight = mode.roi.h;
|
350 |
+
}
|
351 |
+
|
352 |
+
const PixelFormat fmt = VideoFormatFromOpenNI2(nipixelfmt);
|
353 |
+
const StreamInfo stream(
|
354 |
+
fmt, outputWidth, outputHeight,
|
355 |
+
(outputWidth * fmt.bpp) / 8,
|
356 |
+
(unsigned char*)0 + sizeBytes
|
357 |
+
);
|
358 |
+
|
359 |
+
sizeBytes += stream.SizeBytes();
|
360 |
+
streams.push_back(stream);
|
361 |
+
}
|
362 |
+
|
363 |
+
SetRegisterDepthToImage(depth_to_color);
|
364 |
+
|
365 |
+
use_ir_and_rgb = use_rgb && use_ir;
|
366 |
+
}
|
367 |
+
|
368 |
+
void OpenNi2Video::UpdateProperties()
|
369 |
+
{
|
370 |
+
picojson::value& jsopenni = device_properties["openni"];
|
371 |
+
|
372 |
+
picojson::value& jsdevices = jsopenni["devices"];
|
373 |
+
jsdevices = picojson::value(picojson::array_type,false);
|
374 |
+
jsdevices.get<picojson::array>().resize(numDevices);
|
375 |
+
for (size_t i=0; i<numDevices; ++i) {
|
376 |
+
picojson::value& jsdevice = jsdevices[i];
|
377 |
+
#define SET_PARAM(param_type, param) \
|
378 |
+
{ \
|
379 |
+
param_type val; \
|
380 |
+
if(devices[i].getProperty(param, &val) == openni::STATUS_OK) { \
|
381 |
+
jsdevice[#param] = val; \
|
382 |
+
} \
|
383 |
+
}
|
384 |
+
SET_PARAM( unsigned long long, XN_MODULE_PROPERTY_USB_INTERFACE );
|
385 |
+
SET_PARAM( bool, XN_MODULE_PROPERTY_MIRROR );
|
386 |
+
char serialNumber[1024];
|
387 |
+
devices[i].getProperty(ONI_DEVICE_PROPERTY_SERIAL_NUMBER, &serialNumber);
|
388 |
+
jsdevice["ONI_DEVICE_PROPERTY_SERIAL_NUMBER"] = std::string(serialNumber);
|
389 |
+
#undef SET_PARAM
|
390 |
+
}
|
391 |
+
|
392 |
+
picojson::value& stream = jsopenni["streams"];
|
393 |
+
stream = picojson::value(picojson::array_type,false);
|
394 |
+
stream.get<picojson::array>().resize(Streams().size());
|
395 |
+
for(unsigned int i=0; i<Streams().size(); ++i) {
|
396 |
+
if(sensor_type[i].sensor_type != OpenNiUnassigned)
|
397 |
+
{
|
398 |
+
#define SET_PARAM(param_type, param) \
|
399 |
+
{\
|
400 |
+
param_type val; \
|
401 |
+
if(video_stream[i].getProperty(param, &val) == openni::STATUS_OK) { \
|
402 |
+
jsstream[#param] = val; \
|
403 |
+
} \
|
404 |
+
}
|
405 |
+
|
406 |
+
picojson::value& jsstream = stream[i];
|
407 |
+
SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_INPUT_FORMAT );
|
408 |
+
SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_CROPPING_MODE );
|
409 |
+
|
410 |
+
SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_CLOSE_RANGE );
|
411 |
+
SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_WHITE_BALANCE_ENABLED );
|
412 |
+
SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_GAIN );
|
413 |
+
SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_HOLE_FILTER );
|
414 |
+
SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_REGISTRATION_TYPE );
|
415 |
+
SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_CONST_SHIFT );
|
416 |
+
SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_PIXEL_SIZE_FACTOR );
|
417 |
+
SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_MAX_SHIFT );
|
418 |
+
SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_PARAM_COEFF );
|
419 |
+
SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_SHIFT_SCALE );
|
420 |
+
SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_ZERO_PLANE_DISTANCE );
|
421 |
+
SET_PARAM( double, XN_STREAM_PROPERTY_ZERO_PLANE_PIXEL_SIZE );
|
422 |
+
SET_PARAM( double, XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE );
|
423 |
+
SET_PARAM( double, XN_STREAM_PROPERTY_DCMOS_RCMOS_DISTANCE );
|
424 |
+
#undef SET_PARAM
|
425 |
+
}
|
426 |
+
}
|
427 |
+
}
|
428 |
+
|
429 |
+
void OpenNi2Video::SetMirroring(bool enable)
|
430 |
+
{
|
431 |
+
// Set this property on all streams. It doesn't matter if it fails.
|
432 |
+
for(unsigned int i=0; i<Streams().size(); ++i) {
|
433 |
+
video_stream[i].setMirroringEnabled(enable);
|
434 |
+
}
|
435 |
+
}
|
436 |
+
|
437 |
+
void OpenNi2Video::SetAutoExposure(bool enable)
|
438 |
+
{
|
439 |
+
// Set this property on all streams exposing CameraSettings
|
440 |
+
for(unsigned int i=0; i<Streams().size(); ++i) {
|
441 |
+
openni::CameraSettings* cam = video_stream[i].getCameraSettings();
|
442 |
+
if(cam) cam->setAutoExposureEnabled(enable);
|
443 |
+
}
|
444 |
+
}
|
445 |
+
|
446 |
+
void OpenNi2Video::SetAutoWhiteBalance(bool enable)
|
447 |
+
{
|
448 |
+
// Set this property on all streams exposing CameraSettings
|
449 |
+
for(unsigned int i=0; i<Streams().size(); ++i) {
|
450 |
+
openni::CameraSettings* cam = video_stream[i].getCameraSettings();
|
451 |
+
if(cam) cam->setAutoWhiteBalanceEnabled(enable);
|
452 |
+
}
|
453 |
+
}
|
454 |
+
|
455 |
+
void OpenNi2Video::SetDepthCloseRange(bool enable)
|
456 |
+
{
|
457 |
+
// Set this property on all streams. It doesn't matter if it fails.
|
458 |
+
for(unsigned int i=0; i<Streams().size(); ++i) {
|
459 |
+
video_stream[i].setProperty(XN_STREAM_PROPERTY_CLOSE_RANGE, enable);
|
460 |
+
}
|
461 |
+
}
|
462 |
+
|
463 |
+
void OpenNi2Video::SetDepthHoleFilter(bool enable)
|
464 |
+
{
|
465 |
+
// Set this property on all streams. It doesn't matter if it fails.
|
466 |
+
for(unsigned int i=0; i<Streams().size(); ++i) {
|
467 |
+
video_stream[i].setProperty(XN_STREAM_PROPERTY_HOLE_FILTER, enable);
|
468 |
+
video_stream[i].setProperty(XN_STREAM_PROPERTY_GAIN,50);
|
469 |
+
}
|
470 |
+
}
|
471 |
+
|
472 |
+
void OpenNi2Video::SetDepthColorSyncEnabled(bool enable)
|
473 |
+
{
|
474 |
+
for(size_t i = 0 ; i < numDevices; i++) {
|
475 |
+
devices[i].setDepthColorSyncEnabled(enable);
|
476 |
+
}
|
477 |
+
}
|
478 |
+
|
479 |
+
void OpenNi2Video::SetFastCrop(bool enable)
|
480 |
+
{
|
481 |
+
const uint32_t pango_XN_STREAM_PROPERTY_FAST_ZOOM_CROP = 0x1080F009;
|
482 |
+
for (unsigned int i = 0; i < Streams().size(); ++i) {
|
483 |
+
video_stream[i].setProperty(pango_XN_STREAM_PROPERTY_FAST_ZOOM_CROP, enable);
|
484 |
+
video_stream[i].setProperty(XN_STREAM_PROPERTY_CROPPING_MODE, enable ? XN_CROPPING_MODE_INCREASED_FPS : XN_CROPPING_MODE_NORMAL);
|
485 |
+
}
|
486 |
+
}
|
487 |
+
|
488 |
+
void OpenNi2Video::SetRegisterDepthToImage(bool enable)
|
489 |
+
{
|
490 |
+
if(enable) {
|
491 |
+
for(size_t i = 0 ; i < numDevices; i++) {
|
492 |
+
devices[i].setImageRegistrationMode(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);
|
493 |
+
}
|
494 |
+
}else{
|
495 |
+
for(size_t i = 0 ; i < numDevices ; i++) {
|
496 |
+
devices[i].setImageRegistrationMode(openni::IMAGE_REGISTRATION_OFF);
|
497 |
+
}
|
498 |
+
}
|
499 |
+
}
|
500 |
+
|
501 |
+
void OpenNi2Video::SetPlaybackSpeed(float speed)
|
502 |
+
{
|
503 |
+
for(size_t i = 0 ; i < numDevices; i++) {
|
504 |
+
openni::PlaybackControl* control = devices[i].getPlaybackControl();
|
505 |
+
if(control) control->setSpeed(speed);
|
506 |
+
}
|
507 |
+
}
|
508 |
+
|
509 |
+
void OpenNi2Video::SetPlaybackRepeat(bool enabled)
|
510 |
+
{
|
511 |
+
for(size_t i = 0 ; i < numDevices; i++) {
|
512 |
+
openni::PlaybackControl* control = devices[i].getPlaybackControl();
|
513 |
+
if(control) control->setRepeatEnabled(enabled);
|
514 |
+
}
|
515 |
+
}
|
516 |
+
|
517 |
+
OpenNi2Video::~OpenNi2Video()
|
518 |
+
{
|
519 |
+
Stop();
|
520 |
+
|
521 |
+
for(size_t i=0; i<numStreams; ++i) {
|
522 |
+
if( video_stream[i].isValid()) {
|
523 |
+
video_stream[i].destroy();
|
524 |
+
}
|
525 |
+
}
|
526 |
+
|
527 |
+
openni::OpenNI::shutdown();
|
528 |
+
}
|
529 |
+
|
530 |
+
size_t OpenNi2Video::SizeBytes() const
|
531 |
+
{
|
532 |
+
return sizeBytes;
|
533 |
+
}
|
534 |
+
|
535 |
+
const std::vector<StreamInfo>& OpenNi2Video::Streams() const
|
536 |
+
{
|
537 |
+
return streams;
|
538 |
+
}
|
539 |
+
|
540 |
+
void OpenNi2Video::Start()
|
541 |
+
{
|
542 |
+
for(unsigned int i=0; i<Streams().size(); ++i) {
|
543 |
+
video_stream[i].start();
|
544 |
+
}
|
545 |
+
}
|
546 |
+
|
547 |
+
void OpenNi2Video::Stop()
|
548 |
+
{
|
549 |
+
for(unsigned int i=0; i<Streams().size(); ++i) {
|
550 |
+
video_stream[i].stop();
|
551 |
+
}
|
552 |
+
}
|
553 |
+
|
554 |
+
openni::VideoStream * OpenNi2Video::GetVideoStream(int stream){
|
555 |
+
if(video_stream[stream].isValid()) {
|
556 |
+
return &video_stream[stream];
|
557 |
+
}else{
|
558 |
+
pango_print_error("Error getting stream: %d \n%s",stream, openni::OpenNI::getExtendedError() );
|
559 |
+
return NULL;
|
560 |
+
}
|
561 |
+
}
|
562 |
+
|
563 |
+
bool OpenNi2Video::GrabNext( unsigned char* image, bool /*wait*/ )
|
564 |
+
{
|
565 |
+
unsigned char* out_img = image;
|
566 |
+
|
567 |
+
openni::Status rc = openni::STATUS_OK;
|
568 |
+
|
569 |
+
for(unsigned int i=0; i<Streams().size(); ++i) {
|
570 |
+
if(sensor_type[i].sensor_type == OpenNiUnassigned) {
|
571 |
+
rc = openni::STATUS_OK;
|
572 |
+
continue;
|
573 |
+
}
|
574 |
+
|
575 |
+
if(!video_stream[i].isValid()) {
|
576 |
+
rc = openni::STATUS_NO_DEVICE;
|
577 |
+
continue;
|
578 |
+
}
|
579 |
+
|
580 |
+
if(use_ir_and_rgb) video_stream[i].start();
|
581 |
+
|
582 |
+
rc = video_stream[i].readFrame(&video_frame[i]);
|
583 |
+
video_frame[0].getFrameIndex();
|
584 |
+
if(rc != openni::STATUS_OK) {
|
585 |
+
pango_print_error("Error reading frame:\n%s", openni::OpenNI::getExtendedError() );
|
586 |
+
}
|
587 |
+
|
588 |
+
const bool toGreyscale = false;
|
589 |
+
if(toGreyscale) {
|
590 |
+
const int w = streams[i].Width();
|
591 |
+
const int h = streams[i].Height();
|
592 |
+
|
593 |
+
openni::RGB888Pixel* pColour = (openni::RGB888Pixel*)video_frame[i].getData();
|
594 |
+
for(int i = 0 ; i < w*h;i++){
|
595 |
+
openni::RGB888Pixel rgb = pColour[i];
|
596 |
+
int grey = ((int)(rgb.r&0xFF) + (int)(rgb.g&0xFF) + (int)(rgb.b&0xFF))/3;
|
597 |
+
grey = std::min(255,std::max(0,grey));
|
598 |
+
out_img[i] = grey;
|
599 |
+
}
|
600 |
+
}else{
|
601 |
+
memcpy(out_img, video_frame[i].getData(), streams[i].SizeBytes());
|
602 |
+
}
|
603 |
+
|
604 |
+
// update frame properties
|
605 |
+
(*streams_properties)[i]["devtime_us"] = video_frame[i].getTimestamp();
|
606 |
+
|
607 |
+
if(use_ir_and_rgb) video_stream[i].stop();
|
608 |
+
|
609 |
+
out_img += streams[i].SizeBytes();
|
610 |
+
}
|
611 |
+
|
612 |
+
current_frame_index = video_frame[0].getFrameIndex();
|
613 |
+
|
614 |
+
return rc == openni::STATUS_OK;
|
615 |
+
}
|
616 |
+
|
617 |
+
bool OpenNi2Video::GrabNewest( unsigned char* image, bool wait )
|
618 |
+
{
|
619 |
+
return GrabNext(image,wait);
|
620 |
+
}
|
621 |
+
|
622 |
+
size_t OpenNi2Video::GetCurrentFrameId() const
|
623 |
+
{
|
624 |
+
return current_frame_index;
|
625 |
+
}
|
626 |
+
|
627 |
+
size_t OpenNi2Video::GetTotalFrames() const
|
628 |
+
{
|
629 |
+
return total_frames;
|
630 |
+
}
|
631 |
+
|
632 |
+
size_t OpenNi2Video::Seek(size_t frameid)
|
633 |
+
{
|
634 |
+
openni::PlaybackControl* control = devices[0].getPlaybackControl();
|
635 |
+
if(control) {
|
636 |
+
control->seek(video_stream[0], frameid);
|
637 |
+
return frameid;
|
638 |
+
}else{
|
639 |
+
return -1;
|
640 |
+
}
|
641 |
+
}
|
642 |
+
|
643 |
+
PANGOLIN_REGISTER_FACTORY(OpenNi2Video)
|
644 |
+
{
|
645 |
+
struct OpenNI2VideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
646 |
+
std::map<std::string,Precedence> Schemes() const override
|
647 |
+
{
|
648 |
+
return {{"openni2",10}, {"openni",10}, {"oni",10}};
|
649 |
+
}
|
650 |
+
const char* Description() const override
|
651 |
+
{
|
652 |
+
return "OpenNI v2 Driver to access Kinect / Primesense devices.";
|
653 |
+
}
|
654 |
+
ParamSet Params() const override
|
655 |
+
{
|
656 |
+
return {{
|
657 |
+
{"size","640x480","Image dimension"},
|
658 |
+
{"fps","30","Framerate"},
|
659 |
+
{"roi","0+0+0x0","Region of interest"},
|
660 |
+
{"realtime","","If playback rate should be slowed to real-time speed."},
|
661 |
+
{"img\\d+", "","Device stream for nth image. Choose from [gray,rgb,ir,depth1mm,depth100um,depth_reg,ir8,ir24,ir+,ir8+]"},
|
662 |
+
{"closerange","false","Use close-range mode (lower projector intensity)"},
|
663 |
+
{"holefilter","false","Enable hole filter"},
|
664 |
+
{"fastcrop","false","?"}
|
665 |
+
}};
|
666 |
+
}
|
667 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
668 |
+
const bool realtime = uri.Contains("realtime");
|
669 |
+
const ImageDim default_dim = uri.Get<ImageDim>("size", ImageDim(640,480));
|
670 |
+
const ImageRoi default_roi = uri.Get<ImageRoi>("roi", ImageRoi(0,0,0,0));
|
671 |
+
const unsigned int default_fps = uri.Get<unsigned int>("fps", 30);
|
672 |
+
|
673 |
+
std::vector<OpenNiStreamMode> stream_modes;
|
674 |
+
|
675 |
+
int num_streams = 0;
|
676 |
+
std::string simg= "img1";
|
677 |
+
while(uri.Contains(simg)) {
|
678 |
+
OpenNiStreamMode stream = uri.Get<OpenNiStreamMode>(simg, OpenNiStreamMode(OpenNiRgb,default_dim,default_roi,default_fps,0));
|
679 |
+
stream_modes.push_back(stream);
|
680 |
+
++num_streams;
|
681 |
+
simg = "img" + ToString(num_streams+1);
|
682 |
+
}
|
683 |
+
|
684 |
+
OpenNi2Video* nivid;
|
685 |
+
if(!uri.url.empty()) {
|
686 |
+
nivid = new OpenNi2Video(pangolin::PathExpand(uri.url));
|
687 |
+
}else if(stream_modes.size()) {
|
688 |
+
nivid = new OpenNi2Video(stream_modes);
|
689 |
+
}else{
|
690 |
+
nivid = new OpenNi2Video(default_dim, default_roi, default_fps);
|
691 |
+
}
|
692 |
+
|
693 |
+
nivid->SetDepthCloseRange( uri.Get<bool>("closerange",false) );
|
694 |
+
nivid->SetDepthHoleFilter( uri.Get<bool>("holefilter",false) );
|
695 |
+
nivid->SetDepthColorSyncEnabled( uri.Get<bool>("coloursync",false) );
|
696 |
+
nivid->SetFastCrop( uri.Get<bool>("fastcrop",false) );
|
697 |
+
nivid->SetPlaybackSpeed(realtime ? 1.0f : -1.0f);
|
698 |
+
nivid->SetAutoExposure(true);
|
699 |
+
nivid->SetAutoWhiteBalance(true);
|
700 |
+
nivid->SetMirroring(false);
|
701 |
+
|
702 |
+
nivid->UpdateProperties();
|
703 |
+
|
704 |
+
nivid->Start();
|
705 |
+
|
706 |
+
return std::unique_ptr<VideoInterface>(nivid);
|
707 |
+
}
|
708 |
+
};
|
709 |
+
|
710 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(
|
711 |
+
std::make_shared<OpenNI2VideoFactory>()
|
712 |
+
);
|
713 |
+
}
|
714 |
+
|
715 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/pack.cpp
ADDED
@@ -0,0 +1,277 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/video/drivers/pack.h>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/iostream_operators.h>
|
31 |
+
#include <pangolin/video/video.h>
|
32 |
+
|
33 |
+
#ifdef DEBUGUNPACK
|
34 |
+
#include <pangolin/utils/timer.h>
|
35 |
+
#define TSTART() pangolin::basetime start,last,now; start = pangolin::TimeNow(); last = start;
|
36 |
+
#define TGRABANDPRINT(...) now = pangolin::TimeNow(); fprintf(stderr,"UNPACK: "); fprintf(stderr, __VA_ARGS__); fprintf(stderr, " %fms.\n",1000*pangolin::TimeDiff_s(last, now)); last = now;
|
37 |
+
#define DBGPRINT(...) fprintf(stderr,"UNPACK: "); fprintf(stderr, __VA_ARGS__); fprintf(stderr,"\n");
|
38 |
+
#else
|
39 |
+
#define TSTART()
|
40 |
+
#define TGRABANDPRINT(...)
|
41 |
+
#define DBGPRINT(...)
|
42 |
+
#endif
|
43 |
+
|
44 |
+
namespace pangolin
|
45 |
+
{
|
46 |
+
|
47 |
+
PackVideo::PackVideo(std::unique_ptr<VideoInterface> &src_, PixelFormat out_fmt)
|
48 |
+
: src(std::move(src_)), size_bytes(0), buffer(0)
|
49 |
+
{
|
50 |
+
if( !src || out_fmt.channels != 1) {
|
51 |
+
throw VideoException("PackVideo: Only supports single channel input.");
|
52 |
+
}
|
53 |
+
|
54 |
+
videoin.push_back(src.get());
|
55 |
+
|
56 |
+
for(size_t s=0; s< src->Streams().size(); ++s) {
|
57 |
+
const size_t w = src->Streams()[s].Width();
|
58 |
+
const size_t h = src->Streams()[s].Height();
|
59 |
+
|
60 |
+
// Check compatibility of formats
|
61 |
+
const PixelFormat in_fmt = src->Streams()[s].PixFormat();
|
62 |
+
if(in_fmt.channels > 1 || in_fmt.bpp > 16) {
|
63 |
+
throw VideoException("PackVideo: Only supports one channel input.");
|
64 |
+
}
|
65 |
+
|
66 |
+
// round up to ensure enough bytes for packing
|
67 |
+
const size_t pitch = (w*out_fmt.bpp)/ 8 + ((w*out_fmt.bpp) % 8 > 0? 1 : 0);
|
68 |
+
streams.push_back(pangolin::StreamInfo( out_fmt, w, h, pitch, reinterpret_cast<uint8_t*>(size_bytes) ));
|
69 |
+
size_bytes += h*pitch;
|
70 |
+
}
|
71 |
+
|
72 |
+
buffer = new unsigned char[src->SizeBytes()];
|
73 |
+
}
|
74 |
+
|
75 |
+
PackVideo::~PackVideo()
|
76 |
+
{
|
77 |
+
delete[] buffer;
|
78 |
+
}
|
79 |
+
|
80 |
+
//! Implement VideoInput::Start()
|
81 |
+
void PackVideo::Start()
|
82 |
+
{
|
83 |
+
videoin[0]->Start();
|
84 |
+
}
|
85 |
+
|
86 |
+
//! Implement VideoInput::Stop()
|
87 |
+
void PackVideo::Stop()
|
88 |
+
{
|
89 |
+
videoin[0]->Stop();
|
90 |
+
}
|
91 |
+
|
92 |
+
//! Implement VideoInput::SizeBytes()
|
93 |
+
size_t PackVideo::SizeBytes() const
|
94 |
+
{
|
95 |
+
return size_bytes;
|
96 |
+
}
|
97 |
+
|
98 |
+
//! Implement VideoInput::Streams()
|
99 |
+
const std::vector<StreamInfo>& PackVideo::Streams() const
|
100 |
+
{
|
101 |
+
return streams;
|
102 |
+
}
|
103 |
+
|
104 |
+
template<typename T>
|
105 |
+
void ConvertTo8bit(
|
106 |
+
Image<unsigned char>& out,
|
107 |
+
const Image<unsigned char>& in
|
108 |
+
) {
|
109 |
+
for(size_t r=0; r<out.h; ++r) {
|
110 |
+
T* pout = (T*)(out.ptr + r*out.pitch);
|
111 |
+
uint8_t* pin = in.ptr + r*in.pitch;
|
112 |
+
const uint8_t* pin_end = in.ptr + (r+1)*in.pitch;
|
113 |
+
while(pin != pin_end) {
|
114 |
+
*(pout++) = *(pin++);
|
115 |
+
}
|
116 |
+
}
|
117 |
+
}
|
118 |
+
|
119 |
+
template<typename T>
|
120 |
+
void ConvertTo10bit(
|
121 |
+
Image<unsigned char>& out,
|
122 |
+
const Image<unsigned char>& in
|
123 |
+
) {
|
124 |
+
for(size_t r=0; r<out.h; ++r) {
|
125 |
+
uint8_t* pout = out.ptr + r*out.pitch;
|
126 |
+
T* pin = (T*)(in.ptr + r*in.pitch);
|
127 |
+
const T* pin_end = (T*)(in.ptr + (r+1)*in.pitch);
|
128 |
+
while(pin != pin_end) {
|
129 |
+
uint64_t val = (*(pin++) & 0x00000003FF);
|
130 |
+
val |= uint64_t(*(pin++) & 0x00000003FF) << 10;
|
131 |
+
val |= uint64_t(*(pin++) & 0x00000003FF) << 20;
|
132 |
+
val |= uint64_t(*(pin++) & 0x00000003FF) << 30;
|
133 |
+
*(pout++) = uint8_t( val & 0x00000000FF);
|
134 |
+
*(pout++) = uint8_t((val & 0x000000FF00) >> 8);
|
135 |
+
*(pout++) = uint8_t((val & 0x0000FF0000) >> 16);
|
136 |
+
*(pout++) = uint8_t((val & 0x00FF000000) >> 24);
|
137 |
+
*(pout++) = uint8_t((val & 0xFF00000000) >> 32);
|
138 |
+
}
|
139 |
+
}
|
140 |
+
}
|
141 |
+
|
142 |
+
template<typename T>
|
143 |
+
void ConvertTo12bit(
|
144 |
+
Image<unsigned char>& out,
|
145 |
+
const Image<unsigned char>& in
|
146 |
+
) {
|
147 |
+
for(size_t r=0; r<out.h; ++r) {
|
148 |
+
uint8_t* pout = out.ptr + r*out.pitch;
|
149 |
+
T* pin = (T*)(in.ptr + r*in.pitch);
|
150 |
+
const T* pin_end = (T*)(in.ptr + (r+1)*in.pitch);
|
151 |
+
while(pin != pin_end) {
|
152 |
+
uint32_t val = (*(pin++) & 0x00000FFF);
|
153 |
+
val |= uint32_t(*(pin++) & 0x00000FFF) << 12;
|
154 |
+
*(pout++) = uint8_t( val & 0x000000FF);
|
155 |
+
*(pout++) = uint8_t((val & 0x0000FF00) >> 8);
|
156 |
+
*(pout++) = uint8_t((val & 0x00FF0000) >> 16);
|
157 |
+
}
|
158 |
+
}
|
159 |
+
}
|
160 |
+
|
161 |
+
void PackVideo::Process(unsigned char* image, const unsigned char* buffer)
|
162 |
+
{
|
163 |
+
TSTART()
|
164 |
+
for(size_t s=0; s<streams.size(); ++s) {
|
165 |
+
const Image<unsigned char> img_in = videoin[0]->Streams()[s].StreamImage(buffer);
|
166 |
+
Image<unsigned char> img_out = Streams()[s].StreamImage(image);
|
167 |
+
|
168 |
+
const int bits_out = Streams()[s].PixFormat().bpp;
|
169 |
+
|
170 |
+
if(videoin[0]->Streams()[s].PixFormat().format == "GRAY16LE") {
|
171 |
+
if(bits_out == 8) {
|
172 |
+
ConvertTo8bit<uint16_t>(img_out, img_in);
|
173 |
+
}else if( bits_out == 10) {
|
174 |
+
ConvertTo10bit<uint16_t>(img_out, img_in);
|
175 |
+
}else if( bits_out == 12){
|
176 |
+
ConvertTo12bit<uint16_t>(img_out, img_in);
|
177 |
+
}else{
|
178 |
+
throw pangolin::VideoException("Unsupported bitdepths.");
|
179 |
+
}
|
180 |
+
}else{
|
181 |
+
throw pangolin::VideoException("Unsupported input pix format.");
|
182 |
+
}
|
183 |
+
}
|
184 |
+
TGRABANDPRINT("Packing took ")
|
185 |
+
}
|
186 |
+
|
187 |
+
//! Implement VideoInput::GrabNext()
|
188 |
+
bool PackVideo::GrabNext( unsigned char* image, bool wait )
|
189 |
+
{
|
190 |
+
if(videoin[0]->GrabNext(buffer,wait)) {
|
191 |
+
Process(image,buffer);
|
192 |
+
return true;
|
193 |
+
}else{
|
194 |
+
return false;
|
195 |
+
}
|
196 |
+
}
|
197 |
+
|
198 |
+
//! Implement VideoInput::GrabNewest()
|
199 |
+
bool PackVideo::GrabNewest( unsigned char* image, bool wait )
|
200 |
+
{
|
201 |
+
if(videoin[0]->GrabNewest(buffer,wait)) {
|
202 |
+
Process(image,buffer);
|
203 |
+
return true;
|
204 |
+
}else{
|
205 |
+
return false;
|
206 |
+
}
|
207 |
+
}
|
208 |
+
|
209 |
+
std::vector<VideoInterface*>& PackVideo::InputStreams()
|
210 |
+
{
|
211 |
+
return videoin;
|
212 |
+
}
|
213 |
+
|
214 |
+
unsigned int PackVideo::AvailableFrames() const
|
215 |
+
{
|
216 |
+
BufferAwareVideoInterface* vpi = dynamic_cast<BufferAwareVideoInterface*>(videoin[0]);
|
217 |
+
if(!vpi)
|
218 |
+
{
|
219 |
+
pango_print_warn("Pack: child interface is not buffer aware.");
|
220 |
+
return 0;
|
221 |
+
}
|
222 |
+
else
|
223 |
+
{
|
224 |
+
return vpi->AvailableFrames();
|
225 |
+
}
|
226 |
+
}
|
227 |
+
|
228 |
+
bool PackVideo::DropNFrames(uint32_t n)
|
229 |
+
{
|
230 |
+
BufferAwareVideoInterface* vpi = dynamic_cast<BufferAwareVideoInterface*>(videoin[0]);
|
231 |
+
if(!vpi)
|
232 |
+
{
|
233 |
+
pango_print_warn("Pack: child interface is not buffer aware.");
|
234 |
+
return false;
|
235 |
+
}
|
236 |
+
else
|
237 |
+
{
|
238 |
+
return vpi->DropNFrames(n);
|
239 |
+
}
|
240 |
+
}
|
241 |
+
|
242 |
+
PANGOLIN_REGISTER_FACTORY(PackVideo)
|
243 |
+
{
|
244 |
+
struct PackVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
245 |
+
std::map<std::string,Precedence> Schemes() const override
|
246 |
+
{
|
247 |
+
return {{"pack",10}};
|
248 |
+
}
|
249 |
+
const char* Description() const override
|
250 |
+
{
|
251 |
+
return "Packs a video from a given format.";
|
252 |
+
}
|
253 |
+
ParamSet Params() const override
|
254 |
+
{
|
255 |
+
return {{
|
256 |
+
{"fmt","GRAY16LE","Pixel format of the video to unpack. See help for pixel formats for all possible values."}
|
257 |
+
}};
|
258 |
+
}
|
259 |
+
|
260 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
261 |
+
ParamReader reader(Params(),uri);
|
262 |
+
std::unique_ptr<VideoInterface> subvid = pangolin::OpenVideo(uri.url);
|
263 |
+
const std::string fmt = reader.Get<std::string>("fmt");
|
264 |
+
return std::unique_ptr<VideoInterface>(
|
265 |
+
new PackVideo(subvid, PixelFormatFromString(fmt) )
|
266 |
+
);
|
267 |
+
}
|
268 |
+
};
|
269 |
+
|
270 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<PackVideoFactory>());
|
271 |
+
}
|
272 |
+
|
273 |
+
}
|
274 |
+
|
275 |
+
#undef TSTART
|
276 |
+
#undef TGRABANDPRINT
|
277 |
+
#undef DBGPRINT
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/pango.cpp
ADDED
@@ -0,0 +1,255 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/factory/factory_registry.h>
|
29 |
+
#include <pangolin/log/playback_session.h>
|
30 |
+
#include <pangolin/utils/file_extension.h>
|
31 |
+
#include <pangolin/utils/file_utils.h>
|
32 |
+
#include <pangolin/utils/signal_slot.h>
|
33 |
+
#include <pangolin/video/drivers/pango.h>
|
34 |
+
|
35 |
+
#include <functional>
|
36 |
+
|
37 |
+
namespace pangolin
|
38 |
+
{
|
39 |
+
|
40 |
+
const std::string pango_video_type = "raw_video";
|
41 |
+
|
42 |
+
PangoVideo::PangoVideo(const std::string& filename, std::shared_ptr<PlaybackSession> playback_session)
|
43 |
+
: _filename(filename),
|
44 |
+
_playback_session(playback_session),
|
45 |
+
_reader(_playback_session->Open(filename)),
|
46 |
+
_event_promise(_playback_session->Time()),
|
47 |
+
_src_id(FindPacketStreamSource()),
|
48 |
+
_source(nullptr)
|
49 |
+
{
|
50 |
+
PANGO_ENSURE(_src_id != -1, "No appropriate video streams found in log.");
|
51 |
+
|
52 |
+
_source = &_reader->Sources()[_src_id];
|
53 |
+
SetupStreams(*_source);
|
54 |
+
|
55 |
+
// Make sure we time-seek with other playback devices
|
56 |
+
session_seek = _playback_session->Time().OnSeek.connect(
|
57 |
+
[&](SyncTime::TimePoint t){
|
58 |
+
_event_promise.Cancel();
|
59 |
+
_reader->Seek(_src_id, t);
|
60 |
+
_event_promise.WaitAndRenew(_source->NextPacketTime());
|
61 |
+
}
|
62 |
+
);
|
63 |
+
|
64 |
+
_event_promise.WaitAndRenew(_source->NextPacketTime());
|
65 |
+
}
|
66 |
+
|
67 |
+
PangoVideo::~PangoVideo()
|
68 |
+
{
|
69 |
+
}
|
70 |
+
|
71 |
+
size_t PangoVideo::SizeBytes() const
|
72 |
+
{
|
73 |
+
return _size_bytes;
|
74 |
+
}
|
75 |
+
|
76 |
+
const std::vector<StreamInfo>& PangoVideo::Streams() const
|
77 |
+
{
|
78 |
+
return _streams;
|
79 |
+
}
|
80 |
+
|
81 |
+
void PangoVideo::Start()
|
82 |
+
{
|
83 |
+
|
84 |
+
}
|
85 |
+
|
86 |
+
void PangoVideo::Stop()
|
87 |
+
{
|
88 |
+
|
89 |
+
}
|
90 |
+
|
91 |
+
bool PangoVideo::GrabNext(unsigned char* image, bool /*wait*/)
|
92 |
+
{
|
93 |
+
try
|
94 |
+
{
|
95 |
+
Packet fi = _reader->NextFrame(_src_id);
|
96 |
+
_frame_properties = fi.meta;
|
97 |
+
|
98 |
+
if(_fixed_size) {
|
99 |
+
fi.Stream().read(reinterpret_cast<char*>(image), _size_bytes);
|
100 |
+
}else{
|
101 |
+
for(size_t s=0; s < _streams.size(); ++s) {
|
102 |
+
StreamInfo& si = _streams[s];
|
103 |
+
pangolin::Image<unsigned char> dst = si.StreamImage(image);
|
104 |
+
|
105 |
+
if(stream_decoder[s]) {
|
106 |
+
pangolin::TypedImage img = stream_decoder[s](fi.Stream());
|
107 |
+
PANGO_ENSURE(img.IsValid());
|
108 |
+
|
109 |
+
// TODO: We can avoid this copy by decoding directly into img
|
110 |
+
for(size_t row =0; row < dst.h; ++row) {
|
111 |
+
std::memcpy(dst.RowPtr(row), img.RowPtr(row), si.RowBytes());
|
112 |
+
}
|
113 |
+
}else{
|
114 |
+
for(size_t row =0; row < dst.h; ++row) {
|
115 |
+
fi.Stream().read((char*)dst.RowPtr(row), si.RowBytes());
|
116 |
+
}
|
117 |
+
}
|
118 |
+
}
|
119 |
+
}
|
120 |
+
|
121 |
+
_event_promise.WaitAndRenew(_source->NextPacketTime());
|
122 |
+
return true;
|
123 |
+
}
|
124 |
+
catch(...)
|
125 |
+
{
|
126 |
+
_frame_properties = picojson::value();
|
127 |
+
return false;
|
128 |
+
}
|
129 |
+
}
|
130 |
+
|
131 |
+
bool PangoVideo::GrabNewest( unsigned char* image, bool wait )
|
132 |
+
{
|
133 |
+
return GrabNext(image, wait);
|
134 |
+
}
|
135 |
+
|
136 |
+
size_t PangoVideo::GetCurrentFrameId() const
|
137 |
+
{
|
138 |
+
return (int)(_reader->Sources()[_src_id].next_packet_id) - 1;
|
139 |
+
}
|
140 |
+
|
141 |
+
size_t PangoVideo::GetTotalFrames() const
|
142 |
+
{
|
143 |
+
return _source->index.size();
|
144 |
+
}
|
145 |
+
|
146 |
+
size_t PangoVideo::Seek(size_t next_frame_id)
|
147 |
+
{
|
148 |
+
// Get time for seek
|
149 |
+
if(next_frame_id < _source->index.size()) {
|
150 |
+
const int64_t capture_time = _source->index[next_frame_id].capture_time;
|
151 |
+
_playback_session->Time().Seek(SyncTime::TimePoint(std::chrono::microseconds(capture_time)));
|
152 |
+
return next_frame_id;
|
153 |
+
}else{
|
154 |
+
return _source->next_packet_id;
|
155 |
+
}
|
156 |
+
}
|
157 |
+
|
158 |
+
std::string PangoVideo::GetSourceUri()
|
159 |
+
{
|
160 |
+
return _source_uri;
|
161 |
+
}
|
162 |
+
|
163 |
+
int PangoVideo::FindPacketStreamSource()
|
164 |
+
{
|
165 |
+
for(const auto& src : _reader->Sources())
|
166 |
+
{
|
167 |
+
if (!src.driver.compare(pango_video_type))
|
168 |
+
{
|
169 |
+
return static_cast<int>(src.id);
|
170 |
+
}
|
171 |
+
}
|
172 |
+
|
173 |
+
return -1;
|
174 |
+
}
|
175 |
+
|
176 |
+
void PangoVideo::SetupStreams(const PacketStreamSource& src)
|
177 |
+
{
|
178 |
+
// Read sources header
|
179 |
+
_fixed_size = src.data_size_bytes != 0;
|
180 |
+
_size_bytes = src.data_size_bytes;
|
181 |
+
_source_uri = src.uri;
|
182 |
+
|
183 |
+
_device_properties = src.info["device"];
|
184 |
+
const picojson::value& json_streams = src.info["streams"];
|
185 |
+
const size_t num_streams = json_streams.size();
|
186 |
+
|
187 |
+
for (size_t i = 0; i < num_streams; ++i)
|
188 |
+
{
|
189 |
+
const picojson::value& json_stream = json_streams[i];
|
190 |
+
|
191 |
+
std::string encoding = json_stream["encoding"].get<std::string>();
|
192 |
+
|
193 |
+
// Check if the stream is compressed
|
194 |
+
if(json_stream.contains("decoded")) {
|
195 |
+
const std::string compressed_encoding = encoding;
|
196 |
+
encoding = json_stream["decoded"].get<std::string>();
|
197 |
+
const PixelFormat decoded_fmt = PixelFormatFromString(encoding);
|
198 |
+
stream_decoder.push_back(StreamEncoderFactory::I().GetDecoder(compressed_encoding, decoded_fmt));
|
199 |
+
}else{
|
200 |
+
stream_decoder.push_back(nullptr);
|
201 |
+
}
|
202 |
+
|
203 |
+
PixelFormat fmt = PixelFormatFromString(encoding);
|
204 |
+
|
205 |
+
fmt.channel_bit_depth = json_stream.get_value<int64_t>("channel_bit_depth", 0);
|
206 |
+
|
207 |
+
StreamInfo si(
|
208 |
+
fmt,
|
209 |
+
json_stream["width"].get<int64_t>(),
|
210 |
+
json_stream["height"].get<int64_t>(),
|
211 |
+
json_stream["pitch"].get<int64_t>(),
|
212 |
+
reinterpret_cast<unsigned char*>(json_stream["offset"].get<int64_t>())
|
213 |
+
);
|
214 |
+
|
215 |
+
if(!_fixed_size) {
|
216 |
+
_size_bytes += si.SizeBytes();
|
217 |
+
}
|
218 |
+
|
219 |
+
_streams.push_back(si);
|
220 |
+
}
|
221 |
+
}
|
222 |
+
|
223 |
+
PANGOLIN_REGISTER_FACTORY(PangoVideo)
|
224 |
+
{
|
225 |
+
struct PangoVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
226 |
+
std::map<std::string,Precedence> Schemes() const override
|
227 |
+
{
|
228 |
+
return {{"pango",0}, {"file",5}};
|
229 |
+
}
|
230 |
+
const char* Description() const override
|
231 |
+
{
|
232 |
+
return "Plays Pango video container format.";
|
233 |
+
}
|
234 |
+
ParamSet Params() const override
|
235 |
+
{
|
236 |
+
return {{
|
237 |
+
{"OrderedPlayback","false","Whether the playback respects the order of every data as they were recorded. Important for simulated playback."}
|
238 |
+
}};
|
239 |
+
}
|
240 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
241 |
+
const std::string path = PathExpand(uri.url);
|
242 |
+
|
243 |
+
ParamReader reader(Params(),uri);
|
244 |
+
|
245 |
+
if( !uri.scheme.compare("pango") || FileType(uri.url) == ImageFileTypePango ) {
|
246 |
+
return std::unique_ptr<VideoInterface>(new PangoVideo(path.c_str(), PlaybackSession::ChooseFromParams(reader)));
|
247 |
+
}
|
248 |
+
return std::unique_ptr<VideoInterface>();
|
249 |
+
}
|
250 |
+
};
|
251 |
+
|
252 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<PangoVideoFactory>());
|
253 |
+
}
|
254 |
+
|
255 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/pango_video_output.cpp
ADDED
@@ -0,0 +1,319 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/factory/factory_registry.h>
|
29 |
+
#include <pangolin/utils/file_utils.h>
|
30 |
+
#include <pangolin/utils/memstreambuf.h>
|
31 |
+
#include <pangolin/utils/picojson.h>
|
32 |
+
#include <pangolin/utils/sigstate.h>
|
33 |
+
#include <pangolin/utils/timer.h>
|
34 |
+
#include <pangolin/video/drivers/pango_video_output.h>
|
35 |
+
#include <pangolin/video/iostream_operators.h>
|
36 |
+
#include <pangolin/video/video_interface.h>
|
37 |
+
#include <set>
|
38 |
+
#include <future>
|
39 |
+
|
40 |
+
#ifndef _WIN_
|
41 |
+
# include <unistd.h>
|
42 |
+
#endif
|
43 |
+
|
44 |
+
namespace pangolin
|
45 |
+
{
|
46 |
+
|
47 |
+
const std::string pango_video_type = "raw_video";
|
48 |
+
|
49 |
+
void SigPipeHandler(int sig)
|
50 |
+
{
|
51 |
+
SigState::I().sig_callbacks.at(sig).value = true;
|
52 |
+
}
|
53 |
+
|
54 |
+
PangoVideoOutput::PangoVideoOutput(const std::string& filename, size_t buffer_size_bytes, const std::map<size_t, std::string> &stream_encoder_uris)
|
55 |
+
: filename(filename),
|
56 |
+
packetstream_buffer_size_bytes(buffer_size_bytes),
|
57 |
+
packetstreamsrcid(-1),
|
58 |
+
total_frame_size(0),
|
59 |
+
is_pipe(pangolin::IsPipe(filename)),
|
60 |
+
fixed_size(true),
|
61 |
+
stream_encoder_uris(stream_encoder_uris)
|
62 |
+
{
|
63 |
+
if(!is_pipe)
|
64 |
+
{
|
65 |
+
packetstream.Open(filename, packetstream_buffer_size_bytes);
|
66 |
+
}
|
67 |
+
else
|
68 |
+
{
|
69 |
+
RegisterNewSigCallback(&SigPipeHandler, (void*)this, SIGPIPE);
|
70 |
+
}
|
71 |
+
|
72 |
+
// Instantiate encoders
|
73 |
+
}
|
74 |
+
|
75 |
+
PangoVideoOutput::~PangoVideoOutput()
|
76 |
+
{
|
77 |
+
}
|
78 |
+
|
79 |
+
const std::vector<StreamInfo>& PangoVideoOutput::Streams() const
|
80 |
+
{
|
81 |
+
return streams;
|
82 |
+
}
|
83 |
+
|
84 |
+
bool PangoVideoOutput::IsPipe() const
|
85 |
+
{
|
86 |
+
return is_pipe;
|
87 |
+
}
|
88 |
+
|
89 |
+
void PangoVideoOutput::SetStreams(const std::vector<StreamInfo>& st, const std::string& uri, const picojson::value& properties)
|
90 |
+
{
|
91 |
+
std::set<unsigned char*> unique_ptrs;
|
92 |
+
for (size_t i = 0; i < st.size(); ++i)
|
93 |
+
{
|
94 |
+
unique_ptrs.insert(st[i].Offset());
|
95 |
+
}
|
96 |
+
|
97 |
+
if (unique_ptrs.size() < st.size())
|
98 |
+
throw std::invalid_argument("Each image must have unique offset into buffer.");
|
99 |
+
|
100 |
+
if (packetstreamsrcid == -1)
|
101 |
+
{
|
102 |
+
input_uri = uri;
|
103 |
+
streams = st;
|
104 |
+
device_properties = properties;
|
105 |
+
|
106 |
+
picojson::value json_header(picojson::object_type, false);
|
107 |
+
picojson::value& json_streams = json_header["streams"];
|
108 |
+
json_header["device"] = device_properties;
|
109 |
+
|
110 |
+
stream_encoders.resize(streams.size());
|
111 |
+
|
112 |
+
fixed_size = true;
|
113 |
+
|
114 |
+
total_frame_size = 0;
|
115 |
+
for (unsigned int i = 0; i < streams.size(); ++i)
|
116 |
+
{
|
117 |
+
StreamInfo& si = streams[i];
|
118 |
+
total_frame_size = std::max(total_frame_size, (size_t) si.Offset() + si.SizeBytes());
|
119 |
+
|
120 |
+
picojson::value& json_stream = json_streams.push_back();
|
121 |
+
|
122 |
+
std::string encoder_name = si.PixFormat().format;
|
123 |
+
if(stream_encoder_uris.find(i) != stream_encoder_uris.end() && !stream_encoder_uris[i].empty() ) {
|
124 |
+
// instantiate encoder and write it's name to the stream properties
|
125 |
+
json_stream["decoded"] = si.PixFormat().format;
|
126 |
+
encoder_name = stream_encoder_uris[i];
|
127 |
+
stream_encoders[i] = StreamEncoderFactory::I().GetEncoder(encoder_name, si.PixFormat());
|
128 |
+
fixed_size = false;
|
129 |
+
}
|
130 |
+
|
131 |
+
json_stream["channel_bit_depth"] = si.PixFormat().channel_bit_depth;
|
132 |
+
json_stream["encoding"] = encoder_name;
|
133 |
+
json_stream["width"] = si.Width();
|
134 |
+
json_stream["height"] = si.Height();
|
135 |
+
json_stream["pitch"] = si.Pitch();
|
136 |
+
json_stream["offset"] = (size_t) si.Offset();
|
137 |
+
}
|
138 |
+
|
139 |
+
PacketStreamSource pss;
|
140 |
+
pss.driver = pango_video_type;
|
141 |
+
pss.uri = input_uri;
|
142 |
+
pss.info = json_header;
|
143 |
+
pss.data_size_bytes = fixed_size ? total_frame_size : 0;
|
144 |
+
pss.data_definitions = "struct Frame{ uint8 stream_data[" + pangolin::Convert<std::string, size_t>::Do(total_frame_size) + "];};";
|
145 |
+
|
146 |
+
packetstreamsrcid = (int)packetstream.AddSource(pss);
|
147 |
+
} else {
|
148 |
+
throw std::runtime_error("Unable to add new streams");
|
149 |
+
}
|
150 |
+
}
|
151 |
+
|
152 |
+
int PangoVideoOutput::WriteStreams(const unsigned char* data, const picojson::value& frame_properties)
|
153 |
+
{
|
154 |
+
const int64_t host_reception_time_us = frame_properties.get_value(PANGO_HOST_RECEPTION_TIME_US, Time_us(TimeNow()));
|
155 |
+
|
156 |
+
#ifndef _WIN_
|
157 |
+
if (is_pipe)
|
158 |
+
{
|
159 |
+
// If there is a reader waiting on the other side of the pipe, open
|
160 |
+
// a file descriptor to the file and close it only after the file
|
161 |
+
// has been opened by the PacketStreamWriter. This avoids the reader
|
162 |
+
// from seeing EOF on its next read because all file descriptors on
|
163 |
+
// the write side have been closed.
|
164 |
+
//
|
165 |
+
// When the stream is already open but the reader has disappeared,
|
166 |
+
// opening a file descriptor will fail and errno will be ENXIO.
|
167 |
+
int fd = WritablePipeFileDescriptor(filename);
|
168 |
+
|
169 |
+
if (!packetstream.IsOpen())
|
170 |
+
{
|
171 |
+
if (fd != -1)
|
172 |
+
{
|
173 |
+
packetstream.Open(filename, packetstream_buffer_size_bytes);
|
174 |
+
close(fd);
|
175 |
+
}
|
176 |
+
}
|
177 |
+
else
|
178 |
+
{
|
179 |
+
if (fd != -1)
|
180 |
+
{
|
181 |
+
// There's a reader on the other side of the pipe.
|
182 |
+
close(fd);
|
183 |
+
}
|
184 |
+
else
|
185 |
+
{
|
186 |
+
if (errno == ENXIO)
|
187 |
+
{
|
188 |
+
packetstream.ForceClose();
|
189 |
+
SigState::I().sig_callbacks.at(SIGPIPE).value = false;
|
190 |
+
|
191 |
+
// This should be unnecessary since per the man page,
|
192 |
+
// data should be dropped from the buffer upon closing the
|
193 |
+
// writable file descriptors.
|
194 |
+
pangolin::FlushPipe(filename);
|
195 |
+
}
|
196 |
+
}
|
197 |
+
}
|
198 |
+
|
199 |
+
if (!packetstream.IsOpen())
|
200 |
+
return 0;
|
201 |
+
}
|
202 |
+
#endif
|
203 |
+
|
204 |
+
if(!fixed_size) {
|
205 |
+
// TODO: Make this more efficient (without so many allocs and memcpy's)
|
206 |
+
|
207 |
+
std::vector<memstreambuf> encoded_stream_data;
|
208 |
+
|
209 |
+
// Create buffers for compressed data: the first will be reused for all the data later
|
210 |
+
encoded_stream_data.emplace_back(total_frame_size);
|
211 |
+
for(size_t i=1; i < streams.size(); ++i) {
|
212 |
+
encoded_stream_data.emplace_back(streams[i].SizeBytes());
|
213 |
+
}
|
214 |
+
|
215 |
+
// lambda encodes frame data i to encoded_stream_data[i]
|
216 |
+
auto encode_stream = [&](int i){
|
217 |
+
encoded_stream_data[i].clear();
|
218 |
+
std::ostream encode_stream(&encoded_stream_data[i]);
|
219 |
+
|
220 |
+
const StreamInfo& si = streams[i];
|
221 |
+
const Image<unsigned char> stream_image = si.StreamImage(data);
|
222 |
+
|
223 |
+
if(stream_encoders[i]) {
|
224 |
+
// Encode to buffer
|
225 |
+
stream_encoders[i](encode_stream, stream_image);
|
226 |
+
}else{
|
227 |
+
if(stream_image.IsContiguous()) {
|
228 |
+
encode_stream.write((char*)stream_image.ptr, streams[i].SizeBytes());
|
229 |
+
}else{
|
230 |
+
for(size_t row=0; row < stream_image.h; ++row) {
|
231 |
+
encode_stream.write((char*)stream_image.RowPtr(row), si.RowBytes());
|
232 |
+
}
|
233 |
+
}
|
234 |
+
}
|
235 |
+
return true;
|
236 |
+
};
|
237 |
+
|
238 |
+
// Compress each stream (>0 in another thread)
|
239 |
+
std::vector<std::future<bool>> encode_finished;
|
240 |
+
for(size_t i=1; i < streams.size(); ++i) {
|
241 |
+
encode_finished.emplace_back(std::async(std::launch::async, [&,i](){
|
242 |
+
return encode_stream(i);
|
243 |
+
}));
|
244 |
+
}
|
245 |
+
// Encode stream 0 in this thread
|
246 |
+
encode_stream(0);
|
247 |
+
|
248 |
+
// Reuse our first compression stream for the rest of the data too.
|
249 |
+
std::vector<uint8_t>& encoded = encoded_stream_data[0].buffer;
|
250 |
+
|
251 |
+
// Wait on all threads to finish and copy into data packet
|
252 |
+
for(size_t i=1; i < streams.size(); ++i) {
|
253 |
+
encode_finished[i-1].get();
|
254 |
+
encoded.insert(encoded.end(), encoded_stream_data[i].buffer.begin(), encoded_stream_data[i].buffer.end());
|
255 |
+
}
|
256 |
+
|
257 |
+
packetstream.WriteSourcePacket(packetstreamsrcid, reinterpret_cast<const char*>(encoded.data()), host_reception_time_us, encoded.size(), frame_properties);
|
258 |
+
}else{
|
259 |
+
packetstream.WriteSourcePacket(packetstreamsrcid, reinterpret_cast<const char*>(data), host_reception_time_us, total_frame_size, frame_properties);
|
260 |
+
}
|
261 |
+
|
262 |
+
return 0;
|
263 |
+
}
|
264 |
+
|
265 |
+
PANGOLIN_REGISTER_FACTORY(PangoVideoOutput)
|
266 |
+
{
|
267 |
+
struct PangoVideoFactory final : public TypedFactoryInterface<VideoOutputInterface> {
|
268 |
+
std::map<std::string,Precedence> Schemes() const override
|
269 |
+
{
|
270 |
+
return {{"pango",10}, {"file",10}};
|
271 |
+
}
|
272 |
+
const char* Description() const override
|
273 |
+
{
|
274 |
+
return "Output to a Pango video container.";
|
275 |
+
}
|
276 |
+
ParamSet Params() const override
|
277 |
+
{
|
278 |
+
return {{
|
279 |
+
{"buffer_size_mb","100","Buffer size in MB"},
|
280 |
+
{"unique_filename","","This is flag to create a unique file name in the case of file already exists."},
|
281 |
+
{"encoder(\\d+)?"," ","encoder or encoderN, 1 <= N <= 100. The default values of encoderN are set to encoder"}
|
282 |
+
}};
|
283 |
+
}
|
284 |
+
std::unique_ptr<VideoOutputInterface> Open(const Uri& uri) override {
|
285 |
+
ParamReader reader(Params(), uri);
|
286 |
+
|
287 |
+
const size_t mb = 1024*1024;
|
288 |
+
const size_t buffer_size_bytes = reader.Get<size_t>("buffer_size_mb") * mb;
|
289 |
+
std::string filename = uri.url;
|
290 |
+
|
291 |
+
if(reader.Contains("unique_filename")) {
|
292 |
+
filename = MakeUniqueFilename(filename);
|
293 |
+
}
|
294 |
+
|
295 |
+
// Default encoder
|
296 |
+
std::string default_encoder = "";
|
297 |
+
|
298 |
+
if(reader.Contains("encoder")) {
|
299 |
+
default_encoder = reader.Get<std::string>("encoder","");
|
300 |
+
}
|
301 |
+
|
302 |
+
// Encoders for each stream
|
303 |
+
std::map<size_t, std::string> stream_encoder_uris;
|
304 |
+
for(size_t i=0; i<100; ++i)
|
305 |
+
{
|
306 |
+
const std::string encoder_key = pangolin::FormatString("encoder%",i+1);
|
307 |
+
stream_encoder_uris[i] = reader.Get<std::string>(encoder_key, default_encoder);
|
308 |
+
}
|
309 |
+
|
310 |
+
return std::unique_ptr<VideoOutputInterface>(
|
311 |
+
new PangoVideoOutput(filename, buffer_size_bytes, stream_encoder_uris)
|
312 |
+
);
|
313 |
+
}
|
314 |
+
};
|
315 |
+
|
316 |
+
return FactoryRegistry::I()->RegisterFactory<VideoOutputInterface>(std::make_shared<PangoVideoFactory>());
|
317 |
+
}
|
318 |
+
|
319 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/pleora.cpp
ADDED
@@ -0,0 +1,756 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2015 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/video/drivers/pleora.h>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/iostream_operators.h>
|
31 |
+
#include <thread>
|
32 |
+
|
33 |
+
#ifdef DEBUGPLEORA
|
34 |
+
#include <pangolin/utils/timer.h>
|
35 |
+
#define TSTART() pangolin::basetime start,last,now; start = pangolin::TimeNow(); last = start;
|
36 |
+
#define TGRABANDPRINT(...) now = pangolin::TimeNow(); fprintf(stderr," PLEORA: "); fprintf(stderr, __VA_ARGS__); fprintf(stderr, " %fms.\n",1000*pangolin::TimeDiff_s(last, now)); last = now;
|
37 |
+
#define DBGPRINT(...) fprintf(stderr," PLEORA: "); fprintf(stderr, __VA_ARGS__); fprintf(stderr,"\n");
|
38 |
+
#else
|
39 |
+
#define TSTART()
|
40 |
+
#define TGRABANDPRINT(...)
|
41 |
+
#define DBGPRINT(...)
|
42 |
+
#endif
|
43 |
+
|
44 |
+
namespace pangolin
|
45 |
+
{
|
46 |
+
|
47 |
+
inline void ThrowOnFailure(const PvResult& res)
|
48 |
+
{
|
49 |
+
if(res.IsFailure()) {
|
50 |
+
throw std::runtime_error("Failure: " + std::string(res.GetCodeString().GetAscii()) );
|
51 |
+
}
|
52 |
+
}
|
53 |
+
|
54 |
+
template<typename T>
|
55 |
+
struct PleoraParamTraits;
|
56 |
+
|
57 |
+
template<> struct PleoraParamTraits<bool> {
|
58 |
+
typedef PvGenBoolean PvType;
|
59 |
+
};
|
60 |
+
template<> struct PleoraParamTraits<int64_t> {
|
61 |
+
typedef PvGenInteger PvType;
|
62 |
+
};
|
63 |
+
template<> struct PleoraParamTraits<float> {
|
64 |
+
typedef PvGenFloat PvType;
|
65 |
+
};
|
66 |
+
template<> struct PleoraParamTraits<std::string> {
|
67 |
+
typedef PvGenString PvType;
|
68 |
+
};
|
69 |
+
|
70 |
+
template<typename T>
|
71 |
+
T GetParam(PvGenParameterArray* params, const char* name)
|
72 |
+
{
|
73 |
+
typedef typename PleoraParamTraits<T>::PvType PvType;
|
74 |
+
PvType* param = dynamic_cast<PvType*>( params->Get(name) );
|
75 |
+
if(!param) {
|
76 |
+
throw std::runtime_error("Incorrect type");
|
77 |
+
}
|
78 |
+
T ret;
|
79 |
+
PvResult res = param->GetValue(ret);
|
80 |
+
if(res.IsFailure()) {
|
81 |
+
throw std::runtime_error("Cannot get value: " + std::string(res.GetCodeString().GetAscii()) );
|
82 |
+
}
|
83 |
+
return ret;
|
84 |
+
}
|
85 |
+
|
86 |
+
template<typename T>
|
87 |
+
bool SetParam(PvGenParameterArray* params, const char* name, T val)
|
88 |
+
{
|
89 |
+
typedef typename PleoraParamTraits<T>::PvType PvType;
|
90 |
+
PvType* param = dynamic_cast<PvType*>( params->Get(name) );
|
91 |
+
if(!param) {
|
92 |
+
throw std::runtime_error("Unable to get parameter handle: " + std::string(name) );
|
93 |
+
}
|
94 |
+
|
95 |
+
if(!param->IsWritable()) {
|
96 |
+
throw std::runtime_error("Cannot set value for " + std::string(name) );
|
97 |
+
}
|
98 |
+
|
99 |
+
PvResult res = param->SetValue(val);
|
100 |
+
if(res.IsFailure()) {
|
101 |
+
throw std::runtime_error("Cannot set value: " + std::string(res.GetCodeString().GetAscii()) );
|
102 |
+
}
|
103 |
+
return true;
|
104 |
+
}
|
105 |
+
|
106 |
+
inline const PvDeviceInfo* SelectDevice( PvSystem& aSystem, const char* model_name = 0, const char* serial_num = 0, size_t index = 0 )
|
107 |
+
{
|
108 |
+
aSystem.Find();
|
109 |
+
|
110 |
+
// Enumerate all devices, select first that matches criteria
|
111 |
+
size_t matches = 0;
|
112 |
+
for ( uint32_t i = 0; i < aSystem.GetInterfaceCount(); i++ ) {
|
113 |
+
const PvInterface *lInterface = dynamic_cast<const PvInterface *>( aSystem.GetInterface( i ) );
|
114 |
+
if ( lInterface ) {
|
115 |
+
for ( uint32_t j = 0; j < lInterface->GetDeviceCount(); j++ ) {
|
116 |
+
const PvDeviceInfo *lDI = dynamic_cast<const PvDeviceInfo *>( lInterface->GetDeviceInfo( j ) );
|
117 |
+
if ( lDI && lDI->IsConfigurationValid() ) {
|
118 |
+
if( model_name && strcmp(lDI->GetModelName().GetAscii(), model_name) )
|
119 |
+
continue;
|
120 |
+
if( serial_num && strcmp(lDI->GetSerialNumber().GetAscii(), serial_num) )
|
121 |
+
continue;
|
122 |
+
if(matches == index) {
|
123 |
+
return lDI;
|
124 |
+
}
|
125 |
+
++matches;
|
126 |
+
}
|
127 |
+
}
|
128 |
+
}
|
129 |
+
}
|
130 |
+
|
131 |
+
return 0;
|
132 |
+
}
|
133 |
+
|
134 |
+
PixelFormat PleoraFormat(const PvGenEnum* pfmt)
|
135 |
+
{
|
136 |
+
std::string spfmt = pfmt->ToString().GetAscii();
|
137 |
+
if( !spfmt.compare("Mono8") ) {
|
138 |
+
return PixelFormatFromString("GRAY8");
|
139 |
+
} else if( !spfmt.compare("Mono10p") ) {
|
140 |
+
return PixelFormatFromString("GRAY10");
|
141 |
+
} else if( !spfmt.compare("Mono12p") ) {
|
142 |
+
return PixelFormatFromString("GRAY12");
|
143 |
+
} else if( !spfmt.compare("Mono10") || !spfmt.compare("Mono12")) {
|
144 |
+
return PixelFormatFromString("GRAY16LE");
|
145 |
+
} else if( !spfmt.compare("RGB8") ) {
|
146 |
+
return PixelFormatFromString("RGB24");
|
147 |
+
} else if( !spfmt.compare("BGR8") ) {
|
148 |
+
return PixelFormatFromString("BGR24");
|
149 |
+
} else if( !spfmt.compare("BayerBG8") ) {
|
150 |
+
return PixelFormatFromString("GRAY8");
|
151 |
+
} else if( !spfmt.compare("BayerBG12") ) {
|
152 |
+
return PixelFormatFromString("GRAY16LE");
|
153 |
+
} else {
|
154 |
+
throw VideoException("Unknown Pleora pixel format", spfmt);
|
155 |
+
}
|
156 |
+
}
|
157 |
+
|
158 |
+
PleoraVideo::PleoraVideo(const Params& p): size_bytes(0), lPvSystem(0), lDevice(0), lStream(0), lDeviceParams(0), lStart(0), lStop(0),
|
159 |
+
lTemperatureCelcius(0), getTemp(false), lStreamParams(0), validGrabbedBuffers(0)
|
160 |
+
{
|
161 |
+
std::string sn;
|
162 |
+
std::string mn;
|
163 |
+
int index = 0;
|
164 |
+
size_t buffer_count = PleoraVideo::DEFAULT_BUFFER_COUNT;
|
165 |
+
Params device_params;
|
166 |
+
|
167 |
+
for(Params::ParamMap::const_iterator it = p.params.begin(); it != p.params.end(); it++) {
|
168 |
+
if(it->first == "model"){
|
169 |
+
mn = it->second;
|
170 |
+
} else if(it->first == "sn"){
|
171 |
+
sn = it->second;
|
172 |
+
} else if(it->first == "idx"){
|
173 |
+
index = p.Get<int>("idx", 0);
|
174 |
+
} else if(it->first == "size") {
|
175 |
+
const ImageDim dim = p.Get<ImageDim>("size", ImageDim(0,0) );
|
176 |
+
device_params.Set("Width" , dim.x);
|
177 |
+
device_params.Set("Height" , dim.y);
|
178 |
+
} else if(it->first == "pos") {
|
179 |
+
const ImageDim pos = p.Get<ImageDim>("pos", ImageDim(0,0) );
|
180 |
+
device_params.Set("OffsetX" , pos.x);
|
181 |
+
device_params.Set("OffsetY" , pos.y);
|
182 |
+
} else if(it->first == "roi") {
|
183 |
+
const ImageRoi roi = p.Get<ImageRoi>("roi", ImageRoi(0,0,0,0) );
|
184 |
+
device_params.Set("Width" , roi.w);
|
185 |
+
device_params.Set("Height" , roi.h);
|
186 |
+
device_params.Set("OffsetX", roi.x);
|
187 |
+
device_params.Set("OffsetY", roi.y);
|
188 |
+
} else {
|
189 |
+
device_params.Set(it->first, it->second);
|
190 |
+
}
|
191 |
+
}
|
192 |
+
|
193 |
+
InitDevice(mn.empty() ? 0 : mn.c_str(), sn.empty() ? 0 : sn.c_str(), index);
|
194 |
+
SetDeviceParams(device_params);
|
195 |
+
InitStream();
|
196 |
+
|
197 |
+
InitPangoStreams();
|
198 |
+
InitPangoDeviceProperties();
|
199 |
+
InitBuffers(buffer_count);
|
200 |
+
}
|
201 |
+
|
202 |
+
PleoraVideo::~PleoraVideo()
|
203 |
+
{
|
204 |
+
Stop();
|
205 |
+
DeinitBuffers();
|
206 |
+
DeinitStream();
|
207 |
+
DeinitDevice();
|
208 |
+
}
|
209 |
+
|
210 |
+
std::string PleoraVideo::GetParameter(const std::string& name) {
|
211 |
+
PvGenParameter* par = lDeviceParams->Get(PvString(name.c_str()));
|
212 |
+
if(par) {
|
213 |
+
PvString ret = par->ToString();
|
214 |
+
return std::string(ret.GetAscii());
|
215 |
+
} else {
|
216 |
+
pango_print_error("Parameter %s not recognized\n", name.c_str());
|
217 |
+
return "";
|
218 |
+
}
|
219 |
+
}
|
220 |
+
|
221 |
+
void PleoraVideo::SetParameter(const std::string& name, const std::string& value) {
|
222 |
+
PvGenParameter* par = lDeviceParams->Get(PvString(name.c_str()));
|
223 |
+
if(par) {
|
224 |
+
PvResult r = par->FromString(PvString(value.c_str()));
|
225 |
+
if(!r.IsOK()){
|
226 |
+
pango_print_error("Error setting parameter %s to:%s Reason:%s\n", name.c_str(), value.c_str(), r.GetDescription().GetAscii());
|
227 |
+
} else {
|
228 |
+
pango_print_info("Setting parameter %s to:%s\n", name.c_str(), value.c_str());
|
229 |
+
}
|
230 |
+
} else {
|
231 |
+
pango_print_error("Parameter %s not recognized\n", name.c_str());
|
232 |
+
}
|
233 |
+
}
|
234 |
+
|
235 |
+
void PleoraVideo::InitDevice(
|
236 |
+
const char* model_name, const char* serial_num, size_t index
|
237 |
+
) {
|
238 |
+
lPvSystem = new PvSystem();
|
239 |
+
if ( !lPvSystem ) {
|
240 |
+
throw pangolin::VideoException("Pleora: Unable to create PvSystem");
|
241 |
+
}
|
242 |
+
|
243 |
+
lDeviceInfo = SelectDevice(*lPvSystem, model_name, serial_num, index);
|
244 |
+
if ( !lDeviceInfo ) {
|
245 |
+
delete lPvSystem;
|
246 |
+
throw pangolin::VideoException("Pleora: Unable to select device");
|
247 |
+
}
|
248 |
+
|
249 |
+
PvResult lResult;
|
250 |
+
lDevice = PvDevice::CreateAndConnect( lDeviceInfo, &lResult );
|
251 |
+
if ( !lDevice ) {
|
252 |
+
delete lPvSystem;
|
253 |
+
throw pangolin::VideoException("Pleora: Unable to connect to device", lResult.GetDescription().GetAscii() );
|
254 |
+
}
|
255 |
+
|
256 |
+
lDeviceParams = lDevice->GetParameters();
|
257 |
+
}
|
258 |
+
|
259 |
+
|
260 |
+
void PleoraVideo::DeinitDevice()
|
261 |
+
{
|
262 |
+
if(lDevice) {
|
263 |
+
lDevice->Disconnect();
|
264 |
+
PvDevice::Free( lDevice );
|
265 |
+
lDevice = 0;
|
266 |
+
}
|
267 |
+
|
268 |
+
delete lPvSystem;
|
269 |
+
lPvSystem = 0;
|
270 |
+
}
|
271 |
+
|
272 |
+
void PleoraVideo::InitStream()
|
273 |
+
{
|
274 |
+
// Setup Stream
|
275 |
+
PvResult lResult;
|
276 |
+
lStream = PvStream::CreateAndOpen( lDeviceInfo->GetConnectionID(), &lResult );
|
277 |
+
if ( !lStream ) {
|
278 |
+
DeinitDevice();
|
279 |
+
throw pangolin::VideoException("Pleora: Unable to open stream", lResult.GetDescription().GetAscii() );
|
280 |
+
}
|
281 |
+
lStreamParams = lStream->GetParameters();
|
282 |
+
}
|
283 |
+
|
284 |
+
void PleoraVideo::DeinitStream()
|
285 |
+
{
|
286 |
+
if(lStream) {
|
287 |
+
lStream->Close();
|
288 |
+
PvStream::Free( lStream );
|
289 |
+
lStream = 0;
|
290 |
+
}
|
291 |
+
}
|
292 |
+
|
293 |
+
void PleoraVideo::SetDeviceParams(Params& p) {
|
294 |
+
|
295 |
+
lStart = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStart" ) );
|
296 |
+
lStop = dynamic_cast<PvGenCommand*>( lDeviceParams->Get( "AcquisitionStop" ) );
|
297 |
+
|
298 |
+
for(Params::ParamMap::iterator it = p.params.begin(); it != p.params.end(); it++) {
|
299 |
+
if(it->first == "get_temperature"){
|
300 |
+
getTemp = p.Get<bool>("get_temperature",false);
|
301 |
+
} else {
|
302 |
+
if (it->second == "Execute") {
|
303 |
+
// This is a command, deal with it accordingly.
|
304 |
+
PvGenCommand* cmd = dynamic_cast<PvGenCommand*>(lDeviceParams->Get(it->first.c_str()));
|
305 |
+
if(cmd) {
|
306 |
+
PvResult r = cmd->Execute();
|
307 |
+
if(!r.IsOK()){
|
308 |
+
pango_print_error("Error executing command %s Reason:%s\n", it->first.c_str(), r.GetDescription().GetAscii());
|
309 |
+
} else {
|
310 |
+
pango_print_info("Executed Command %s\n", it->first.c_str());
|
311 |
+
}
|
312 |
+
bool done;
|
313 |
+
int attempts = 20;
|
314 |
+
do {
|
315 |
+
cmd->IsDone(done);
|
316 |
+
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
|
317 |
+
attempts--;
|
318 |
+
} while(!done && (attempts > 0));
|
319 |
+
if(attempts == 0) {
|
320 |
+
pango_print_error("Timeout while waiting for command %s done\n", it->first.c_str());
|
321 |
+
}
|
322 |
+
} else {
|
323 |
+
pango_print_error("Command %s not recognized\n", it->first.c_str());
|
324 |
+
}
|
325 |
+
} else {
|
326 |
+
try {
|
327 |
+
PvGenParameter* par = lDeviceParams->Get(PvString(it->first.c_str()));
|
328 |
+
if(par) {
|
329 |
+
PvResult r = par->FromString(PvString(it->second.c_str()));
|
330 |
+
if(!r.IsOK()){
|
331 |
+
pango_print_error("Error setting parameter %s to:%s Reason:%s\n", it->first.c_str(), it->second.c_str(), r.GetDescription().GetAscii());
|
332 |
+
} else {
|
333 |
+
pango_print_info("Setting parameter %s to:%s\n", it->first.c_str(), it->second.c_str());
|
334 |
+
}
|
335 |
+
} else {
|
336 |
+
pango_print_error("Parameter %s not recognized\n", it->first.c_str());
|
337 |
+
}
|
338 |
+
} catch(std::runtime_error e) {
|
339 |
+
pango_print_error("Set parameter %s: %s\n", it->first.c_str(), e.what());
|
340 |
+
}
|
341 |
+
}
|
342 |
+
}
|
343 |
+
}
|
344 |
+
|
345 |
+
// Get Handles to properties we'll be using.
|
346 |
+
lAnalogGain = lDeviceParams->GetInteger("AnalogGain");
|
347 |
+
lGamma = lDeviceParams->GetFloat("Gamma");
|
348 |
+
lAnalogBlackLevel = lDeviceParams->GetInteger("AnalogBlackLevel");
|
349 |
+
lExposure = lDeviceParams->GetFloat("ExposureTime");
|
350 |
+
lAquisitionMode = lDeviceParams->GetEnum("AcquisitionMode");
|
351 |
+
lTriggerSource = lDeviceParams->GetEnum("TriggerSource");
|
352 |
+
lTriggerMode = lDeviceParams->GetEnum("TriggerMode");
|
353 |
+
|
354 |
+
if(getTemp) {
|
355 |
+
lTemperatureCelcius = lDeviceParams->GetFloat("DeviceTemperatureCelsius");
|
356 |
+
pango_print_warn("Warning: get_temperature might add a blocking call taking several ms to each frame read.");
|
357 |
+
}
|
358 |
+
|
359 |
+
}
|
360 |
+
|
361 |
+
void PleoraVideo::InitBuffers(size_t buffer_count)
|
362 |
+
{
|
363 |
+
// Reading payload size from device
|
364 |
+
const uint32_t lSize = lDevice->GetPayloadSize();
|
365 |
+
|
366 |
+
// Use buffer_count or the maximum number of buffers, whichever is smaller
|
367 |
+
const uint32_t lBufferCount = ( lStream->GetQueuedBufferMaximum() < buffer_count ) ?
|
368 |
+
lStream->GetQueuedBufferMaximum() :
|
369 |
+
buffer_count;
|
370 |
+
|
371 |
+
// Allocate buffers and queue
|
372 |
+
for( uint32_t i = 0; i < lBufferCount; i++ ) {
|
373 |
+
PvBuffer *lBuffer = new PvBuffer;
|
374 |
+
lBuffer->Alloc( static_cast<uint32_t>( lSize ) );
|
375 |
+
lBufferList.push_back( lBuffer );
|
376 |
+
}
|
377 |
+
}
|
378 |
+
|
379 |
+
void PleoraVideo::DeinitBuffers()
|
380 |
+
{
|
381 |
+
// Free buffers
|
382 |
+
for( BufferList::iterator lIt = lBufferList.begin(); lIt != lBufferList.end(); lIt++ ) {
|
383 |
+
delete *lIt;
|
384 |
+
}
|
385 |
+
lBufferList.clear();
|
386 |
+
}
|
387 |
+
|
388 |
+
void PleoraVideo::InitPangoStreams()
|
389 |
+
{
|
390 |
+
// Get actual width, height and payload size
|
391 |
+
const int w = DeviceParam<int64_t>("Width");
|
392 |
+
const int h = DeviceParam<int64_t>("Height");
|
393 |
+
const uint32_t lSize = lDevice->GetPayloadSize();
|
394 |
+
|
395 |
+
// Setup pangolin for stream
|
396 |
+
PvGenEnum* lpixfmt = dynamic_cast<PvGenEnum*>( lDeviceParams->Get("PixelFormat") );
|
397 |
+
const PixelFormat fmt = PleoraFormat(lpixfmt);
|
398 |
+
streams.push_back(StreamInfo(fmt, w, h, (w*fmt.bpp)/8));
|
399 |
+
size_bytes = lSize;
|
400 |
+
}
|
401 |
+
|
402 |
+
void PleoraVideo::InitPangoDeviceProperties()
|
403 |
+
{
|
404 |
+
// Store camera details in device properties
|
405 |
+
device_properties["SerialNumber"] = std::string(lDeviceInfo->GetSerialNumber().GetAscii());
|
406 |
+
device_properties["VendorName"] = std::string(lDeviceInfo->GetVendorName().GetAscii());
|
407 |
+
device_properties["ModelName"] = std::string(lDeviceInfo->GetModelName().GetAscii());
|
408 |
+
device_properties["ManufacturerInfo"] = std::string(lDeviceInfo->GetManufacturerInfo().GetAscii());
|
409 |
+
device_properties["Version"] = std::string(lDeviceInfo->GetVersion().GetAscii());
|
410 |
+
device_properties["DisplayID"] = std::string(lDeviceInfo->GetDisplayID().GetAscii());
|
411 |
+
device_properties["UniqueID"] = std::string(lDeviceInfo->GetUniqueID().GetAscii());
|
412 |
+
device_properties["ConnectionID"] = std::string(lDeviceInfo->GetConnectionID().GetAscii());
|
413 |
+
|
414 |
+
picojson::value props(picojson::object_type, true);
|
415 |
+
for(size_t i=0; i < lDeviceParams->GetCount(); ++i) {
|
416 |
+
PvGenParameter* p = (*lDeviceParams)[i];
|
417 |
+
if(p->IsReadable()) {
|
418 |
+
props[p->GetName().GetAscii()] = p->ToString().GetAscii();
|
419 |
+
}
|
420 |
+
}
|
421 |
+
|
422 |
+
device_properties["properties"] = props;
|
423 |
+
}
|
424 |
+
|
425 |
+
unsigned int PleoraVideo::AvailableFrames() const
|
426 |
+
{
|
427 |
+
return validGrabbedBuffers;
|
428 |
+
}
|
429 |
+
|
430 |
+
bool PleoraVideo::DropNFrames(uint32_t n)
|
431 |
+
{
|
432 |
+
if(n > validGrabbedBuffers) return false;
|
433 |
+
|
434 |
+
while(n > 0) {
|
435 |
+
lStream->QueueBuffer(lGrabbedBuffList.front().buff);
|
436 |
+
lGrabbedBuffList.pop_front();
|
437 |
+
--validGrabbedBuffers;
|
438 |
+
--n;
|
439 |
+
DBGPRINT("DropNFrames: removed 1 frame from the list and requeued it.")
|
440 |
+
}
|
441 |
+
|
442 |
+
return true;
|
443 |
+
}
|
444 |
+
|
445 |
+
void PleoraVideo::Start()
|
446 |
+
{
|
447 |
+
if(lStream->GetQueuedBufferCount() == 0) {
|
448 |
+
// Queue all buffers in the stream
|
449 |
+
for( BufferList::iterator lIt = lBufferList.begin(); lIt != lBufferList.end(); lIt++ ) {
|
450 |
+
lStream->QueueBuffer( *lIt );
|
451 |
+
}
|
452 |
+
lDevice->StreamEnable();
|
453 |
+
lStart->Execute();
|
454 |
+
} else {
|
455 |
+
// // It isn't an error to repeatedly start
|
456 |
+
// pango_print_warn("PleoraVideo: Already started.\n");
|
457 |
+
}
|
458 |
+
}
|
459 |
+
|
460 |
+
void PleoraVideo::Stop()
|
461 |
+
{
|
462 |
+
// stop grab thread
|
463 |
+
if(lStream->GetQueuedBufferCount() > 0) {
|
464 |
+
lStop->Execute();
|
465 |
+
lDevice->StreamDisable();
|
466 |
+
|
467 |
+
// Abort all buffers from the stream and dequeue
|
468 |
+
lStream->AbortQueuedBuffers();
|
469 |
+
while ( lStream->GetQueuedBufferCount() > 0 ) {
|
470 |
+
PvBuffer *lBuffer = NULL;
|
471 |
+
PvResult lOperationResult;
|
472 |
+
lStream->RetrieveBuffer( &lBuffer, &lOperationResult );
|
473 |
+
}
|
474 |
+
} else {
|
475 |
+
// // It isn't an error to repeatedly stop
|
476 |
+
// pango_print_warn("PleoraVideo: Already stopped.\n");
|
477 |
+
}
|
478 |
+
}
|
479 |
+
|
480 |
+
size_t PleoraVideo::SizeBytes() const
|
481 |
+
{
|
482 |
+
return size_bytes;
|
483 |
+
}
|
484 |
+
|
485 |
+
const std::vector<StreamInfo>& PleoraVideo::Streams() const
|
486 |
+
{
|
487 |
+
return streams;
|
488 |
+
}
|
489 |
+
|
490 |
+
bool PleoraVideo::ParseBuffer(PvBuffer* lBuffer, unsigned char* image)
|
491 |
+
{
|
492 |
+
TSTART()
|
493 |
+
if ( lBuffer->GetPayloadType() == PvPayloadTypeImage ) {
|
494 |
+
PvImage *lImage = lBuffer->GetImage();
|
495 |
+
TGRABANDPRINT("GetImage took ")
|
496 |
+
std::memcpy(image, lImage->GetDataPointer(), size_bytes);
|
497 |
+
TGRABANDPRINT("memcpy took ")
|
498 |
+
// Required frame properties
|
499 |
+
frame_properties[PANGO_CAPTURE_TIME_US] = picojson::value(lBuffer->GetTimestamp());
|
500 |
+
frame_properties[PANGO_HOST_RECEPTION_TIME_US] = picojson::value(lBuffer->GetReceptionTime());
|
501 |
+
TGRABANDPRINT("Frame properties took ")
|
502 |
+
|
503 |
+
// Optional frame properties
|
504 |
+
if(lTemperatureCelcius != 0) {
|
505 |
+
double val;
|
506 |
+
PvResult lResult = lTemperatureCelcius->GetValue(val);
|
507 |
+
if(lResult.IsSuccess()) {
|
508 |
+
frame_properties[PANGO_SENSOR_TEMPERATURE_C] = picojson::value(val);
|
509 |
+
} else {
|
510 |
+
pango_print_error("DeviceTemperatureCelsius %f fail\n", val);
|
511 |
+
}
|
512 |
+
}
|
513 |
+
TGRABANDPRINT("GetTemperature took ")
|
514 |
+
return true;
|
515 |
+
} else {
|
516 |
+
return false;
|
517 |
+
}
|
518 |
+
|
519 |
+
}
|
520 |
+
|
521 |
+
bool PleoraVideo::GrabNext( unsigned char* image, bool wait)
|
522 |
+
{
|
523 |
+
const uint32_t timeout = wait ? 1000 : 0;
|
524 |
+
bool good = false;
|
525 |
+
TSTART()
|
526 |
+
DBGPRINT("GrabNext no thread:")
|
527 |
+
|
528 |
+
RetriveAllAvailableBuffers((validGrabbedBuffers==0) ? timeout : 0);
|
529 |
+
TGRABANDPRINT("Retriving all available buffers (valid frames in queue=%d, queue size=%ld) took ",validGrabbedBuffers ,lGrabbedBuffList.size())
|
530 |
+
|
531 |
+
if(validGrabbedBuffers == 0) return false;
|
532 |
+
|
533 |
+
// Retrieve next buffer from list and parse it
|
534 |
+
GrabbedBufferList::iterator front = lGrabbedBuffList.begin();
|
535 |
+
if ( front->res.IsOK() ) {
|
536 |
+
good = ParseBuffer(front->buff, image);
|
537 |
+
}
|
538 |
+
TGRABANDPRINT("Parsing buffer took ")
|
539 |
+
|
540 |
+
lStream->QueueBuffer(front->buff);
|
541 |
+
TGRABANDPRINT("\tPLEORA:QueueBuffer: ")
|
542 |
+
|
543 |
+
// Remove used buffer from list.
|
544 |
+
lGrabbedBuffList.pop_front();
|
545 |
+
--validGrabbedBuffers;
|
546 |
+
|
547 |
+
return good;
|
548 |
+
}
|
549 |
+
|
550 |
+
|
551 |
+
bool PleoraVideo::GrabNewest( unsigned char* image, bool wait )
|
552 |
+
{
|
553 |
+
const uint32_t timeout = wait ? 0xFFFFFFFF : 0;
|
554 |
+
bool good = false;
|
555 |
+
|
556 |
+
TSTART()
|
557 |
+
DBGPRINT("GrabNewest no thread:")
|
558 |
+
RetriveAllAvailableBuffers((validGrabbedBuffers==0) ? timeout : 0);
|
559 |
+
TGRABANDPRINT("Retriving all available buffers (valid frames in queue=%d, queue size=%ld) took ",validGrabbedBuffers ,lGrabbedBuffList.size())
|
560 |
+
|
561 |
+
if(validGrabbedBuffers == 0) {
|
562 |
+
DBGPRINT("No valid buffers, returning.")
|
563 |
+
return false;
|
564 |
+
}
|
565 |
+
if(validGrabbedBuffers > 1) DropNFrames(validGrabbedBuffers-1);
|
566 |
+
TGRABANDPRINT("Dropping %d frames took ", (validGrabbedBuffers-1))
|
567 |
+
|
568 |
+
// Retrieve next buffer from list and parse it
|
569 |
+
GrabbedBufferList::iterator front = lGrabbedBuffList.begin();
|
570 |
+
if ( front->res.IsOK() ) {
|
571 |
+
good = ParseBuffer(front->buff, image);
|
572 |
+
}
|
573 |
+
TGRABANDPRINT("Parsing buffer took ")
|
574 |
+
|
575 |
+
lStream->QueueBuffer(front->buff);
|
576 |
+
TGRABANDPRINT("Requeueing buffer took ")
|
577 |
+
|
578 |
+
// Remove used buffer from list.
|
579 |
+
lGrabbedBuffList.pop_front();
|
580 |
+
--validGrabbedBuffers;
|
581 |
+
|
582 |
+
return good;
|
583 |
+
}
|
584 |
+
|
585 |
+
void PleoraVideo::RetriveAllAvailableBuffers(uint32_t timeout){
|
586 |
+
PvBuffer *lBuffer = NULL;
|
587 |
+
PvResult lOperationResult;
|
588 |
+
PvResult lResult;
|
589 |
+
TSTART()
|
590 |
+
do {
|
591 |
+
lResult = lStream->RetrieveBuffer( &lBuffer, &lOperationResult, timeout);
|
592 |
+
if ( !lResult.IsOK() ) {
|
593 |
+
if(lResult && !(lResult.GetCode() == PvResult::Code::TIMEOUT)) {
|
594 |
+
pango_print_warn("Pleora error: %s,\n'%s'\n", lResult.GetCodeString().GetAscii(), lResult.GetDescription().GetAscii() );
|
595 |
+
}
|
596 |
+
return;
|
597 |
+
} else if( !lOperationResult.IsOK() ) {
|
598 |
+
pango_print_warn("Pleora error %s,\n'%s'\n", lOperationResult.GetCodeString().GetAscii(), lResult.GetDescription().GetAscii() );
|
599 |
+
lStream->QueueBuffer( lBuffer );
|
600 |
+
return;
|
601 |
+
}
|
602 |
+
lGrabbedBuffList.push_back(GrabbedBuffer(lBuffer,lOperationResult,true));
|
603 |
+
++validGrabbedBuffers;
|
604 |
+
TGRABANDPRINT("Attempt retrieving buffer (timeout=%d validbuffer=%d) took ", timeout, validGrabbedBuffers)
|
605 |
+
timeout = 0;
|
606 |
+
} while (lResult.IsOK());
|
607 |
+
}
|
608 |
+
|
609 |
+
int64_t PleoraVideo::GetGain()
|
610 |
+
{
|
611 |
+
int64_t val;
|
612 |
+
if(lAnalogGain) {
|
613 |
+
ThrowOnFailure( lAnalogGain->GetValue(val) );
|
614 |
+
}
|
615 |
+
return val;
|
616 |
+
}
|
617 |
+
|
618 |
+
void PleoraVideo::SetGain(int64_t val)
|
619 |
+
{
|
620 |
+
if(val >= 0 && lAnalogGain && lAnalogGain->IsWritable()) {
|
621 |
+
ThrowOnFailure( lAnalogGain->SetValue(val) );
|
622 |
+
frame_properties[PANGO_ANALOG_GAIN] = picojson::value(val);
|
623 |
+
}
|
624 |
+
}
|
625 |
+
|
626 |
+
int64_t PleoraVideo::GetAnalogBlackLevel()
|
627 |
+
{
|
628 |
+
int64_t val;
|
629 |
+
if(lAnalogBlackLevel) {
|
630 |
+
ThrowOnFailure( lAnalogBlackLevel->GetValue(val) );
|
631 |
+
}
|
632 |
+
return val;
|
633 |
+
}
|
634 |
+
|
635 |
+
void PleoraVideo::SetAnalogBlackLevel(int64_t val)
|
636 |
+
{
|
637 |
+
if(val >= 0 && lAnalogBlackLevel&& lAnalogBlackLevel->IsWritable()) {
|
638 |
+
ThrowOnFailure( lAnalogBlackLevel->SetValue(val) );
|
639 |
+
frame_properties[PANGO_ANALOG_BLACK_LEVEL] = picojson::value(val);
|
640 |
+
}
|
641 |
+
}
|
642 |
+
|
643 |
+
double PleoraVideo::GetExposure()
|
644 |
+
{
|
645 |
+
double val;
|
646 |
+
if( lExposure ) {
|
647 |
+
ThrowOnFailure( lExposure->GetValue(val));
|
648 |
+
}
|
649 |
+
return val;
|
650 |
+
}
|
651 |
+
|
652 |
+
void PleoraVideo::SetExposure(double val)
|
653 |
+
{
|
654 |
+
if(val > 0 && lExposure && lExposure->IsWritable() ) {
|
655 |
+
ThrowOnFailure( lExposure->SetValue(val) );
|
656 |
+
frame_properties[PANGO_EXPOSURE_US] = picojson::value(val);
|
657 |
+
}
|
658 |
+
}
|
659 |
+
|
660 |
+
|
661 |
+
double PleoraVideo::GetGamma()
|
662 |
+
{
|
663 |
+
double val;
|
664 |
+
if(lGamma) {
|
665 |
+
ThrowOnFailure(lGamma->GetValue(val));
|
666 |
+
}
|
667 |
+
return val;
|
668 |
+
}
|
669 |
+
|
670 |
+
void PleoraVideo::SetGamma(double val)
|
671 |
+
{
|
672 |
+
if(val > 0 && lGamma && lGamma->IsWritable() ) {
|
673 |
+
ThrowOnFailure( lGamma->SetValue(val) );
|
674 |
+
frame_properties[PANGO_GAMMA] = picojson::value(val);
|
675 |
+
}
|
676 |
+
}
|
677 |
+
|
678 |
+
//use 0,0,1 for line0 hardware trigger.
|
679 |
+
//use 2,252,0 for software continuous
|
680 |
+
void PleoraVideo::SetupTrigger(bool triggerActive, int64_t triggerSource, int64_t acquisitionMode)
|
681 |
+
{
|
682 |
+
if(lAquisitionMode && lTriggerSource && lTriggerMode &&
|
683 |
+
lAquisitionMode->IsWritable() && lTriggerSource->IsWritable() && lTriggerMode->IsWritable() ) {
|
684 |
+
// Check input is valid.
|
685 |
+
const PvGenEnumEntry* entry_src;
|
686 |
+
const PvGenEnumEntry* entry_acq;
|
687 |
+
lTriggerSource->GetEntryByValue(triggerSource, &entry_src);
|
688 |
+
lAquisitionMode->GetEntryByValue(acquisitionMode, &entry_acq);
|
689 |
+
|
690 |
+
if(entry_src && entry_acq) {
|
691 |
+
ThrowOnFailure(lTriggerMode->SetValue(triggerActive ? 1 : 0));
|
692 |
+
if(triggerActive) {
|
693 |
+
pango_print_debug("Pleora: external trigger active\n");
|
694 |
+
ThrowOnFailure(lTriggerSource->SetValue(triggerSource));
|
695 |
+
ThrowOnFailure(lAquisitionMode->SetValue(acquisitionMode));
|
696 |
+
}
|
697 |
+
}else{
|
698 |
+
pango_print_error("Bad values for trigger options.");
|
699 |
+
}
|
700 |
+
}
|
701 |
+
}
|
702 |
+
|
703 |
+
template<typename T>
|
704 |
+
T PleoraVideo::DeviceParam(const char* name)
|
705 |
+
{
|
706 |
+
return GetParam<T>(lDeviceParams, name);
|
707 |
+
}
|
708 |
+
|
709 |
+
template<typename T>
|
710 |
+
bool PleoraVideo::SetDeviceParam(const char* name, T val)
|
711 |
+
{
|
712 |
+
return SetParam<T>(lDeviceParams, name, val);
|
713 |
+
}
|
714 |
+
|
715 |
+
template<typename T>
|
716 |
+
T PleoraVideo::StreamParam(const char* name)
|
717 |
+
{
|
718 |
+
return GetParam<T>(lStreamParams, name);
|
719 |
+
}
|
720 |
+
|
721 |
+
template<typename T>
|
722 |
+
bool PleoraVideo::SetStreamParam(const char* name, T val)
|
723 |
+
{
|
724 |
+
return SetParam<T>(lStreamParams, name, val);
|
725 |
+
}
|
726 |
+
|
727 |
+
PANGOLIN_REGISTER_FACTORY(PleoraVideo)
|
728 |
+
{
|
729 |
+
struct PleoraVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
730 |
+
std::map<std::string,Precedence> Schemes() const override
|
731 |
+
{
|
732 |
+
return {{"pleora",10}, {"u3v",10}};
|
733 |
+
}
|
734 |
+
const char* Description() const override
|
735 |
+
{
|
736 |
+
return "Uses Pleora EBusSDK to open u3v camera.";
|
737 |
+
}
|
738 |
+
ParamSet Params() const override
|
739 |
+
{
|
740 |
+
return {{
|
741 |
+
{"*","Enumerates arguments dynamically from camera. Use native u3v properties."}
|
742 |
+
}};
|
743 |
+
}
|
744 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
745 |
+
return std::unique_ptr<VideoInterface>(new PleoraVideo(uri));
|
746 |
+
}
|
747 |
+
};
|
748 |
+
|
749 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<PleoraVideoFactory>());
|
750 |
+
}
|
751 |
+
|
752 |
+
}
|
753 |
+
|
754 |
+
#undef TSTART
|
755 |
+
#undef TGRABANDPRINT
|
756 |
+
#undef DBGPRINT
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/realsense.cpp
ADDED
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#include <librealsense/rs.hpp>
|
2 |
+
#include <pangolin/video/drivers/realsense.h>
|
3 |
+
#include <pangolin/factory/factory_registry.h>
|
4 |
+
|
5 |
+
namespace pangolin {
|
6 |
+
|
7 |
+
RealSenseVideo::RealSenseVideo(ImageDim dim, int fps)
|
8 |
+
: dim_(dim), fps_(fps) {
|
9 |
+
ctx_ = new rs::context();
|
10 |
+
sizeBytes = 0;
|
11 |
+
for (int32_t i=0; i<ctx_->get_device_count(); ++i) {
|
12 |
+
devs_.push_back(ctx_->get_device(i));
|
13 |
+
|
14 |
+
devs_[i]->enable_stream(rs::stream::depth, dim_.x, dim_.y, rs::format::z16, fps_);
|
15 |
+
StreamInfo streamD(PixelFormatFromString("GRAY16LE"), dim_.x, dim_.y, dim_.x*2, 0);
|
16 |
+
streams.push_back(streamD);
|
17 |
+
|
18 |
+
sizeBytes += streamD.SizeBytes();
|
19 |
+
devs_[i]->enable_stream(rs::stream::color, dim_.x, dim_.y, rs::format::rgb8, fps_);
|
20 |
+
StreamInfo streamRGB(PixelFormatFromString("RGB24"), dim_.x, dim_.y, dim_.x*3, (uint8_t*)0+sizeBytes);
|
21 |
+
streams.push_back(streamRGB);
|
22 |
+
sizeBytes += streamRGB.SizeBytes();
|
23 |
+
|
24 |
+
devs_[i]->start();
|
25 |
+
}
|
26 |
+
total_frames = std::numeric_limits<int>::max();
|
27 |
+
}
|
28 |
+
|
29 |
+
RealSenseVideo::~RealSenseVideo() {
|
30 |
+
delete ctx_;
|
31 |
+
}
|
32 |
+
|
33 |
+
void RealSenseVideo::Start() {
|
34 |
+
for (int32_t i=0; i<ctx_->get_device_count(); ++i) {
|
35 |
+
devs_[i]->stop();
|
36 |
+
devs_[i]->start();
|
37 |
+
}
|
38 |
+
current_frame_index = 0;
|
39 |
+
}
|
40 |
+
|
41 |
+
void RealSenseVideo::Stop() {
|
42 |
+
for (int32_t i=0; i<ctx_->get_device_count(); ++i) {
|
43 |
+
devs_[i]->stop();
|
44 |
+
}
|
45 |
+
}
|
46 |
+
|
47 |
+
size_t RealSenseVideo::SizeBytes() const {
|
48 |
+
return sizeBytes;
|
49 |
+
}
|
50 |
+
|
51 |
+
const std::vector<StreamInfo>& RealSenseVideo::Streams() const {
|
52 |
+
return streams;
|
53 |
+
}
|
54 |
+
|
55 |
+
bool RealSenseVideo::GrabNext(unsigned char* image, bool wait) {
|
56 |
+
|
57 |
+
unsigned char* out_img = image;
|
58 |
+
for (int32_t i=0; i<ctx_->get_device_count(); ++i) {
|
59 |
+
if (wait) {
|
60 |
+
devs_[i]->wait_for_frames();
|
61 |
+
}
|
62 |
+
memcpy(out_img, devs_[i]->get_frame_data(rs::stream::depth), streams[i*2].SizeBytes());
|
63 |
+
out_img += streams[i*2].SizeBytes();
|
64 |
+
memcpy(out_img, devs_[i]->get_frame_data(rs::stream::color), streams[i*2+1].SizeBytes());
|
65 |
+
out_img += streams[i*2+1].SizeBytes();
|
66 |
+
}
|
67 |
+
return true;
|
68 |
+
}
|
69 |
+
|
70 |
+
bool RealSenseVideo::GrabNewest(unsigned char* image, bool wait) {
|
71 |
+
return GrabNext(image, wait);
|
72 |
+
}
|
73 |
+
|
74 |
+
size_t RealSenseVideo::GetCurrentFrameId() const {
|
75 |
+
return current_frame_index;
|
76 |
+
}
|
77 |
+
|
78 |
+
size_t RealSenseVideo::GetTotalFrames() const {
|
79 |
+
return total_frames;
|
80 |
+
}
|
81 |
+
|
82 |
+
size_t RealSenseVideo::Seek(size_t frameid) {
|
83 |
+
// TODO
|
84 |
+
return -1;
|
85 |
+
}
|
86 |
+
|
87 |
+
PANGOLIN_REGISTER_FACTORY(RealSenseVideo)
|
88 |
+
{
|
89 |
+
struct RealSenseVideoFactory : public TypedFactoryInterface<VideoInterface> {
|
90 |
+
std::map<std::string,Precedence> Schemes() const override
|
91 |
+
{
|
92 |
+
return {{"realsense1",10}, {"realsense",20}};
|
93 |
+
}
|
94 |
+
const char* Description() const override
|
95 |
+
{
|
96 |
+
return "Stream from RealSense devices.";
|
97 |
+
}
|
98 |
+
ParamSet Params() const override
|
99 |
+
{
|
100 |
+
return {{
|
101 |
+
{"size","640x480","Image dimension"},
|
102 |
+
{"fps","30","Frames per second"}
|
103 |
+
}};
|
104 |
+
}
|
105 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
106 |
+
const ImageDim dim = uri.Get<ImageDim>("size", ImageDim(640,480));
|
107 |
+
const unsigned int fps = uri.Get<unsigned int>("fps", 30);
|
108 |
+
return std::unique_ptr<VideoInterface>( new RealSenseVideo(dim, fps) );
|
109 |
+
}
|
110 |
+
};
|
111 |
+
|
112 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<RealSenseVideoFactory>());
|
113 |
+
}
|
114 |
+
|
115 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/realsense2.cpp
ADDED
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#include <librealsense2/rs.hpp>
|
2 |
+
#include <pangolin/video/drivers/realsense2.h>
|
3 |
+
#include <pangolin/factory/factory_registry.h>
|
4 |
+
|
5 |
+
namespace pangolin {
|
6 |
+
|
7 |
+
RealSense2Video::RealSense2Video(ImageDim dim, int fps)
|
8 |
+
: dim_(dim), fps_(fps) {
|
9 |
+
|
10 |
+
sizeBytes = 0;
|
11 |
+
|
12 |
+
// Create RealSense pipeline, encapsulating the actual device and sensors
|
13 |
+
pipe = new rs2::pipeline();
|
14 |
+
|
15 |
+
//Configure the pipeline
|
16 |
+
cfg = new rs2::config();
|
17 |
+
|
18 |
+
{ //config depth
|
19 |
+
cfg->enable_stream(RS2_STREAM_DEPTH, dim_.x, dim_.y, RS2_FORMAT_Z16, fps_);
|
20 |
+
StreamInfo streamD(PixelFormatFromString("GRAY16LE"), dim_.x, dim_.y, dim_.x*2, 0);
|
21 |
+
streams.push_back(streamD);
|
22 |
+
sizeBytes += streamD.SizeBytes();
|
23 |
+
}
|
24 |
+
|
25 |
+
{ //config color
|
26 |
+
cfg->enable_stream(RS2_STREAM_COLOR, dim_.x, dim_.y, RS2_FORMAT_RGB8, fps_);
|
27 |
+
StreamInfo streamRGB(PixelFormatFromString("RGB24"), dim_.x, dim_.y, dim_.x*3, reinterpret_cast<uint8_t*>(sizeBytes));
|
28 |
+
streams.push_back(streamRGB);
|
29 |
+
sizeBytes += streamRGB.SizeBytes();
|
30 |
+
}
|
31 |
+
|
32 |
+
// Start streaming with default recommended configuration
|
33 |
+
pipe->start(*cfg);
|
34 |
+
rs2::pipeline_profile profile = pipe->get_active_profile();
|
35 |
+
auto sensor = profile.get_device().first<rs2::depth_sensor>();
|
36 |
+
auto scale = sensor.get_depth_scale();
|
37 |
+
std::cout << "Depth scale is: " << scale << std::endl;
|
38 |
+
|
39 |
+
total_frames = std::numeric_limits<int>::max();
|
40 |
+
}
|
41 |
+
|
42 |
+
RealSense2Video::~RealSense2Video() {
|
43 |
+
delete pipe;
|
44 |
+
pipe = nullptr;
|
45 |
+
|
46 |
+
delete cfg;
|
47 |
+
cfg = nullptr;
|
48 |
+
}
|
49 |
+
|
50 |
+
void RealSense2Video::Start() {
|
51 |
+
pipe->start(*cfg);
|
52 |
+
current_frame_index = 0;
|
53 |
+
}
|
54 |
+
|
55 |
+
void RealSense2Video::Stop() {
|
56 |
+
pipe->stop();
|
57 |
+
}
|
58 |
+
|
59 |
+
size_t RealSense2Video::SizeBytes() const {
|
60 |
+
return sizeBytes;
|
61 |
+
}
|
62 |
+
|
63 |
+
const std::vector<StreamInfo>& RealSense2Video::Streams() const {
|
64 |
+
return streams;
|
65 |
+
}
|
66 |
+
|
67 |
+
bool RealSense2Video::GrabNext(unsigned char* image, bool /*wait*/) {
|
68 |
+
|
69 |
+
unsigned char* out_img = image;
|
70 |
+
|
71 |
+
rs2::frameset data = pipe->wait_for_frames(); // Wait for next set of frames from the camera
|
72 |
+
rs2::frame depth = data.get_depth_frame(); // Get the depth data
|
73 |
+
rs2::frame color = data.get_color_frame(); // Get the color data
|
74 |
+
|
75 |
+
memcpy(out_img, depth.get_data(), streams[0].SizeBytes());
|
76 |
+
out_img += streams[0].SizeBytes();
|
77 |
+
|
78 |
+
memcpy(out_img, color.get_data(), streams[1].SizeBytes());
|
79 |
+
out_img += streams[1].SizeBytes();
|
80 |
+
|
81 |
+
return true;
|
82 |
+
}
|
83 |
+
|
84 |
+
bool RealSense2Video::GrabNewest(unsigned char* image, bool wait) {
|
85 |
+
return GrabNext(image, wait);
|
86 |
+
}
|
87 |
+
|
88 |
+
size_t RealSense2Video::GetCurrentFrameId() const {
|
89 |
+
return current_frame_index;
|
90 |
+
}
|
91 |
+
|
92 |
+
size_t RealSense2Video::GetTotalFrames() const {
|
93 |
+
return total_frames;
|
94 |
+
}
|
95 |
+
|
96 |
+
size_t RealSense2Video::Seek(size_t /*frameid*/) {
|
97 |
+
// TODO
|
98 |
+
return -1;
|
99 |
+
}
|
100 |
+
|
101 |
+
PANGOLIN_REGISTER_FACTORY(RealSense2Video)
|
102 |
+
{
|
103 |
+
struct RealSense2VideoFactory : public TypedFactoryInterface<VideoInterface> {
|
104 |
+
std::map<std::string,Precedence> Schemes() const override
|
105 |
+
{
|
106 |
+
return {{"realsense2",10}, {"realsense",10}};
|
107 |
+
}
|
108 |
+
const char* Description() const override
|
109 |
+
{
|
110 |
+
return "Stream from RealSense devices.";
|
111 |
+
}
|
112 |
+
ParamSet Params() const override
|
113 |
+
{
|
114 |
+
return {{
|
115 |
+
{"size","640x480","Image dimension"},
|
116 |
+
{"fps","30","Frames per second"}
|
117 |
+
}};
|
118 |
+
}
|
119 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
120 |
+
const ImageDim dim = uri.Get<ImageDim>("size", ImageDim(640,480));
|
121 |
+
const unsigned int fps = uri.Get<unsigned int>("fps", 30);
|
122 |
+
return std::unique_ptr<VideoInterface>( new RealSense2Video(dim, fps) );
|
123 |
+
}
|
124 |
+
};
|
125 |
+
|
126 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<RealSense2VideoFactory>());
|
127 |
+
}
|
128 |
+
|
129 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/shared_memory.cpp
ADDED
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#include <pangolin/factory/factory_registry.h>
|
2 |
+
#include <pangolin/video/drivers/shared_memory.h>
|
3 |
+
#include <pangolin/video/iostream_operators.h>
|
4 |
+
|
5 |
+
using namespace std;
|
6 |
+
|
7 |
+
namespace pangolin
|
8 |
+
{
|
9 |
+
|
10 |
+
SharedMemoryVideo::SharedMemoryVideo(size_t w, size_t h, std::string pix_fmt,
|
11 |
+
const std::shared_ptr<SharedMemoryBufferInterface>& shared_memory,
|
12 |
+
const std::shared_ptr<ConditionVariableInterface>& buffer_full) :
|
13 |
+
_fmt(PixelFormatFromString(pix_fmt)),
|
14 |
+
_frame_size(w*h*_fmt.bpp/8),
|
15 |
+
_shared_memory(shared_memory),
|
16 |
+
_buffer_full(buffer_full)
|
17 |
+
{
|
18 |
+
const size_t pitch = w * _fmt.bpp/8;
|
19 |
+
const StreamInfo stream(_fmt, w, h, pitch, 0);
|
20 |
+
_streams.push_back(stream);
|
21 |
+
}
|
22 |
+
|
23 |
+
SharedMemoryVideo::~SharedMemoryVideo()
|
24 |
+
{
|
25 |
+
}
|
26 |
+
|
27 |
+
void SharedMemoryVideo::Start()
|
28 |
+
{
|
29 |
+
}
|
30 |
+
|
31 |
+
void SharedMemoryVideo::Stop()
|
32 |
+
{
|
33 |
+
}
|
34 |
+
|
35 |
+
size_t SharedMemoryVideo::SizeBytes() const
|
36 |
+
{
|
37 |
+
return _frame_size;
|
38 |
+
}
|
39 |
+
|
40 |
+
const std::vector<StreamInfo>& SharedMemoryVideo::Streams() const
|
41 |
+
{
|
42 |
+
return _streams;
|
43 |
+
}
|
44 |
+
|
45 |
+
bool SharedMemoryVideo::GrabNext(unsigned char* image, bool wait)
|
46 |
+
{
|
47 |
+
// If a condition variable exists, try waiting on it.
|
48 |
+
if(_buffer_full) {
|
49 |
+
timespec ts;
|
50 |
+
clock_gettime(CLOCK_REALTIME, &ts);
|
51 |
+
|
52 |
+
if (wait) {
|
53 |
+
_buffer_full->wait();
|
54 |
+
} else if (!_buffer_full->wait(ts)) {
|
55 |
+
return false;
|
56 |
+
}
|
57 |
+
}
|
58 |
+
|
59 |
+
// Read the buffer.
|
60 |
+
_shared_memory->lock();
|
61 |
+
memcpy(image, _shared_memory->ptr(), _frame_size);
|
62 |
+
_shared_memory->unlock();
|
63 |
+
|
64 |
+
return true;
|
65 |
+
}
|
66 |
+
|
67 |
+
bool SharedMemoryVideo::GrabNewest(unsigned char* image, bool wait)
|
68 |
+
{
|
69 |
+
return GrabNext(image,wait);
|
70 |
+
}
|
71 |
+
|
72 |
+
PANGOLIN_REGISTER_FACTORY(SharedMemoryVideo)
|
73 |
+
{
|
74 |
+
struct SharedMemoryVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
75 |
+
std::map<std::string,Precedence> Schemes() const override
|
76 |
+
{
|
77 |
+
return {{"shmem",10}};
|
78 |
+
}
|
79 |
+
const char* Description() const override
|
80 |
+
{
|
81 |
+
return "Stream from posix shared memory";
|
82 |
+
}
|
83 |
+
ParamSet Params() const override
|
84 |
+
{
|
85 |
+
return {{
|
86 |
+
{"fmt","RGB24","Pixel format: see pixel format help for all possible values"},
|
87 |
+
{"size","640x480","Image dimension"}
|
88 |
+
}};
|
89 |
+
}
|
90 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
91 |
+
const ImageDim dim = uri.Get<ImageDim>("size", ImageDim(0, 0));
|
92 |
+
const std::string sfmt = uri.Get<std::string>("fmt", "GRAY8");
|
93 |
+
const PixelFormat fmt = PixelFormatFromString(sfmt);
|
94 |
+
const std::string shmem_name = std::string("/") + uri.url;
|
95 |
+
std::shared_ptr<SharedMemoryBufferInterface> shmem_buffer =
|
96 |
+
open_named_shared_memory_buffer(shmem_name, true);
|
97 |
+
if (dim.x == 0 || dim.y == 0 || !shmem_buffer) {
|
98 |
+
throw VideoException("invalid shared memory parameters");
|
99 |
+
}
|
100 |
+
|
101 |
+
const std::string cond_name = shmem_name + "_cond";
|
102 |
+
std::shared_ptr<ConditionVariableInterface> buffer_full =
|
103 |
+
open_named_condition_variable(cond_name);
|
104 |
+
|
105 |
+
return std::unique_ptr<VideoInterface>(
|
106 |
+
new SharedMemoryVideo(dim.x, dim.y, fmt, shmem_buffer,buffer_full)
|
107 |
+
);
|
108 |
+
}
|
109 |
+
};
|
110 |
+
|
111 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<SharedMemoryVideoFactory>());
|
112 |
+
}
|
113 |
+
|
114 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/shift.cpp
ADDED
@@ -0,0 +1,253 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2014 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/video/drivers/shift.h>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/iostream_operators.h>
|
31 |
+
#include <pangolin/video/video.h>
|
32 |
+
|
33 |
+
namespace pangolin
|
34 |
+
{
|
35 |
+
|
36 |
+
ShiftVideo::ShiftVideo(std::unique_ptr<VideoInterface>& src_,
|
37 |
+
const std::map<size_t, int>& shift_right_bits,
|
38 |
+
const std::map<size_t, uint32_t>& masks)
|
39 |
+
: src(std::move(src_)), size_bytes(0), shift_right_bits(shift_right_bits), masks(masks)
|
40 |
+
{
|
41 |
+
if(!src) {
|
42 |
+
throw VideoException("ShiftVideo: VideoInterface in must not be null");
|
43 |
+
}
|
44 |
+
|
45 |
+
videoin.push_back(src.get());
|
46 |
+
|
47 |
+
formats_supported.insert("GRAY16LE");
|
48 |
+
formats_supported.insert("RGB48");
|
49 |
+
formats_supported.insert("BGR48");
|
50 |
+
formats_supported.insert("RGBA64");
|
51 |
+
formats_supported.insert("BGRA64");
|
52 |
+
|
53 |
+
for(size_t s=0; s< src->Streams().size(); ++s) {
|
54 |
+
const size_t w = src->Streams()[s].Width();
|
55 |
+
const size_t h = src->Streams()[s].Height();
|
56 |
+
|
57 |
+
auto i = shift_right_bits.find(s);
|
58 |
+
|
59 |
+
if(i != shift_right_bits.end() && i->second != 0)
|
60 |
+
{
|
61 |
+
if(formats_supported.count(src->Streams()[s].PixFormat().format) == 0)
|
62 |
+
{
|
63 |
+
throw VideoException("ShiftVideo: input format is not compatible for shifting.");
|
64 |
+
}
|
65 |
+
|
66 |
+
PixelFormat out_fmt;
|
67 |
+
|
68 |
+
if(src->Streams()[s].PixFormat().format == "GRAY16LE")
|
69 |
+
{
|
70 |
+
out_fmt = PixelFormatFromString("GRAY8");
|
71 |
+
}
|
72 |
+
else if(src->Streams()[s].PixFormat().format == "RGB48")
|
73 |
+
{
|
74 |
+
out_fmt = PixelFormatFromString("RGB24");
|
75 |
+
}
|
76 |
+
else if(src->Streams()[s].PixFormat().format == "BGR48")
|
77 |
+
{
|
78 |
+
out_fmt = PixelFormatFromString("BGR24");
|
79 |
+
}
|
80 |
+
else if(src->Streams()[s].PixFormat().format == "RGBA64")
|
81 |
+
{
|
82 |
+
out_fmt = PixelFormatFromString("RGBA32");
|
83 |
+
}
|
84 |
+
else if(src->Streams()[s].PixFormat().format == "BGRA64")
|
85 |
+
{
|
86 |
+
out_fmt = PixelFormatFromString("BGRA32");
|
87 |
+
}
|
88 |
+
|
89 |
+
streams.push_back(pangolin::StreamInfo(out_fmt, w, h, w*out_fmt.bpp / 8, reinterpret_cast<uint8_t*>(size_bytes) ));
|
90 |
+
}
|
91 |
+
else
|
92 |
+
{
|
93 |
+
streams.push_back(pangolin::StreamInfo(src->Streams()[s].PixFormat(), w, h, w*src->Streams()[s].PixFormat().bpp / 8, reinterpret_cast<uint8_t*>(size_bytes) ));
|
94 |
+
}
|
95 |
+
|
96 |
+
size_bytes += streams.back().SizeBytes();
|
97 |
+
}
|
98 |
+
|
99 |
+
buffer.reset(new uint8_t[src->SizeBytes()]);
|
100 |
+
}
|
101 |
+
|
102 |
+
ShiftVideo::~ShiftVideo()
|
103 |
+
{
|
104 |
+
}
|
105 |
+
|
106 |
+
//! Implement VideoInput::Start()
|
107 |
+
void ShiftVideo::Start()
|
108 |
+
{
|
109 |
+
videoin[0]->Start();
|
110 |
+
}
|
111 |
+
|
112 |
+
//! Implement VideoInput::Stop()
|
113 |
+
void ShiftVideo::Stop()
|
114 |
+
{
|
115 |
+
videoin[0]->Stop();
|
116 |
+
}
|
117 |
+
|
118 |
+
//! Implement VideoInput::SizeBytes()
|
119 |
+
size_t ShiftVideo::SizeBytes() const
|
120 |
+
{
|
121 |
+
return size_bytes;
|
122 |
+
}
|
123 |
+
|
124 |
+
//! Implement VideoInput::Streams()
|
125 |
+
const std::vector<StreamInfo>& ShiftVideo::Streams() const
|
126 |
+
{
|
127 |
+
return streams;
|
128 |
+
}
|
129 |
+
|
130 |
+
void DoShift16to8(
|
131 |
+
Image<uint8_t>& out,
|
132 |
+
const Image<uint8_t>& in,
|
133 |
+
const int shift_right_bits,
|
134 |
+
const uint32_t mask,
|
135 |
+
const uint16_t maxValBitDepth
|
136 |
+
) {
|
137 |
+
for(size_t r = 0; r < out.h; ++r)
|
138 |
+
{
|
139 |
+
uint8_t* pout = (uint8_t*)(out.ptr + r*out.pitch);
|
140 |
+
uint16_t* pin = (uint16_t*)(in.ptr + r*in.pitch);
|
141 |
+
const uint16_t* pin_end = (uint16_t*)(in.ptr + (r+1)*in.pitch);
|
142 |
+
while(pin != pin_end) {
|
143 |
+
*(pout++) = (std::min(*(pin++), maxValBitDepth) >> shift_right_bits) & mask;
|
144 |
+
}
|
145 |
+
}
|
146 |
+
}
|
147 |
+
|
148 |
+
void ShiftVideo::Process(uint8_t* buffer_out, const uint8_t* buffer_in)
|
149 |
+
{
|
150 |
+
for(size_t s=0; s<streams.size(); ++s) {
|
151 |
+
const Image<uint8_t> img_in = videoin[0]->Streams()[s].StreamImage(buffer_in);
|
152 |
+
Image<uint8_t> img_out = Streams()[s].StreamImage(buffer_out);
|
153 |
+
const size_t bytes_per_pixel = Streams()[s].PixFormat().bpp / 8;
|
154 |
+
|
155 |
+
auto i = shift_right_bits.find(s);
|
156 |
+
|
157 |
+
if(i != shift_right_bits.end() && i->second != 0)
|
158 |
+
{
|
159 |
+
auto m = masks.find(s);
|
160 |
+
DoShift16to8(img_out, img_in, i->second, (m == masks.end() ? 0xffff : m->second), std::pow(2, videoin[0]->Streams()[s].PixFormat().channel_bit_depth) - 1);
|
161 |
+
}
|
162 |
+
else
|
163 |
+
{
|
164 |
+
//straight copy
|
165 |
+
if( img_out.w != img_in.w || img_out.h != img_in.h ) {
|
166 |
+
throw std::runtime_error("ShiftVideo: Incompatible image sizes");
|
167 |
+
}
|
168 |
+
|
169 |
+
for(size_t y=0; y < img_out.h; ++y) {
|
170 |
+
std::memcpy(img_out.RowPtr((int)y), img_in.RowPtr((int)y), bytes_per_pixel * img_in.w);
|
171 |
+
}
|
172 |
+
}
|
173 |
+
}
|
174 |
+
}
|
175 |
+
|
176 |
+
//! Implement VideoInput::GrabNext()
|
177 |
+
bool ShiftVideo::GrabNext( uint8_t* image, bool wait )
|
178 |
+
{
|
179 |
+
if(videoin[0]->GrabNext(buffer.get(),wait)) {
|
180 |
+
Process(image, buffer.get());
|
181 |
+
return true;
|
182 |
+
}else{
|
183 |
+
return false;
|
184 |
+
}
|
185 |
+
}
|
186 |
+
|
187 |
+
//! Implement VideoInput::GrabNewest()
|
188 |
+
bool ShiftVideo::GrabNewest( uint8_t* image, bool wait )
|
189 |
+
{
|
190 |
+
if(videoin[0]->GrabNewest(buffer.get(),wait)) {
|
191 |
+
Process(image, buffer.get());
|
192 |
+
return true;
|
193 |
+
}else{
|
194 |
+
return false;
|
195 |
+
}
|
196 |
+
}
|
197 |
+
|
198 |
+
std::vector<VideoInterface*>& ShiftVideo::InputStreams()
|
199 |
+
{
|
200 |
+
return videoin;
|
201 |
+
}
|
202 |
+
|
203 |
+
PANGOLIN_REGISTER_FACTORY(ShiftVideo)
|
204 |
+
{
|
205 |
+
struct ShiftVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
206 |
+
std::map<std::string,Precedence> Schemes() const override
|
207 |
+
{
|
208 |
+
return {{"shift",10}};
|
209 |
+
}
|
210 |
+
const char* Description() const override
|
211 |
+
{
|
212 |
+
return "Video Filter: bitwise shift pixel values.";
|
213 |
+
}
|
214 |
+
ParamSet Params() const override
|
215 |
+
{
|
216 |
+
return {{
|
217 |
+
{"shift\\d+","0","shiftN, N:[1,streams]. Right shift pixel values."},
|
218 |
+
{"mask\\d+","65535","maskN, N:[1,streams]. Bitwise pixel mask (after shift)"},
|
219 |
+
}};
|
220 |
+
}
|
221 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
222 |
+
std::map<size_t, int> shift_right_bits;
|
223 |
+
std::map<size_t, uint32_t> masks;
|
224 |
+
|
225 |
+
ParamReader reader(Params(), uri);
|
226 |
+
|
227 |
+
for(size_t i=0; i<100; ++i)
|
228 |
+
{
|
229 |
+
const std::string shift_key = pangolin::FormatString("shift%",i+1);
|
230 |
+
const std::string mask_key = pangolin::FormatString("mask%",i+1);
|
231 |
+
|
232 |
+
if(reader.Contains(shift_key))
|
233 |
+
{
|
234 |
+
shift_right_bits[i] = reader.Get<int>(shift_key);
|
235 |
+
}
|
236 |
+
|
237 |
+
if(reader.Contains(mask_key))
|
238 |
+
{
|
239 |
+
masks[i] = reader.Get<int>(mask_key);
|
240 |
+
}
|
241 |
+
}
|
242 |
+
|
243 |
+
std::unique_ptr<VideoInterface> subvid = pangolin::OpenVideo(uri.url);
|
244 |
+
return std::unique_ptr<VideoInterface>(
|
245 |
+
new ShiftVideo(subvid, shift_right_bits, masks)
|
246 |
+
);
|
247 |
+
}
|
248 |
+
};
|
249 |
+
|
250 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<ShiftVideoFactory>());
|
251 |
+
}
|
252 |
+
|
253 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/split.cpp
ADDED
@@ -0,0 +1,180 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2013 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/video/drivers/split.h>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/iostream_operators.h>
|
31 |
+
#include <pangolin/video/video.h>
|
32 |
+
|
33 |
+
#include <exception>
|
34 |
+
|
35 |
+
namespace pangolin
|
36 |
+
{
|
37 |
+
|
38 |
+
SplitVideo::SplitVideo(std::unique_ptr<VideoInterface> &src_, const std::vector<StreamInfo>& streams)
|
39 |
+
: src(std::move(src_)), streams(streams)
|
40 |
+
{
|
41 |
+
videoin.push_back(src.get());
|
42 |
+
|
43 |
+
// Warn if stream over-runs input stream
|
44 |
+
for(unsigned int i=0; i < streams.size(); ++i) {
|
45 |
+
if(src->SizeBytes() < (size_t)streams[i].Offset() + streams[i].SizeBytes() ) {
|
46 |
+
pango_print_warn("VideoSplitter: stream extends past end of input.\n");
|
47 |
+
break;
|
48 |
+
}
|
49 |
+
}
|
50 |
+
}
|
51 |
+
|
52 |
+
SplitVideo::~SplitVideo()
|
53 |
+
{
|
54 |
+
}
|
55 |
+
|
56 |
+
size_t SplitVideo::SizeBytes() const
|
57 |
+
{
|
58 |
+
return videoin[0]->SizeBytes();
|
59 |
+
}
|
60 |
+
|
61 |
+
const std::vector<StreamInfo>& SplitVideo::Streams() const
|
62 |
+
{
|
63 |
+
return streams;
|
64 |
+
}
|
65 |
+
|
66 |
+
void SplitVideo::Start()
|
67 |
+
{
|
68 |
+
videoin[0]->Start();
|
69 |
+
}
|
70 |
+
|
71 |
+
void SplitVideo::Stop()
|
72 |
+
{
|
73 |
+
videoin[0]->Stop();
|
74 |
+
}
|
75 |
+
|
76 |
+
bool SplitVideo::GrabNext( unsigned char* image, bool wait )
|
77 |
+
{
|
78 |
+
return videoin[0]->GrabNext(image, wait);
|
79 |
+
}
|
80 |
+
|
81 |
+
bool SplitVideo::GrabNewest( unsigned char* image, bool wait )
|
82 |
+
{
|
83 |
+
return videoin[0]->GrabNewest(image, wait);
|
84 |
+
}
|
85 |
+
|
86 |
+
std::vector<VideoInterface*>& SplitVideo::InputStreams()
|
87 |
+
{
|
88 |
+
return videoin;
|
89 |
+
}
|
90 |
+
|
91 |
+
PANGOLIN_REGISTER_FACTORY(SplitVideo)
|
92 |
+
{
|
93 |
+
struct SplitVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
94 |
+
std::map<std::string,Precedence> Schemes() const override
|
95 |
+
{
|
96 |
+
return {{"split",10}};
|
97 |
+
}
|
98 |
+
const char* Description() const override
|
99 |
+
{
|
100 |
+
return "Transforms a set of video streams into a new set by providing region-of-interest, raw memory layout, or stream order specification.";
|
101 |
+
}
|
102 |
+
ParamSet Params() const override
|
103 |
+
{
|
104 |
+
return {{
|
105 |
+
{"roi\\d+","X+Y+WxH","Region of Interest as WidthxHeight"},
|
106 |
+
{"mem\\d+","width,height,pitch,pixelformat*","By default dynamically set from the first stream"},
|
107 |
+
{"stream\\d+","0","Integer"}
|
108 |
+
}};
|
109 |
+
}
|
110 |
+
|
111 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
112 |
+
std::vector<StreamInfo> streams;
|
113 |
+
|
114 |
+
std::unique_ptr<VideoInterface> subvid = pangolin::OpenVideo(uri.url);
|
115 |
+
if(subvid->Streams().size() == 0) {
|
116 |
+
throw VideoException("VideoSplitter input must have at least one stream");
|
117 |
+
}
|
118 |
+
|
119 |
+
ParamReader param_reader(Params(), uri);
|
120 |
+
|
121 |
+
while(true) {
|
122 |
+
const size_t n = streams.size() + 1;
|
123 |
+
std::string key_roi = std::string("roi") + pangolin::Convert<std::string, size_t>::Do(n);
|
124 |
+
std::string key_mem = std::string("mem") + pangolin::Convert<std::string, size_t>::Do(n);
|
125 |
+
std::string key_str = std::string("stream") + pangolin::Convert<std::string, size_t>::Do(n);
|
126 |
+
|
127 |
+
if(uri.Contains(key_roi)) {
|
128 |
+
const StreamInfo& st1 = subvid->Streams()[0];
|
129 |
+
const ImageRoi& roi = param_reader.Get<ImageRoi>(key_roi, ImageRoi());
|
130 |
+
if(roi.w == 0 || roi.h == 0) {
|
131 |
+
throw VideoException("split: empty ROI.");
|
132 |
+
}
|
133 |
+
const size_t start1 = roi.y * st1.Pitch() + st1.PixFormat().bpp * roi.x / 8;
|
134 |
+
streams.push_back( StreamInfo( st1.PixFormat(), roi.w, roi.h, st1.Pitch(), reinterpret_cast<unsigned char*>(start1) ) );
|
135 |
+
}else if(uri.Contains(key_mem)) {
|
136 |
+
const StreamInfo& info = param_reader.Get(key_mem, subvid->Streams()[0]); //uri.Get<StreamInfo>(key_mem, subvid->Streams()[0] );
|
137 |
+
streams.push_back(info);
|
138 |
+
}else if(uri.Contains(key_str)) {
|
139 |
+
const size_t old_stream = param_reader.Get<size_t>(key_str) - 1; //uri.Get<size_t>(key_str, 0) -1;
|
140 |
+
if(old_stream >= subvid->Streams().size()) {
|
141 |
+
throw VideoException("split: requesting source stream which does not exist.");
|
142 |
+
}
|
143 |
+
streams.push_back(subvid->Streams()[old_stream]);
|
144 |
+
}else{
|
145 |
+
break;
|
146 |
+
}
|
147 |
+
}
|
148 |
+
|
149 |
+
// Default split if no arguments
|
150 |
+
if(streams.size() == 0) {
|
151 |
+
const StreamInfo& st1 = subvid->Streams()[0];
|
152 |
+
const size_t subw = st1.Width();
|
153 |
+
const size_t subh = st1.Height();
|
154 |
+
|
155 |
+
ImageRoi roi1, roi2;
|
156 |
+
|
157 |
+
if(subw > subh) {
|
158 |
+
// split horizontally
|
159 |
+
roi1 = ImageRoi(0,0, subw/2, subh );
|
160 |
+
roi2 = ImageRoi(subw/2,0, subw/2, subh );
|
161 |
+
}else{
|
162 |
+
// split vertically
|
163 |
+
roi1 = ImageRoi(0,0, subw, subh/2 );
|
164 |
+
roi2 = ImageRoi(0,subh/2, subw, subh/2 );
|
165 |
+
}
|
166 |
+
|
167 |
+
const size_t start1 = roi1.y * st1.Pitch() + st1.PixFormat().bpp * roi1.x / 8;
|
168 |
+
const size_t start2 = roi2.y * st1.Pitch() + st1.PixFormat().bpp * roi2.x / 8;
|
169 |
+
streams.push_back( StreamInfo( st1.PixFormat(), roi1.w, roi1.h, st1.Pitch(), (unsigned char*)(start1) ) );
|
170 |
+
streams.push_back( StreamInfo( st1.PixFormat(), roi2.w, roi2.h, st1.Pitch(), (unsigned char*)(start2) ) );
|
171 |
+
}
|
172 |
+
|
173 |
+
return std::unique_ptr<VideoInterface>( new SplitVideo(subvid,streams) );
|
174 |
+
}
|
175 |
+
};
|
176 |
+
|
177 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<SplitVideoFactory>());
|
178 |
+
}
|
179 |
+
|
180 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/teli.cpp
ADDED
@@ -0,0 +1,563 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2015 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <XmlFeatures.h>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/drivers/teli.h>
|
31 |
+
#include <pangolin/video/iostream_operators.h>
|
32 |
+
|
33 |
+
namespace pangolin
|
34 |
+
{
|
35 |
+
|
36 |
+
// Represet lifetime of Teli SDK. Destructed by static deinitialisation.
|
37 |
+
class TeliSystem
|
38 |
+
{
|
39 |
+
public:
|
40 |
+
static TeliSystem& Instance() {
|
41 |
+
static TeliSystem sys;
|
42 |
+
return sys;
|
43 |
+
}
|
44 |
+
|
45 |
+
private:
|
46 |
+
TeliSystem()
|
47 |
+
{
|
48 |
+
Teli::CAM_API_STATUS uiStatus = Teli::Sys_Initialize();
|
49 |
+
if (uiStatus != Teli::CAM_API_STS_SUCCESS && uiStatus != Teli::CAM_API_STS_ALREADY_INITIALIZED) {
|
50 |
+
throw pangolin::VideoException(FormatString("Unable to initialise TeliSDK (%).", uiStatus));
|
51 |
+
}
|
52 |
+
}
|
53 |
+
|
54 |
+
~TeliSystem()
|
55 |
+
{
|
56 |
+
Teli::CAM_API_STATUS uiStatus = Teli::Sys_Terminate();
|
57 |
+
if (uiStatus != Teli::CAM_API_STS_SUCCESS) {
|
58 |
+
pango_print_warn("TeliSDK: Error uninitialising.");
|
59 |
+
}
|
60 |
+
}
|
61 |
+
};
|
62 |
+
|
63 |
+
std::string GetNodeValStr(Teli::CAM_HANDLE cam, Teli::CAM_NODE_HANDLE node, std::string node_str)
|
64 |
+
{
|
65 |
+
Teli::TC_NODE_TYPE node_type;
|
66 |
+
Teli::CAM_API_STATUS st = Teli::Nd_GetType(cam, node, &node_type);
|
67 |
+
if(st != Teli::CAM_API_STS_SUCCESS) {
|
68 |
+
throw std::runtime_error("TeliSDK: Unable to get Teli node type.");
|
69 |
+
}
|
70 |
+
|
71 |
+
Teli::CAM_API_STATUS status;
|
72 |
+
|
73 |
+
switch(node_type) {
|
74 |
+
case Teli::TC_NODE_TYPE_INTEGER:
|
75 |
+
{
|
76 |
+
int64_t val;
|
77 |
+
status = Teli::Nd_GetIntValue(cam, node, &val);
|
78 |
+
if(status == Teli::CAM_API_STS_SUCCESS) {
|
79 |
+
return pangolin::Convert<std::string, int64_t>::Do(val);
|
80 |
+
}
|
81 |
+
break;
|
82 |
+
}
|
83 |
+
case Teli::TC_NODE_TYPE_BOOLEAN:
|
84 |
+
{
|
85 |
+
bool8_t val;
|
86 |
+
status = Teli::Nd_GetBoolValue(cam, node, &val);
|
87 |
+
if(status == Teli::CAM_API_STS_SUCCESS) {
|
88 |
+
return pangolin::Convert<std::string, bool8_t>::Do(val);
|
89 |
+
}
|
90 |
+
break;
|
91 |
+
}
|
92 |
+
case Teli::TC_NODE_TYPE_FLOAT:
|
93 |
+
{
|
94 |
+
float64_t val;
|
95 |
+
status = Teli::Nd_GetFloatValue(cam, node, &val);
|
96 |
+
if(status == Teli::CAM_API_STS_SUCCESS) {
|
97 |
+
return pangolin::Convert<std::string, float64_t>::Do(val);
|
98 |
+
}
|
99 |
+
break;
|
100 |
+
}
|
101 |
+
case Teli::TC_NODE_TYPE_STRING:
|
102 |
+
{
|
103 |
+
uint32_t buffer_size = 10*1024;
|
104 |
+
char* buffer = new char[buffer_size];
|
105 |
+
status = Teli::Nd_GetStrValue(cam, node, buffer, &buffer_size);
|
106 |
+
std::string val(buffer);
|
107 |
+
delete[] buffer;
|
108 |
+
if(status == Teli::CAM_API_STS_SUCCESS) {
|
109 |
+
return val;
|
110 |
+
}
|
111 |
+
break;
|
112 |
+
}
|
113 |
+
case Teli::TC_NODE_TYPE_ENUMERATION:
|
114 |
+
{
|
115 |
+
uint32_t buffer_size = 10*1024;
|
116 |
+
char* buffer = new char[buffer_size];
|
117 |
+
status = Teli::Nd_GetEnumStrValue(cam, node, buffer, &buffer_size);
|
118 |
+
std::string val(buffer);
|
119 |
+
if(status == Teli::CAM_API_STS_SUCCESS) {
|
120 |
+
return val;
|
121 |
+
}
|
122 |
+
break;
|
123 |
+
}
|
124 |
+
case Teli::TC_NODE_TYPE_COMMAND:
|
125 |
+
case Teli::TC_NODE_TYPE_REGISTER:
|
126 |
+
case Teli::TC_NODE_TYPE_CATEGORY:
|
127 |
+
case Teli::TC_NODE_TYPE_ENUM_ENTRY:
|
128 |
+
case Teli::TC_NODE_TYPE_PORT:
|
129 |
+
default:
|
130 |
+
throw VideoException(FormatString("TeliSDK: Unsupported node_type: %", node_type));
|
131 |
+
}
|
132 |
+
|
133 |
+
if(status != Teli::CAM_API_STS_SUCCESS) {
|
134 |
+
Teli::CAM_GENICAM_ERR_MSG psErrMsg;
|
135 |
+
Teli::Misc_GetLastGenICamError(&psErrMsg);
|
136 |
+
throw VideoException("TeliSDK: Unable to get Teli parameter, " + node_str, psErrMsg.pszDescription);
|
137 |
+
}else{
|
138 |
+
throw VideoException("TeliSDK: Unable to get Teli parameter, " + node_str);
|
139 |
+
}
|
140 |
+
}
|
141 |
+
|
142 |
+
void TeliVideo::SetNodeValStr(Teli::CAM_HANDLE cam, Teli::CAM_NODE_HANDLE node, std::string node_str, std::string val_str)
|
143 |
+
{
|
144 |
+
Teli::TC_NODE_TYPE node_type;
|
145 |
+
Teli::CAM_API_STATUS st = Teli::Nd_GetType(cam, node, &node_type);
|
146 |
+
if(st != Teli::CAM_API_STS_SUCCESS) {
|
147 |
+
throw VideoException("TeliSDK: Unable to get Teli node type.");
|
148 |
+
}
|
149 |
+
|
150 |
+
Teli::CAM_API_STATUS status = Teli::CAM_API_STS_SUCCESS;
|
151 |
+
|
152 |
+
switch(node_type) {
|
153 |
+
case Teli::TC_NODE_TYPE_INTEGER:
|
154 |
+
{
|
155 |
+
const int64_t val = pangolin::Convert<int64_t, std::string>::Do(val_str);
|
156 |
+
status = Teli::Nd_SetIntValue(cam, node, val);
|
157 |
+
break;
|
158 |
+
}
|
159 |
+
case Teli::TC_NODE_TYPE_BOOLEAN:
|
160 |
+
{
|
161 |
+
const bool8_t val = pangolin::Convert<bool8_t, std::string>::Do(val_str);
|
162 |
+
status = Teli::Nd_SetBoolValue(cam, node, val);
|
163 |
+
break;
|
164 |
+
}
|
165 |
+
case Teli::TC_NODE_TYPE_FLOAT:
|
166 |
+
{
|
167 |
+
const float64_t val = pangolin::Convert<float64_t, std::string>::Do(val_str);
|
168 |
+
status = Teli::Nd_SetFloatValue(cam, node, val);
|
169 |
+
break;
|
170 |
+
}
|
171 |
+
case Teli::TC_NODE_TYPE_STRING:
|
172 |
+
{
|
173 |
+
status = Teli::Nd_SetStrValue(cam, node, val_str.c_str());
|
174 |
+
break;
|
175 |
+
}
|
176 |
+
case Teli::TC_NODE_TYPE_ENUMERATION:
|
177 |
+
{
|
178 |
+
status = Teli::Nd_SetEnumStrValue(cam, node, val_str.c_str());
|
179 |
+
break;
|
180 |
+
}
|
181 |
+
case Teli::TC_NODE_TYPE_COMMAND:
|
182 |
+
{
|
183 |
+
status = Teli::Nd_CmdExecute(cam, node, true);
|
184 |
+
|
185 |
+
if (status != Teli::CAM_API_STS_SUCCESS) {
|
186 |
+
pango_print_error("TeliVideo: Nd_CmdExecute returned error, %u", status);
|
187 |
+
break;
|
188 |
+
}
|
189 |
+
|
190 |
+
// Confirm command is successful
|
191 |
+
bool done = false;
|
192 |
+
for(int attempts=20; attempts > 0; --attempts) {
|
193 |
+
// Confirm whether the execution has been accomplished.
|
194 |
+
status = Teli::Nd_GetCmdIsDone(cam, node, &done);
|
195 |
+
if (status != Teli::CAM_API_STS_SUCCESS) {
|
196 |
+
pango_print_error("TeliVideo: Nd_GetCmdIsDone returned error, %u", status);
|
197 |
+
break;
|
198 |
+
}
|
199 |
+
if(done) break;
|
200 |
+
}
|
201 |
+
|
202 |
+
pango_print_error("Timeout while waiting for command %s done\n", node_str.c_str());
|
203 |
+
break;
|
204 |
+
}
|
205 |
+
case Teli::TC_NODE_TYPE_REGISTER:
|
206 |
+
case Teli::TC_NODE_TYPE_CATEGORY:
|
207 |
+
case Teli::TC_NODE_TYPE_ENUM_ENTRY:
|
208 |
+
case Teli::TC_NODE_TYPE_PORT:
|
209 |
+
default:
|
210 |
+
throw VideoException("TeliSDK: Unsupported node_type: " + node_type);
|
211 |
+
}
|
212 |
+
|
213 |
+
if(status != Teli::CAM_API_STS_SUCCESS) {
|
214 |
+
Teli::CAM_GENICAM_ERR_MSG psErrMsg;
|
215 |
+
Teli::Misc_GetLastGenICamError(&psErrMsg);
|
216 |
+
throw VideoException("TeliSDK: Unable to set Teli parameter, " + node_str, psErrMsg.pszDescription);
|
217 |
+
}
|
218 |
+
}
|
219 |
+
|
220 |
+
TeliVideo::TeliVideo(const Params& p)
|
221 |
+
: cam(0), strm(0), hStrmCmpEvt(0), transfer_bandwidth_gbps(0), exposure_us(0)
|
222 |
+
{
|
223 |
+
TeliSystem::Instance();
|
224 |
+
|
225 |
+
uint32_t num_cams = 0;
|
226 |
+
Teli::CAM_API_STATUS uiStatus = Teli::Sys_GetNumOfCameras(&num_cams);
|
227 |
+
if (uiStatus != Teli::CAM_API_STS_SUCCESS)
|
228 |
+
throw pangolin::VideoException("Unable to enumerate TeliSDK cameras.");
|
229 |
+
|
230 |
+
if (num_cams == 0)
|
231 |
+
throw pangolin::VideoException("No TeliSDK Cameras available.");
|
232 |
+
|
233 |
+
// Default to rogue values
|
234 |
+
std::string sn;
|
235 |
+
std::string mn;
|
236 |
+
int cam_index = 0;
|
237 |
+
|
238 |
+
Params device_params;
|
239 |
+
|
240 |
+
for(Params::ParamMap::const_iterator it = p.params.begin(); it != p.params.end(); it++) {
|
241 |
+
if(it->first == "model"){
|
242 |
+
mn = it->second;
|
243 |
+
} else if(it->first == "sn"){
|
244 |
+
sn = it->second;
|
245 |
+
} else if(it->first == "idx"){
|
246 |
+
cam_index = p.Get<int>("idx", 0);
|
247 |
+
} else if(it->first == "size") {
|
248 |
+
const ImageDim dim = p.Get<ImageDim>("size", ImageDim(0,0) );
|
249 |
+
device_params.Set("Width" , dim.x);
|
250 |
+
device_params.Set("Height" , dim.y);
|
251 |
+
} else if(it->first == "pos") {
|
252 |
+
const ImageDim pos = p.Get<ImageDim>("pos", ImageDim(0,0) );
|
253 |
+
device_params.Set("OffsetX" , pos.x);
|
254 |
+
device_params.Set("OffsetY" , pos.y);
|
255 |
+
} else if(it->first == "roi") {
|
256 |
+
const ImageRoi roi = p.Get<ImageRoi>("roi", ImageRoi(0,0,0,0) );
|
257 |
+
device_params.Set("Width" , roi.w);
|
258 |
+
device_params.Set("Height" , roi.h);
|
259 |
+
device_params.Set("OffsetX", roi.x);
|
260 |
+
device_params.Set("OffsetY", roi.y);
|
261 |
+
} else {
|
262 |
+
device_params.Set(it->first, it->second);
|
263 |
+
}
|
264 |
+
}
|
265 |
+
|
266 |
+
if(sn.empty() && mn.empty()) {
|
267 |
+
uiStatus = Teli::Cam_Open(cam_index, &cam, 0, true, 0);
|
268 |
+
}else{
|
269 |
+
uiStatus = Teli::Cam_OpenFromInfo(
|
270 |
+
(sn.empty() ? 0 : sn.c_str()),
|
271 |
+
(mn.empty() ? 0 : mn.c_str()),
|
272 |
+
0, &cam, 0, true, 0
|
273 |
+
);
|
274 |
+
}
|
275 |
+
if (uiStatus != Teli::CAM_API_STS_SUCCESS)
|
276 |
+
throw pangolin::VideoException(FormatString("TeliSDK: Error opening camera, sn='%'", sn));
|
277 |
+
|
278 |
+
SetDeviceParams(device_params);
|
279 |
+
Initialise();
|
280 |
+
}
|
281 |
+
|
282 |
+
bool TeliVideo::GetParameter(const std::string& name, std::string& result)
|
283 |
+
{
|
284 |
+
Teli::CAM_NODE_HANDLE node;
|
285 |
+
Teli::CAM_API_STATUS st = Teli::Nd_GetNode(cam, name.c_str(), &node);
|
286 |
+
if( st == Teli::CAM_API_STS_SUCCESS) {
|
287 |
+
result = GetNodeValStr(cam, node, name);
|
288 |
+
return true;
|
289 |
+
}else{
|
290 |
+
pango_print_warn("TeliSDK: Unable to get reference to node: %s", name.c_str());
|
291 |
+
return false;
|
292 |
+
}
|
293 |
+
}
|
294 |
+
|
295 |
+
bool TeliVideo::SetParameter(const std::string& name, const std::string& value)
|
296 |
+
{
|
297 |
+
Teli::CAM_NODE_HANDLE node;
|
298 |
+
Teli::CAM_API_STATUS st = Teli::Nd_GetNode(cam, name.c_str(), &node);
|
299 |
+
if( st == Teli::CAM_API_STS_SUCCESS) {
|
300 |
+
SetNodeValStr(cam, node, name, value);
|
301 |
+
return true;
|
302 |
+
}else{
|
303 |
+
pango_print_warn("TeliSDK: Unable to get reference to node: %s", name.c_str());
|
304 |
+
return false;
|
305 |
+
}
|
306 |
+
}
|
307 |
+
|
308 |
+
void TeliVideo::Initialise()
|
309 |
+
{
|
310 |
+
Teli::CAM_API_STATUS uiStatus = Teli::CAM_API_STS_SUCCESS;
|
311 |
+
|
312 |
+
// Create completion event object for stream.
|
313 |
+
#ifdef _WIN_
|
314 |
+
hStrmCmpEvt = CreateEvent(NULL, FALSE, FALSE, NULL);
|
315 |
+
if (hStrmCmpEvt == NULL)
|
316 |
+
throw pangolin::VideoException("TeliSDK: Error creating event.");
|
317 |
+
#endif
|
318 |
+
#ifdef _LINUX_
|
319 |
+
uiStatus = Teli::Sys_CreateSignal(&hStrmCmpEvt);
|
320 |
+
if (uiStatus != Teli::CAM_API_STS_SUCCESS)
|
321 |
+
throw pangolin::VideoException("TeliSDK: Error creating event.");
|
322 |
+
#endif
|
323 |
+
uint32_t uiPyldSize = 0;
|
324 |
+
uiStatus = Teli::Strm_OpenSimple(cam, &strm, &uiPyldSize, hStrmCmpEvt);
|
325 |
+
if (uiStatus != Teli::CAM_API_STS_SUCCESS)
|
326 |
+
throw pangolin::VideoException("TeliSDK: Error opening camera stream.");
|
327 |
+
|
328 |
+
// Read pixel format
|
329 |
+
PixelFormat pfmt;
|
330 |
+
Teli::CAM_PIXEL_FORMAT teli_fmt;
|
331 |
+
uiStatus = Teli::GetCamPixelFormat(cam, &teli_fmt);
|
332 |
+
if (uiStatus != Teli::CAM_API_STS_SUCCESS)
|
333 |
+
throw pangolin::VideoException("TeliSDK: Error calling GetCamPixelFormat.");
|
334 |
+
|
335 |
+
switch( teli_fmt) {
|
336 |
+
case Teli::PXL_FMT_Mono8:
|
337 |
+
case Teli::PXL_FMT_BayerGR8:
|
338 |
+
case Teli::PXL_FMT_BayerBG8:
|
339 |
+
pfmt = pangolin::PixelFormatFromString("GRAY8");
|
340 |
+
break;
|
341 |
+
case Teli::PXL_FMT_Mono10:
|
342 |
+
case Teli::PXL_FMT_Mono12:
|
343 |
+
case Teli::PXL_FMT_Mono16:
|
344 |
+
case Teli::PXL_FMT_BayerGR10:
|
345 |
+
case Teli::PXL_FMT_BayerGR12:
|
346 |
+
case Teli::PXL_FMT_BayerBG10:
|
347 |
+
case Teli::PXL_FMT_BayerBG12:
|
348 |
+
pfmt = pangolin::PixelFormatFromString("GRAY16LE");
|
349 |
+
break;
|
350 |
+
case Teli::PXL_FMT_RGB8:
|
351 |
+
pfmt = pangolin::PixelFormatFromString("RGB24");
|
352 |
+
break;
|
353 |
+
case Teli::PXL_FMT_BGR8:
|
354 |
+
pfmt = pangolin::PixelFormatFromString("BGR24");
|
355 |
+
break;
|
356 |
+
default:
|
357 |
+
throw std::runtime_error("TeliSDK: Unknown pixel format: " + ToString<int>(teli_fmt) );
|
358 |
+
}
|
359 |
+
|
360 |
+
size_bytes = 0;
|
361 |
+
|
362 |
+
// Use width and height reported by camera
|
363 |
+
uint32_t w = 0;
|
364 |
+
uint32_t h = 0;
|
365 |
+
if( Teli::GetCamWidth(cam, &w) != Teli::CAM_API_STS_SUCCESS || Teli::GetCamHeight(cam, &h) != Teli::CAM_API_STS_SUCCESS) {
|
366 |
+
throw pangolin::VideoException("TeliSDK: Unable to establish stream dimensions.");
|
367 |
+
}
|
368 |
+
|
369 |
+
const int n = 1;
|
370 |
+
for(size_t c=0; c < n; ++c) {
|
371 |
+
const StreamInfo stream_info(pfmt, w, h, (w*pfmt.bpp) / 8, 0);
|
372 |
+
streams.push_back(stream_info);
|
373 |
+
size_bytes += uiPyldSize;
|
374 |
+
}
|
375 |
+
|
376 |
+
InitPangoDeviceProperties();
|
377 |
+
}
|
378 |
+
|
379 |
+
void TeliVideo::InitPangoDeviceProperties()
|
380 |
+
{
|
381 |
+
|
382 |
+
Teli::CAM_INFO info;
|
383 |
+
Teli::Cam_GetInformation(cam, 0, &info);
|
384 |
+
|
385 |
+
// Store camera details in device properties
|
386 |
+
device_properties["SerialNumber"] = std::string(info.szSerialNumber);
|
387 |
+
device_properties["VendorName"] = std::string(info.szManufacturer);
|
388 |
+
device_properties["ModelName"] = std::string(info.szModelName);
|
389 |
+
device_properties["ManufacturerInfo"] = std::string(info.sU3vCamInfo.szManufacturerInfo);
|
390 |
+
device_properties["Version"] = std::string(info.sU3vCamInfo.szDeviceVersion);
|
391 |
+
device_properties[PANGO_HAS_TIMING_DATA] = true;
|
392 |
+
|
393 |
+
// TODO: Enumerate other settings.
|
394 |
+
}
|
395 |
+
|
396 |
+
void TeliVideo::SetDeviceParams(const Params& p)
|
397 |
+
{
|
398 |
+
for(Params::ParamMap::const_iterator it = p.params.begin(); it != p.params.end(); it++) {
|
399 |
+
if(it->first == "transfer_bandwidth_gbps") {
|
400 |
+
transfer_bandwidth_gbps = atof(it->second.c_str());
|
401 |
+
} else {
|
402 |
+
try{
|
403 |
+
if (it->second == "Execute") {
|
404 |
+
//
|
405 |
+
std::runtime_error("TeliSDK: Execution commands not yet supported.");
|
406 |
+
} else {
|
407 |
+
SetParameter(it->first, it->second);
|
408 |
+
}
|
409 |
+
}catch(std::exception& e) {
|
410 |
+
std::cerr << e.what() << std::endl;
|
411 |
+
}
|
412 |
+
}
|
413 |
+
}
|
414 |
+
}
|
415 |
+
|
416 |
+
TeliVideo::~TeliVideo()
|
417 |
+
{
|
418 |
+
Teli::CAM_API_STATUS uiStatus = Teli::Strm_Close(strm);
|
419 |
+
if (uiStatus != Teli::CAM_API_STS_SUCCESS)
|
420 |
+
pango_print_warn("TeliSDK: Error closing camera stream.");
|
421 |
+
|
422 |
+
uiStatus = Teli::Cam_Close(cam);
|
423 |
+
if (uiStatus != Teli::CAM_API_STS_SUCCESS)
|
424 |
+
pango_print_warn("TeliSDK: Error closing camera.");
|
425 |
+
}
|
426 |
+
|
427 |
+
//! Implement VideoInput::Start()
|
428 |
+
void TeliVideo::Start()
|
429 |
+
{
|
430 |
+
Teli::CAM_API_STATUS uiStatus = Teli::Strm_Start(strm);
|
431 |
+
if (uiStatus != Teli::CAM_API_STS_SUCCESS)
|
432 |
+
throw pangolin::VideoException("TeliSDK: Error starting stream.");
|
433 |
+
}
|
434 |
+
|
435 |
+
//! Implement VideoInput::Stop()
|
436 |
+
void TeliVideo::Stop()
|
437 |
+
{
|
438 |
+
Teli::CAM_API_STATUS uiStatus = Teli::Strm_Stop(strm);
|
439 |
+
if (uiStatus != Teli::CAM_API_STS_SUCCESS)
|
440 |
+
throw pangolin::VideoException("TeliSDK: Error stopping stream.");
|
441 |
+
}
|
442 |
+
|
443 |
+
//! Implement VideoInput::SizeBytes()
|
444 |
+
size_t TeliVideo::SizeBytes() const
|
445 |
+
{
|
446 |
+
return size_bytes;
|
447 |
+
}
|
448 |
+
|
449 |
+
//! Implement VideoInput::Streams()
|
450 |
+
const std::vector<StreamInfo>& TeliVideo::Streams() const
|
451 |
+
{
|
452 |
+
return streams;
|
453 |
+
}
|
454 |
+
|
455 |
+
void TeliVideo::PopulateEstimatedCenterCaptureTime(basetime host_reception_time)
|
456 |
+
{
|
457 |
+
if(transfer_bandwidth_gbps) {
|
458 |
+
const float transfer_time_us = size_bytes / int64_t((transfer_bandwidth_gbps * 1E3) / 8.0);
|
459 |
+
frame_properties[PANGO_ESTIMATED_CENTER_CAPTURE_TIME_US] = picojson::value(int64_t(pangolin::Time_us(host_reception_time) - (exposure_us/2.0) - transfer_time_us));
|
460 |
+
}
|
461 |
+
}
|
462 |
+
|
463 |
+
bool TeliVideo::GrabNext(unsigned char* image, bool /*wait*/)
|
464 |
+
{
|
465 |
+
#ifdef _WIN_
|
466 |
+
unsigned int uiRet = WaitForSingleObject(hStrmCmpEvt, 2000);
|
467 |
+
if (uiRet == WAIT_OBJECT_0) {
|
468 |
+
#endif
|
469 |
+
#ifdef _LINUX_
|
470 |
+
unsigned int uiRet = Teli::Sys_WaitForSignal(hStrmCmpEvt, 2000);
|
471 |
+
if (uiRet == Teli::CAM_API_STS_SUCCESS) {
|
472 |
+
#endif
|
473 |
+
Teli::CAM_IMAGE_INFO sImageInfo;
|
474 |
+
uint32_t uiPyldSize = (uint32_t)size_bytes;
|
475 |
+
Teli::CAM_API_STATUS uiStatus = Teli::Strm_ReadCurrentImage(strm, image, &uiPyldSize, &sImageInfo);
|
476 |
+
frame_properties[PANGO_EXPOSURE_US] = picojson::value(exposure_us);
|
477 |
+
frame_properties[PANGO_CAPTURE_TIME_US] = picojson::value(sImageInfo.ullTimestamp/1000);
|
478 |
+
basetime now = pangolin::TimeNow();
|
479 |
+
frame_properties[PANGO_HOST_RECEPTION_TIME_US] = picojson::value(pangolin::Time_us(now));
|
480 |
+
PopulateEstimatedCenterCaptureTime(now);
|
481 |
+
return (uiStatus == Teli::CAM_API_STS_SUCCESS);
|
482 |
+
}
|
483 |
+
|
484 |
+
return false;
|
485 |
+
}
|
486 |
+
|
487 |
+
//! Implement VideoInput::GrabNewest()
|
488 |
+
bool TeliVideo::GrabNewest(unsigned char* image, bool wait)
|
489 |
+
{
|
490 |
+
return GrabNext(image,wait);
|
491 |
+
}
|
492 |
+
|
493 |
+
//! Returns number of available frames
|
494 |
+
uint32_t TeliVideo::AvailableFrames() const
|
495 |
+
{
|
496 |
+
uint32_t puiCount = 0;
|
497 |
+
Teli::CAM_API_STATUS uiStatus = Teli::GetCamImageBufferFrameCount(cam, &puiCount);
|
498 |
+
if (uiStatus != Teli::CAM_API_STS_SUCCESS)
|
499 |
+
throw pangolin::VideoException("TeliSDK: Error reading frame buffer frame count.");
|
500 |
+
return puiCount;
|
501 |
+
}
|
502 |
+
|
503 |
+
//! Drops N frames in the queue starting from the oldest
|
504 |
+
//! returns false if less than n frames arae available
|
505 |
+
bool TeliVideo::DropNFrames(uint32_t n)
|
506 |
+
{
|
507 |
+
for (uint32_t i=0;i<n;++i) {
|
508 |
+
#ifdef _WIN_
|
509 |
+
unsigned int uiRet = WaitForSingleObject(hStrmCmpEvt, 2000);
|
510 |
+
if (uiRet == WAIT_OBJECT_0) {
|
511 |
+
#endif
|
512 |
+
#ifdef _LINUX_
|
513 |
+
unsigned int uiRet = Teli::Sys_WaitForSignal(hStrmCmpEvt, 2000);
|
514 |
+
if (uiRet == Teli::CAM_API_STS_SUCCESS) {
|
515 |
+
#endif
|
516 |
+
Teli::CAM_IMAGE_INFO sImageInfo;
|
517 |
+
uint32_t uiPyldSize = 0 ;
|
518 |
+
Teli::Strm_ReadCurrentImage(strm, 0, &uiPyldSize, &sImageInfo);
|
519 |
+
} else {
|
520 |
+
return false;
|
521 |
+
}
|
522 |
+
}
|
523 |
+
return true;
|
524 |
+
}
|
525 |
+
|
526 |
+
//! Access JSON properties of device
|
527 |
+
const picojson::value& TeliVideo::DeviceProperties() const
|
528 |
+
{
|
529 |
+
return device_properties;
|
530 |
+
}
|
531 |
+
|
532 |
+
//! Access JSON properties of most recently captured frame
|
533 |
+
const picojson::value& TeliVideo::FrameProperties() const
|
534 |
+
{
|
535 |
+
return frame_properties;
|
536 |
+
}
|
537 |
+
|
538 |
+
PANGOLIN_REGISTER_FACTORY(TeliVideo)
|
539 |
+
{
|
540 |
+
struct TeliVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
541 |
+
std::map<std::string,Precedence> Schemes() const override
|
542 |
+
{
|
543 |
+
return {{"teli",10}, {"u3v",5}};
|
544 |
+
}
|
545 |
+
const char* Description() const override
|
546 |
+
{
|
547 |
+
return "Uses Toshiba TeliCam library to open u3v camera.";
|
548 |
+
}
|
549 |
+
ParamSet Params() const override
|
550 |
+
{
|
551 |
+
return {{
|
552 |
+
{"*","Enumerates arguments dynamically from camera. Use native u3v properties."}
|
553 |
+
}};
|
554 |
+
}
|
555 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
556 |
+
return std::unique_ptr<VideoInterface>(new TeliVideo(uri));
|
557 |
+
}
|
558 |
+
};
|
559 |
+
|
560 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<TeliVideoFactory>());
|
561 |
+
}
|
562 |
+
|
563 |
+
}
|
third-party/DPVO/Pangolin/components/pango_video/src/drivers/test.cpp
ADDED
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* This file is part of the Pangolin Project.
|
2 |
+
* http://github.com/stevenlovegrove/Pangolin
|
3 |
+
*
|
4 |
+
* Copyright (c) 2013 Steven Lovegrove
|
5 |
+
*
|
6 |
+
* Permission is hereby granted, free of charge, to any person
|
7 |
+
* obtaining a copy of this software and associated documentation
|
8 |
+
* files (the "Software"), to deal in the Software without
|
9 |
+
* restriction, including without limitation the rights to use,
|
10 |
+
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
* copies of the Software, and to permit persons to whom the
|
12 |
+
* Software is furnished to do so, subject to the following
|
13 |
+
* conditions:
|
14 |
+
*
|
15 |
+
* The above copyright notice and this permission notice shall be
|
16 |
+
* included in all copies or substantial portions of the Software.
|
17 |
+
*
|
18 |
+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
19 |
+
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
20 |
+
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
21 |
+
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
22 |
+
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
23 |
+
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
24 |
+
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
25 |
+
* OTHER DEALINGS IN THE SOFTWARE.
|
26 |
+
*/
|
27 |
+
|
28 |
+
#include <pangolin/video/drivers/test.h>
|
29 |
+
#include <pangolin/factory/factory_registry.h>
|
30 |
+
#include <pangolin/video/iostream_operators.h>
|
31 |
+
|
32 |
+
namespace pangolin
|
33 |
+
{
|
34 |
+
|
35 |
+
void setRandomData(unsigned char * arr, size_t size){
|
36 |
+
for(size_t i = 0 ; i < size;i++) {
|
37 |
+
// arr[i] = (unsigned char)(i * 255.0 / size);
|
38 |
+
arr[i] = (unsigned char)(rand()/(RAND_MAX/255.0));
|
39 |
+
}
|
40 |
+
}
|
41 |
+
|
42 |
+
TestVideo::TestVideo(size_t w, size_t h, size_t n, std::string pix_fmt)
|
43 |
+
{
|
44 |
+
const PixelFormat pfmt = PixelFormatFromString(pix_fmt);
|
45 |
+
|
46 |
+
size_bytes = 0;
|
47 |
+
|
48 |
+
for(size_t c=0; c < n; ++c) {
|
49 |
+
const StreamInfo stream_info(pfmt, w, h, (w*pfmt.bpp)/8, 0);
|
50 |
+
streams.push_back(stream_info);
|
51 |
+
size_bytes += w*h*(pfmt.bpp)/8;
|
52 |
+
}
|
53 |
+
}
|
54 |
+
|
55 |
+
TestVideo::~TestVideo()
|
56 |
+
{
|
57 |
+
|
58 |
+
}
|
59 |
+
|
60 |
+
//! Implement VideoInput::Start()
|
61 |
+
void TestVideo::Start()
|
62 |
+
{
|
63 |
+
|
64 |
+
}
|
65 |
+
|
66 |
+
//! Implement VideoInput::Stop()
|
67 |
+
void TestVideo::Stop()
|
68 |
+
{
|
69 |
+
|
70 |
+
}
|
71 |
+
|
72 |
+
//! Implement VideoInput::SizeBytes()
|
73 |
+
size_t TestVideo::SizeBytes() const
|
74 |
+
{
|
75 |
+
return size_bytes;
|
76 |
+
}
|
77 |
+
|
78 |
+
//! Implement VideoInput::Streams()
|
79 |
+
const std::vector<StreamInfo>& TestVideo::Streams() const
|
80 |
+
{
|
81 |
+
return streams;
|
82 |
+
}
|
83 |
+
|
84 |
+
//! Implement VideoInput::GrabNext()
|
85 |
+
bool TestVideo::GrabNext( unsigned char* image, bool /*wait*/ )
|
86 |
+
{
|
87 |
+
setRandomData(image, size_bytes);
|
88 |
+
return true;
|
89 |
+
}
|
90 |
+
|
91 |
+
//! Implement VideoInput::GrabNewest()
|
92 |
+
bool TestVideo::GrabNewest( unsigned char* image, bool wait )
|
93 |
+
{
|
94 |
+
return GrabNext(image,wait);
|
95 |
+
}
|
96 |
+
|
97 |
+
PANGOLIN_REGISTER_FACTORY(TestVideo)
|
98 |
+
{
|
99 |
+
struct TestVideoFactory final : public TypedFactoryInterface<VideoInterface> {
|
100 |
+
std::map<std::string,Precedence> Schemes() const override
|
101 |
+
{
|
102 |
+
return {{"test",10}};
|
103 |
+
}
|
104 |
+
const char* Description() const override
|
105 |
+
{
|
106 |
+
return "A test video feed with pixel-wise white noise.";
|
107 |
+
}
|
108 |
+
ParamSet Params() const override
|
109 |
+
{
|
110 |
+
return {{
|
111 |
+
{"size","640x480","Image dimension"},
|
112 |
+
{"n","1","Number of streams"},
|
113 |
+
{"fmt","RGB24","Pixel format: see pixel format help for all possible values"}
|
114 |
+
}};
|
115 |
+
}
|
116 |
+
std::unique_ptr<VideoInterface> Open(const Uri& uri) override {
|
117 |
+
ParamReader reader(Params(), uri);
|
118 |
+
const ImageDim dim = reader.Get<ImageDim>("size");
|
119 |
+
const int n = reader.Get<int>("n");
|
120 |
+
std::string fmt = reader.Get<std::string>("fmt");
|
121 |
+
return std::unique_ptr<VideoInterface>(new TestVideo(dim.x,dim.y,n,fmt));
|
122 |
+
}
|
123 |
+
};
|
124 |
+
|
125 |
+
return FactoryRegistry::I()->RegisterFactory<VideoInterface>(std::make_shared<TestVideoFactory>());
|
126 |
+
}
|
127 |
+
|
128 |
+
}
|