Merge pull request #5 from peterbraden/master

Merge latest master changes from main node-opencv repo
This commit is contained in:
Edgar O Silva 2014-10-21 13:34:43 -05:00
commit cf11a2100a
65 changed files with 4372 additions and 1900 deletions

1
.gitignore vendored
View File

@ -6,3 +6,4 @@ npm-debug.log
out*.jpg
out*.png
examples/*.avi
examples/tmp/*

View File

@ -1,8 +1,7 @@
language: node_js
node_js:
- "0.6"
- "0.8"
- "0.10"
- "0.11"
before_install:
- sudo apt-get update

73
CHANGELOG.md Normal file
View File

@ -0,0 +1,73 @@
# Changelog
### 1.0.0
Ok, let's do a proper semver release :)
The big news in this release is that thanks to some amazing work by
@kaosat-dev, node-opencv now works with node 0.11.
There's also some general tidying up, including the examples by marcbachmann
Thanks all!
#### 0.7.0
Matrix constructors, and contour access from @oskardahlberg and @emallson.
#### 0.6.0
Many updates from the community, thank you to all.
Especially: @oskardahlberg, @salmanulhaq, @jcao75, @psayre23, @jhludwig
, @coolblade, @ytham, @morganrallen and anyone I inadvertantly missed.
#### 0.5.0
Lots more opencv functions added, and bugfixes from a large number of
contributors. Thanks to all of them!
- Allow args for `HoughLinesP` by @clkao in #112
- `matchTemplate` and `minMaxLoc` by @ytham in #108
- updated `blockingWaitKey` by @hybridgroup in #98
#### 0.0.13 -> 0.4.0
( missing description... )
#### 0.0.13
- V Early support for face recognition - API is _likely_ to change. Have fun!
- *API Change*: VideoCapture.read now calls callback(err, im) instead of callback(im)
#### 0.0.12
- Matrix clone()
- NamedWindow Support
#### 0.0.11
- Bug Fixes
- ImageStream becomes ImageDataStream, and new ImageStream allows multiple images to be
streamed as matrices, for example, with an object detection stream.
- @ryansouza improved documentation
- Correcting matrix constructor (thanks @gluxon)
- @Michael Smith expanded Contours functionality.
Thanks all!
#### 0.0.10
- Bug Fixes
- @Contra added code that allows thickness and color args for ellipse
- Camshift Support
- @jtlebi added bindings for erode, gaussianBlur, arcLength, approxPolyDP, isConvex, cornerCount
- @gluxon added bindings for inRange
Thanks everyone!
#### 0.0.9
- toBuffer can now take a callback and be run async (re #21)

View File

@ -3,13 +3,17 @@
[![Build Status](https://secure.travis-ci.org/peterbraden/node-opencv.png)](http://travis-ci.org/peterbraden/node-opencv)
[OpenCV](http://opencv.willowgarage.com/wiki/) bindings for Node.js. OpenCV is the defacto computer vision library - by interfacing with it natively in node, we get powerful real time vision in js.
[OpenCV](http://opencv.willowgarage.com/wiki/) bindings for Node.js. OpenCV is
the defacto computer vision library - by interfacing with it natively in node,
we get powerful real time vision in js.
People are using node-opencv to fly control quadrocoptors, detect faces from webcam images and annotate video streams. If you're using it for something cool, I'd love to hear about it!
People are using node-opencv to fly control quadrocoptors, detect faces from
webcam images and annotate video streams. If you're using it for something
cool, I'd love to hear about it!
## Install
You'll need OpenCV 2.3.1 installed.
You'll need OpenCV 2.3.1 or newer installed before installing node-opencv.
Then:
@ -17,18 +21,12 @@ Then:
$ npm install opencv
```
Or to build the repo:
```bash
$ node-gyp rebuild
```
## Examples
### Face Detection
```javascript
cv.readImage("./examples/test.jpg", function(err, im){
cv.readImage("./examples/files/mona.png", function(err, im){
im.detectObject(cv.FACE_CASCADE, {}, function(err, faces){
for (var i=0;i<faces.length; i++){
var x = faces[i]
@ -62,11 +60,11 @@ new Matrix(height, width)
Or you can use opencv to read in image files. Supported formats are in the OpenCV docs, but jpgs etc are supported.
```javascript
cv.readImage(filename, function(mat){
cv.readImage(filename, function(err, mat){
...
})
cv.readImage(buffer, function(mat){
cv.readImage(buffer, function(err, mat){
...
})
```
@ -80,7 +78,7 @@ s.on('load', function(matrix){
...
})
fs.createReadStream('./examples/test.jpg').pipe(s);
fs.createReadStream('./examples/files/mona.png').pipe(s);
```
If however, you have a series of images, and you wish to stream them into a
@ -202,41 +200,6 @@ contours.convexHull(index, clockwise);
```
## MIT License
The library is distributed under the MIT License - if for some reason that
The library is distributed under the MIT License - if for some reason that
doesn't work for you please get in touch.
## Changelog
#### 0.0.13
- V Early support for face recognition - API is _likely_ to change. Have fun!
- *API Change*: VideoCapture.read now calls callback(err, im) instead of callback(im)
#### 0.0.12
- Matrix clone()
- NamedWindow Support
#### 0.0.11
- Bug Fixes
- ImageStream becomes ImageDataStream, and new ImageStream allows multiple images to be
streamed as matrices, for example, with an object detection stream.
- @ryansouza improved documentation
- Correcting matrix constructor (thanks @gluxon)
- @Michael Smith expanded Contours functionality.
Thanks all!
#### 0.0.10
- Bug Fixes
- @Contra added code that allows thickness and color args for ellipse
- Camshift Support
- @jtlebi added bindings for erode, gaussianBlur, arcLength, approxPolyDP, isConvex, cornerCount
- @gluxon added bindings for inRange
Thanks everyone!
#### 0.0.9
- toBuffer can now take a callback and be run async (re #21)

View File

@ -1,7 +1,7 @@
{
"targets": [{
"targets": [{
"target_name": "opencv"
, "sources": [
, "sources": [
"src/init.cc"
, "src/Matrix.cc"
, "src/OpenCV.cc"
@ -12,16 +12,19 @@
, "src/CamShift.cc"
, "src/HighGUI.cc"
, "src/FaceRecognizer.cc"
, "src/BackgroundSubtractor.cc"
, "src/Constants.cc"
]
, 'libraries': [
'<!@(pkg-config --libs opencv)'
]
# For windows
,'include_dirs': [
'<!@(pkg-config --cflags opencv)'
'<!@(pkg-config --cflags opencv)',
"<!(node -e \"require('nan')\")"
]
, 'cflags': [
'<!@(pkg-config --cflags "opencv >= 2.3.1" )'
, '-Wall'
@ -41,9 +44,8 @@
, "GCC_ENABLE_CPP_RTTI": "YES"
, "GCC_ENABLE_CPP_EXCEPTIONS": "YES"
}
}]
}]
]
}]
}

View File

@ -0,0 +1,839 @@
<?xml version="1.0"?>
<opencv_storage>
<cascade>
<stageType>BOOST</stageType>
<featureType>HOG</featureType>
<height>24</height>
<width>48</width>
<stageParams>
<boostType>GAB</boostType>
<minHitRate>9.9500000476837158e-01</minHitRate>
<maxFalseAlarm>5.0000000000000000e-01</maxFalseAlarm>
<weightTrimRate>9.4999999999999996e-01</weightTrimRate>
<maxDepth>1</maxDepth>
<maxWeakCount>100</maxWeakCount></stageParams>
<featureParams>
<maxCatCount>0</maxCatCount>
<featSize>36</featSize></featureParams>
<stageNum>14</stageNum>
<stages>
<!-- stage 0 -->
<_>
<maxWeakCount>4</maxWeakCount>
<stageThreshold>-6.6961961984634399e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 37 5.0542093813419342e-03</internalNodes>
<leafValues>
6.3475179672241211e-01 -8.2110089063644409e-01</leafValues></_>
<_>
<internalNodes>
0 -1 19 2.6041852310299873e-02</internalNodes>
<leafValues>
-6.0500168800354004e-01 5.0097995996475220e-01</leafValues></_>
<_>
<internalNodes>
0 -1 30 6.0196466743946075e-02</internalNodes>
<leafValues>
-7.4185353517532349e-01 4.1114318370819092e-01</leafValues></_>
<_>
<internalNodes>
0 -1 10 2.0617619156837463e-02</internalNodes>
<leafValues>
-7.6064187288284302e-01 4.1562452912330627e-01</leafValues></_></weakClassifiers></_>
<!-- stage 1 -->
<_>
<maxWeakCount>6</maxWeakCount>
<stageThreshold>-1.0635967254638672e+00</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 24 4.8876302316784859e-03</internalNodes>
<leafValues>
5.0000000000000000e-01 -7.3762375116348267e-01</leafValues></_>
<_>
<internalNodes>
0 -1 32 2.2325046360492706e-02</internalNodes>
<leafValues>
-5.8662104606628418e-01 5.1117855310440063e-01</leafValues></_>
<_>
<internalNodes>
0 -1 45 1.8713673576712608e-02</internalNodes>
<leafValues>
-6.7774200439453125e-01 4.1451421380043030e-01</leafValues></_>
<_>
<internalNodes>
0 -1 47 4.2779751121997833e-02</internalNodes>
<leafValues>
-7.6153320074081421e-01 4.1026860475540161e-01</leafValues></_>
<_>
<internalNodes>
0 -1 79 1.3636458665132523e-02</internalNodes>
<leafValues>
-8.3057796955108643e-01 2.6945105195045471e-01</leafValues></_>
<_>
<internalNodes>
0 -1 22 1.0419502854347229e-02</internalNodes>
<leafValues>
3.3267253637313843e-01 -7.9588419198989868e-01</leafValues></_></weakClassifiers></_>
<!-- stage 2 -->
<_>
<maxWeakCount>5</maxWeakCount>
<stageThreshold>-1.3316284418106079e+00</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 59 1.1601861566305161e-02</internalNodes>
<leafValues>
-7.2571426630020142e-01 3.9076924324035645e-01</leafValues></_>
<_>
<internalNodes>
0 -1 28 2.6228143833577633e-03</internalNodes>
<leafValues>
5.1864373683929443e-01 -6.6300022602081299e-01</leafValues></_>
<_>
<internalNodes>
0 -1 53 9.1179274022579193e-03</internalNodes>
<leafValues>
-7.6964265108108521e-01 3.8903066515922546e-01</leafValues></_>
<_>
<internalNodes>
0 -1 58 1.0239388793706894e-02</internalNodes>
<leafValues>
3.6964818835258484e-01 -6.5922862291336060e-01</leafValues></_>
<_>
<internalNodes>
0 -1 49 2.0771797746419907e-02</internalNodes>
<leafValues>
-7.8919953107833862e-01 3.4207329154014587e-01</leafValues></_></weakClassifiers></_>
<!-- stage 3 -->
<_>
<maxWeakCount>7</maxWeakCount>
<stageThreshold>-8.5314482450485229e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 23 3.9043962024152279e-03</internalNodes>
<leafValues>
4.2080000042915344e-01 -7.0133334398269653e-01</leafValues></_>
<_>
<internalNodes>
0 -1 50 9.8744053393602371e-03</internalNodes>
<leafValues>
-7.6291352510452271e-01 3.2687053084373474e-01</leafValues></_>
<_>
<internalNodes>
0 -1 55 1.4459470286965370e-02</internalNodes>
<leafValues>
-7.3181456327438354e-01 2.9510185122489929e-01</leafValues></_>
<_>
<internalNodes>
0 -1 84 1.2070775032043457e-02</internalNodes>
<leafValues>
3.2441005110740662e-01 -7.8221690654754639e-01</leafValues></_>
<_>
<internalNodes>
0 -1 67 2.5153608992695808e-02</internalNodes>
<leafValues>
-6.4789843559265137e-01 3.1182798743247986e-01</leafValues></_>
<_>
<internalNodes>
0 -1 82 8.3147712051868439e-02</internalNodes>
<leafValues>
-5.5435341596603394e-01 3.7672561407089233e-01</leafValues></_>
<_>
<internalNodes>
0 -1 54 1.2301506474614143e-02</internalNodes>
<leafValues>
-6.5210336446762085e-01 3.4679582715034485e-01</leafValues></_></weakClassifiers></_>
<!-- stage 4 -->
<_>
<maxWeakCount>7</maxWeakCount>
<stageThreshold>-7.3280197381973267e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 26 4.6639265492558479e-03</internalNodes>
<leafValues>
4.7098976373672485e-01 -6.6666668653488159e-01</leafValues></_>
<_>
<internalNodes>
0 -1 18 7.7744415029883385e-03</internalNodes>
<leafValues>
-7.8850072622299194e-01 2.7177917957305908e-01</leafValues></_>
<_>
<internalNodes>
0 -1 78 9.7649656236171722e-03</internalNodes>
<leafValues>
3.8612514734268188e-01 -6.1002421379089355e-01</leafValues></_>
<_>
<internalNodes>
0 -1 1 1.7446843907237053e-02</internalNodes>
<leafValues>
-6.2320548295974731e-01 3.5820624232292175e-01</leafValues></_>
<_>
<internalNodes>
0 -1 80 1.2544549070298672e-02</internalNodes>
<leafValues>
-8.2759410142898560e-01 2.6734635233879089e-01</leafValues></_>
<_>
<internalNodes>
0 -1 15 9.5381084829568863e-03</internalNodes>
<leafValues>
4.5518133044242859e-01 -4.7254380583763123e-01</leafValues></_>
<_>
<internalNodes>
0 -1 29 1.1022552847862244e-02</internalNodes>
<leafValues>
-8.0517464876174927e-01 2.5165572762489319e-01</leafValues></_></weakClassifiers></_>
<!-- stage 5 -->
<_>
<maxWeakCount>7</maxWeakCount>
<stageThreshold>-8.0065369606018066e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 36 8.9248679578304291e-03</internalNodes>
<leafValues>
4.1610738635063171e-01 -6.1386138200759888e-01</leafValues></_>
<_>
<internalNodes>
0 -1 66 4.9243606626987457e-02</internalNodes>
<leafValues>
-7.3897874355316162e-01 2.3975111544132233e-01</leafValues></_>
<_>
<internalNodes>
0 -1 4 8.1836590543389320e-03</internalNodes>
<leafValues>
-7.9024726152420044e-01 3.0607378482818604e-01</leafValues></_>
<_>
<internalNodes>
0 -1 64 3.9731532335281372e-02</internalNodes>
<leafValues>
-6.2868875265121460e-01 3.5599696636199951e-01</leafValues></_>
<_>
<internalNodes>
0 -1 69 2.1768644452095032e-02</internalNodes>
<leafValues>
-5.3039550781250000e-01 4.0542030334472656e-01</leafValues></_>
<_>
<internalNodes>
0 -1 42 2.3743376135826111e-02</internalNodes>
<leafValues>
-6.6907733678817749e-01 2.6277667284011841e-01</leafValues></_>
<_>
<internalNodes>
0 -1 73 1.5905532985925674e-02</internalNodes>
<leafValues>
2.3004263639450073e-01 -7.6509678363800049e-01</leafValues></_></weakClassifiers></_>
<!-- stage 6 -->
<_>
<maxWeakCount>9</maxWeakCount>
<stageThreshold>-9.5710253715515137e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 31 2.3976407945156097e-02</internalNodes>
<leafValues>
-6.0215055942535400e-01 3.5668790340423584e-01</leafValues></_>
<_>
<internalNodes>
0 -1 33 1.6243303194642067e-02</internalNodes>
<leafValues>
-5.5735617876052856e-01 3.9698266983032227e-01</leafValues></_>
<_>
<internalNodes>
0 -1 39 6.5313324332237244e-02</internalNodes>
<leafValues>
-6.1343312263488770e-01 3.7502941489219666e-01</leafValues></_>
<_>
<internalNodes>
0 -1 56 2.1734256297349930e-02</internalNodes>
<leafValues>
-5.6393921375274658e-01 3.5127151012420654e-01</leafValues></_>
<_>
<internalNodes>
0 -1 25 1.0944951325654984e-02</internalNodes>
<leafValues>
3.5995838046073914e-01 -6.1829131841659546e-01</leafValues></_>
<_>
<internalNodes>
0 -1 40 2.0281352102756500e-02</internalNodes>
<leafValues>
-5.7755672931671143e-01 4.1146075725555420e-01</leafValues></_>
<_>
<internalNodes>
0 -1 71 1.3257162645459175e-02</internalNodes>
<leafValues>
-7.9115587472915649e-01 2.1226151287555695e-01</leafValues></_>
<_>
<internalNodes>
0 -1 70 4.7885462641716003e-02</internalNodes>
<leafValues>
-7.0310407876968384e-01 3.1437802314758301e-01</leafValues></_>
<_>
<internalNodes>
0 -1 85 1.0180527344346046e-02</internalNodes>
<leafValues>
-8.3038038015365601e-01 2.1416822075843811e-01</leafValues></_></weakClassifiers></_>
<!-- stage 7 -->
<_>
<maxWeakCount>7</maxWeakCount>
<stageThreshold>-5.7961404323577881e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 9 3.2557632774114609e-02</internalNodes>
<leafValues>
-3.9629006385803223e-01 5.7739555835723877e-01</leafValues></_>
<_>
<internalNodes>
0 -1 38 3.5968523006886244e-03</internalNodes>
<leafValues>
3.1778690218925476e-01 -6.2161976099014282e-01</leafValues></_>
<_>
<internalNodes>
0 -1 52 1.3301933184266090e-02</internalNodes>
<leafValues>
-5.6837254762649536e-01 3.1374710798263550e-01</leafValues></_>
<_>
<internalNodes>
0 -1 57 1.3667806982994080e-02</internalNodes>
<leafValues>
-6.9410246610641479e-01 2.5042265653610229e-01</leafValues></_>
<_>
<internalNodes>
0 -1 21 1.0717843659222126e-02</internalNodes>
<leafValues>
2.2533524036407471e-01 -7.2063952684402466e-01</leafValues></_>
<_>
<internalNodes>
0 -1 65 1.9350808113813400e-02</internalNodes>
<leafValues>
3.0546805262565613e-01 -6.1556744575500488e-01</leafValues></_>
<_>
<internalNodes>
0 -1 62 1.7843421548604965e-02</internalNodes>
<leafValues>
-6.5155881643295288e-01 3.2118973135948181e-01</leafValues></_></weakClassifiers></_>
<!-- stage 8 -->
<_>
<maxWeakCount>8</maxWeakCount>
<stageThreshold>-1.0275948047637939e+00</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 7 2.4886079132556915e-02</internalNodes>
<leafValues>
-6.1690139770507812e-01 3.3953487873077393e-01</leafValues></_>
<_>
<internalNodes>
0 -1 35 3.4813559614121914e-03</internalNodes>
<leafValues>
3.5839259624481201e-01 -6.2441271543502808e-01</leafValues></_>
<_>
<internalNodes>
0 -1 6 9.2840641736984253e-03</internalNodes>
<leafValues>
-7.2705483436584473e-01 2.9669672250747681e-01</leafValues></_>
<_>
<internalNodes>
0 -1 76 4.8370264470577240e-02</internalNodes>
<leafValues>
-4.1198855638504028e-01 4.3474736809730530e-01</leafValues></_>
<_>
<internalNodes>
0 -1 20 2.2166669368743896e-02</internalNodes>
<leafValues>
-4.2162674665451050e-01 4.0039113163948059e-01</leafValues></_>
<_>
<internalNodes>
0 -1 0 6.6159442067146301e-02</internalNodes>
<leafValues>
-3.5341855883598328e-01 5.1975446939468384e-01</leafValues></_>
<_>
<internalNodes>
0 -1 14 1.5775233507156372e-02</internalNodes>
<leafValues>
2.5096359848976135e-01 -8.2702255249023438e-01</leafValues></_>
<_>
<internalNodes>
0 -1 60 1.1368561536073685e-02</internalNodes>
<leafValues>
-9.0493923425674438e-01 1.5262487530708313e-01</leafValues></_></weakClassifiers></_>
<!-- stage 9 -->
<_>
<maxWeakCount>5</maxWeakCount>
<stageThreshold>-1.1457120180130005e+00</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 76 3.3531658351421356e-02</internalNodes>
<leafValues>
-8.0352646112442017e-01 5.2902156114578247e-01</leafValues></_>
<_>
<internalNodes>
0 -1 5 9.4961840659379959e-03</internalNodes>
<leafValues>
-7.6558345556259155e-01 2.8800898790359497e-01</leafValues></_>
<_>
<internalNodes>
0 -1 16 1.5307674184441566e-02</internalNodes>
<leafValues>
4.3669199943542480e-01 -5.6426107883453369e-01</leafValues></_>
<_>
<internalNodes>
0 -1 44 1.9379984587430954e-02</internalNodes>
<leafValues>
-4.5677116513252258e-01 4.8609861731529236e-01</leafValues></_>
<_>
<internalNodes>
0 -1 75 4.0394026786088943e-02</internalNodes>
<leafValues>
-5.8213305473327637e-01 4.4347706437110901e-01</leafValues></_></weakClassifiers></_>
<!-- stage 10 -->
<_>
<maxWeakCount>9</maxWeakCount>
<stageThreshold>-1.3891929388046265e+00</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 74 1.3090066611766815e-02</internalNodes>
<leafValues>
3.1437125802040100e-01 -6.3253009319305420e-01</leafValues></_>
<_>
<internalNodes>
0 -1 44 2.2685792297124863e-02</internalNodes>
<leafValues>
-3.5965347290039062e-01 4.7721466422080994e-01</leafValues></_>
<_>
<internalNodes>
0 -1 17 1.6330339014530182e-02</internalNodes>
<leafValues>
3.0792072415351868e-01 -5.7770532369613647e-01</leafValues></_>
<_>
<internalNodes>
0 -1 38 9.5306994626298547e-04</internalNodes>
<leafValues>
4.5918419957160950e-01 -3.5668218135833740e-01</leafValues></_>
<_>
<internalNodes>
0 -1 41 1.0749800130724907e-02</internalNodes>
<leafValues>
-7.5108170509338379e-01 2.5033050775527954e-01</leafValues></_>
<_>
<internalNodes>
0 -1 51 4.5300077646970749e-02</internalNodes>
<leafValues>
-5.5595809221267700e-01 2.8857883810997009e-01</leafValues></_>
<_>
<internalNodes>
0 -1 9 1.2305306270718575e-02</internalNodes>
<leafValues>
-7.3887377977371216e-01 2.4087795615196228e-01</leafValues></_>
<_>
<internalNodes>
0 -1 57 2.1951880306005478e-02</internalNodes>
<leafValues>
-4.7762107849121094e-01 3.1966149806976318e-01</leafValues></_>
<_>
<internalNodes>
0 -1 83 2.0090535283088684e-02</internalNodes>
<leafValues>
1.9412286579608917e-01 -8.5962796211242676e-01</leafValues></_></weakClassifiers></_>
<!-- stage 11 -->
<_>
<maxWeakCount>9</maxWeakCount>
<stageThreshold>-1.2574299573898315e+00</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 72 1.2667892500758171e-02</internalNodes>
<leafValues>
3.4039333462715149e-01 -6.6371679306030273e-01</leafValues></_>
<_>
<internalNodes>
0 -1 12 8.2983868196606636e-03</internalNodes>
<leafValues>
-6.6516041755676270e-01 4.2625319957733154e-01</leafValues></_>
<_>
<internalNodes>
0 -1 34 1.3944265665486455e-03</internalNodes>
<leafValues>
4.3100255727767944e-01 -4.3553835153579712e-01</leafValues></_>
<_>
<internalNodes>
0 -1 48 2.9196422547101974e-02</internalNodes>
<leafValues>
-5.5650693178176880e-01 3.0238664150238037e-01</leafValues></_>
<_>
<internalNodes>
0 -1 63 1.6110163182020187e-02</internalNodes>
<leafValues>
-5.7638877630233765e-01 2.9655921459197998e-01</leafValues></_>
<_>
<internalNodes>
0 -1 3 4.4288985431194305e-02</internalNodes>
<leafValues>
-5.2310836315155029e-01 3.4848624467849731e-01</leafValues></_>
<_>
<internalNodes>
0 -1 73 1.0944083333015442e-02</internalNodes>
<leafValues>
2.5988364219665527e-01 -6.1724573373794556e-01</leafValues></_>
<_>
<internalNodes>
0 -1 8 1.2161436490714550e-02</internalNodes>
<leafValues>
-6.1962151527404785e-01 3.4090581536293030e-01</leafValues></_>
<_>
<internalNodes>
0 -1 32 3.9139650762081146e-02</internalNodes>
<leafValues>
-3.0264016985893250e-01 6.4403891563415527e-01</leafValues></_></weakClassifiers></_>
<!-- stage 12 -->
<_>
<maxWeakCount>8</maxWeakCount>
<stageThreshold>-6.0144728422164917e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 72 1.3080243021249771e-02</internalNodes>
<leafValues>
2.8611898422241211e-01 -6.8707484006881714e-01</leafValues></_>
<_>
<internalNodes>
0 -1 79 1.3372734189033508e-02</internalNodes>
<leafValues>
-7.6833885908126831e-01 2.4285763502120972e-01</leafValues></_>
<_>
<internalNodes>
0 -1 68 2.1684931591153145e-02</internalNodes>
<leafValues>
-6.8435341119766235e-01 2.6952907443046570e-01</leafValues></_>
<_>
<internalNodes>
0 -1 46 6.5650418400764465e-02</internalNodes>
<leafValues>
-5.7296985387802124e-01 3.9135009050369263e-01</leafValues></_>
<_>
<internalNodes>
0 -1 13 1.3176317326724529e-02</internalNodes>
<leafValues>
-7.9447650909423828e-01 2.7378407120704651e-01</leafValues></_>
<_>
<internalNodes>
0 -1 3 4.3580539524555206e-02</internalNodes>
<leafValues>
-6.1350268125534058e-01 2.7570602297782898e-01</leafValues></_>
<_>
<internalNodes>
0 -1 76 4.4955946505069733e-02</internalNodes>
<leafValues>
-5.3824591636657715e-01 3.4698137640953064e-01</leafValues></_>
<_>
<internalNodes>
0 -1 53 1.0510908439755440e-02</internalNodes>
<leafValues>
-7.1225810050964355e-01 2.8148773312568665e-01</leafValues></_></weakClassifiers></_>
<!-- stage 13 -->
<_>
<maxWeakCount>7</maxWeakCount>
<stageThreshold>-5.1197630167007446e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 81 8.1387851387262344e-03</internalNodes>
<leafValues>
4.1029641032218933e-01 -7.3259055614471436e-01</leafValues></_>
<_>
<internalNodes>
0 -1 11 1.9650684669613838e-02</internalNodes>
<leafValues>
-4.6334064006805420e-01 5.3854602575302124e-01</leafValues></_>
<_>
<internalNodes>
0 -1 77 1.5638254582881927e-02</internalNodes>
<leafValues>
4.1182518005371094e-01 -5.5902385711669922e-01</leafValues></_>
<_>
<internalNodes>
0 -1 43 2.2062283009290695e-02</internalNodes>
<leafValues>
-4.7116351127624512e-01 6.0200870037078857e-01</leafValues></_>
<_>
<internalNodes>
0 -1 61 4.1543480008840561e-02</internalNodes>
<leafValues>
-7.1258103847503662e-01 3.5103169083595276e-01</leafValues></_>
<_>
<internalNodes>
0 -1 2 9.6121616661548615e-03</internalNodes>
<leafValues>
-7.9785227775573730e-01 2.3375311493873596e-01</leafValues></_>
<_>
<internalNodes>
0 -1 27 5.6492481380701065e-03</internalNodes>
<leafValues>
2.9058167338371277e-01 -8.4337824583053589e-01</leafValues></_></weakClassifiers></_></stages>
<features>
<_>
<rect>
0 0 8 8 30</rect></_>
<_>
<rect>
0 4 8 8 33</rect></_>
<_>
<rect>
0 4 8 8 34</rect></_>
<_>
<rect>
0 8 8 8 12</rect></_>
<_>
<rect>
0 8 8 8 28</rect></_>
<_>
<rect>
0 8 8 8 34</rect></_>
<_>
<rect>
0 8 8 8 35</rect></_>
<_>
<rect>
4 0 8 8 20</rect></_>
<_>
<rect>
4 4 8 8 18</rect></_>
<_>
<rect>
4 4 8 8 19</rect></_>
<_>
<rect>
4 4 8 8 20</rect></_>
<_>
<rect>
4 4 8 8 33</rect></_>
<_>
<rect>
4 4 8 8 34</rect></_>
<_>
<rect>
8 4 8 8 23</rect></_>
<_>
<rect>
8 4 8 8 28</rect></_>
<_>
<rect>
8 4 8 8 29</rect></_>
<_>
<rect>
8 4 8 8 32</rect></_>
<_>
<rect>
8 8 8 8 7</rect></_>
<_>
<rect>
8 8 8 8 25</rect></_>
<_>
<rect>
12 4 8 8 2</rect></_>
<_>
<rect>
12 4 8 8 24</rect></_>
<_>
<rect>
12 4 8 8 33</rect></_>
<_>
<rect>
12 4 8 8 34</rect></_>
<_>
<rect>
12 8 8 8 27</rect></_>
<_>
<rect>
12 8 8 8 28</rect></_>
<_>
<rect>
12 8 8 8 32</rect></_>
<_>
<rect>
12 8 8 8 33</rect></_>
<_>
<rect>
12 8 8 8 34</rect></_>
<_>
<rect>
12 8 8 8 35</rect></_>
<_>
<rect>
16 4 8 8 1</rect></_>
<_>
<rect>
16 8 8 8 22</rect></_>
<_>
<rect>
20 4 8 8 14</rect></_>
<_>
<rect>
20 4 8 8 15</rect></_>
<_>
<rect>
20 4 8 8 28</rect></_>
<_>
<rect>
20 8 8 8 18</rect></_>
<_>
<rect>
20 8 8 8 19</rect></_>
<_>
<rect>
20 8 8 8 20</rect></_>
<_>
<rect>
20 8 8 8 25</rect></_>
<_>
<rect>
20 8 8 8 26</rect></_>
<_>
<rect>
20 8 8 8 31</rect></_>
<_>
<rect>
24 4 8 8 29</rect></_>
<_>
<rect>
24 8 8 8 35</rect></_>
<_>
<rect>
28 0 8 8 32</rect></_>
<_>
<rect>
28 4 8 8 12</rect></_>
<_>
<rect>
28 4 8 8 19</rect></_>
<_>
<rect>
28 4 8 8 33</rect></_>
<_>
<rect>
28 8 8 8 22</rect></_>
<_>
<rect>
32 0 8 8 22</rect></_>
<_>
<rect>
32 0 8 8 23</rect></_>
<_>
<rect>
32 8 8 8 5</rect></_>
<_>
<rect>
32 8 8 8 19</rect></_>
<_>
<rect>
32 8 8 8 22</rect></_>
<_>
<rect>
32 8 8 8 24</rect></_>
<_>
<rect>
32 8 8 8 25</rect></_>
<_>
<rect>
32 8 8 8 26</rect></_>
<_>
<rect>
0 4 16 8 19</rect></_>
<_>
<rect>
0 4 16 8 23</rect></_>
<_>
<rect>
0 4 16 8 24</rect></_>
<_>
<rect>
0 4 16 8 35</rect></_>
<_>
<rect>
0 8 16 8 24</rect></_>
<_>
<rect>
4 0 16 8 19</rect></_>
<_>
<rect>
4 0 16 8 21</rect></_>
<_>
<rect>
4 4 16 8 15</rect></_>
<_>
<rect>
4 4 16 8 24</rect></_>
<_>
<rect>
4 8 16 8 3</rect></_>
<_>
<rect>
4 8 16 8 8</rect></_>
<_>
<rect>
4 8 16 8 22</rect></_>
<_>
<rect>
8 4 16 8 2</rect></_>
<_>
<rect>
8 4 16 8 14</rect></_>
<_>
<rect>
8 4 16 8 15</rect></_>
<_>
<rect>
12 0 16 8 31</rect></_>
<_>
<rect>
12 4 16 8 1</rect></_>
<_>
<rect>
12 4 16 8 18</rect></_>
<_>
<rect>
12 4 16 8 19</rect></_>
<_>
<rect>
12 8 16 8 20</rect></_>
<_>
<rect>
12 8 16 8 21</rect></_>
<_>
<rect>
12 8 16 8 30</rect></_>
<_>
<rect>
16 4 16 8 23</rect></_>
<_>
<rect>
16 4 16 8 26</rect></_>
<_>
<rect>
16 4 16 8 28</rect></_>
<_>
<rect>
16 4 16 8 33</rect></_>
<_>
<rect>
16 8 16 8 18</rect></_>
<_>
<rect>
16 8 16 8 22</rect></_>
<_>
<rect>
16 8 16 8 23</rect></_>
<_>
<rect>
16 8 16 8 24</rect></_>
<_>
<rect>
16 8 16 8 28</rect></_></features></cascade>
</opencv_storage>

View File

@ -0,0 +1,865 @@
<?xml version="1.0"?>
<opencv_storage>
<cascade>
<stageType>BOOST</stageType>
<featureType>LBP</featureType>
<height>20</height>
<width>20</width>
<stageParams>
<boostType>GAB</boostType>
<minHitRate>9.9500000476837158e-01</minHitRate>
<maxFalseAlarm>5.0000000000000000e-01</maxFalseAlarm>
<weightTrimRate>9.4999999999999996e-01</weightTrimRate>
<maxDepth>1</maxDepth>
<maxWeakCount>100</maxWeakCount></stageParams>
<featureParams>
<maxCatCount>256</maxCatCount>
<featSize>1</featSize></featureParams>
<stageNum>17</stageNum>
<stages>
<!-- stage 0 -->
<_>
<maxWeakCount>4</maxWeakCount>
<stageThreshold>-3.8216483592987061e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 14 -116131361 -805273592 285212754 -665676544
-1979514361 402858256 1210101859 2053261651</internalNodes>
<leafValues>
-8.1863981485366821e-01 3.8631346821784973e-01</leafValues></_>
<_>
<internalNodes>
0 -1 77 -839909889 -1967144961 2147479531 -17825793
-1090521089 -1409417217 -35 -1996497413</internalNodes>
<leafValues>
-5.6318879127502441e-01 7.0905172824859619e-01</leafValues></_>
<_>
<internalNodes>
0 -1 28 -493945890 -113131518 -2134802368 -1021980672
-1047822198 -98459648 -1273282546 -1681264630</internalNodes>
<leafValues>
-6.4377105236053467e-01 6.2214618921279907e-01</leafValues></_>
<_>
<internalNodes>
0 -1 11 -150800566 -921452477 1077940434 -1374649776
-670917501 -1901983744 1678475683 1441215394</internalNodes>
<leafValues>
-7.8556549549102783e-01 4.3848153948783875e-01</leafValues></_></weakClassifiers></_>
<!-- stage 1 -->
<_>
<maxWeakCount>5</maxWeakCount>
<stageThreshold>-1.0401203632354736e+00</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 20 -1814899305 -262926229 285511897 -552550389
1376793451 -654311424 -2113738673 2130855459</internalNodes>
<leafValues>
-6.8602150678634644e-01 4.3896102905273438e-01</leafValues></_>
<_>
<internalNodes>
0 -1 22 -11077378 -275885994 1090601128 -148762616
1678571944 -552075258 -375205670 -11384614</internalNodes>
<leafValues>
-7.2594994306564331e-01 4.2457693815231323e-01</leafValues></_>
<_>
<internalNodes>
0 -1 63 -778579808 -787259340 -2147167744 -49217507
184658072 -771751735 1006796936 -796667983</internalNodes>
<leafValues>
-6.1250990629196167e-01 5.0073426961898804e-01</leafValues></_>
<_>
<internalNodes>
0 -1 56 1343396308 84934933 285217813 -822083563 25212093
-1433894798 -2088760165 -2012493765</internalNodes>
<leafValues>
-5.7877659797668457e-01 5.7871204614639282e-01</leafValues></_>
<_>
<internalNodes>
0 -1 50 1443108372 830492876 268963869 822554689 -2113371735
2023768224 -1610608245 226517145</internalNodes>
<leafValues>
-7.4487793445587158e-01 5.0465714931488037e-01</leafValues></_></weakClassifiers></_>
<!-- stage 2 -->
<_>
<maxWeakCount>5</maxWeakCount>
<stageThreshold>-1.1187639236450195e+00</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 18 -801385737 1431375955 -803072557 -585891518
-938332102 -914226869 1216446578 -14524093</internalNodes>
<leafValues>
-6.2183237075805664e-01 5.0148367881774902e-01</leafValues></_>
<_>
<internalNodes>
0 -1 54 -1045843523 -2074918719 119263 -614772479 16949948
-1735815038 -796254024 212832684</internalNodes>
<leafValues>
-6.8186712265014648e-01 4.5836842060089111e-01</leafValues></_>
<_>
<internalNodes>
0 -1 30 -503273014 -1325367148 285380632 -1382547450
-2105409330 -419395567 212048061 -811710238</internalNodes>
<leafValues>
-6.9231766462326050e-01 4.7207185626029968e-01</leafValues></_>
<_>
<internalNodes>
0 -1 53 -939081456 -619118569 16777619 1258917905
-2012729320 -1693941624 -1992294244 -1900328664</internalNodes>
<leafValues>
-6.1702716350555420e-01 5.8441513776779175e-01</leafValues></_>
<_>
<internalNodes>
0 -1 43 119542092 -2008006383 16797787 1569772500 1208162414
-1709629312 1233316041 713306272</internalNodes>
<leafValues>
-9.0641558170318604e-01 3.2989087700843811e-01</leafValues></_></weakClassifiers></_>
<!-- stage 3 -->
<_>
<maxWeakCount>4</maxWeakCount>
<stageThreshold>-5.5834686756134033e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 19 -115422529 -82283305 537940377 -142208699 436215879
-1207304190 -1928636341 -10004461</internalNodes>
<leafValues>
-6.1693549156188965e-01 4.4067797064781189e-01</leafValues></_>
<_>
<internalNodes>
0 -1 64 -566238831 1502331091 16842964 1158217893 1092340101
-1978466175 1082328064 1635980099</internalNodes>
<leafValues>
-5.9507393836975098e-01 4.5505648851394653e-01</leafValues></_>
<_>
<internalNodes>
0 -1 8 -767634337 -506457974 -1860960253 1342337154
1673595022 413204523 -486506330 554718159</internalNodes>
<leafValues>
-7.5239545106887817e-01 3.6372080445289612e-01</leafValues></_>
<_>
<internalNodes>
0 -1 40 355890012 402718800 103507 855786333 570589235
-360546174 310387350 958677152</internalNodes>
<leafValues>
-7.0168584585189819e-01 4.2190608382225037e-01</leafValues></_></weakClassifiers></_>
<!-- stage 4 -->
<_>
<maxWeakCount>5</maxWeakCount>
<stageThreshold>-6.1581146717071533e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 67 -1001262241 643023193 1352730491 -1047150320
176196319 -2013069272 -1994798887 -926186061</internalNodes>
<leafValues>
-6.0077518224716187e-01 4.7904190421104431e-01</leafValues></_>
<_>
<internalNodes>
0 -1 0 -487592006 1341027884 -285266219 -671356948
1333132790 -589827 -1207974302 -402696201</internalNodes>
<leafValues>
-4.6866020560264587e-01 6.2427258491516113e-01</leafValues></_>
<_>
<internalNodes>
0 -1 33 1079139793 1880653969 -2139074401 1595087059
142608443 -442498942 188772473 982724683</internalNodes>
<leafValues>
-5.7951289415359497e-01 5.2355396747589111e-01</leafValues></_>
<_>
<internalNodes>
0 -1 3 -1070682378 1140850688 1082195970 -400486334
-494874482 -2146861053 539173123 -1865158426</internalNodes>
<leafValues>
-6.7111265659332275e-01 4.4606295228004456e-01</leafValues></_>
<_>
<internalNodes>
0 -1 17 -16783725 1069498363 -1082671369 -806396033
2138007300 -98305 -1082245273 -47743681</internalNodes>
<leafValues>
-4.7863450646400452e-01 7.2622120380401611e-01</leafValues></_></weakClassifiers></_>
<!-- stage 5 -->
<_>
<maxWeakCount>4</maxWeakCount>
<stageThreshold>-5.1642441749572754e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 18 -414526753 -27421077 268456409 1964331602
-2012657653 -478019583 -501225108 -66885085</internalNodes>
<leafValues>
-5.8139532804489136e-01 4.4910180568695068e-01</leafValues></_>
<_>
<internalNodes>
0 -1 41 -97002076 -741792725 1078001810 -1852112868
-401895415 -1610579672 673808462 -1626689015</internalNodes>
<leafValues>
-6.1895817518234253e-01 4.3450984358787537e-01</leafValues></_>
<_>
<internalNodes>
0 -1 72 -1928006696 140677458 66775 353360263 554289561
-1433862008 310629005 216648157</internalNodes>
<leafValues>
-6.7537730932235718e-01 4.2255818843841553e-01</leafValues></_>
<_>
<internalNodes>
0 -1 10 1360560366 823148953 289476876 1401946114 -938426319
-1207435128 136950963 1697159755</internalNodes>
<leafValues>
-7.2465872764587402e-01 4.1688033938407898e-01</leafValues></_></weakClassifiers></_>
<!-- stage 6 -->
<_>
<maxWeakCount>7</maxWeakCount>
<stageThreshold>-1.0788922309875488e+00</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 47 -2130469803 -788528887 -2122231499 222887945
168225019 -776863336 436734124 -1178894070</internalNodes>
<leafValues>
-5.6179773807525635e-01 4.7468355298042297e-01</leafValues></_>
<_>
<internalNodes>
0 -1 71 -1046941288 671318097 -922598905 88462273 688423656
-1734115072 -1381670715 215957569</internalNodes>
<leafValues>
-8.2765579223632812e-01 3.3159419894218445e-01</leafValues></_>
<_>
<internalNodes>
0 -1 26 1255952851 -44383792 16777501 256004113 1879189731
-2004287464 -2071902069 1795293714</internalNodes>
<leafValues>
-6.5990495681762695e-01 4.4058448076248169e-01</leafValues></_>
<_>
<internalNodes>
0 -1 12 -1091387990 -1073823817 -1079050572 -281362691
-145395849 2146856831 -9142440 -143395017</internalNodes>
<leafValues>
-5.2639400959014893e-01 5.7729768753051758e-01</leafValues></_>
<_>
<internalNodes>
0 -1 58 -2130004908 -1556086763 -2128043987 -1190981367
-1073151943 -1207400277 -2013255713 227147784</internalNodes>
<leafValues>
-6.4592343568801880e-01 4.6778699755668640e-01</leafValues></_>
<_>
<internalNodes>
0 -1 39 1477067097 -485490669 17106193 1428160784 318865418
-1625079799 -2119692225 -1549612113</internalNodes>
<leafValues>
-5.3414630889892578e-01 5.7999175786972046e-01</leafValues></_>
<_>
<internalNodes>
0 -1 45 1073952 709906434 17109151 -1283366911 -1073604456
-1995833344 -1694351200 371998762</internalNodes>
<leafValues>
-7.3603397607803345e-01 4.6273016929626465e-01</leafValues></_></weakClassifiers></_>
<!-- stage 7 -->
<_>
<maxWeakCount>5</maxWeakCount>
<stageThreshold>-9.4195914268493652e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 61 -1635265121 -2041479155 -2130624037 -720519167
-1564371814 -1577058263 -912152389 -1997823555</internalNodes>
<leafValues>
-5.6640625000000000e-01 4.1420117020606995e-01</leafValues></_>
<_>
<internalNodes>
0 -1 79 -515652849 -687271848 -922708976 268624009
-1040170976 268517386 -2146647714 -1237353902</internalNodes>
<leafValues>
-5.0302296876907349e-01 5.0813484191894531e-01</leafValues></_>
<_>
<internalNodes>
0 -1 25 -917442339 2035454149 25165845 -582594538 -491763659
-226357214 138412104 1061311058</internalNodes>
<leafValues>
-6.4046996831893921e-01 4.2238986492156982e-01</leafValues></_>
<_>
<internalNodes>
0 -1 62 -792330738 454877192 17252368 -1805975487 -805189368
-1837105152 -2088564586 2123875655</internalNodes>
<leafValues>
-5.5271726846694946e-01 4.8939716815948486e-01</leafValues></_>
<_>
<internalNodes>
0 -1 13 -926365360 -855342846 536907893 243286047 536870947
1581451272 1107316902 1313335431</internalNodes>
<leafValues>
-7.2280997037887573e-01 4.1264382004737854e-01</leafValues></_></weakClassifiers></_>
<!-- stage 8 -->
<_>
<maxWeakCount>6</maxWeakCount>
<stageThreshold>-5.5679398775100708e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 46 -519314635 1560543488 1074924461 -250462203
-1975418119 -1837072246 -1862080120 -1901377268</internalNodes>
<leafValues>
-5.5813956260681152e-01 4.1317364573478699e-01</leafValues></_>
<_>
<internalNodes>
0 -1 16 -997006133 2098733135 1090585820 -987217918
-796646602 1744898062 -910110577 1862664523</internalNodes>
<leafValues>
-6.1264091730117798e-01 4.1155117750167847e-01</leafValues></_>
<_>
<internalNodes>
0 -1 70 -584843516 633627713 2179480 -1006225911 -234860504
-937312184 -1434400588 -1601449815</internalNodes>
<leafValues>
-6.0614407062530518e-01 4.5268186926841736e-01</leafValues></_>
<_>
<internalNodes>
0 -1 36 -942141238 -788160445 570491930 -774811598
-208595988 8978464 142774506 2047508490</internalNodes>
<leafValues>
-6.6957730054855347e-01 4.4044500589370728e-01</leafValues></_>
<_>
<internalNodes>
0 -1 60 974549209 -1488288383 1073812741 84480391 50563224
-668761984 722004873 -2002255636</internalNodes>
<leafValues>
-6.7046916484832764e-01 4.2434301972389221e-01</leafValues></_>
<_>
<internalNodes>
0 -1 35 -17498493 -169017362 -16809985 2063072816 2147319807
-25231362 2104983486 -453544089</internalNodes>
<leafValues>
-4.9415424466133118e-01 6.6655534505844116e-01</leafValues></_></weakClassifiers></_>
<!-- stage 9 -->
<_>
<maxWeakCount>6</maxWeakCount>
<stageThreshold>-5.1655203104019165e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 77 -817370721 5636119 -788446887 -976189373 -1069489939
-2138274806 -1975459815 -858818325</internalNodes>
<leafValues>
-5.7312250137329102e-01 4.0697672963142395e-01</leafValues></_>
<_>
<internalNodes>
0 -1 23 1076011771 -132595565 17191057 -2061893628 168511598
-914358248 1661608910 164761498</internalNodes>
<leafValues>
-6.8805932998657227e-01 3.4988537430763245e-01</leafValues></_>
<_>
<internalNodes>
0 -1 73 -544473564 -1300002480 -2146433770 -2105485786
-1073058812 -1457979374 -905862656 1984387410</internalNodes>
<leafValues>
-7.3495119810104370e-01 3.2354596257209778e-01</leafValues></_>
<_>
<internalNodes>
0 -1 74 -670971031 -671084544 -2147036911 -871886589
-938831204 705921025 1795246624 1918518199</internalNodes>
<leafValues>
-5.4816365242004395e-01 4.7995322942733765e-01</leafValues></_>
<_>
<internalNodes>
0 -1 1 -308559471 -872234358 1099120704 1091625104
1493182595 187801600 1378747543 -496800669</internalNodes>
<leafValues>
-5.2902102470397949e-01 5.1137113571166992e-01</leafValues></_>
<_>
<internalNodes>
0 -1 66 926891901 1040039932 -3 2147483005 -4194307 -1049601
536281087 -2130756491</internalNodes>
<leafValues>
-4.8939517140388489e-01 6.0654014348983765e-01</leafValues></_></weakClassifiers></_>
<!-- stage 10 -->
<_>
<maxWeakCount>5</maxWeakCount>
<stageThreshold>-8.1084722280502319e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 48 1360060701 -851439284 285278341 -1861156847 47259
-125681527 561528 -1433723806</internalNodes>
<leafValues>
-5.7746475934982300e-01 3.8810199499130249e-01</leafValues></_>
<_>
<internalNodes>
0 -1 6 -503135818 -988675821 -2147417341 -485866624
134824522 -1735327740 -1056898514 -1170102209</internalNodes>
<leafValues>
-5.0149130821228027e-01 5.1512545347213745e-01</leafValues></_>
<_>
<internalNodes>
0 -1 49 1107449621 -1825533678 -1878929129 17875089
1778483889 -1877868381 -1476351281 -1996482224</internalNodes>
<leafValues>
-5.5987912416458130e-01 5.0142657756805420e-01</leafValues></_>
<_>
<internalNodes>
0 -1 9 -2083342797 1485963264 268550165 855638017 -771702720
-2013265919 4530450 1329807895</internalNodes>
<leafValues>
-6.4084523916244507e-01 4.4509848952293396e-01</leafValues></_>
<_>
<internalNodes>
0 -1 65 352542737 -654245859 138414407 -855486377 -636940126
-1582792422 268566954 -880652000</internalNodes>
<leafValues>
-5.5803930759429932e-01 5.6015783548355103e-01</leafValues></_></weakClassifiers></_>
<!-- stage 11 -->
<_>
<maxWeakCount>5</maxWeakCount>
<stageThreshold>-6.2533426284790039e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 32 -133533401 -784244669 -780140527 1359056914
-1031797493 -1543486464 -1073553021 -92123109</internalNodes>
<leafValues>
-5.8724200725555420e-01 5.1419556140899658e-01</leafValues></_>
<_>
<internalNodes>
0 -1 7 -269496593 -1426917394 -135630917 -1433633417
-625516682 -1644855554 -134217737 -1032325633</internalNodes>
<leafValues>
-4.5971798896789551e-01 6.3643723726272583e-01</leafValues></_>
<_>
<internalNodes>
0 -1 21 554158987 1648525588 1610827922 -1059942392
214040720 436240385 1124667635 -995261309</internalNodes>
<leafValues>
-5.2691739797592163e-01 5.5342370271682739e-01</leafValues></_>
<_>
<internalNodes>
0 -1 78 -610786687 -1014431744 335544322 -1739571200
1082245124 1074511880 1745675016 -1035567035</internalNodes>
<leafValues>
-6.7040818929672241e-01 4.6022328734397888e-01</leafValues></_>
<_>
<internalNodes>
0 -1 24 -753871985 -2130296827 131074 -1021931517 32896
734101512 -2142764925 -1738530053</internalNodes>
<leafValues>
-5.7864147424697876e-01 5.4643619060516357e-01</leafValues></_></weakClassifiers></_>
<!-- stage 12 -->
<_>
<maxWeakCount>5</maxWeakCount>
<stageThreshold>-6.7065376043319702e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 51 -2129670127 -2088419071 1361086495 -1526660645
-654077751 -1658814454 -1744824308 -1895058812</internalNodes>
<leafValues>
-6.1866128444671631e-01 4.3417367339134216e-01</leafValues></_>
<_>
<internalNodes>
0 -1 76 1862270947 83886063 -18 1341124607 -1073741841
-1392508933 -268435527 -989872129</internalNodes>
<leafValues>
-5.2253365516662598e-01 6.0079723596572876e-01</leafValues></_>
<_>
<internalNodes>
0 -1 44 -787841000 -209616639 -788363672 -1775730420
-1342160402 -1165983576 671160800 -1474788180</internalNodes>
<leafValues>
-5.8633673191070557e-01 5.2033215761184692e-01</leafValues></_>
<_>
<internalNodes>
0 -1 37 569663985 742691024 285442513 487589005 -1851670144
-89505782 -1988090481 -1077935924</internalNodes>
<leafValues>
-5.3621900081634521e-01 6.3959217071533203e-01</leafValues></_>
<_>
<internalNodes>
0 -1 15 -184914305 771751919 -65876 -1363247362 1475698037
-1610645644 -34292354 -331874723</internalNodes>
<leafValues>
-5.6640690565109253e-01 6.1255306005477905e-01</leafValues></_></weakClassifiers></_>
<!-- stage 13 -->
<_>
<maxWeakCount>4</maxWeakCount>
<stageThreshold>-5.2602511644363403e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 29 -2095943401 1426227530 -1870589951 -1806692085
-2113887494 -1020919808 -1844871064 -355597174</internalNodes>
<leafValues>
-6.1396306753158569e-01 4.1046831011772156e-01</leafValues></_>
<_>
<internalNodes>
0 -1 18 -900564753 -528338747 268489916 -746535674 692210514
949731328 -259469820 1930916179</internalNodes>
<leafValues>
-7.3841679096221924e-01 3.8619107007980347e-01</leafValues></_>
<_>
<internalNodes>
0 -1 34 -2070226876 -931119082 33689667 282689 1250523520
679608458 1242079947 1150372112</internalNodes>
<leafValues>
-7.8880369663238525e-01 3.2611399888992310e-01</leafValues></_>
<_>
<internalNodes>
0 -1 4 -860356945 -831983891 -1627439388 -1666056287 -524418
-1376468995 1727455091 -1999276891</internalNodes>
<leafValues>
-5.1788991689682007e-01 5.9072709083557129e-01</leafValues></_></weakClassifiers></_>
<!-- stage 14 -->
<_>
<maxWeakCount>6</maxWeakCount>
<stageThreshold>-1.3436260223388672e+00</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 55 1363263926 -2078834663 17109203 956678215
-1878942767 -2013167616 -1408630563 -1930720795</internalNodes>
<leafValues>
-6.0593217611312866e-01 3.5978835821151733e-01</leafValues></_>
<_>
<internalNodes>
0 -1 42 29500340 1361281361 1073840257 -1190440896
-1602133519 -1738211184 -1031729089 428649352</internalNodes>
<leafValues>
-5.8324354887008667e-01 4.1990754008293152e-01</leafValues></_>
<_>
<internalNodes>
0 -1 15 -1877288625 -1845297117 1073832201 -1022328170
-1072672747 67633314 1744863748 -1370234849</internalNodes>
<leafValues>
-5.6197988986968994e-01 4.9538531899452209e-01</leafValues></_>
<_>
<internalNodes>
0 -1 57 -1997237800 290455689 33043 -83755007 671617187
-771653632 -1308622815 -1643898648</internalNodes>
<leafValues>
-6.1528784036636353e-01 4.8066803812980652e-01</leafValues></_>
<_>
<internalNodes>
0 -1 52 1042336445 930008956 1073737691 -36869 1073446911 -1
1064793489 -1884555331</internalNodes>
<leafValues>
-5.5574280023574829e-01 5.8720117807388306e-01</leafValues></_>
<_>
<internalNodes>
0 -1 59 307288180 1862631681 134230053 1666482434
-1979547580 486572553 -2122301300 275678211</internalNodes>
<leafValues>
-6.6033959388732910e-01 4.7883984446525574e-01</leafValues></_></weakClassifiers></_>
<!-- stage 15 -->
<_>
<maxWeakCount>5</maxWeakCount>
<stageThreshold>-5.6767666339874268e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 27 970512799 1073873004 -2147467111 -586149724
2080385096 -1601929063 -2109698803 1932544002</internalNodes>
<leafValues>
-7.4157303571701050e-01 2.3076923191547394e-01</leafValues></_>
<_>
<internalNodes>
0 -1 68 -1826168427 1157845249 69640 -707641323 -1878601112
-2147434480 8526616 -1919192087</internalNodes>
<leafValues>
-4.8303791880607605e-01 5.7106935977935791e-01</leafValues></_>
<_>
<internalNodes>
0 -1 2 -286332165 -856231938 -1359075465 2008874994
936271867 1341521919 1291798515 -638068998</internalNodes>
<leafValues>
-4.5425757765769958e-01 6.0736602544784546e-01</leafValues></_>
<_>
<internalNodes>
0 -1 69 -889718955 1445904375 -16846850 -1318191105
-59300018 1702756215 -151076884 -253759518</internalNodes>
<leafValues>
-4.5242094993591309e-01 6.6423928737640381e-01</leafValues></_>
<_>
<internalNodes>
0 -1 5 -269299977 -83886081 -81921 -202115082 -196612
1996488703 -134922377 -831371521</internalNodes>
<leafValues>
-4.6283668279647827e-01 6.4084291458129883e-01</leafValues></_></weakClassifiers></_>
<!-- stage 16 -->
<_>
<maxWeakCount>5</maxWeakCount>
<stageThreshold>-8.0613672733306885e-01</stageThreshold>
<weakClassifiers>
<_>
<internalNodes>
0 -1 75 -518941801 -2063396849 285216791 -2147155968
-1593732973 -401981304 16950161 -1446266867</internalNodes>
<leafValues>
-5.5018585920333862e-01 4.6794870495796204e-01</leafValues></_>
<_>
<internalNodes>
0 -1 80 -180264429 -2130640895 -805306367 -2147385344
-2142601200 1073741826 1090600968 1215529111</internalNodes>
<leafValues>
-5.0185394287109375e-01 5.4967546463012695e-01</leafValues></_>
<_>
<internalNodes>
0 -1 38 43288548 -776699902 16842967 -719847421 -1870483439
-1609883646 -1988099424 277618810</internalNodes>
<leafValues>
-6.9379532337188721e-01 4.0550073981285095e-01</leafValues></_>
<_>
<internalNodes>
0 -1 31 1094774863 -1806810620 86000023 -2145775593
-804632430 -1195196288 -2129655117 -1441791665</internalNodes>
<leafValues>
-5.4782897233963013e-01 6.0082912445068359e-01</leafValues></_>
<_>
<internalNodes>
0 -1 79 -535637683 -658423808 524290 814219784 134471684
-2097152000 -1073487842 1917251584</internalNodes>
<leafValues>
-6.2990331649780273e-01 5.0844103097915649e-01</leafValues></_></weakClassifiers></_></stages>
<features>
<_>
<rect>
0 0 2 3</rect></_>
<_>
<rect>
0 1 2 2</rect></_>
<_>
<rect>
0 3 2 2</rect></_>
<_>
<rect>
0 3 3 4</rect></_>
<_>
<rect>
0 4 1 2</rect></_>
<_>
<rect>
0 4 4 5</rect></_>
<_>
<rect>
0 5 2 2</rect></_>
<_>
<rect>
0 8 1 1</rect></_>
<_>
<rect>
0 8 3 4</rect></_>
<_>
<rect>
0 9 1 3</rect></_>
<_>
<rect>
0 11 6 1</rect></_>
<_>
<rect>
0 11 6 3</rect></_>
<_>
<rect>
0 12 1 2</rect></_>
<_>
<rect>
0 12 2 2</rect></_>
<_>
<rect>
0 14 3 2</rect></_>
<_>
<rect>
0 16 1 1</rect></_>
<_>
<rect>
0 16 3 1</rect></_>
<_>
<rect>
0 17 1 1</rect></_>
<_>
<rect>
0 17 2 1</rect></_>
<_>
<rect>
0 17 3 1</rect></_>
<_>
<rect>
0 17 4 1</rect></_>
<_>
<rect>
1 1 2 2</rect></_>
<_>
<rect>
1 5 5 5</rect></_>
<_>
<rect>
1 9 4 2</rect></_>
<_>
<rect>
1 10 2 3</rect></_>
<_>
<rect>
1 15 2 1</rect></_>
<_>
<rect>
1 16 4 1</rect></_>
<_>
<rect>
1 17 4 1</rect></_>
<_>
<rect>
2 6 6 4</rect></_>
<_>
<rect>
2 9 3 2</rect></_>
<_>
<rect>
2 10 4 1</rect></_>
<_>
<rect>
2 16 2 1</rect></_>
<_>
<rect>
2 17 1 1</rect></_>
<_>
<rect>
2 17 3 1</rect></_>
<_>
<rect>
3 0 3 2</rect></_>
<_>
<rect>
3 1 2 1</rect></_>
<_>
<rect>
3 10 5 2</rect></_>
<_>
<rect>
4 8 4 1</rect></_>
<_>
<rect>
4 9 4 1</rect></_>
<_>
<rect>
4 13 3 1</rect></_>
<_>
<rect>
4 17 4 1</rect></_>
<_>
<rect>
5 3 5 4</rect></_>
<_>
<rect>
5 9 4 1</rect></_>
<_>
<rect>
6 17 2 1</rect></_>
<_>
<rect>
7 0 3 1</rect></_>
<_>
<rect>
7 7 3 1</rect></_>
<_>
<rect>
7 10 4 1</rect></_>
<_>
<rect>
7 12 4 1</rect></_>
<_>
<rect>
7 13 3 1</rect></_>
<_>
<rect>
7 15 3 1</rect></_>
<_>
<rect>
8 10 2 1</rect></_>
<_>
<rect>
8 12 3 1</rect></_>
<_>
<rect>
8 13 3 1</rect></_>
<_>
<rect>
8 14 3 1</rect></_>
<_>
<rect>
8 14 4 2</rect></_>
<_>
<rect>
8 16 4 1</rect></_>
<_>
<rect>
8 17 4 1</rect></_>
<_>
<rect>
9 0 1 1</rect></_>
<_>
<rect>
9 15 3 1</rect></_>
<_>
<rect>
10 0 3 2</rect></_>
<_>
<rect>
10 16 2 1</rect></_>
<_>
<rect>
10 17 3 1</rect></_>
<_>
<rect>
11 0 3 3</rect></_>
<_>
<rect>
11 5 3 3</rect></_>
<_>
<rect>
11 9 3 2</rect></_>
<_>
<rect>
11 10 2 1</rect></_>
<_>
<rect>
11 15 2 1</rect></_>
<_>
<rect>
11 17 3 1</rect></_>
<_>
<rect>
12 14 1 1</rect></_>
<_>
<rect>
13 0 2 1</rect></_>
<_>
<rect>
13 3 2 3</rect></_>
<_>
<rect>
13 13 2 2</rect></_>
<_>
<rect>
13 17 2 1</rect></_>
<_>
<rect>
14 2 2 2</rect></_>
<_>
<rect>
14 8 2 2</rect></_>
<_>
<rect>
14 15 2 1</rect></_>
<_>
<rect>
14 16 2 1</rect></_>
<_>
<rect>
14 17 2 1</rect></_>
<_>
<rect>
15 2 1 1</rect></_>
<_>
<rect>
17 5 1 1</rect></_>
<_>
<rect>
17 7 1 4</rect></_></features></cascade>
</opencv_storage>

View File

@ -1,20 +0,0 @@
var cv = require('../lib/opencv');
var COLOR = [0, 255, 0]; //default red
var thickness = 2; // default 1
cv.readImage('./mona.png', function(err, im) {
im.detectObject('../data/haarcascade_frontalface_alt2.xml', {}, function(err, faces) {
for(var k = 0; k < faces.length; k++) {
face = faces[k];
im.rectangle([face.x, face.y], [face.x + face.width, face.y + face.height], COLOR, 2);
}
im.save('/tmp/salida.png');
});
});

View File

@ -1,16 +1,14 @@
var cv = require('../lib/opencv');
cv.readImage("./files/mona.png", function(err, orig) {
if (err) throw err;
cv.readImage("./mona.png", function(err, orig) {
cv.readImage("./over_text.png", function(err, over_text) {
var result = new cv.Matrix(orig.width(), orig.height());
result.addWeighted(orig, 0.7, over_text, 0.9);
result.save("/tmp/weighted.png");
});
cv.readImage("./files/over_text.png", function(err, over_text) {
if (err) throw err;
var result = new cv.Matrix(orig.width(), orig.height());
result.addWeighted(orig, 0.7, over_text, 0.9);
result.save("./tmp/weighted.png");
console.log('Image saved to ./tmp/weighted.png');
});
});

View File

@ -1,13 +1,12 @@
var cv = require('../lib/opencv');
var camera = new cv.VideoCapture(0);
var window = new cv.NamedWindow('Video', 0)
setInterval(function() {
camera.read(function(err, im) {
im.save('/tmp/cam.png');
});
}, 1000);
camera.read(function(err, im) {
if (err) throw err;
window.show(im);
window.blockingWaitKey(0, 50);
});
}, 20);

18
examples/car-detection.js Normal file
View File

@ -0,0 +1,18 @@
var cv = require('../lib/opencv');
cv.readImage("./files/car1.jpg", function(err, im){
if (err) throw err;
if (im.width() < 1 || im.height() < 1) throw new Error('Image has no size');
im.detectObject("../data/hogcascade_cars_sideview.xml", {}, function(err, cars){
if (err) throw err;
for (var i=0; i < cars.length; i++){
var x = cars[i];
im.rectangle([x.x, x.y], [x.width, x.height]);
}
im.save('./tmp/car-detection.jpg');
console.log('Image saved to ./tmp/car-detection.jpg');
});
});

14
examples/color-filter.js Normal file
View File

@ -0,0 +1,14 @@
var cv = require('../lib/opencv');
// (B)lue, (G)reen, (R)ed
var lower_threshold = [46, 57, 83];
var upper_threshold = [80, 96, 115];
cv.readImage('./files/coin1.jpg', function(err, im) {
if (err) throw err;
if (im.width() < 1 || im.height() < 1) throw new Error('Image has no size');
im.inRange(lower_threshold, upper_threshold);
im.save('./tmp/coin_detected.jpg');
console.log('Image saved to ./tmp/coin_detected.jpg');
});

View File

@ -1,12 +0,0 @@
var cv = require('../lib/opencv');
// (B)lue, (G)reen, (R)ed
var lower_threshold = [46, 57, 83];
var upper_threshold = [80, 96, 115];
cv.readImage('./coin1.jpg', function(err, im) {
im.inRange(lower_threshold, upper_threshold);
im.save('./coin_detected.jpg');
});

View File

@ -5,37 +5,41 @@ var highThresh = 100;
var nIters = 2;
var maxArea = 2500;
var GREEN = [0, 255, 0]; //B, G, R
var WHITE = [255, 255, 255]; //B, G, R
var RED = [0, 0, 255]; //B, G, R
var GREEN = [0, 255, 0]; // B, G, R
var WHITE = [255, 255, 255]; // B, G, R
var RED = [0, 0, 255]; // B, G, R
cv.readImage('./stuff.png', function(err, im) {
cv.readImage('./files/stuff.png', function(err, im) {
if (err) throw err;
var width = im.width();
var height = im.height();
if (width < 1 || height < 1) throw new Error('Image has no size');
var big = new cv.Matrix(im.width(), im.height());
var all = new cv.Matrix(im.width(), im.height());
var big = new cv.Matrix(height, width);
var all = new cv.Matrix(height, width);
im.convertGrayscale();
im_canny = im.copy();
im.convertGrayscale();
im_canny = im.copy();
im_canny.canny(lowThresh, highThresh);
im_canny.dilate(nIters);
im_canny.canny(lowThresh, highThresh);
im_canny.dilate(nIters);
contours = im_canny.findContours();
contours = im_canny.findContours();
for(i = 0; i < contours.size(); i++) {
if(contours.area(i) > maxArea) {
var moments = contours.moments(i);
var cgx = Math.round(moments.m10/moments.m00);
var cgy = Math.round(moments.m01/moments.m00);
big.drawContour(contours, i, GREEN);
big.line([cgx - 5, cgy], [cgx + 5, cgy], RED);
big.line([cgx, cgy - 5], [cgx, cgy + 5], RED);
}
}
for(i = 0; i < contours.size(); i++) {
if(contours.area(i) > maxArea) {
var moments = contours.moments(i);
var cgx = Math.round(moments.m10 / moments.m00);
var cgy = Math.round(moments.m01 / moments.m00);
big.drawContour(contours, i, GREEN);
big.line([cgx - 5, cgy], [cgx + 5, cgy], RED);
big.line([cgx, cgy - 5], [cgx, cgy + 5], RED);
}
}
all.drawAllContours(contours, WHITE);
all.drawAllContours(contours, WHITE);
big.save('./big.png');
all.save('./all.png');
big.save('./tmp/big.png');
all.save('./tmp/all.png');
console.log('Image saved to ./tmp/big.png && ./tmp/all.png');
});

21
examples/convert-image.js Executable file
View File

@ -0,0 +1,21 @@
var cv = require('../lib/opencv');
cv.readImage('./files/mona.png', function(err, im) {
if (err) throw err;
if (im.width() < 1 || im.height() < 1) throw new Error('Image has no size');
img_hsv = im.copy();
img_gray = im.copy();
img_hsv.convertHSVscale();
img_gray.convertGrayscale();
im.save('./tmp/nor.png');
img_hsv.save('./tmp/hsv.png');
img_gray.save('./tmp/gray.png');
img_crop = im.crop(50,50,250,250);
img_crop.save('./tmp/crop.png');
console.log('Image saved to ./tmp/{crop|nor|hsv|gray}.png');
});

View File

@ -1,18 +0,0 @@
var cv = require('../lib/opencv');
cv.readImage("./mona.png", function(err, im) {
img_hsv = im.copy();
img_gray = im.copy();
img_hsv.convertHSVscale();
img_gray.convertGrayscale();
im.save("/tmp/nor.png");
img_hsv.save("/tmp/hsv.png");
img_gray.save("/tmp/gray.png");
console.log("Guardado");
});

View File

@ -1,9 +1,4 @@
#!/usr/bin/env node
//
// Detects triangles and quadrilaterals
//
var cv = require('../lib/opencv');
var lowThresh = 0;
@ -11,42 +6,46 @@ var highThresh = 100;
var nIters = 2;
var minArea = 2000;
var BLUE = [0, 255, 0]; //B, G, R
var RED = [0, 0, 255]; //B, G, R
var GREEN = [0, 255, 0]; //B, G, R
var WHITE = [255, 255, 255]; //B, G, R
var BLUE = [0, 255, 0]; // B, G, R
var RED = [0, 0, 255]; // B, G, R
var GREEN = [0, 255, 0]; // B, G, R
var WHITE = [255, 255, 255]; // B, G, R
cv.readImage('./shapes.jpg', function(err, im) {
cv.readImage('./files/shapes.jpg', function(err, im) {
if (err) throw err;
var out = new cv.Matrix(im.height(), im.width());
width = im.width()
height = im.height()
if (width < 1 || height < 1) throw new Error('Image has no size');
im.convertGrayscale();
im_canny = im.copy();
var out = new cv.Matrix(height, width);
im.convertGrayscale();
im_canny = im.copy();
im_canny.canny(lowThresh, highThresh);
im_canny.dilate(nIters);
im_canny.canny(lowThresh, highThresh);
im_canny.dilate(nIters);
contours = im_canny.findContours();
contours = im_canny.findContours();
for (i = 0; i < contours.size(); i++) {
for(i = 0; i < contours.size(); i++) {
if (contours.area(i) < minArea) continue;
if(contours.area(i) < minArea) continue;
var arcLength = contours.arcLength(i, true);
contours.approxPolyDP(i, 0.01 * arcLength, true);
var arcLength = contours.arcLength(i, true);
contours.approxPolyDP(i, 0.01 * arcLength, true);
switch(contours.cornerCount(i)) {
case 3:
out.drawContour(contours, i, GREEN);
break;
case 4:
out.drawContour(contours, i, RED);
break;
default:
out.drawContour(contours, i, WHITE);
}
}
switch(contours.cornerCount(i)) {
case 3:
out.drawContour(contours, i, GREEN);
break;
case 4:
out.drawContour(contours, i, RED);
break;
default:
out.drawContour(contours, i, WHITE);
}
}
out.save('./out.png');
out.save('./tmp/detect-shapes.png');
console.log('Image saved to ./tmp/detect-shapes.png');
});

View File

@ -0,0 +1,22 @@
var cv = require('../lib/opencv');
var COLOR = [0, 255, 0]; // default red
var thickness = 2; // default 1
cv.readImage('./files/mona.png', function(err, im) {
if (err) throw err;
if (im.width() < 1 || im.height() < 1) throw new Error('Image has no size');
im.detectObject('../data/haarcascade_frontalface_alt2.xml', {}, function(err, faces) {
if (err) throw err;
for (var i = 0; i < faces.length; i++) {
face = faces[i];
im.rectangle([face.x, face.y], [face.x + face.width, face.y + face.height], COLOR, 2);
}
im.save('./tmp/face-detection-rectangle.png');
console.log('Image saved to ./tmp/face-detection-rectangle.png');
});
});

18
examples/face-detection.js Executable file
View File

@ -0,0 +1,18 @@
var cv = require('../lib/opencv');
cv.readImage("./files/mona.png", function(err, im){
if (err) throw err;
if (im.width() < 1 || im.height() < 1) throw new Error('Image has no size');
im.detectObject("../data/haarcascade_frontalface_alt.xml", {}, function(err, faces){
if (err) throw err;
for (var i = 0; i < faces.length; i++){
var face = faces[i];
im.ellipse(face.x + face.width / 2, face.y + face.height / 2, face.width / 2, face.height / 2);
}
im.save('./tmp/face-detection.png');
console.log('Image saved to ./tmp/face-detection.png');
});
});

View File

@ -1,46 +1,30 @@
/*
// Face recognition proxy
var http = require('http'),
request = require('request'),
cv = require('../lib/opencv');
Face recognition proxy
http.createServer(function(req, resp){
var url = req.url.slice(1);
request({uri:url, encoding:'binary'}, function(err, r, body){
if (err) return resp.end(err.stack);
if (!/image\//.test(r.headers['content-type'])) return resp.end('Not an image');
*/
cv.readImage(new Buffer(body, 'binary'), function(err, im){
if (err) return resp.end(err.stack);
if (im.width() < 1 || im.height() < 1) return resp.end('Image has no size');
var http = require('http')
, request = require('request')
, cv = require('../lib/opencv')
, face_cascade = new cv.CascadeClassifier("./data/haarcascade_frontalface_alt.xml")
http.createServer(function(req, resp){
var url = req.url.slice(1);
console.log(url);
if (url.indexOf('http') != 0){
return request({uri:'http://google.com'}).pipe(resp)
}
im.detectObject('../data/haarcascade_frontalface_alt.xml', {}, function(err, faces) {
if (err) return resp.end(err.stack);
// TODO make sure image
if (url.indexOf(".jpg", url.length - 4) !== -1 ||
url.indexOf(".png", url.length - 4) !== -1){
request({uri:url, encoding:'binary'}, function(err, r, body){
if (err) throw err;
cv.readImage(new Buffer(body, 'binary'), function(err, im){
im.faceDetect(im, {}, function(err, faces){
for (var i=0;i<faces.length; i++){
var x = faces[i]
im.ellipse(x.x + x.width/2, x.y + x.height/2, x.width/2, x.height/2);
}
for (var i = 0; i < faces.length; i++){
var face = faces[i];
im.ellipse(face.x + face.width / 2, face.y + face.height / 2, face.width / 2, face.height / 2);
}
//console.log(faces);
resp.writeHead(200, {'Content-Type': 'image/jpeg'});
resp.end(im.toBuffer());
});
});
})
} else {
request({uri:url || 'http://google.com'}).pipe(resp)
}
}).listen(1901)
resp.writeHead(200, {'Content-Type': 'image/jpeg'});
resp.end(im.toBuffer());
});
});
});
}).listen(3000, function(){ console.log('Listening on http://localhost:3000'); })

View File

@ -1,18 +0,0 @@
var cv = require('../lib/opencv')
, assert = require('assert')
, fs =require('fs')
//console.log(cv.version)
cv.readImage("./mona.png", function(err, im){
im.detectObject("./haarcascade_frontalface_alt.xml", {}, function(err, faces){
for (var i=0;i<faces.length; i++){
var x = faces[i];
im.ellipse(x.x + x.width/2, x.y + x.height/2, x.width/2, x.height/2);
}
im.save('./out.png');
});
});

BIN
examples/files/car1.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 270 KiB

BIN
examples/files/car2.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 992 KiB

View File

Before

Width:  |  Height:  |  Size: 9.4 KiB

After

Width:  |  Height:  |  Size: 9.4 KiB

View File

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

Before

Width:  |  Height:  |  Size: 518 KiB

After

Width:  |  Height:  |  Size: 518 KiB

View File

Before

Width:  |  Height:  |  Size: 5.4 KiB

After

Width:  |  Height:  |  Size: 5.4 KiB

View File

Before

Width:  |  Height:  |  Size: 111 KiB

After

Width:  |  Height:  |  Size: 111 KiB

View File

Before

Width:  |  Height:  |  Size: 116 KiB

After

Width:  |  Height:  |  Size: 116 KiB

View File

Before

Width:  |  Height:  |  Size: 186 KiB

After

Width:  |  Height:  |  Size: 186 KiB

View File

@ -2,19 +2,21 @@ var path = require('path'),
cv = require('../lib/opencv');
// When opening a file, the full path must be passed to opencv
var vid = new cv.VideoCapture(path.join(__dirname, "motion.mov"));
var vid = new cv.VideoCapture(path.join(__dirname, 'files', 'motion.mov'));
vid.read(function(err, mat){
var track = new cv.TrackedObject(mat, [420, 110, 490, 170], {channel: "value"});
if (err) throw err;
var track = new cv.TrackedObject(mat, [420, 110, 490, 170], {channel: 'value'});
var x = 0;
var iter = function(){
vid.read(function(err, m2){
x++;
var rec = track.track(m2)
console.log(">>", x, ":" , rec)
console.log('>>', x, ':' , rec)
if (x % 10 == 0){
m2.rectangle([rec[0], rec[1]], [rec[2], rec[3]])
// m2.save('./out-motiontrack-' + x + '.jpg')
// m2.save('./out-motiontrack-' + x + '.jpg')
}
if (x<100)
iter();
@ -22,4 +24,3 @@ vid.read(function(err, mat){
}
iter();
})

View File

@ -1,9 +1,4 @@
#!/usr/bin/env node
//
// Finds quadrilaterals and fills them with an X
//
var cv = require('../lib/opencv');
var lowThresh = 0;
@ -17,39 +12,41 @@ var RED = [0, 0, 255]; //B, G, R
var GREEN = [0, 255, 0]; //B, G, R
var WHITE = [255, 255, 255]; //B, G, R
cv.readImage('./files/quads.jpg', function(err, im) {
if (err) throw err;
if (im.width() < 1 || im.height() < 1) throw new Error('Image has no size');
cv.readImage('./quads.jpg', function(err, im) {
var out = im.copy();
var out = im.copy();
im.convertGrayscale();
im_canny = im.copy();
im.convertGrayscale();
im_canny = im.copy();
im_canny.canny(lowThresh, highThresh);
im_canny.dilate(nIters);
im_canny.canny(lowThresh, highThresh);
im_canny.dilate(nIters);
contours = im_canny.findContours();
contours = im_canny.findContours();
for (i = 0; i < contours.size(); i++) {
for(i = 0; i < contours.size(); i++) {
var area = contours.area(i);
if (area < minArea || area > maxArea) continue;
var area = contours.area(i);
if(area < minArea || area > maxArea) continue;
var arcLength = contours.arcLength(i, true);
contours.approxPolyDP(i, 0.01 * arcLength, true);
var arcLength = contours.arcLength(i, true);
contours.approxPolyDP(i, 0.01 * arcLength, true);
if (contours.cornerCount(i) != 4) continue;
if(contours.cornerCount(i) != 4) continue;
var points = [
contours.point(i, 0),
contours.point(i, 1),
contours.point(i, 2),
contours.point(i, 3)
]
var points = [
contours.point(i, 0),
contours.point(i, 1),
contours.point(i, 2),
contours.point(i, 3)
]
out.line([points[0].x,points[0].y], [points[2].x, points[2].y], RED);
out.line([points[1].x,points[1].y], [points[3].x, points[3].y], RED);
}
out.line([points[0].x,points[0].y], [points[2].x, points[2].y], RED);
out.line([points[1].x,points[1].y], [points[3].x, points[3].y], RED);
}
out.save('./out.png');
out.save('./tmp/quad-crosses.png');
console.log('Image saved to ./tmp/quad-crosses.png');
});

View File

@ -1,27 +1,20 @@
var cv = require('../lib/opencv');
cv.readImage("./mona.png", function(err, im) {
salt(im, 3000);
im.save("/tmp/salt.png");
cv.readImage("./files/mona.png", function(err, im) {
salt(im, 1000);
im.save("./tmp/salt.png");
console.log('Image saved to ./tmp/salt.png');
});
function salt(img, n) {
var channels;
if ((channels = img.channels()) != 3) return console.log('Image has only %s Channel. It\'s not possible to salt this image.', channels)
if (img.channels() == 1) {
console.log("1 Canales");
} else if (img.channels() == 3) {
for(k = 0; k < n; k ++) {
i = Math.random() * img.width();
j = Math.random() * img.height();
img.set(j, i, 255);
}
}
var width = img.width();
var height = img.height();
for(var i = 0; i < n; i ++) {
x = Math.random() * width;
y = Math.random() * height;
img.set(y, x, 255);
}
}

View File

@ -1,31 +1,24 @@
var cv = require('../lib/opencv')
var cv = require('../lib/opencv');
var vid = new cv.VideoCapture(0);
var vid = new cv.VideoCapture(0)
vid.read(function(err, im){
if (err) throw err;
im.detectObject(cv.FACE_CASCADE, {}, function(err, faces){
if (err) throw err;
if (!faces.length) return console.log("No Faces");
var snap = function(){
vid.read(function(err, im){
im.detectObject(cv.FACE_CASCADE, {}, function(err, faces){
if (!faces){
console.log("No Faces")
return;
}
var face = faces[0]
, ims = im.size()
var im2 = im.roi(face.x, face.y, face.width, face.height)
/*
im.adjustROI(
-face.y
, (face.y + face.height) - ims[0]
, -face.x
, (face.x + face.width) - ims[1])
*/
im2.save('out.jpg')
})
});
}
snap()
var face = faces[0];
var ims = im.size();
var im2 = im.roi(face.x, face.y, face.width, face.height)
/*
im.adjustROI(
-face.y
, (face.y + face.height) - ims[0]
, -face.x
, (face.x + face.width) - ims[1])
*/
im2.save('./tmp/take-face-pics.jpg')
console.log('Image saved to ./tmp/take-face-pics.jpg');
})
});

0
examples/tmp/.gitkeep Normal file
View File

16
examples/warp-image.js Normal file
View File

@ -0,0 +1,16 @@
var cv = require('../lib/opencv');
cv.readImage("./mona.png", function(err, im) {
if (err) throw err;
var width = im.width();
var height = im.height();
if (width < 1 || height < 1) throw new Error('Image has no size');
var srcArray = [0, 0, width, 0, width, height, 0, height];
var dstArray = [0, 0, width * 0.9, height * 0.1, width, height, width * 0.2, height * 0.8];
var xfrmMat = im.getPerspectiveTransform(srcArray, dstArray);
im.warpPerspective(xfrmMat, width, height, [255, 255, 255]);
im.save("./warp-image.png");
console.log('Image saved to ./tmp/warp-image.png');
});

View File

@ -1,10 +1 @@
module.exports = require('../build/Release/opencv.node');
try {
module.exports = require('../build/Release/opencv.node');
} catch (e) { try {
module.exports = require('../build/default/opencv.node');
} catch (e) {
throw e;
}}

View File

@ -1,157 +1,153 @@
var Stream = require('stream').Stream
, Buffers = require('buffers')
, util = require('util')
, path = require('path')
, path = require('path');
var bindings = require('./bindings')
var cv = module.exports = require('./bindings');
var cv = module.exports = {};
cv.__proto__ = bindings;
/*
# Matrix #
The matrix is one of opencv's most core datatypes.
*/
var Matrix = cv.Matrix
, VideoCapture = cv.VideoCapture
, ImageStream
, ImageDataStream
, ObjectDetectionStream
, VideoStream;
var matrix = cv.Matrix.prototype;
Matrix.prototype.detectObject = function(classifier, opts, cb){
var face_cascade;
opts = opts || {};
cv._detectObjectClassifiers = cv._detectObjectClassifiers || {};
matrix.detectObject = function(classifier, opts, cb){
opts = opts || {}
cv._detectObjectClassifiers = cv._detectObjectClassifiers || {}
if (cv._detectObjectClassifiers[classifier]){
var face_cascade = cv._detectObjectClassifiers[classifier];
} else{
var face_cascade = new cv.CascadeClassifier(classifier);
if (!(face_cascade = cv._detectObjectClassifiers[classifier])){
face_cascade = new cv.CascadeClassifier(classifier);
cv._detectObjectClassifiers[classifier] = face_cascade;
}
face_cascade.detectMultiScale(this, cb, opts.scale, opts.neighbors
, opts.min && opts.min[0], opts.min && opts.min[1]);
}
matrix.inspect = function(){
var size = this.size() ? (this.size()[0] + 'x' + this.size()[1]) : '';
return "[Matrix " + size + " ]";
}
cv.ImageDataStream = function(){
this.data = Buffers([])
this.writable = true
}
util.inherits(cv.ImageDataStream, Stream);
var imagedatastream = cv.ImageDataStream.prototype;
imagedatastream.write = function(buf){
this.data.push(buf)
return true;
face_cascade.detectMultiScale(this, cb, opts.scale, opts.neighbors
, opts.min && opts.min[0], opts.min && opts.min[1]);
}
imagedatastream.end = function(b){
var self = this;
if (b)
imagestream.write.call(this,b);
var buf = this.data.toBuffer();
cv.readImage(buf, function(err, im){
self.emit('load', im);
});
Matrix.prototype.inspect = function(){
var size = (this.size()||[]).join('x');
return "[ Matrix " + size + " ]";
}
ImageStream = cv.ImageStream = function(){
this.writable = true;
}
util.inherits(ImageStream, Stream);
cv.ImageStream = function(){
this.writable = true
ImageStream.prototype.write = function(buf){
var self = this;
cv.readImage(buf, function(err, matrix){
if (err) return self.emit('error', err);
self.emit('data', matrix);
});
}
util.inherits(cv.ImageStream, Stream);
var imagestream = cv.ImageStream.prototype;
imagestream.write = function(buf){
var self = this;
cv.readImage(buf, function(err, matrix){
self.emit('data', matrix);
});
ImageDataStream = cv.ImageDataStream = function(){
this.data = Buffers([]);
this.writable = true;
}
util.inherits(ImageDataStream, Stream);
ImageDataStream.prototype.write = function(buf){
this.data.push(buf);
return true;
}
// Object detect stream
cv.ObjectDetectionStream = function(cascade, opts){
ImageDataStream.prototype.end = function(b){
var self = this;
if (b) ImageStream.prototype.write.call(this, b);
var buf = this.data.toBuffer();
cv.readImage(buf, function(err, im){
if (err) return self.emit('error', err);
self.emit('load', im);
});
}
ObjectDetectionStream = cv.ObjectDetectionStream = function(cascade, opts){
this.classifier = new cv.CascadeClassifier(cascade);
this.opts = opts || {}
this.opts = opts || {};
this.readable = true;
this.writable = true;
}
util.inherits(ObjectDetectionStream, Stream);
util.inherits(cv.ObjectDetectionStream, Stream);
var ods = cv.ObjectDetectionStream.prototype;
ods.write = function(m){
ObjectDetectionStream.prototype.write = function(m){
var self = this;
this.classifier.detectMultiScale(m,
function(e, objs){
if (e) { throw e }
self.emit('data', objs, m);
}
, this.opts.scale, this.opts.neighbors
, this.opts.min && this.opts.min[0], this.opts.min && this.opts.min[1]);
this.classifier.detectMultiScale(m, function(err, objs){
if (err) return self.emit('error', err);
self.emit('data', objs, m);
}
, this.opts.scale
, this.opts.neighbors
, this.opts.min && this.opts.min[0]
, this.opts.min && this.opts.min[1]);
}
// == Video Stream ==
cv.VideoStream = function(src){
if (src instanceof cv.VideoCapture){
this.video = src
} else {
this.video = new cv.VideoCapture(src);
}
VideoStream = cv.VideoStream = function(src){
if (!(src instanceof VideoCapture)) src = new VideoCapture(src);
this.video = src;
this.readable = true;
this.paused = false;
}
util.inherits(VideoStream, Stream);
util.inherits(cv.VideoStream, Stream);
var videostream = cv.VideoStream.prototype;
cv.VideoCapture.prototype.toStream = function(){
return new cv.VideoStream(this);
}
videostream.read = function(){
VideoStream.prototype.read = function(){
var self = this;
var frame = function(){
self.video.read(function(err, mat){
self.emit('data', mat)
if (!self.paused){
process.nextTick(frame)
}
if (err) return self.emit('error', err);
self.emit('data', mat);
if (!self.paused) process.nextTick(frame);
})
}
frame();
}
videostream.pause = function(){
this.paused = true
VideoStream.prototype.pause = function(){
this.paused = true;
}
videostream.resume = function(){
this.paused = false
this.read()
VideoStream.prototype.resume = function(){
this.paused = false;
this.read();
}
VideoCapture.prototype.toStream = function(){
return new VideoStream(this);
}
// Provide cascade data for faces etc.
cv.FACE_CASCADE = path.resolve(__dirname, '../data/haarcascade_frontalface_alt.xml')
var CASCADES = {
FACE_CASCADE: 'haarcascade_frontalface_alt.xml'
, EYE_CASCADE: 'haarcascade_eye.xml'
, EYEGLASSES_CASCADE: 'haarcascade_eye_tree_eyeglasses.xml'
, FULLBODY_CASCADE: 'haarcascade_fullbody.xml'
, CAR_SIDE_CASCADE: 'hogcascade_cars_sideview.xml'
}
Object.keys(CASCADES).forEach(function(k){
cv[k] = path.resolve(__dirname, '../data', CASCADES[k])
})

View File

@ -3,19 +3,17 @@
"description": "Node Bindings to OpenCV",
"author": "Peter Braden <peterbraden@peterbraden.co.uk>",
"dependencies": {
"buffers": "0.1.1"
"buffers": "0.1.1",
"nan": "^1.3.0"
},
"version": "0.4.0",
"version": "1.0.0",
"devDependencies": {
"vows": "*"
},
"engines": {
"node": ">=0.4.1"
"tape": "^3.0.0"
},
"license": "MIT",
"scripts": {
"preinstall": "node-gyp clean rebuild",
"test": "vows test/unit.js"
"build": "node-gyp build",
"test": "node test/unit.js"
},
"keywords": [
"opencv",
@ -27,5 +25,8 @@
"repository": {
"type": "git",
"url": "https://github.com/peterbraden/node-opencv.git"
},
"engines": {
"node": ">=0.10"
}
}

View File

@ -10,7 +10,7 @@ for (var i = 1; i< 41; i++){
cv.readImage("/Users/peterbraden/Downloads/orl_faces/s6/10.pgm", function(e, im){
var facerec = cv.FaceRecognizer.createEigenFaceRecognizer();
var facerec = cv.FaceRecognizer.createEigenFaceRecognizer();
//facerec.trainSync(trainingData);
facerec.loadSync("/Users/peterbraden/Desktop/ORL")
@ -20,9 +20,7 @@ cv.readImage("/Users/peterbraden/Downloads/orl_faces/s6/10.pgm", function(e, im)
*/
cv.readImage("examples/mona.png", function(e, mat){
cv.readImage("./examples/files/mona.png", function(e, mat){
var th = mat.threshold(200, 200, "Threshold to Zero Inverted");
th.save('out.png')
th.save('./examples/tmp/out.png')
})

138
src/BackgroundSubtractor.cc Normal file
View File

@ -0,0 +1,138 @@
#include "BackgroundSubtractor.h"
#include "Matrix.h"
#include <iostream>
#include <nan.h>
#if CV_MAJOR_VERSION >= 2 && CV_MINOR_VERSION >=4
Persistent<FunctionTemplate> BackgroundSubtractorWrap::constructor;
void
BackgroundSubtractorWrap::Init(Handle<Object> target) {
NanScope();
// Constructor
Local<FunctionTemplate> ctor = NanNew<FunctionTemplate>(BackgroundSubtractorWrap::New);
NanAssignPersistent(constructor, ctor);
ctor->InstanceTemplate()->SetInternalFieldCount(1);
ctor->SetClassName(NanNew("BackgroundSubtractor"));
NODE_SET_METHOD(ctor, "createMOG", CreateMOG);
NODE_SET_PROTOTYPE_METHOD(ctor, "applyMOG", ApplyMOG);
target->Set(NanNew("BackgroundSubtractor"), ctor->GetFunction());
};
NAN_METHOD(BackgroundSubtractorWrap::New) {
NanScope();
if (args.This()->InternalFieldCount() == 0)
JSTHROW_TYPE("Cannot Instantiate without new")
//Create MOG by default
cv::Ptr<cv::BackgroundSubtractor> bg;
BackgroundSubtractorWrap *pt = new BackgroundSubtractorWrap(bg);
pt->Wrap(args.This());
NanReturnValue(args.This());
}
NAN_METHOD(BackgroundSubtractorWrap::CreateMOG) {
NanScope();
int history = 200;
int nmixtures = 5;
double backgroundRatio = 0.7;
double noiseSigma = 0;
if(args.Length() > 1){
INT_FROM_ARGS(history, 0)
INT_FROM_ARGS(nmixtures, 1)
DOUBLE_FROM_ARGS(backgroundRatio, 2)
DOUBLE_FROM_ARGS(noiseSigma, 3)
}
Local<Object> n = NanNew(BackgroundSubtractorWrap::constructor)->GetFunction()->NewInstance();
cv::Ptr<cv::BackgroundSubtractor> bg;
BackgroundSubtractorWrap *pt = new BackgroundSubtractorWrap(bg);
pt->Wrap(n);
NanReturnValue( n );
};
//Fetch foreground mask
NAN_METHOD(BackgroundSubtractorWrap::ApplyMOG) {
SETUP_FUNCTION(BackgroundSubtractorWrap)
REQ_FUN_ARG(1, cb);
Local<Value> argv[2];
if(args.Length() == 0){
argv[0] = NanNew("Input image missing");
argv[1] = NanNull();
cb->Call(NanGetCurrentContext()->Global(), 2, argv);
NanReturnUndefined();
}
try{
Local<Object> fgMask = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *img = ObjectWrap::Unwrap<Matrix>(fgMask);
cv::Mat mat;
if(Buffer::HasInstance(args[0])){
uint8_t *buf = (uint8_t *) Buffer::Data(args[0]->ToObject());
unsigned len = Buffer::Length(args[0]->ToObject());
cv::Mat *mbuf = new cv::Mat(len, 1, CV_64FC1, buf);
mat = cv::imdecode(*mbuf, -1);
//mbuf->release();
}
else{
Matrix *_img = ObjectWrap::Unwrap<Matrix>(args[0]->ToObject());
mat = (_img->mat).clone();
}
if (mat.empty()){
return NanThrowTypeError("Error loading file");
}
cv::Mat _fgMask;
self->subtractor->operator()(mat, _fgMask);
img->mat = _fgMask;
mat.release();
argv[0] = NanNull();
argv[1] = fgMask;
TryCatch try_catch;
cb->Call(NanGetCurrentContext()->Global(), 2, argv);
if (try_catch.HasCaught()) {
FatalException(try_catch);
}
NanReturnUndefined();
}
catch( cv::Exception& e ){
const char* err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
};
BackgroundSubtractorWrap::BackgroundSubtractorWrap(cv::Ptr<cv::BackgroundSubtractor> _subtractor){
subtractor = _subtractor;
};
#endif

View File

@ -0,0 +1,21 @@
#include "OpenCV.h"
#if CV_MAJOR_VERSION >= 2 && CV_MINOR_VERSION >=4
#include <opencv2/video/background_segm.hpp>
class BackgroundSubtractorWrap: public node::ObjectWrap {
public:
cv::Ptr<cv::BackgroundSubtractor> subtractor;
static Persistent<FunctionTemplate> constructor;
static void Init(Handle<Object> target);
static NAN_METHOD(New);
BackgroundSubtractorWrap(cv::Ptr<cv::BackgroundSubtractor> bg);
static NAN_METHOD(CreateMOG);
static NAN_METHOD(ApplyMOG);
};
#endif

View File

@ -12,24 +12,26 @@ Persistent<FunctionTemplate> TrackedObject::constructor;
void
TrackedObject::Init(Handle<Object> target) {
HandleScope scope;
NanScope();
// Constructor
constructor = Persistent<FunctionTemplate>::New(FunctionTemplate::New(TrackedObject::New));
constructor->InstanceTemplate()->SetInternalFieldCount(1);
constructor->SetClassName(String::NewSymbol("TrackedObject"));
Local<FunctionTemplate> ctor = NanNew<FunctionTemplate>(TrackedObject::New);
NanAssignPersistent(constructor, ctor);
ctor->InstanceTemplate()->SetInternalFieldCount(1);
ctor->SetClassName(NanNew("TrackedObject"));
// Prototype
//Local<ObjectTemplate> proto = constructor->PrototypeTemplate();
NODE_SET_PROTOTYPE_METHOD(constructor, "track", Track);
target->Set(String::NewSymbol("TrackedObject"), constructor->GetFunction());
NODE_SET_PROTOTYPE_METHOD(ctor, "track", Track);
target->Set(NanNew("TrackedObject"), ctor->GetFunction());
};
Handle<Value>
TrackedObject::New(const Arguments &args) {
HandleScope scope;
NAN_METHOD(TrackedObject::New) {
NanScope();
if (args.This()->InternalFieldCount() == 0){
JSTHROW_TYPE("Cannot Instantiate without new")
@ -53,8 +55,8 @@ TrackedObject::New(const Arguments &args) {
if (args[2]->IsObject()){
Local<Object> opts = args[2]->ToObject();
if (opts->Get(String::New("channel"))->IsString()){
v8::String::Utf8Value c(opts->Get(String::New("channel"))->ToString());
if (opts->Get(NanNew("channel"))->IsString()){
v8::String::Utf8Value c(opts->Get(NanNew("channel"))->ToString());
std::string cc = std::string(*c);
if (cc == "hue" || cc == "h"){
@ -75,7 +77,7 @@ TrackedObject::New(const Arguments &args) {
to->Wrap(args.This());
return args.This();
NanReturnValue(args.This());
}
@ -120,13 +122,12 @@ TrackedObject::TrackedObject(cv::Mat image, cv::Rect rect, int chan){
Handle<Value>
TrackedObject::Track(const v8::Arguments& args){
NAN_METHOD(TrackedObject::Track){
SETUP_FUNCTION(TrackedObject)
if (args.Length() != 1){
v8::ThrowException(v8::Exception::TypeError(v8::String::New("track takes an image param")));
return Undefined();
NanThrowTypeError("track takes an image param");
NanReturnUndefined();
}
@ -137,7 +138,7 @@ TrackedObject::Track(const v8::Arguments& args){
self->prev_rect.y <0 ||
self->prev_rect.width <= 1 ||
self->prev_rect.height <= 1){
return v8::ThrowException(v8::Exception::TypeError(v8::String::New("OPENCV ERROR: prev rectangle is illogical")));
return NanThrowTypeError("OPENCV ERROR: prev rectangle is illogical");
}
update_chann_image(self, im->mat);
@ -167,25 +168,24 @@ TrackedObject::Track(const v8::Arguments& args){
self->prev_rect = backup_prev_rect;
}
v8::Local<v8::Array> arr = v8::Array::New(4);
v8::Local<v8::Array> arr = NanNew<Array>(4);
arr->Set(0, Number::New(bounds.x));
arr->Set(1, Number::New(bounds.y));
arr->Set(2, Number::New(bounds.x + bounds.width));
arr->Set(3, Number::New(bounds.y + bounds.height));
arr->Set(0, NanNew<Number>(bounds.x));
arr->Set(1, NanNew<Number>(bounds.y));
arr->Set(2, NanNew<Number>(bounds.x + bounds.width));
arr->Set(3, NanNew<Number>(bounds.y + bounds.height));
/*
cv::Point2f pts[4];
r.points(pts);
for (int i=0; i<8; i+=2){
arr->Set(i, Number::New(pts[i].x));
arr->Set(i+1, Number::New(pts[i].y));
arr->Set(i, NanNew<Number>(pts[i].x));
arr->Set(i+1, NanNew<Number>(pts[i].y));
}
*/
return scope.Close(arr);
NanReturnValue(arr);
}

View File

@ -14,7 +14,7 @@ class TrackedObject: public node::ObjectWrap {
static Persistent<FunctionTemplate> constructor;
static void Init(Handle<Object> target);
static Handle<Value> New(const Arguments &args);
static NAN_METHOD(New);
TrackedObject(cv::Mat image, cv::Rect rect, int channel);

View File

@ -1,81 +1,123 @@
#include "CascadeClassifierWrap.h"
#include "OpenCV.h"
#include "Matrix.h"
#include <nan.h>
void AsyncDetectMultiScale(uv_work_t *req);
void AfterAsyncDetectMultiScale(uv_work_t *req);
Persistent<FunctionTemplate> CascadeClassifierWrap::constructor;
void
CascadeClassifierWrap::Init(Handle<Object> target) {
HandleScope scope;
NanScope();
// Constructor
constructor = Persistent<FunctionTemplate>::New(FunctionTemplate::New(CascadeClassifierWrap::New));
constructor->InstanceTemplate()->SetInternalFieldCount(1);
constructor->SetClassName(String::NewSymbol("CascadeClassifier"));
Local<FunctionTemplate> ctor = NanNew<FunctionTemplate>(CascadeClassifierWrap::New);
NanAssignPersistent(constructor, ctor);
ctor->InstanceTemplate()->SetInternalFieldCount(1);
ctor->SetClassName(NanNew("CascadeClassifier"));
// Prototype
//Local<ObjectTemplate> proto = constructor->PrototypeTemplate();
NODE_SET_PROTOTYPE_METHOD(constructor, "detectMultiScale", DetectMultiScale);
NODE_SET_PROTOTYPE_METHOD(ctor, "detectMultiScale", DetectMultiScale);
target->Set(String::NewSymbol("CascadeClassifier"), constructor->GetFunction());
target->Set(NanNew("CascadeClassifier"), ctor->GetFunction());
};
Handle<Value>
CascadeClassifierWrap::New(const Arguments &args) {
HandleScope scope;
NAN_METHOD(CascadeClassifierWrap::New) {
NanScope();
if (args.This()->InternalFieldCount() == 0)
return v8::ThrowException(v8::Exception::TypeError(v8::String::New("Cannot Instantiate without new")));
NanThrowTypeError("Cannot instantiate without new");
CascadeClassifierWrap *pt = new CascadeClassifierWrap(*args[0]);
pt->Wrap(args.This());
return args.This();
NanReturnValue( args.This() );
}
CascadeClassifierWrap::CascadeClassifierWrap(v8::Value* fileName){
std::string filename;
filename = std::string(*v8::String::AsciiValue(fileName->ToString()));
filename = std::string(*NanAsciiString(fileName->ToString()));
if (!cc.load(filename.c_str())){
v8::ThrowException(v8::Exception::TypeError(v8::String::New("Error loading file")));
NanThrowTypeError("Error loading file");
}
}
struct classifier_baton_t {
CascadeClassifierWrap *cc;
Persistent<Function> cb;
Matrix *im;
double scale;
int neighbors;
int minw;
int minh;
int sleep_for;
std::vector<cv::Rect> res;
class AsyncDetectMultiScale : public NanAsyncWorker {
public:
AsyncDetectMultiScale(NanCallback *callback, CascadeClassifierWrap *cc, Matrix* im, double scale, int neighbors, int minw, int minh, int sleep_for) : NanAsyncWorker(callback), cc(cc), im(im), scale(scale), neighbors(neighbors), minw(minw), minh(minh), sleep_for(sleep_for) {}
~AsyncDetectMultiScale() {}
uv_work_t request;
void Execute () {
std::vector<cv::Rect> objects;
cv::Mat gray;
if(this->im->mat.channels() != 1)
cvtColor(this->im->mat, gray, CV_BGR2GRAY);
equalizeHist( gray, gray);
this->cc->cc.detectMultiScale(gray, objects, this->scale, this->neighbors, 0 | CV_HAAR_SCALE_IMAGE, cv::Size(this->minw, this->minh));
res = objects;
}
void HandleOKCallback () {
NanScope();
// this->matrix->Unref();
v8::Local<v8::Array> arr = NanNew<v8::Array>(this->res.size());
for(unsigned int i = 0; i < this->res.size(); i++ ){
v8::Local<v8::Object> x = NanNew<v8::Object>();
x->Set(NanNew("x"), NanNew<Number>(this->res[i].x));
x->Set(NanNew("y"), NanNew<Number>(this->res[i].y));
x->Set(NanNew("width"), NanNew<Number>(this->res[i].width));
x->Set(NanNew("height"), NanNew<Number>(this->res[i].height));
arr->Set(i, x);
}
//argv[1] = arr;
Local<Value> argv[] = {
NanNull()
, arr
};
TryCatch try_catch;
callback->Call(2, argv);
if (try_catch.HasCaught()) {
FatalException(try_catch);
}
}
private:
CascadeClassifierWrap *cc;
Matrix* im;
double scale;
int neighbors;
int minw;
int minh;
int sleep_for;
std::vector<cv::Rect> res;
};
Handle<Value>
CascadeClassifierWrap::DetectMultiScale(const v8::Arguments& args){
HandleScope scope;
NAN_METHOD(CascadeClassifierWrap::DetectMultiScale){
NanScope();
CascadeClassifierWrap *self = ObjectWrap::Unwrap<CascadeClassifierWrap>(args.This());
if (args.Length() < 2){
v8::ThrowException(v8::Exception::TypeError(v8::String::New("detectMultiScale takes at least 2 args")));
NanThrowTypeError("detectMultiScale takes at least 2 args");
}
Matrix *im = ObjectWrap::Unwrap<Matrix>(args[0]->ToObject());
@ -97,87 +139,9 @@ CascadeClassifierWrap::DetectMultiScale(const v8::Arguments& args){
}
classifier_baton_t *baton = new classifier_baton_t();
baton->cc = self;
baton->cb = Persistent<Function>::New(cb);
baton->im = im;
baton->scale = scale;
baton->neighbors = neighbors;
baton->minw = minw;
baton->minh = minh;
baton->sleep_for = 1;
baton->request.data = baton;
// self->Ref();
// eio_custom(EIO_DetectMultiScale, EIO_PRI_DEFAULT, EIO_AfterDetectMultiScale, baton);
// ev_ref(EV_DEFAULT_UC);
uv_queue_work(uv_default_loop(), &baton->request, AsyncDetectMultiScale, (uv_after_work_cb)AfterAsyncDetectMultiScale);
return Undefined();
NanCallback *callback = new NanCallback(cb.As<Function>());
NanAsyncQueueWorker( new AsyncDetectMultiScale(callback, self, im, scale, neighbors, minw, minh, 1) );
NanReturnUndefined();
}
void AsyncDetectMultiScale(uv_work_t *req) {
classifier_baton_t *baton = static_cast<classifier_baton_t *>(req->data);
// sleep(baton->sleep_for);
std::vector<cv::Rect> objects;
cv::Mat gray;
if(baton->im->mat.channels() != 1)
cvtColor(baton->im->mat, gray, CV_BGR2GRAY);
equalizeHist( gray, gray);
baton->cc->cc.detectMultiScale(gray, objects, baton->scale, baton->neighbors, 0 | CV_HAAR_SCALE_IMAGE, cv::Size(baton->minw, baton->minh));
baton->res = objects;
}
void AfterAsyncDetectMultiScale(uv_work_t *req) {
HandleScope scope;
classifier_baton_t *baton = static_cast<classifier_baton_t *>(req->data);
// ev_unref(EV_DEFAULT_UC);
// baton->cc->Unref();
Local<Value> argv[2];
argv[0] = Local<Value>::New(Null());
v8::Local<v8::Array> arr = v8::Array::New(baton->res.size());
for(unsigned int i = 0; i < baton->res.size(); i++ ){
v8::Local<v8::Object> x = v8::Object::New();
x->Set(v8::String::New("x"), v8::Number::New(baton->res[i].x));
x->Set(v8::String::New("y"), v8::Number::New(baton->res[i].y));
x->Set(v8::String::New("width"), v8::Number::New(baton->res[i].width));
x->Set(v8::String::New("height"), v8::Number::New(baton->res[i].height));
arr->Set(i, x);
}
argv[1] = arr;
TryCatch try_catch;
baton->cb->Call(Context::GetCurrent()->Global(), 2, argv);
if (try_catch.HasCaught()) {
FatalException(try_catch);
}
baton->cb.Dispose();
delete baton;
// return 0;
}

View File

@ -6,13 +6,14 @@ class CascadeClassifierWrap: public node::ObjectWrap {
static Persistent<FunctionTemplate> constructor;
static void Init(Handle<Object> target);
static Handle<Value> New(const Arguments &args);
static NAN_METHOD(New);
CascadeClassifierWrap(v8::Value* fileName);
//static Handle<Value> LoadHaarClassifierCascade(const v8::Arguments&);
//static Handle<Value> LoadHaarClassifierCascade(const v8::Arguments&);
static NAN_METHOD(DetectMultiScale);
static Handle<Value> DetectMultiScale(const v8::Arguments&);
static void EIO_DetectMultiScale(uv_work_t *req);
static int EIO_AfterDetectMultiScale(uv_work_t *req);

60
src/Constants.cc Normal file
View File

@ -0,0 +1,60 @@
#include "OpenCV.h"
#include "Constants.h"
#define CONST(C) \
obj->Set(NanNew<String>(#C), NanNew<Integer>(C));
void
Constants::Init(Handle<Object> target) {
Persistent<Object> inner;
Local<Object> obj = NanNew<Object>();
NanAssignPersistent(inner, obj);
CONST(CV_8U);
CONST(CV_8S);
CONST(CV_16U);
CONST(CV_16S);
CONST(CV_32S);
CONST(CV_32F);
CONST(CV_64F);
CONST(CV_USRTYPE1);
CONST(CV_8UC1);
CONST(CV_8UC2);
CONST(CV_8UC3);
CONST(CV_8UC4);
CONST(CV_8SC1);
CONST(CV_8SC2);
CONST(CV_8SC3);
CONST(CV_8SC4);
CONST(CV_16UC1);
CONST(CV_16UC2);
CONST(CV_16UC3);
CONST(CV_16UC4);
CONST(CV_16SC1);
CONST(CV_16SC2);
CONST(CV_16SC3);
CONST(CV_16SC4);
CONST(CV_32SC1);
CONST(CV_32SC2);
CONST(CV_32SC3);
CONST(CV_32SC4);
CONST(CV_32FC1);
CONST(CV_32FC2);
CONST(CV_32FC3);
CONST(CV_32FC4);
CONST(CV_64FC1);
CONST(CV_64FC2);
CONST(CV_64FC3);
CONST(CV_64FC4);
target->Set(NanNew("Constants"), obj);
}
#undef CONST

6
src/Constants.h Normal file
View File

@ -0,0 +1,6 @@
#include "OpenCV.h"
class Constants: public node::ObjectWrap {
public:
static void Init(Handle<Object> target);
};

View File

@ -1,5 +1,6 @@
#include "Contours.h"
#include "OpenCV.h"
#include <nan.h>
#include <iostream>
@ -8,58 +9,54 @@ v8::Persistent<FunctionTemplate> Contour::constructor;
void
Contour::Init(Handle<Object> target) {
HandleScope scope;
NanScope();
//Class
v8::Local<v8::FunctionTemplate> m = v8::FunctionTemplate::New(New);
m->SetClassName(v8::String::NewSymbol("Contours"));
// Constructor
constructor = Persistent<FunctionTemplate>::New(m);
constructor->InstanceTemplate()->SetInternalFieldCount(1);
constructor->SetClassName(String::NewSymbol("Contours"));
//Class/contructor
Local<FunctionTemplate> ctor = NanNew<FunctionTemplate>(Contour::New);
NanAssignPersistent(constructor, ctor);
ctor->InstanceTemplate()->SetInternalFieldCount(1);
ctor->SetClassName(NanNew("Contours"));
// Prototype
//Local<ObjectTemplate> proto = constructor->PrototypeTemplate();
NODE_SET_PROTOTYPE_METHOD(constructor, "point", Point);
NODE_SET_PROTOTYPE_METHOD(constructor, "size", Size);
NODE_SET_PROTOTYPE_METHOD(constructor, "cornerCount", CornerCount);
NODE_SET_PROTOTYPE_METHOD(constructor, "area", Area);
NODE_SET_PROTOTYPE_METHOD(constructor, "arcLength", ArcLength);
NODE_SET_PROTOTYPE_METHOD(constructor, "approxPolyDP", ApproxPolyDP);
NODE_SET_PROTOTYPE_METHOD(constructor, "convexHull", ConvexHull);
NODE_SET_PROTOTYPE_METHOD(constructor, "boundingRect", BoundingRect);
NODE_SET_PROTOTYPE_METHOD(constructor, "minAreaRect", BoundingRect);
NODE_SET_PROTOTYPE_METHOD(constructor, "isConvex", IsConvex);
NODE_SET_PROTOTYPE_METHOD(constructor, "moments", Moments);
target->Set(String::NewSymbol("Contours"), m->GetFunction());
NODE_SET_PROTOTYPE_METHOD(ctor, "point", Point);
NODE_SET_PROTOTYPE_METHOD(ctor, "size", Size);
NODE_SET_PROTOTYPE_METHOD(ctor, "cornerCount", CornerCount);
NODE_SET_PROTOTYPE_METHOD(ctor, "area", Area);
NODE_SET_PROTOTYPE_METHOD(ctor, "arcLength", ArcLength);
NODE_SET_PROTOTYPE_METHOD(ctor, "approxPolyDP", ApproxPolyDP);
NODE_SET_PROTOTYPE_METHOD(ctor, "convexHull", ConvexHull);
NODE_SET_PROTOTYPE_METHOD(ctor, "boundingRect", BoundingRect);
NODE_SET_PROTOTYPE_METHOD(ctor, "minAreaRect", MinAreaRect);
NODE_SET_PROTOTYPE_METHOD(ctor, "isConvex", IsConvex);
NODE_SET_PROTOTYPE_METHOD(ctor, "moments", Moments);
NODE_SET_PROTOTYPE_METHOD(ctor, "hierarchy", Hierarchy);
NODE_SET_PROTOTYPE_METHOD(ctor, "serialize", Serialize);
NODE_SET_PROTOTYPE_METHOD(ctor, "deserialize", Deserialize);
target->Set(NanNew("Contours"), ctor->GetFunction());
};
Handle<Value>
Contour::New(const Arguments &args) {
HandleScope scope;
NAN_METHOD(Contour::New) {
NanScope();
if (args.This()->InternalFieldCount() == 0)
return v8::ThrowException(v8::Exception::TypeError(v8::String::New("Cannot instantiate without new")));
NanThrowTypeError("Cannot instantiate without new");
Contour *contours;
contours = new Contour;
contours->Wrap(args.Holder());
return scope.Close(args.Holder());
NanReturnValue(args.Holder());
}
Contour::Contour(): ObjectWrap() {
}
Handle<Value>
Contour::Point(const Arguments &args) {
HandleScope scope;
NAN_METHOD(Contour::Point) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
int pos = args[0]->NumberValue();
@ -67,62 +64,80 @@ Contour::Point(const Arguments &args) {
cv::Point point = self->contours[pos][index];
Local<Object> data = Object::New();
data->Set(String::NewSymbol("x"), Number::New(point.x));
data->Set(String::NewSymbol("y"), Number::New(point.y));
Local<Object> data = NanNew<Object>();
data->Set(NanNew("x"), NanNew<Number>(point.x));
data->Set(NanNew("y"), NanNew<Number>(point.y));
return scope.Close(data);
NanReturnValue(data);
}
NAN_METHOD(Contour::Points) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
int pos = args[0]->NumberValue();
vector<cv::Point> points = self->contours[pos];
Local<Array> data = NanNew<Array>(points.size());
for (std::vector<int>::size_type i = 0; i != points.size(); i++) {
Local<Object> point_data = NanNew<Object>();
point_data->Set(NanNew<String>("x"), NanNew<Number>(points[i].x));
point_data->Set(NanNew<String>("y"), NanNew<Number>(points[i].y));
data->Set(i, point_data);
}
NanReturnValue(data);
}
// FIXME: this sould better be called "Length" as ``Contours`` is an Array like structure
// also, this would allow to use ``Size`` for the function returning the number of corners
// in the contour for better consistency with OpenCV.
Handle<Value>
Contour::Size(const Arguments &args) {
HandleScope scope;
NAN_METHOD(Contour::Size) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
return scope.Close(Number::New(self->contours.size()));
NanReturnValue(NanNew<Number>(self->contours.size()));
}
Handle<Value>
Contour::CornerCount(const Arguments &args) {
HandleScope scope;
NAN_METHOD(Contour::CornerCount) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
int pos = args[0]->NumberValue();
return scope.Close(Number::New(self->contours[pos].size()));
NanReturnValue(NanNew<Number>(self->contours[pos].size()));
}
Handle<Value>
Contour::Area(const Arguments &args) {
HandleScope scope;
NAN_METHOD(Contour::Area) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
int pos = args[0]->NumberValue();
//return scope.Close(Number::New(contourArea(self->contours)));
return scope.Close(Number::New(contourArea(cv::Mat(self->contours[pos]))));
//NanReturnValue(NanNew<Number>(contourArea(self->contours)));
NanReturnValue(NanNew<Number>(contourArea(cv::Mat(self->contours[pos]))));
}
Handle<Value>
Contour::ArcLength(const Arguments &args) {
HandleScope scope;
NAN_METHOD(Contour::ArcLength) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
int pos = args[0]->NumberValue();
bool isClosed = args[1]->BooleanValue();
return scope.Close(Number::New(arcLength(cv::Mat(self->contours[pos]), isClosed)));
NanReturnValue(NanNew<Number>(arcLength(cv::Mat(self->contours[pos]), isClosed)));
}
Handle<Value>
Contour::ApproxPolyDP(const Arguments &args) {
HandleScope scope;
NAN_METHOD(Contour::ApproxPolyDP) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
int pos = args[0]->NumberValue();
@ -133,13 +148,12 @@ Contour::ApproxPolyDP(const Arguments &args) {
approxPolyDP(cv::Mat(self->contours[pos]), approxed, epsilon, isClosed);
approxed.copyTo(self->contours[pos]);
return scope.Close(v8::Null());
NanReturnNull();
}
Handle<Value>
Contour::ConvexHull(const Arguments &args) {
HandleScope scope;
NAN_METHOD(Contour::ConvexHull) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
@ -150,81 +164,77 @@ Contour::ConvexHull(const Arguments &args) {
cv::convexHull(cv::Mat(self->contours[pos]), hull, clockwise);
hull.copyTo(self->contours[pos]);
return scope.Close(v8::Null());
NanReturnNull();
}
Handle<Value>
Contour::BoundingRect(const Arguments &args) {
HandleScope scope;
NAN_METHOD(Contour::BoundingRect) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
int pos = args[0]->NumberValue();
cv::Rect bounding = cv::boundingRect(cv::Mat(self->contours[pos]));
Local<Object> rect = Object::New();
Local<Object> rect = NanNew<Object>();
rect->Set(String::NewSymbol("x"), Number::New(bounding.x));
rect->Set(String::NewSymbol("y"), Number::New(bounding.y));
rect->Set(String::NewSymbol("width"), Number::New(bounding.width));
rect->Set(String::NewSymbol("height"), Number::New(bounding.height));
rect->Set(NanNew("x"), NanNew<Number>(bounding.x));
rect->Set(NanNew("y"), NanNew<Number>(bounding.y));
rect->Set(NanNew("width"), NanNew<Number>(bounding.width));
rect->Set(NanNew("height"), NanNew<Number>(bounding.height));
return scope.Close(rect);
NanReturnValue(rect);
}
Handle<Value>
Contour::MinAreaRect(const Arguments &args) {
HandleScope scope;
NAN_METHOD(Contour::MinAreaRect) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
int pos = args[0]->NumberValue();
cv::RotatedRect minimum = cv::minAreaRect(cv::Mat(self->contours[pos]));
Local<Object> rect = Object::New();
rect->Set(String::NewSymbol("angle"), Number::New(minimum.angle));
Local<Object> rect = NanNew<Object>();
rect->Set(NanNew("angle"), NanNew<Number>(minimum.angle));
Local<Object> size = Object::New();
size->Set(String::NewSymbol("height"), Number::New(minimum.size.height));
size->Set(String::NewSymbol("width"), Number::New(minimum.size.width));
rect->Set(String::NewSymbol("size"), size);
Local<Object> size = NanNew<Object>();
size->Set(NanNew("height"), NanNew<Number>(minimum.size.height));
size->Set(NanNew("width"), NanNew<Number>(minimum.size.width));
rect->Set(NanNew("size"), size);
Local<Object> center = Object::New();
center->Set(String::NewSymbol("x"), Number::New(minimum.center.x));
center->Set(String::NewSymbol("y"), Number::New(minimum.center.y));
Local<Object> center = NanNew<Object>();
center->Set(NanNew("x"), NanNew<Number>(minimum.center.x));
center->Set(NanNew("y"), NanNew<Number>(minimum.center.y));
v8::Local<v8::Array> points = v8::Array::New(4);
v8::Local<v8::Array> points = NanNew<Array>(4);
cv::Point2f rect_points[4];
minimum.points(rect_points);
for (unsigned int i=0; i<4; i++){
Local<Object> point = Object::New();
point->Set(String::NewSymbol("x"), Number::New(rect_points[i].x));
point->Set(String::NewSymbol("y"), Number::New(rect_points[i].y));
Local<Object> point = NanNew<Object>();
point->Set(NanNew("x"), NanNew<Number>(rect_points[i].x));
point->Set(NanNew("y"), NanNew<Number>(rect_points[i].y));
points->Set(i, point);
}
rect->Set(String::NewSymbol("points"), points);
rect->Set(NanNew("points"), points);
return scope.Close(rect);
NanReturnValue(rect);
}
Handle<Value>
Contour::IsConvex(const Arguments &args) {
HandleScope scope;
NAN_METHOD(Contour::IsConvex) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
int pos = args[0]->NumberValue();
return scope.Close(Boolean::New(isContourConvex(cv::Mat(self->contours[pos]))));
NanReturnValue(NanNew<Boolean>(isContourConvex(cv::Mat(self->contours[pos]))));
}
Handle<Value>
Contour::Moments(const Arguments &args) {
HandleScope scope;
NAN_METHOD(Contour::Moments) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
int pos = args[0]->NumberValue();
@ -232,13 +242,116 @@ Contour::Moments(const Arguments &args) {
/// Get the moments
cv::Moments mu = moments( self->contours[pos], false );
Local<Object> res = Object::New();
Local<Object> res = NanNew<Object>();
res->Set(String::NewSymbol("m00"), Number::New(mu.m00));
res->Set(String::NewSymbol("m10"), Number::New(mu.m10));
res->Set(String::NewSymbol("m01"), Number::New(mu.m01));
res->Set(String::NewSymbol("m11"), Number::New(mu.m11));
res->Set(NanNew("m00"), NanNew<Number>(mu.m00));
res->Set(NanNew("m10"), NanNew<Number>(mu.m10));
res->Set(NanNew("m01"), NanNew<Number>(mu.m01));
res->Set(NanNew("m11"), NanNew<Number>(mu.m11));
return scope.Close(res);
NanReturnValue(res);
}
NAN_METHOD(Contour::Hierarchy) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
int pos = args[0]->IntegerValue();
cv::Vec4i hierarchy = self->hierarchy[pos];
Local<Array> res = NanNew<Array>(4);
res->Set(0, NanNew<Number>(hierarchy[0]));
res->Set(1, NanNew<Number>(hierarchy[1]));
res->Set(2, NanNew<Number>(hierarchy[2]));
res->Set(3, NanNew<Number>(hierarchy[3]));
NanReturnValue(res);
}
NAN_METHOD(Contour::Serialize) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
Local<Array> contours_data = NanNew<Array>(self->contours.size());
for (std::vector<int>::size_type i = 0; i != self->contours.size(); i++) {
vector<cv::Point> points = self->contours[i];
Local<Array> contour_data = NanNew<Array>(points.size());
for (std::vector<int>::size_type j = 0; j != points.size(); j++) {
Local<Array> point_data = NanNew<Array>(2);
point_data->Set(0, NanNew<Number>(points[j].x));
point_data->Set(1, NanNew<Number>(points[j].y));
contour_data->Set(j, point_data);
}
contours_data->Set(i, contour_data);
}
Local<Array> hierarchy_data = NanNew<Array>(self->hierarchy.size());
for (std::vector<int>::size_type i = 0; i != self->hierarchy.size(); i++) {
Local<Array> contour_data = NanNew<Array>(4);
contour_data->Set(0, NanNew<Number>(self->hierarchy[i][0]));
contour_data->Set(1, NanNew<Number>(self->hierarchy[i][1]));
contour_data->Set(2, NanNew<Number>(self->hierarchy[i][2]));
contour_data->Set(3, NanNew<Number>(self->hierarchy[i][3]));
hierarchy_data->Set(i, contour_data);
}
Local<Object> data = NanNew<Object>();
data->Set(NanNew<String>("contours"), contours_data);
data->Set(NanNew<String>("hierarchy"), hierarchy_data);
NanReturnValue(data);
}
NAN_METHOD(Contour::Deserialize) {
NanScope();
Contour *self = ObjectWrap::Unwrap<Contour>(args.This());
Handle<Object> data = Handle<Object>::Cast(args[0]);
Handle<Array> contours_data = Handle<Array>::Cast(data->Get(NanNew<String>("contours")));
Handle<Array> hierarchy_data = Handle<Array>::Cast(data->Get(NanNew<String>("hierarchy")));
vector<vector<cv::Point> > contours_res;
int contours_length = contours_data->Length();
for (int i = 0; i < contours_length; i++) {
Handle<Array> contour_data = Handle<Array>::Cast(contours_data->Get(i));
vector<cv::Point> points;
int contour_length = contour_data->Length();
for (int j = 0; j < contour_length; j++) {
Handle<Array> point_data = Handle<Array>::Cast(contour_data->Get(j));
int x = point_data->Get(0)->IntegerValue();
int y = point_data->Get(1)->IntegerValue();
points.push_back(cv::Point(x, y));
}
contours_res.push_back(points);
}
vector<cv::Vec4i> hierarchy_res;
int hierarchy_length = hierarchy_data->Length();
for (int i = 0; i < hierarchy_length; i++) {
Handle<Array> contour_data = Handle<Array>::Cast(hierarchy_data->Get(i));
int a = contour_data->Get(0)->IntegerValue();
int b = contour_data->Get(1)->IntegerValue();
int c = contour_data->Get(2)->IntegerValue();
int d = contour_data->Get(3)->IntegerValue();
hierarchy_res.push_back(cv::Vec4i(a, b, c, d));
}
self->contours = contours_res;
self->hierarchy = hierarchy_res;
NanReturnNull();
}

View File

@ -7,23 +7,28 @@ class Contour: public node::ObjectWrap {
cv::Mat mat;
vector<vector<cv::Point> > contours;
static Persistent<FunctionTemplate> constructor;
vector<cv::Vec4i> hierarchy;
static Persistent<FunctionTemplate> constructor;
static void Init(Handle<Object> target);
static Handle<Value> New(const Arguments &args);
static NAN_METHOD(New);
Contour();
//JSFUNC(Size)
static Handle<Value> Point(const v8::Arguments&);
static Handle<Value> Size(const v8::Arguments&);
static Handle<Value> CornerCount(const v8::Arguments&);
static Handle<Value> Area(const v8::Arguments&);
static Handle<Value> ArcLength(const v8::Arguments&);
static Handle<Value> ApproxPolyDP(const v8::Arguments&);
static Handle<Value> ConvexHull(const v8::Arguments&);
static Handle<Value> BoundingRect(const v8::Arguments&);
static Handle<Value> MinAreaRect(const v8::Arguments&);
static Handle<Value> IsConvex(const v8::Arguments&);
static Handle<Value> Moments(const v8::Arguments&);
JSFUNC(Point)
JSFUNC(Points)
JSFUNC(Size)
JSFUNC(CornerCount)
JSFUNC(Area)
JSFUNC(ArcLength)
JSFUNC(ApproxPolyDP)
JSFUNC(ConvexHull)
JSFUNC(BoundingRect)
JSFUNC(MinAreaRect)
JSFUNC(IsConvex)
JSFUNC(Moments)
JSFUNC(Hierarchy)
JSFUNC(Serialize)
JSFUNC(Deserialize)
};

View File

@ -4,6 +4,7 @@
#if CV_MAJOR_VERSION >= 2 && CV_MINOR_VERSION >=4
#include "Matrix.h"
#include <nan.h>
#define EIGEN 0
#define LBPH 1
@ -13,7 +14,7 @@
cv::Mat fromMatrixOrFilename(Local<Value> v){
cv::Mat im;
if (v->IsString()){
std::string filename = std::string(*v8::String::AsciiValue(v->ToString()));
std::string filename = std::string(*NanAsciiString(v->ToString()));
im = cv::imread(filename);
//std::cout<< im.size();
} else {
@ -31,31 +32,31 @@ Persistent<FunctionTemplate> FaceRecognizerWrap::constructor;
void
FaceRecognizerWrap::Init(Handle<Object> target) {
HandleScope scope;
NanScope();
// Constructor
constructor = Persistent<FunctionTemplate>::New(FunctionTemplate::New(FaceRecognizerWrap::New));
constructor->InstanceTemplate()->SetInternalFieldCount(1);
constructor->SetClassName(String::NewSymbol("FaceRecognizer"));
Local<FunctionTemplate> ctor = NanNew<FunctionTemplate>(FaceRecognizerWrap::New);
NanAssignPersistent(constructor, ctor);
ctor->InstanceTemplate()->SetInternalFieldCount(1);
ctor->SetClassName(NanNew("FaceRecognizer"));
NODE_SET_METHOD(constructor, "createLBPHFaceRecognizer", CreateLBPH);
NODE_SET_METHOD(constructor, "createEigenFaceRecognizer", CreateEigen);
NODE_SET_METHOD(constructor, "createFisherFaceRecognizer", CreateFisher);
NODE_SET_METHOD(ctor, "createLBPHFaceRecognizer", CreateLBPH);
NODE_SET_METHOD(ctor, "createEigenFaceRecognizer", CreateEigen);
NODE_SET_METHOD(ctor, "createFisherFaceRecognizer", CreateFisher);
NODE_SET_PROTOTYPE_METHOD(constructor, "trainSync", TrainSync);
NODE_SET_PROTOTYPE_METHOD(constructor, "updateSync", UpdateSync);
NODE_SET_PROTOTYPE_METHOD(constructor, "predictSync", PredictSync);
NODE_SET_PROTOTYPE_METHOD(constructor, "saveSync", SaveSync);
NODE_SET_PROTOTYPE_METHOD(constructor, "loadSync", LoadSync);
NODE_SET_PROTOTYPE_METHOD(ctor, "trainSync", TrainSync);
NODE_SET_PROTOTYPE_METHOD(ctor, "updateSync", UpdateSync);
NODE_SET_PROTOTYPE_METHOD(ctor, "predictSync", PredictSync);
NODE_SET_PROTOTYPE_METHOD(ctor, "saveSync", SaveSync);
NODE_SET_PROTOTYPE_METHOD(ctor, "loadSync", LoadSync);
NODE_SET_PROTOTYPE_METHOD(constructor, "getMat", GetMat);
NODE_SET_PROTOTYPE_METHOD(ctor, "getMat", GetMat);
target->Set(String::NewSymbol("FaceRecognizer"), constructor->GetFunction());
target->Set(NanNew("FaceRecognizer"), ctor->GetFunction());
};
Handle<Value>
FaceRecognizerWrap::New(const Arguments &args) {
HandleScope scope;
NAN_METHOD(FaceRecognizerWrap::New) {
NanScope();
if (args.This()->InternalFieldCount() == 0)
JSTHROW_TYPE("Cannot Instantiate without new")
@ -65,12 +66,11 @@ FaceRecognizerWrap::New(const Arguments &args) {
FaceRecognizerWrap *pt = new FaceRecognizerWrap(f, LBPH);
pt->Wrap(args.This());
return args.This();
NanReturnValue(args.This());
}
Handle<Value>
FaceRecognizerWrap::CreateLBPH(const Arguments &args) {
HandleScope scope;
NAN_METHOD(FaceRecognizerWrap::CreateLBPH) {
NanScope();
int radius = 1;
int neighbors = 8;
@ -84,7 +84,7 @@ FaceRecognizerWrap::CreateLBPH(const Arguments &args) {
INT_FROM_ARGS(grid_y, 3)
DOUBLE_FROM_ARGS(threshold, 4)
Local<Object> n = FaceRecognizerWrap::constructor->GetFunction()->NewInstance();
Local<Object> n = NanNew(FaceRecognizerWrap::constructor)->GetFunction()->NewInstance();
cv::Ptr<cv::FaceRecognizer> f = cv::createLBPHFaceRecognizer(
radius, neighbors, grid_x, grid_y, threshold
@ -92,12 +92,11 @@ FaceRecognizerWrap::CreateLBPH(const Arguments &args) {
FaceRecognizerWrap *pt = new FaceRecognizerWrap(f, LBPH);
pt->Wrap(n);
return n;
NanReturnValue( n );
}
Handle<Value>
FaceRecognizerWrap::CreateEigen(const Arguments &args) {
HandleScope scope;
NAN_METHOD(FaceRecognizerWrap::CreateEigen) {
NanScope();
int components = 0;
double threshold = DBL_MAX;
@ -105,7 +104,7 @@ FaceRecognizerWrap::CreateEigen(const Arguments &args) {
INT_FROM_ARGS(components, 0)
DOUBLE_FROM_ARGS(threshold, 1)
Local<Object> n = FaceRecognizerWrap::constructor->GetFunction()->NewInstance();
Local<Object> n = NanNew(FaceRecognizerWrap::constructor)->GetFunction()->NewInstance();
cv::Ptr<cv::FaceRecognizer> f = cv::createEigenFaceRecognizer(
components, threshold
@ -113,12 +112,11 @@ FaceRecognizerWrap::CreateEigen(const Arguments &args) {
FaceRecognizerWrap *pt = new FaceRecognizerWrap(f, EIGEN);
pt->Wrap(n);
return n;
NanReturnValue( n );
}
Handle<Value>
FaceRecognizerWrap::CreateFisher(const Arguments &args) {
HandleScope scope;
NAN_METHOD(FaceRecognizerWrap::CreateFisher) {
NanScope();
int components = 0;
double threshold = DBL_MAX;
@ -126,7 +124,7 @@ FaceRecognizerWrap::CreateFisher(const Arguments &args) {
INT_FROM_ARGS(components, 0)
DOUBLE_FROM_ARGS(threshold, 1)
Local<Object> n = FaceRecognizerWrap::constructor->GetFunction()->NewInstance();
Local<Object> n = NanNew(FaceRecognizerWrap::constructor)->GetFunction()->NewInstance();
cv::Ptr<cv::FaceRecognizer> f = cv::createFisherFaceRecognizer(
components, threshold
@ -134,7 +132,7 @@ FaceRecognizerWrap::CreateFisher(const Arguments &args) {
FaceRecognizerWrap *pt = new FaceRecognizerWrap(f, FISHER);
pt->Wrap(n);
return n;
NanReturnValue( n );
}
@ -144,15 +142,20 @@ FaceRecognizerWrap::FaceRecognizerWrap(cv::Ptr<cv::FaceRecognizer> f, int type){
}
Handle<Value> UnwrapTrainingData(const Arguments& args, cv::vector<cv::Mat>* images, cv::vector<int>* labels){
Handle<Value> UnwrapTrainingData(_NAN_METHOD_ARGS_TYPE args, cv::vector<cv::Mat>* images, cv::vector<int>* labels){
if (args.Length() < 1 || !args[0]->IsArray()){
JSTHROW("FaceRecognizer.train takes a list of [<int> label, image] tuples")
}
// Iterate through [[label, image], ...] etc, and add matrix / label to vectors
const Local<Array> tuples = v8::Array::Cast(*args[0]);
//const
//Local<Array> tuples = v8::Array::Cast(*args[0]);
const Local<Array> tuples = Local<Array>::Cast(args[0]);
const uint32_t length = tuples->Length();
for (uint32_t i=0 ; i<length ; ++i){
const Local<Value> val = tuples->Get(i);
@ -161,7 +164,7 @@ Handle<Value> UnwrapTrainingData(const Arguments& args, cv::vector<cv::Mat>* ima
JSTHROW("train takes a list of [label, image] tuples")
}
Local<Array> valarr = v8::Array::Cast(*val);
Local<Array> valarr = Local<Array>::Cast(val);
if (valarr->Length() != 2 || !valarr->Get(0)->IsInt32()){
JSTHROW("train takes a list of [label, image] tuples")
@ -174,11 +177,11 @@ Handle<Value> UnwrapTrainingData(const Arguments& args, cv::vector<cv::Mat>* ima
labels->push_back(label);
images->push_back(im);
}
return v8::Undefined();
return NanUndefined();
}
Handle<Value>
FaceRecognizerWrap::TrainSync(const Arguments& args){
NAN_METHOD(FaceRecognizerWrap::TrainSync){
SETUP_FUNCTION(FaceRecognizerWrap)
cv::vector<cv::Mat> images;
@ -186,16 +189,15 @@ FaceRecognizerWrap::TrainSync(const Arguments& args){
Handle<Value> exception = UnwrapTrainingData(args, &images, &labels);
if (!exception->IsUndefined()){
return exception;
NanReturnValue(exception);//FIXME: not too sure about returning exceptions like this
}
self->rec->train(images, labels);
return scope.Close(v8::Undefined());
NanReturnUndefined();
}
Handle<Value>
FaceRecognizerWrap::UpdateSync(const Arguments& args){
NAN_METHOD(FaceRecognizerWrap::UpdateSync){
SETUP_FUNCTION(FaceRecognizerWrap)
@ -212,17 +214,16 @@ FaceRecognizerWrap::UpdateSync(const Arguments& args){
Handle<Value> exception = UnwrapTrainingData(args, &images, &labels);
if (!exception->IsUndefined()){
return exception;
JSTHROW( exception );
}
self->rec->update(images, labels);
return scope.Close(v8::Undefined());
NanReturnUndefined();
}
Handle<Value>
FaceRecognizerWrap::PredictSync(const Arguments& args){
NAN_METHOD(FaceRecognizerWrap::PredictSync){
SETUP_FUNCTION(FaceRecognizerWrap)
cv::Mat im = fromMatrixOrFilename(args[0]);//TODO CHECK!
@ -233,50 +234,47 @@ FaceRecognizerWrap::PredictSync(const Arguments& args){
double confidence = 0.0;
self->rec->predict(im, predictedLabel, confidence);
v8::Local<v8::Object> res = v8::Object::New();
res->Set(v8::String::New("id"), v8::Number::New(predictedLabel));
res->Set(v8::String::New("confidence"), v8::Number::New(confidence));
v8::Local<v8::Object> res = NanNew<Object>();
res->Set(NanNew("id"), NanNew<Number>(predictedLabel));
res->Set(NanNew("confidence"), NanNew<Number>(confidence));
return scope.Close(res);
NanReturnValue(res);
}
Handle<Value>
FaceRecognizerWrap::SaveSync(const Arguments& args){
NAN_METHOD(FaceRecognizerWrap::SaveSync){
SETUP_FUNCTION(FaceRecognizerWrap)
if (!args[0]->IsString()){
JSTHROW("Save takes a filename")
}
std::string filename = std::string(*v8::String::AsciiValue(args[0]->ToString()));
std::string filename = std::string(*NanAsciiString(args[0]->ToString()));
self->rec->save(filename);
return v8::Undefined();
NanReturnUndefined();
}
Handle<Value>
FaceRecognizerWrap::LoadSync(const Arguments& args){
NAN_METHOD(FaceRecognizerWrap::LoadSync){
SETUP_FUNCTION(FaceRecognizerWrap)
if (!args[0]->IsString()){
JSTHROW("Load takes a filename")
}
std::string filename = std::string(*v8::String::AsciiValue(args[0]->ToString()));
std::string filename = std::string(*NanAsciiString(args[0]->ToString()));
self->rec->load(filename);
return v8::Undefined();
NanReturnUndefined();
}
Handle<Value>
FaceRecognizerWrap::GetMat(const Arguments& args){
NAN_METHOD(FaceRecognizerWrap::GetMat){
SETUP_FUNCTION(FaceRecognizerWrap)
if (!args[0]->IsString()){
JSTHROW("getMat takes a key")
}
std::string key = std::string(*v8::String::AsciiValue(args[0]->ToString()));
std::string key = std::string(*NanAsciiString(args[0]->ToString()));
cv::Mat m = self->rec->getMat(key);
Local<Object> im = Matrix::constructor->GetFunction()->NewInstance();
Local<Object> im = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *img = ObjectWrap::Unwrap<Matrix>(im);
img->mat = m;
return im;
NanReturnValue( im );
}

View File

@ -11,7 +11,7 @@ class FaceRecognizerWrap: public node::ObjectWrap {
static Persistent<FunctionTemplate> constructor;
static void Init(Handle<Object> target);
static Handle<Value> New(const Arguments &args);
static NAN_METHOD(New);
FaceRecognizerWrap(cv::Ptr<cv::FaceRecognizer> f, int type);

View File

@ -7,26 +7,25 @@ Persistent<FunctionTemplate> NamedWindow::constructor;
void
NamedWindow::Init(Handle<Object> target) {
HandleScope scope;
NanScope();
// Constructor
constructor = Persistent<FunctionTemplate>::New(FunctionTemplate::New(NamedWindow::New));
constructor->InstanceTemplate()->SetInternalFieldCount(1);
constructor->SetClassName(String::NewSymbol("NamedWindow"));
Local<FunctionTemplate> ctor = NanNew<FunctionTemplate>(NamedWindow::New);
NanAssignPersistent(constructor, ctor);
ctor->InstanceTemplate()->SetInternalFieldCount(1);
ctor->SetClassName(NanNew("NamedWindow"));
// Prototype
//Local<ObjectTemplate> proto = constructor->PrototypeTemplate();
NODE_SET_PROTOTYPE_METHOD(constructor, "show", Show);
NODE_SET_PROTOTYPE_METHOD(constructor, "destroy", Destroy);
NODE_SET_PROTOTYPE_METHOD(constructor, "blockingWaitKey", BlockingWaitKey);
target->Set(String::NewSymbol("NamedWindow"), constructor->GetFunction());
NODE_SET_PROTOTYPE_METHOD(ctor, "show", Show);
NODE_SET_PROTOTYPE_METHOD(ctor, "destroy", Destroy);
NODE_SET_PROTOTYPE_METHOD(ctor, "blockingWaitKey", BlockingWaitKey);
target->Set(NanNew("NamedWindow"), ctor->GetFunction());
};
Handle<Value>
NamedWindow::New(const Arguments &args) {
HandleScope scope;
NAN_METHOD(NamedWindow::New) {
NanScope();
if (args.This()->InternalFieldCount() == 0){
JSTHROW_TYPE("Cannot Instantiate without new")
@ -34,13 +33,13 @@ NamedWindow::New(const Arguments &args) {
NamedWindow* win;
if (args.Length() == 1){
win = new NamedWindow(std::string(*v8::String::AsciiValue(args[0]->ToString())), 0);
} else if (args.Length() == 2){
win = new NamedWindow(std::string(*v8::String::AsciiValue(args[0]->ToString())), 0);
win = new NamedWindow(std::string(*NanAsciiString(args[0]->ToString())), 0);
} else { //if (args.Length() == 2){
win = new NamedWindow(std::string(*NanAsciiString(args[0]->ToString())), 0);
}
win->Wrap(args.Holder());
return scope.Close(args.Holder());
NanReturnValue(args.Holder());
}
@ -51,26 +50,23 @@ NamedWindow::NamedWindow(const std::string& name, int f){
}
Handle<Value>
NamedWindow::Show(const v8::Arguments& args){
NAN_METHOD(NamedWindow::Show){
SETUP_FUNCTION(NamedWindow)
Matrix *im = ObjectWrap::Unwrap<Matrix>(args[0]->ToObject());
cv::imshow(self->winname, im->mat);
return scope.Close(args.Holder());
NanReturnValue(args.Holder());
}
Handle<Value>
NamedWindow::Destroy(const v8::Arguments& args){
NAN_METHOD(NamedWindow::Destroy){
SETUP_FUNCTION(NamedWindow)
cv::destroyWindow(self->winname);
return scope.Close(args.Holder());
NanReturnValue(args.Holder());
}
Handle<Value>
NamedWindow::BlockingWaitKey(const v8::Arguments& args){
HandleScope scope;
NAN_METHOD(NamedWindow::BlockingWaitKey){
NanScope();
//SETUP_FUNCTION(NamedWindow)
int time = 0;
@ -84,5 +80,5 @@ NamedWindow::BlockingWaitKey(const v8::Arguments& args){
int res = cv::waitKey(time);
return scope.Close(Number::New(res));
NanReturnValue(NanNew<Number>(res));
}

View File

@ -6,10 +6,9 @@ class NamedWindow: public node::ObjectWrap {
std::string winname;
int flags;
static Persistent<FunctionTemplate> constructor;
static void Init(Handle<Object> target);
static Handle<Value> New(const Arguments &args);
static NAN_METHOD(New);
NamedWindow(const std::string& winname, int flags);

File diff suppressed because it is too large Load Diff

View File

@ -3,17 +3,19 @@
class Matrix: public node::ObjectWrap {
public:
cv::Mat mat;
cv::Mat mat;
static Persistent<FunctionTemplate> constructor;
static void Init(Handle<Object> target);
static Handle<Value> New(const Arguments &args);
static NAN_METHOD(New);
Matrix();
Matrix(cv::Mat other, cv::Rect roi);
Matrix(int rows, int cols);
Matrix(int rows, int cols, int typ);
Matrix(int rows, int cols, int type);
static double DblGet(cv::Mat mat, int i, int j);
JSFUNC(Zeros) // factory
JSFUNC(Ones) // factory
JSFUNC(Eye) // factory
JSFUNC(Get) // at
@ -48,6 +50,8 @@ class Matrix: public node::ObjectWrap {
JSFUNC(ConvertGrayscale)
JSFUNC(ConvertHSVscale)
JSFUNC(GaussianBlur)
JSFUNC(MedianBlur)
JSFUNC(BilateralFilter)
JSFUNC(Copy)
JSFUNC(Flip)
JSFUNC(ROI)
@ -55,6 +59,8 @@ class Matrix: public node::ObjectWrap {
JSFUNC(AbsDiff)
JSFUNC(AddWeighted)
JSFUNC(BitwiseXor)
JSFUNC(BitwiseNot)
JSFUNC(BitwiseAnd)
JSFUNC(CountNonZero)
//JSFUNC(Split)
JSFUNC(Canny)
@ -69,12 +75,15 @@ class Matrix: public node::ObjectWrap {
JSFUNC(GoodFeaturesToTrack)
JSFUNC(HoughLinesP)
JSFUNC(Crop)
JSFUNC(inRange)
JSFUNC(LocateROI)
JSFUNC(AdjustROI)
JSFUNC(Threshold)
JSFUNC(AdaptiveThreshold)
JSFUNC(MeanStdDev)
JSFUNC(CopyTo)
@ -84,14 +93,28 @@ class Matrix: public node::ObjectWrap {
JSFUNC(EqualizeHist)
JSFUNC(Pixel)
JSFUNC(FloodFill)
JSFUNC(MatchTemplate)
JSFUNC(MinMaxLoc)
JSFUNC(PushBack)
JSFUNC(PutText)
JSFUNC(GetPerspectiveTransform)
JSFUNC(WarpPerspective)
JSFUNC(CopyWithMask)
JSFUNC(SetWithMask)
JSFUNC(MeanWithMask)
JSFUNC(Shift)
/*
static Handle<Value> Val(const Arguments& args);
static Handle<Value> RowRange(const Arguments& args);
static Handle<Value> ColRange(const Arguments& args);
static Handle<Value> Diag(const Arguments& args);
static Handle<Value> Clone(const Arguments& args);
static Handle<Value> CopyTo(const Arguments& args);
static Handle<Value> ConvertTo(const Arguments& args);
static Handle<Value> Val(const Arguments& args);
static Handle<Value> RowRange(const Arguments& args);
static Handle<Value> ColRange(const Arguments& args);
static Handle<Value> Diag(const Arguments& args);
static Handle<Value> Clone(const Arguments& args);
static Handle<Value> CopyTo(const Arguments& args);
static Handle<Value> ConvertTo(const Arguments& args);
static Handle<Value> AssignTo(const Arguments& args);
static Handle<Value> SetTo(const Arguments& args);
static Handle<Value> Reshape(const Arguments& args);
@ -111,9 +134,9 @@ class Matrix: public node::ObjectWrap {
static Handle<Value> Depth(const Arguments& args);
static Handle<Value> Channels(const Arguments& args);
static Handle<Value> StepOne(const Arguments& args);
static Handle<Value> GetPerspectiveTransform(const Arguments& args);
static Handle<Value> WarpPerspective(const Arguments& args);
*/
};

View File

@ -1,30 +1,29 @@
#include "OpenCV.h"
#include "Matrix.h"
#include <nan.h>
void
OpenCV::Init(Handle<Object> target) {
HandleScope scope;
NanScope();
// Version string.
char out [21];
int n = sprintf(out, "%i.%i", CV_MAJOR_VERSION, CV_MINOR_VERSION);
target->Set(String::NewSymbol("version"), String::New(out, n));
target->Set(NanNew<String>("version"), NanNew<String>(out, n));
NODE_SET_METHOD(target, "readImage", ReadImage);
}
Handle<Value>
OpenCV::ReadImage(const Arguments &args) {
HandleScope scope;
NAN_METHOD(OpenCV::ReadImage) {
NanEscapableScope();
try{
Local<Object> im_h = Matrix::constructor->GetFunction()->NewInstance();
Local<Object> im_h = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *img = ObjectWrap::Unwrap<Matrix>(im_h);
cv::Mat mat;
REQ_FUN_ARG(1, cb);
@ -38,7 +37,7 @@ OpenCV::ReadImage(const Arguments &args) {
} else if (args[0]->IsString()) {
std::string filename = std::string(*v8::String::AsciiValue(args[0]->ToString()));
std::string filename = std::string(*NanAsciiString(args[0]->ToString()));
mat = cv::imread(filename);
} else if (Buffer::HasInstance(args[0])){
@ -49,7 +48,7 @@ OpenCV::ReadImage(const Arguments &args) {
mat = cv::imdecode(*mbuf, -1);
if (mat.empty()){
return v8::ThrowException(v8::Exception::TypeError(v8::String::New("Error loading file")));
NanThrowTypeError("Error loading file");
}
}
@ -57,23 +56,22 @@ OpenCV::ReadImage(const Arguments &args) {
Local<Value> argv[2];
argv[0] = Local<Value>::New(Null());
argv[0] = NanNull();
argv[1] = im_h;
TryCatch try_catch;
cb->Call(Context::GetCurrent()->Global(), 2, argv);
cb->Call(NanGetCurrentContext()->Global(), 2, argv);
if (try_catch.HasCaught()) {
FatalException(try_catch);
}
return Undefined();
NanReturnUndefined();
} catch( cv::Exception& e ){
const char* err_msg = e.what();
return v8::ThrowException(v8::Exception::Error(v8::String::New(err_msg)));
NanThrowError(err_msg);
NanReturnUndefined();
}
};

View File

@ -6,32 +6,33 @@
#include <node_object_wrap.h>
#include <node_version.h>
#include <node_buffer.h>
#include <cv.h>
#include <highgui.h>
#include <opencv/cv.h>
#include <opencv/highgui.h>
#include <string.h>
#include <nan.h>
using namespace v8;
using namespace node;
#define REQ_FUN_ARG(I, VAR) \
if (args.Length() <= (I) || !args[I]->IsFunction()) \
return v8::ThrowException(v8::Exception::TypeError( \
String::New("Argument " #I " must be a function"))); \
return NanThrowTypeError("Argument " #I " must be a function"); \
Local<Function> VAR = Local<Function>::Cast(args[I]);
#define SETUP_FUNCTION(TYP) \
HandleScope scope; \
NanScope(); \
TYP *self = ObjectWrap::Unwrap<TYP>(args.This());
#define JSFUNC(NAME) \
static Handle<Value> NAME(const Arguments& args);
static NAN_METHOD(NAME);
#define JSTHROW_TYPE(ERR) \
return v8::ThrowException(v8::Exception::TypeError(v8::String::New(ERR)));
NanThrowTypeError( ERR );
#define JSTHROW(ERR) \
return v8::ThrowException(v8::Exception::Error(v8::String::New(ERR)));
NanThrowError( ERR );
#define INT_FROM_ARGS(NAME, IND) \
@ -48,10 +49,10 @@ class OpenCV: public node::ObjectWrap{
public:
static void Init(Handle<Object> target);
static Handle<Value> ReadImage(const v8::Arguments&);
static NAN_METHOD(ReadImage);
};
#endif

View File

@ -6,65 +6,63 @@ v8::Persistent<FunctionTemplate> Point::constructor;
void
Point::Init(Handle<Object> target) {
HandleScope scope;
NanScope();
// Constructor
constructor = Persistent<FunctionTemplate>::New(FunctionTemplate::New(Point::New));
constructor->InstanceTemplate()->SetInternalFieldCount(1);
constructor->SetClassName(String::NewSymbol("Point"));
// Prototype
Local<ObjectTemplate> proto = constructor->PrototypeTemplate();
proto->SetAccessor(String::NewSymbol("x"), GetX, RaiseImmutable);
proto->SetAccessor(String::NewSymbol("y"), GetY, RaiseImmutable);
Local<FunctionTemplate> ctor = NanNew<FunctionTemplate>(Point::New);
NanAssignPersistent(constructor, ctor);
ctor->InstanceTemplate()->SetInternalFieldCount(1);
ctor->SetClassName(NanNew("Point"));
NODE_SET_PROTOTYPE_METHOD(constructor, "dot", Dot);
// Prototype
Local<ObjectTemplate> proto = ctor->PrototypeTemplate();
proto->SetAccessor(NanNew("x"), GetX, RaiseImmutable);
proto->SetAccessor(NanNew("y"), GetY, RaiseImmutable);
NODE_SET_PROTOTYPE_METHOD(ctor, "dot", Dot);
target->Set(String::NewSymbol("Point"), constructor->GetFunction());
target->Set(NanNew("Point"), ctor->GetFunction());
};
Handle<Value>
Point::New(const Arguments &args) {
HandleScope scope;
NAN_METHOD(Point::New) {
NanScope();
if (args.This()->InternalFieldCount() == 0)
return v8::ThrowException(v8::Exception::TypeError(v8::String::New("Cannot Instantiate without new")));
return NanThrowTypeError("Cannot Instantiate without new");
double x = 0, y = 0;
if (args[0]->IsNumber()) x = args[0]->NumberValue();
if (args[1]->IsNumber()) y = args[1]->NumberValue();
Point *pt = new Point(x, y);
pt->Wrap(args.This());
return args.This();
NanReturnValue(args.This());
}
Handle<Value>
Point::GetX(Local<String> prop, const AccessorInfo &info) {
HandleScope scope;
Point *pt = ObjectWrap::Unwrap<Point>(info.This());
return scope.Close(Number::New(pt->point.x));
NAN_GETTER(Point::GetX){
NanScope();
Point *pt = ObjectWrap::Unwrap<Point>(args.This());
NanReturnValue(NanNew<Number>(pt->point.x));
}
Handle<Value>
Point::GetY(Local<String> prop, const AccessorInfo &info) {
HandleScope scope;
Point *pt = ObjectWrap::Unwrap<Point>(info.This());
return scope.Close(Number::New(pt->point.y));
NAN_GETTER(Point::GetY){
NanScope();
Point *pt = ObjectWrap::Unwrap<Point>(args.This());
NanReturnValue(NanNew<Number>(pt->point.y));
}
void
Point::RaiseImmutable(Local<String> property, Local<Value> value, const AccessorInfo& info) {
v8::ThrowException(v8::Exception::TypeError(v8::String::New("Point is immutable")));
}
Handle<Value>
Point::Dot(const v8::Arguments& args){
HandleScope scope;
NAN_SETTER(Point::RaiseImmutable){
NanThrowTypeError("Point is immutable");
}
NAN_METHOD(Point::Dot){
NanScope();
Point *p1 = ObjectWrap::Unwrap<Point>(args.This());
Point *p2 = ObjectWrap::Unwrap<Point>(args[0]->ToObject());
// Since V 2.3 Native Dot no longer supported
return scope.Close(Number::New(p1->point.x * p2->point.x + p1->point.y * p2->point.y));
NanReturnValue(NanNew<Number>(p1->point.x * p2->point.x + p1->point.y * p2->point.y));
}

View File

@ -7,13 +7,13 @@ class Point: public node::ObjectWrap {
CvPoint2D32f point;
static Persistent<FunctionTemplate> constructor;
static void Init(Handle<Object> target);
static Handle<Value> New(const Arguments &args);
static NAN_METHOD(New);
Point(double x, double y);
static Handle<Value> GetX(Local<String> prop, const AccessorInfo &info);
static Handle<Value> GetY(Local<String> prop, const AccessorInfo &info);
static void RaiseImmutable(Local<String> property, Local<Value> value, const AccessorInfo& info);
static Handle<Value> Dot(const v8::Arguments&);
static NAN_GETTER(GetX);
static NAN_GETTER(GetY);
static NAN_SETTER(RaiseImmutable);
static NAN_METHOD(Dot);
};

View File

@ -2,9 +2,9 @@
#include "Matrix.h"
#include "OpenCV.h"
#include <iostream>
using namespace std;
void AsyncRead(uv_work_t *req);
void AfterAsyncRead(uv_work_t *req);
v8::Persistent<FunctionTemplate> VideoCaptureWrap::constructor;
@ -20,27 +20,32 @@ struct videocapture_baton {
void
VideoCaptureWrap::Init(Handle<Object> target) {
HandleScope scope;
NanScope();
// Constructor
constructor = Persistent<FunctionTemplate>::New(FunctionTemplate::New(VideoCaptureWrap::New));
constructor->InstanceTemplate()->SetInternalFieldCount(1);
constructor->SetClassName(String::NewSymbol("VideoCapture"));
// Prototype
//Class
Local<FunctionTemplate> ctor = NanNew<FunctionTemplate>(VideoCaptureWrap::New);
NanAssignPersistent(constructor, ctor);
ctor->InstanceTemplate()->SetInternalFieldCount(1);
ctor->SetClassName(NanNew("VideoCapture"));
// Prototype
//Local<ObjectTemplate> proto = constructor->PrototypeTemplate();
NODE_SET_PROTOTYPE_METHOD(constructor, "read", Read);
NODE_SET_PROTOTYPE_METHOD(ctor, "read", Read);
NODE_SET_PROTOTYPE_METHOD(ctor, "setWidth", SetWidth);
NODE_SET_PROTOTYPE_METHOD(ctor, "setHeight", SetHeight);
NODE_SET_PROTOTYPE_METHOD(ctor, "setPosition", SetPosition);
NODE_SET_PROTOTYPE_METHOD(ctor, "close", Close);
NODE_SET_PROTOTYPE_METHOD(ctor, "ReadSync", ReadSync);
target->Set(String::NewSymbol("VideoCapture"), constructor->GetFunction());
target->Set(NanNew("VideoCapture"), ctor->GetFunction());
};
Handle<Value>
VideoCaptureWrap::New(const Arguments &args) {
HandleScope scope;
NAN_METHOD(VideoCaptureWrap::New) {
NanScope();
if (args.This()->InternalFieldCount() == 0)
return v8::ThrowException(v8::Exception::TypeError(v8::String::New("Cannot Instantiate without new")));
return NanThrowTypeError("Cannot Instantiate without new");
VideoCaptureWrap *v;
@ -48,82 +53,160 @@ VideoCaptureWrap::New(const Arguments &args) {
v = new VideoCaptureWrap(args[0]->NumberValue());
} else {
//TODO - assumes that we have string, verify
v = new VideoCaptureWrap(std::string(*v8::String::AsciiValue(args[0]->ToString())));
v = new VideoCaptureWrap(std::string(*NanAsciiString(args[0]->ToString())));
}
v->Wrap(args.This());
return args.This();
NanReturnValue(args.This());
}
VideoCaptureWrap::VideoCaptureWrap(int device){
HandleScope scope;
NanScope();
cap.open(device);
if(!cap.isOpened()){
v8::ThrowException(v8::Exception::Error(String::New("Camera could not be opened")));
NanThrowError("Camera could not be opened");
}
}
VideoCaptureWrap::VideoCaptureWrap(const std::string& filename){
HandleScope scope;
NanScope();
cap.open(filename);
// TODO! At the moment this only takes a full path - do relative too.
if(!cap.isOpened()){
v8::ThrowException(v8::Exception::Error(String::New("Video file could not be opened (opencv reqs. non relative paths)")));
NanThrowError("Video file could not be opened (opencv reqs. non relative paths)");
}
}
NAN_METHOD(VideoCaptureWrap::SetWidth){
Handle<Value>
VideoCaptureWrap::Read(const Arguments &args) {
NanScope();
VideoCaptureWrap *v = ObjectWrap::Unwrap<VideoCaptureWrap>(args.This());
HandleScope scope;
if(args.Length() != 1)
NanReturnUndefined();
int w = args[0]->IntegerValue();
if(v->cap.isOpened())
v->cap.set(CV_CAP_PROP_FRAME_WIDTH, w);
NanReturnUndefined();
}
NAN_METHOD(VideoCaptureWrap::SetHeight){
NanScope();
VideoCaptureWrap *v = ObjectWrap::Unwrap<VideoCaptureWrap>(args.This());
if(args.Length() != 1)
NanReturnUndefined();
int h = args[0]->IntegerValue();
v->cap.set(CV_CAP_PROP_FRAME_HEIGHT, h);
NanReturnUndefined();
}
NAN_METHOD(VideoCaptureWrap::SetPosition){
NanScope();
VideoCaptureWrap *v = ObjectWrap::Unwrap<VideoCaptureWrap>(args.This());
if(args.Length() != 1)
NanReturnUndefined();
int pos = args[0]->IntegerValue();
v->cap.set(CV_CAP_PROP_POS_FRAMES, pos);
NanReturnUndefined();
}
NAN_METHOD(VideoCaptureWrap::Close){
NanScope();
VideoCaptureWrap *v = ObjectWrap::Unwrap<VideoCaptureWrap>(args.This());
v->cap.release();
NanReturnUndefined();
}
class AsyncVCWorker : public NanAsyncWorker {
public:
AsyncVCWorker(NanCallback *callback, VideoCaptureWrap* vc, Matrix* matrix)
: NanAsyncWorker(callback), vc(vc), matrix(matrix) {}
~AsyncVCWorker() {}
// Executed inside the worker-thread.
// It is not safe to access V8, or V8 data structures
// here, so everything we need for input and output
// should go on `this`.
void Execute () {
this->vc->cap.read(matrix->mat);
}
// Executed when the async work is complete
// this function will be run inside the main event loop
// so it is safe to use V8 again
void HandleOKCallback () {
NanScope();
Local<Object> im_to_return= NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *img = ObjectWrap::Unwrap<Matrix>(im_to_return);
cv::Mat mat;
mat = this->matrix->mat;
img->mat = mat;
Local<Value> argv[] = {
NanNull()
, im_to_return
};
TryCatch try_catch;
callback->Call(2, argv);
if (try_catch.HasCaught()) {
FatalException(try_catch);
}
}
private:
VideoCaptureWrap *vc;
Matrix* matrix;
};
NAN_METHOD(VideoCaptureWrap::Read) {
NanScope();
VideoCaptureWrap *v = ObjectWrap::Unwrap<VideoCaptureWrap>(args.This());
REQ_FUN_ARG(0, cb);
videocapture_baton *baton = new videocapture_baton();
baton->vc = v;
baton->cb = Persistent<Function>::New(cb);
baton->im = new Matrix();
baton->request.data = baton;
uv_queue_work(uv_default_loop(), &baton->request, AsyncRead, (uv_after_work_cb)AfterAsyncRead);
return Undefined();
}
void AsyncRead(uv_work_t *req) {
videocapture_baton *baton = static_cast<videocapture_baton *>(req->data);
baton->vc->cap.read(baton->im->mat);
NanCallback *callback = new NanCallback(cb.As<Function>());
NanAsyncQueueWorker(new AsyncVCWorker(callback, v, new Matrix()));
NanReturnUndefined();
}
void AfterAsyncRead(uv_work_t *req) {
NAN_METHOD(VideoCaptureWrap::ReadSync) {
HandleScope scope;
NanScope();
VideoCaptureWrap *v = ObjectWrap::Unwrap<VideoCaptureWrap>(args.This());
videocapture_baton *baton = static_cast<videocapture_baton *>(req->data);
Local<Object> im_to_return= NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *img = ObjectWrap::Unwrap<Matrix>(im_to_return);
Local<Object> im_to_return= Matrix::constructor->GetFunction()->NewInstance();
Matrix *img = ObjectWrap::Unwrap<Matrix>(im_to_return);
cv::Mat mat;
mat = baton->im->mat;
img->mat = mat;
Local<Value> argv[2];
argv[0] = Local<Value>::New(Null());
argv[1] = im_to_return;
baton->cb->Call(Context::GetCurrent()->Global(), 2, argv);
baton->cb.Dispose();
delete baton;
v->cap.read(img->mat);
NanReturnValue(im_to_return);
}

View File

@ -6,15 +6,24 @@ class VideoCaptureWrap: public node::ObjectWrap {
static Persistent<FunctionTemplate> constructor;
static void Init(Handle<Object> target);
static Handle<Value> New(const Arguments &args);
static NAN_METHOD(New);
VideoCaptureWrap(const std::string& filename);
VideoCaptureWrap(int device);
static Handle<Value> Read(const v8::Arguments&);
static NAN_METHOD(Read);
static NAN_METHOD(ReadSync);
//(Optional) For setting width and height of the input video stream
static NAN_METHOD(SetWidth);
static NAN_METHOD(SetHeight);
// to set frame position
static NAN_METHOD(SetPosition);
static Handle<Value> GetFrameAt(const v8::Arguments&);
static NAN_METHOD(GetFrameAt);
//close the stream
static NAN_METHOD(Close);
};

View File

@ -1,4 +1,5 @@
#include "OpenCV.h"
#include "Point.h"
#include "Matrix.h"
#include "CascadeClassifierWrap.h"
@ -7,12 +8,14 @@
#include "CamShift.h"
#include "HighGUI.h"
#include "FaceRecognizer.h"
#include "Constants.h"
extern "C" void
init(Handle<Object> target) {
HandleScope scope;
NanScope();
OpenCV::Init(target);
Point::Init(target);
Matrix::Init(target);
CascadeClassifierWrap::Init(target);
@ -20,6 +23,8 @@ init(Handle<Object> target) {
Contour::Init(target);
TrackedObject::Init(target);
NamedWindow::Init(target);
Constants::Init(target);
#if CV_MAJOR_VERSION >= 2 && CV_MINOR_VERSION >=4
FaceRecognizerWrap::Init(target);

View File

@ -1,356 +1,277 @@
var vows = require('vows')
, assert = require('assert')
, fs = require('fs');
assertDeepSimilar = function(res, exp){
for (var i = 0; i < res.length; i++){
// res[i] = Math.round(res[i]/100)*100;
}
assert.deepEqual(res, exp)
}
assertWithinRange = function(res, exp, range){
assert.ok((res - exp) < range || (res - exp) > -range, "Not within range:" + res + " (" + exp + "+- " + range + ")")
}
assertWithinRanges = function(res, exp, range){
for (var i =0; i<res.length; i++){
assertWithinRange(res[i], exp[i], range);
}
}
vows.describe('Smoke Tests OpenCV').addBatch({
"Importing": {
topic: require('../lib/opencv')
, "returns obj": function(topic){
assert.ok(!!topic)
}
, '.version' : function(topic){
assert.ok(!!topic.version)
}
, '.Point imports': function(topic){
assert.ok(!!topic.Point)
}
, '.Matrix imports': function(topic){
assert.ok(!!topic.Matrix)
}
, 'importing library multiple times is ok' : function(){
var cv1 = require('../lib/opencv')
, cv2 = require('../lib/opencv')
cv1.readImage('./examples/mona.png', function(){});
cv2.readImage('./examples/mona.png', function(){});
}
}
, "Point" : {
topic : require('../lib/opencv')
, 'constructor' : function(cv){
assert.ok(!!new cv.Point(1, 2))
assert.throws(function () { cv.Point(1, 2)}, TypeError); // cannot call without new
}
, 'accessors' : function(cv){
assert.equal(new cv.Point(1, 2).x, 1)
assert.equal(new cv.Point(1, 2).y, 2)
assert.equal(Math.round(new cv.Point(1.1, 2).x * 100), 110)
assert.equal(Math.round(new cv.Point(1.2, 2.75).y *100), 275)
assert.throws(function () {new cv.Point(1.1, 2).x = 5}, Error); // Points are immutable
assert.throws(function () {new cv.Point(1.1, 2).y = 5}, Error); // Points are immutable
}
var fs = require('fs')
, test = require('tape')
, cv = null
, '.dot': function(cv){
var p1 = new cv.Point(3, 6)
, p2 = new cv.Point(5, 7)
assert.ok(p1.dot);
assert.equal(p1.dot(p2), 57);
}
, '.inside' : function(){}
}
test("Smoke tests / Can Import", function(t){
cv = require('../lib/opencv')
t.ok(cv, "imported fine")
t.ok(cv.version, "version is there:" + cv.version)
t.ok(cv.Point, "point is there")
t.ok(cv.Matrix, "matrix is there")
t.end()
})
, "Matrix": {
topic : require('../lib/opencv')
, "constructor" : function(cv){
assert.ok(cv.Matrix);
assert.ok(new cv.Matrix);
assert.ok(new cv.Matrix(1,2));
}
, "set/row" : function(cv){
var mat = new cv.Matrix(1, 2);
mat.set(0,0,3)
mat.set(0,1,5000)
assert.deepEqual(mat.row(0), [3,5000]);
}
, "get/set" : function(cv){
var mat = new cv.Matrix(1,2);
assert.equal(mat.set(0,0,3), undefined);
assert.equal(mat.get(0,0), 3);
}
, ".width" : function(cv){
var mat = new cv.Matrix(6,7);
assert.equal(mat.width(), 7);
}
, ".height" : function(cv){
var mat = new cv.Matrix(6,7);
assert.equal(mat.height(), 6);
}
, ".size" : function(cv){
var mat = new cv.Matrix(6,7);
assert.deepEqual(mat.size(), [6, 7]);
}
, "resize" : function(cv){
var mat = new cv.Matrix(6,7);
assert.equal(mat.width(), 7);
mat.resize(8,9);
assert.equal(mat.width(), 8);
}
, 'row' : function(cv){
var mat = new cv.Matrix.Eye(4,4)
assertDeepSimilar(mat.row(1), [0,1,0,0])
assertDeepSimilar(mat.row(2), [0,0,1,0])
}
, 'col' : function(cv){
var mat = new cv.Matrix.Eye(4,4);
assertDeepSimilar(mat.col(1), [0,1,0,0])
assertDeepSimilar(mat.col(2), [0,0,1,0])
}
, "empty": function(cv){
assert.equal(new cv.Matrix().empty(), true);
}
, "toBuffer": function(cv){
var buf = fs.readFileSync('./examples/mona.png')
cv.readImage(buf.slice(0), function(err, mat){
var buf0 = mat.toBuffer()
assert.ok(buf0);
//assert.equal(buf.toString('base64'), buf0.toString('base64'));
})
}
, "toBuffer Async": {
topic: function(cv){
var buf = fs.readFileSync('./examples/mona.png')
, cb = this.callback
cv.readImage(buf.slice(0), function(err, mat){
var buff = mat.toBuffer(function(){
cb.apply(this, arguments)
})
})
}
, 'gives a buffer' : function(e, res){
assert.ok(!e)
assert.ok(res);
assert.ok(res.length > 100);
}
}
, "detectObject": {
topic : function(){
var cv = require('../lib/opencv')
, cb = this.callback
cv.readImage("./examples/mona.png", function(err, im){
im.detectObject(cv.FACE_CASCADE, {}, cb)
})
}
, "finds face": function(err, faces){
assert.isNull(err);
assert.isArray(faces);
assert.equal(faces.length, 1)
}
}
, ".absDiff and .countNonZero" : function(cv) {
cv.readImage("./examples/mona.png", function(err, im) {
cv.readImage("./examples/mona.png", function(err, im2){
assert.ok(im);
assert.ok(im2);
var diff = new cv.Matrix(im.width(), im.height());
diff.absDiff(im, im2);
diff.convertGrayscale();
assert.equal(diff.countNonZero(), 0);
});
test('importing library multiple times is ok', function(t){
var cv1 = require('../lib/opencv')
, cv2 = require('../lib/opencv')
cv1.readImage('./examples/files/mona.png', function(err, im){
t.error(err)
cv2.readImage('./examples/files/mona.png', function(err, im){
t.error(err)
t.end();
});
}
});
})
, ".bitwiseXor" : function(cv) {
var mat1 = new cv.Matrix(1,1);
mat1.set(0,0, 1);
var mat2 = new cv.Matrix(1,1);
mat2.set(0,0, 1);
test('Point', function(t){
var xored = new cv.Matrix(1,1);
xored.bitwiseXor(mat1, mat2);
t.ok(new cv.Point(1, 2))
t.throws(function () { cv.Point(1, 2)}, TypeError, "cannot call without new")
assert.equal(xored.get(0,0), 0);
}
t.equal(new cv.Point(1, 2).x, 1)
t.equal(new cv.Point(1, 2).y, 2)
t.equal(Math.round(new cv.Point(1.1, 2).x * 100), 110)
t.equal(Math.round(new cv.Point(1.2, 2.75).y *100), 275)
}
t.throws(function () {new cv.Point(1.1, 2).x = 5}, Error, "Points are immutable")
t.throws(function () {new cv.Point(1.1, 2).y = 5}, Error, "Points are immutable")
var p1 = new cv.Point(3, 6)
, p2 = new cv.Point(5, 7)
t.ok(p1.dot);
t.equal(p1.dot(p2), 57);
t.end()
})
test('Matrix constructor', function(assert){
assert.ok(cv.Matrix);
assert.ok(new cv.Matrix);
assert.ok(new cv.Matrix(1,2));
assert.end()
})
test('Matrix accessors', function(assert){
var mat = new cv.Matrix(1, 2);
mat.set(0,0,3)
mat.set(0,1,5000)
assert.deepEqual(mat.row(0), [3,5000]);
mat = new cv.Matrix(1,2);
assert.equal(mat.set(0,0,3), undefined);
assert.equal(mat.get(0,0), 3);
mat = new cv.Matrix(6,7);
assert.equal(mat.width(), 7);
mat = new cv.Matrix(6,7);
assert.equal(mat.height(), 6);
mat = new cv.Matrix(6,7);
assert.deepEqual(mat.size(), [6, 7]);
mat = new cv.Matrix(6,7);
assert.equal(mat.width(), 7);
mat.resize(8,9);
assert.equal(mat.width(), 8);
mat = new cv.Matrix.Eye(4,4)
assert.deepEqual(mat.row(1), [0,1,0,0])
assert.deepEqual(mat.row(2), [0,0,1,0])
mat = new cv.Matrix.Eye(4,4);
assert.deepEqual(mat.col(1), [0,1,0,0])
assert.deepEqual(mat.col(2), [0,0,1,0])
assert.equal(new cv.Matrix().empty(), true);
assert.end()
})
test("Matrix toBuffer", function(assert){
var buf = fs.readFileSync('./examples/files/mona.png')
cv.readImage(buf.slice(0), function(err, mat){
var buf0 = mat.toBuffer()
assert.ok(buf0);
assert.end()
})
})
, "Image" : {
topic : require('../lib/opencv')
test("Matrix toBuffer Async", function(assert){
var buf = fs.readFileSync('./examples/files/mona.png')
, ".readImage from file": function(cv){
cv.readImage("./examples/mona.png", function(err, im){
assert.ok(im);
assert.equal(im.width(), 500);
assert.equal(im.height(), 756)
assert.equal(im.empty(), false)
})
}
cv.readImage(buf.slice(0), function(err, mat){
mat.toBuffer(function(err, buff){
assert.error(err)
assert.ok(buf)
assert.ok(buf.length > 100)
, ".readImage from buffer" : function(cv){
cv.readImage(fs.readFileSync('./examples/mona.png'), function(err, im){
assert.ok(im);
assert.equal(im.width(), 500);
assert.equal(im.height(), 756)
assert.equal(im.empty(), false)
})
}
}
assert.end()
})
})
})
, "CascadeClassifier": {
topic : require('../lib/opencv')
test("detectObject", function(assert){
cv.readImage("./examples/files/mona.png", function(err, im){
im.detectObject(cv.FACE_CASCADE, {}, function(err, faces){
assert.error(err)
assert.ok(faces)
assert.equal(faces.length, 1)
assert.end()
})
})
})
, "constructor" : function(cv){
assert.ok(new cv.CascadeClassifier("./data/haarcascade_frontalface_alt.xml"))
}
test(".absDiff and .countNonZero", function(assert){
cv.readImage("./examples/files/mona.png", function(err, im) {
cv.readImage("./examples/files/mona.png", function(err, im2){
assert.ok(im);
assert.ok(im2);
, "face detection": {
topic : function(){
var cv = require('../lib/opencv')
, self = this
cv.readImage("./examples/mona.png", function(err, im){
cascade = new cv.CascadeClassifier("./data/haarcascade_frontalface_alt.xml");
cascade.detectMultiScale(im, self.callback)//, 1.1, 2, [30, 30]);
})
var diff = new cv.Matrix(im.width(), im.height());
diff.absDiff(im, im2);
}
, "finds face": function(err, faces){
assert.isNull(err);
assert.isArray(faces);
assert.equal(faces.length, 1)
}
}
}
diff.convertGrayscale();
assert.equal(diff.countNonZero(), 0);
assert.end()
});
});
})
, "ImageDataStream" : {
topic : require('../lib/opencv')
test(".bitwiseXor", function(assert){
var mat1 = new cv.Matrix(1,1);
mat1.set(0,0, 1);
, "pipe" : {
topic : function(cv){
var s = new cv.ImageDataStream()
, self = this
s.on('load', function(im){
assert.ok(im)
assert.equal(im.empty(), false);
self.callback()
})
fs.createReadStream('./examples/mona.png').pipe(s);
}
var mat2 = new cv.Matrix(1,1);
mat2.set(0,0, 1);
, "loaded" : function(im){
//assert.ok(im)
//assert.equal(im.empty(), false);
}
}
var xored = new cv.Matrix(1,1);
xored.bitwiseXor(mat1, mat2);
assert.equal(xored.get(0,0), 0);
assert.end()
})
}
, "ImageStream" :{
topic : require('../lib/opencv')
, "write" : {
topic: function(cv){
var s = new cv.ImageStream()
, im = fs.readFileSync('./examples/mona.png')
, self = this;
s.on('data', function(m){
self.callback(null, m)
})
s.write(im);
}
, "receives data" : function(mat){
assert.deepEqual(mat.size(), [756,500])
}
}
}
, "ObjectDetectionStream" :{
topic : require('../lib/opencv')
}
, "CamShift" : {
"Can Create and Track" : {
topic : function(){
var cv = require('../lib/opencv')
, self = this
cv.readImage('./examples/coin1.jpg', function(e, im){
cv.readImage('./examples/coin2.jpg', function(e, im2){
self.callback(im, im2, cv)
})
})
}
, "create TrackedObject" : function(im, im2, cv){
var tracked = new cv.TrackedObject(im, [420, 110, 490, 170]);
assert.ok(tracked);
}
, "use TrackedObject.track" : function(im, im2, cv){
var tracked = new cv.TrackedObject(im, [420, 110, 490, 170], {channel: 'v'});
assertWithinRanges(tracked.track(im2), [386, 112, 459, 166], 10);
}
}
}
test("Image read from file", function(assert){
cv.readImage("./examples/files/mona.png", function(err, im){
assert.ok(im);
assert.equal(im.width(), 500);
assert.equal(im.height(), 756)
assert.equal(im.empty(), false)
assert.end()
})
})
test("read Image from buffer", function(assert){
cv.readImage(fs.readFileSync('./examples/files/mona.png'), function(err, im){
assert.ok(im);
assert.equal(im.width(), 500);
assert.equal(im.height(), 756)
assert.equal(im.empty(), false)
assert.end()
})
})
test("Cascade Classifier", function(assert){
assert.ok(new cv.CascadeClassifier("./data/haarcascade_frontalface_alt.xml"), 'test constructor')
cv.readImage("./examples/files/mona.png", function(err, im){
cascade = new cv.CascadeClassifier("./data/haarcascade_frontalface_alt.xml");
cascade.detectMultiScale(im, function(err, faces){//, 1.1, 2, [30, 30]);
assert.error(err);
assert.equal(typeof faces, typeof []);
assert.equal(faces.length, 1)
assert.end()
})
})
})
test("ImageDataStream", function(assert){
var s = new cv.ImageDataStream()
s.on('load', function(im){
assert.ok(im)
assert.equal(im.empty(), false);
assert.end()
})
fs.createReadStream('./examples/files/mona.png').pipe(s);
})
test("ImageStream", function(assert){
var s = new cv.ImageStream()
, im = fs.readFileSync('./examples/files/mona.png')
s.on('data', function(mat){
assert.deepEqual(mat.size(), [756,500])
assert.end()
})
s.write(im);
})
test("CamShift", function(assert){
cv.readImage('./examples/files/coin1.jpg', function(e, im){
cv.readImage('./examples/files/coin2.jpg', function(e, im2){
var tracked = new cv.TrackedObject(im, [420, 110, 490, 170], {channel: 'v'});
assert.ok(tracked);
var res = tracked.track(im2)
assert.ok(res);
assert.ok(res[0] < 396)
assert.ok(res[0] > 376)
assert.ok(res[1] < 122)
assert.ok(res[1] > 102)
assert.ok(res[2] < 469)
assert.ok(res[2] > 449)
assert.ok(res[3] < 176)
assert.ok(res[3] > 156)
assert.end()
})
})
})
test("fonts", function(t) {
function rnd() {
return Math.round(Math.random() * 255);
};
cv.readImage('./examples/files/coin1.jpg', function(e, im){
var y = 0;
([
"HERSEY_SIMPLEX",
"HERSEY_PLAIN",
"HERSEY_DUPLEX",
"HERSEY_COMPLEX",
"HERSEY_TRIPLEX",
"HERSEY_COMPLEX_SMALL",
"HERSEY_SCRIPT_SIMPLEX",
"HERSEY_SCRIPT_COMPLEX",
"HERSEY_SCRIPT_SIMPLEX"
]).forEach(function(font) {
im.putText("Some text", 0, y += 20, font, [rnd(), rnd(), rnd()]);
});
t.ok(im, "image is ok")
//im.save("./examples/tmp/coin1-with-text.jpg");
t.end();
});
})
}).export(module);