Merge branch 'peterbraden-master'

This commit is contained in:
AVVS 2015-03-31 18:46:28 -07:00
commit d2a918bedc
38 changed files with 1921 additions and 184 deletions

View File

@ -1,10 +1,70 @@
language: node_js
node_js:
- "0.10"
- "0.11"
- '0.10'
- '0.11'
- '0.12'
compiler: clang
env:
global:
- secure: "kCAwXdfcMv4l4ok5lO89ANbKXXwWQpn5/3qupSbhmX2NDIGUxyXze/cs90u9gF5hcT7ebq27ZJPEtu7pRov8MadfQM9BAd4ZZrHcMHWSkA0Iz+nM+m0Urwach6jkp2Iuwx15L2NHSis7f5PUKJcEv4Gnqs8jrCJzHHS7m7dO0Xo="
- secure: "lBIk4BhdIkSmJtFUNp93UjDm445i9eF7nKA+oSiLRu+b9i/WeRLiKFI89tehexWeXBlyNhziBN16LrHmx3I86yZfEok9dBMA1JuzYBjrvpjRAflGcqaCFLV3axyyyNQRIh7Q+ziAlg0xg8HL752BpnfXO91g3jfDPjGxcvBb5xQ="
before_install:
# Fix a problem with apt-get failing later, see http://docs.travis-ci.com/user/installing-dependencies/#Installing-Ubuntu-packages
- sudo apt-get update -qq
- sudo add-apt-repository -y ppa:kubuntu-ppa/backports
- sudo apt-get update
- sudo apt-get install libcv-dev
- sudo apt-get install libopencv-dev
- sudo apt-get install libhighgui-dev
- sudo apt-get install --force-yes libcv-dev libcvaux-dev libhighgui-dev libopencv-dev
# get commit message
- COMMIT_MESSAGE=$(git show -s --format=%B $TRAVIS_COMMIT | tr -d '\n')
# put local node-pre-gyp on PATH
- export PATH=./node_modules/.bin/:$PATH
# install node-pre-gyp so it is available for packaging and publishing
- npm install node-gyp -g
# install node-pre-gyp so it is available for packaging and publishing
- npm install node-pre-gyp
# install aws-sdk so it is available for publishing to AS3
- npm install aws-sdk
# figure out if we should publish
- PUBLISH_BINARY=false
# if we are building a tag then publish
- if [[ $TRAVIS_BRANCH == `git describe --tags --always HEAD` ]]; then PUBLISH_BINARY=true; fi;
# or if we put [publish binary] in the commit message
- if test "${COMMIT_MESSAGE#*'[publish binary]'}" != "$COMMIT_MESSAGE"; then PUBLISH_BINARY=true; fi;
- platform=$(uname -s | sed "y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/")
install:
# ensure source install works
- npm install --build-from-source
# test our module
- npm test
- node lib/opencv.js
before_script:
- echo "Publishing native platform Binary Package? ->" $PUBLISH_BINARY
# if publishing, do it
- if [[ $PUBLISH_BINARY == true ]]; then node-pre-gyp package publish || true; fi;
# cleanup
- node-pre-gyp clean
- node-gyp clean
- sudo apt-get purge libcv-dev
- sudo apt-get purge libopencv-dev
- sudo apt-get purge libhighgui-dev
script:
# if publishing, test installing from remote
- INSTALL_RESULT=0
- if [[ $PUBLISH_BINARY == true ]]; then INSTALL_RESULT=$(npm install --fallback-to-build=false > /dev/null)$? || true; fi;
# if install returned non zero (errored) then we first unpublish and then call false so travis will bail at this line
- if [[ $INSTALL_RESULT != 0 ]]; then echo "returned $INSTALL_RESULT";node-pre-gyp unpublish;false; fi
# If success then we arrive here so lets clean up
- node-pre-gyp clean
# Can't compile opencv 32bit in 64 bit env.
after_success:
# if success then query and display all published binaries
- node-pre-gyp info

View File

@ -1,6 +1,29 @@
# Changelog
### 3.0.0
You wait ages for a release, and 2 come along at once...
This one is awesome. It adds prebuilt binaries, so you don't need
to build opencv for the common platforms.
Many many thanks to @edgarsilva for awesome work here, and
http://hybridgroup.com/ for hosting the binaries
### 2.0.0
- Support for node 0.12
- Camera Calibration Functions from @queuecumber
- Fix for Nan 1.5.1 from @IMGNRY
- More build fixes from @scanlime
- Matrix crop prototype from @dbpieter
- Many fixes from @madshall
Thanks to everyone that contributed!
### 1.0.0
Ok, let's do a proper semver release :)

34
Makefile Normal file
View File

@ -0,0 +1,34 @@
VERSION := $(shell node -e "console.log(require('./package.json').version)")
.PHONY: default release smoke
test:
npm test
smoke:
npm install --build-from-source
node smoke/smoketest.js
npm test
release:
@echo "Tagging release $(VERSION)"
@git tag -m "$(VERSION)" v$(VERSION)
@echo "Pushing tags to GitHub"
@git push --tags
@echo "Switching to osx-binaries branch"
@git checkout osx-binaries
@echo "Merging master into osx-binaries"
@git merge --no-ff --commit -m "Merge master into osx-binaries [publish binary]" master
@echo "Pushing osx-binaries"
@git push
@echo "Switching to master branch"
@git checkout master
@echo "Publishing to NPM"
@npm publish

View File

@ -22,6 +22,7 @@ $ npm install opencv
```
## Examples
Run the examples from the parent directory.
### Face Detection
@ -172,29 +173,29 @@ See [relevant source code](src/Contours.cc) and [examples](examples/)
```javascript
var contours = im.findContours;
# Count of contours in the Contours object
// Count of contours in the Contours object
contours.size();
# Count of corners(verticies) of contour `index`
// Count of corners(verticies) of contour `index`
contours.cornerCount(index);
# Access vertex data of contours
// Access vertex data of contours
for(var c = 0; c < contours.size(); ++c) {
console.log("Contour " + c);
for(var i = 0; i < contours.cornerCount(c); ++i) {
var point = contours.point(c, i);
console.log("(" + point.x + "," + point.y + ")");"
console.log("(" + point.x + "," + point.y + ")");
}
}
# Computations of contour `index`
// Computations of contour `index`
contours.area(index);
contours.arcLength(index, isClosed);
contours.boundingRect(index);
contours.minAreaRect(index);
contours.isConvex(index);
# Destructively alter contour `index`
// Destructively alter contour `index`
contours.approxPolyDP(index, epsilon, isClosed);
contours.convexHull(index, clockwise);
```

105
appveyor.yml Normal file
View File

@ -0,0 +1,105 @@
# environment variables
environment:
node_pre_gyp_accessKeyId:
secure: 3GHZvq0y83PQ8Qi7FOc5rKoULRQvd2OohhtGqRQLZB4=
node_pre_gyp_secretAccessKey:
secure: AiX8Bx+U8LKu5JZHb5CMth5xOHuWawkQW3il0OFWfV4kodPZnC8dLYIqb4XZeF7f
# try on unstable version of the server to build node-opencv
os: unstable
# to add several platforms to build matrix:
platform:
- x64
install:
- cmd: ECHO "INSTALL OPENCV:"
- cmd: choco install OpenCV
- cmd: ECHO "APPVEYOR_REPO_COMMIT_MESSAGE ->"
- cmd: ECHO %APPVEYOR_REPO_COMMIT_MESSAGE%
- cmd: SET COMMIT_MSG="%APPVEYOR_REPO_COMMIT_MESSAGE%"
- cmd: SET PUBLISH_BINARY=false
# Check to verify the branch is the same than latest tag, if so
# then we publish the binaries if everything else is successful.
- cmd: git describe --tags --always HEAD > _git_tag.tmp
- cmd: SET /p GIT_TAG=<_git_tag.tmp
- cmd: ECHO "LATEST LOCAL TAG:"
- cmd: ECHO %GIT_TAG%
- cmd: ECHO "APPVEYOR REPO BRANCH/TAG:"
- cmd: ECHO %APPVEYOR_REPO_BRANCH%
- cmd: DEL _git_tag.tmp
- cmd: IF x%APPVEYOR_REPO_BRANCH%==x%GIT_TAG% SET PUBLISH_BINARY=true
# Or look for commit message containing `[publish binary]`
- cmd: IF not x%COMMIT_MSG:[publish binary]=%==x%COMMIT_MSG% SET PUBLISH_BINARY=true
- cmd: ECHO "Env Var PUBLISH_BINARY:"
- cmd: ECHO %PUBLISH_BINARY%
- cmd: git clone https://github.com/marcelklehr/nodist.git c:\nodist 2>&1
- cmd: SET PATH=C:\nodist\bin;%PATH%
- cmd: SET NODIST_PREFIX=C:\nodist
before_build:
- cmd: SET ARCH=x64
- cmd: SET NODIST_X64=1
- cmd: call nodist update
- cmd: call nodist stable
- cmd: npm install -g node-gyp
- cmd: SET APP_PATH=%CD%
- cmd: IF EXIST C:\OpenCV* CD C:\OpenCV*
- cmd: SET OPENCV_ROOT_PATH=%CD%\opencv
- cmd: CD %APP_PATH%
- cmd: SET OPENCV_DIR=%OPENCV_ROOT_PATH%\build\%ARCH%\vc12\bin
- cmd: SET PATH=%cd%\node_modules\.bin\;C:\MinGW\bin;C:\GTK\bin;C:\msys\1.0\bin;%OPENCV_DIR%;%PATH%
- cmd: SET PKG_CONFIG_PATH=C:\GTK\lib\pkgconfig
- cmd: DIR %OPENCV_ROOT_PATH%\build\%ARCH%\vc12\bin
- cmd: DIR %OPENCV_ROOT_PATH%\build\%ARCH%\vc12\lib
# Here we need to copy the opencv.pc file from the repo into PKG_CONFIG_PATH
# trick part is to check for the vc12 folder and use that one
- cmd: copy .\utils\opencv_x64.pc C:\GTK\lib\pkgconfig\opencv.pc
# to run your custom scripts instead of automatic MSBuild
build_script:
- cmd: ECHO "BUILDING x64 binary package:"
- cmd: npm install --build-from-source --msvs_version=2013
- cmd: npm test
- cmd: node lib/opencv.js
- cmd: ECHO "PUBLISH x64 binary package:"
- cmd: npm install aws-sdk
- cmd: IF %PUBLISH_BINARY%==true (node-pre-gyp package publish 2>&1)
- cmd: node-pre-gyp clean
- cmd: node-gyp clean
- cmd: npm uninstall -g node-gyp
- cmd: rmdir /q /s node_modules
- cmd: DEL C:\GTK\lib\pkgconfig\opencv.pc
after_build:
- cmd: SET ARCH=x86
- cmd: SET OPENCV_DIR=%OPENCV_ROOT_PATH%\build\%ARCH%\vc12\bin
- cmd: SET PATH=%OPENCV_DIR%;%PATH%
- cmd: SET NODIST_X64=0
- cmd: call nodist update
- cmd: call nodist stable
- cmd: npm install -g node-gyp
- cmd: copy .\utils\opencv_x86.pc C:\GTK\lib\pkgconfig\opencv.pc
- cmd: ECHO "BUILDING x86 binary package:"
- cmd: npm install --build-from-source --msvs_version=2013
- cmd: npm test
- cmd: node lib/opencv.js
- cmd: ECHO "PUBLISH x86 binary package:"
- cmd: npm install aws-sdk
- cmd: IF %PUBLISH_BINARY%==true (node-pre-gyp package publish 2>&1)
- cmd: node-pre-gyp clean
- cmd: node-gyp clean
- cmd: rmdir /q /s node_modules
on_success:
# test installing from binary package works
- cmd: ECHO "ON SUCCESS:"
- cmd: ECHO "Try installing from binary:"
#- cmd: IF %PUBLISH_BINARY%==true npm install --fallback-to-build=false
- cmd: npm install --fallback-to-build=false
# Print Available Binaries
- cmd: node-pre-gyp info
test: OFF
deploy: OFF

View File

@ -1,51 +1,80 @@
{
"targets": [{
"target_name": "opencv"
, "sources": [
"src/init.cc"
, "src/Matrix.cc"
, "src/OpenCV.cc"
, "src/CascadeClassifierWrap.cc"
, "src/Contours.cc"
, "src/Point.cc"
, "src/VideoCaptureWrap.cc"
, "src/CamShift.cc"
, "src/HighGUI.cc"
, "src/FaceRecognizer.cc"
, "src/BackgroundSubtractor.cc"
, "src/Constants.cc"
]
, 'libraries': [
'<!@(pkg-config --libs opencv)'
]
"target_name": "opencv",
"sources": [
"src/init.cc",
"src/Matrix.cc",
"src/OpenCV.cc",
"src/CascadeClassifierWrap.cc",
"src/Contours.cc",
"src/Point.cc",
"src/VideoCaptureWrap.cc",
"src/CamShift.cc",
"src/HighGUI.cc",
"src/FaceRecognizer.cc",
"src/BackgroundSubtractor.cc",
"src/Constants.cc",
"src/Calib3D.cc",
"src/ImgProc.cc",
"src/Stereo.cc"
],
"libraries": [
"<!@(pkg-config --libs opencv)"
],
# For windows
,'include_dirs': [
'<!@(pkg-config --cflags opencv)',
"<!(node -e \"require('nan')\")"
]
, 'cflags': [
'<!@(pkg-config --cflags "opencv >= 2.3.1" )'
, '-Wall'
]
, 'cflags!' : [ '-fno-exceptions']
, 'cflags_cc!': [ '-fno-rtti', '-fno-exceptions']
, "conditions": [
['OS=="mac"', {
# cflags on OS X are stupid and have to be defined like this
'xcode_settings': {
'OTHER_CFLAGS': [
"-mmacosx-version-min=10.7",
"-std=c++11",
"-stdlib=libc++",
'<!@(pkg-config --cflags opencv)'
"include_dirs": [
"<!@(pkg-config --cflags opencv)",
"<!(node -e \"require('nan')\")"
],
"cflags!" : [ "-fno-exceptions"],
"cflags_cc!": [ "-fno-rtti", "-fno-exceptions"],
"conditions": [
[ "OS==\"linux\"", {
"cflags": [
"<!@(pkg-config --cflags \"opencv >= 2.3.1\" )",
"-Wall"
]
, "GCC_ENABLE_CPP_RTTI": "YES"
, "GCC_ENABLE_CPP_EXCEPTIONS": "YES"
}],
[ "OS==\"win\"", {
"cflags": [
"<!@(pkg-config --cflags \"opencv >= 2.4.9\" )",
"-Wall"
],
"msvs_settings": {
"VCCLCompilerTool": {
"ExceptionHandling": "2",
"DisableSpecificWarnings": [ "4530", "4506", "4244" ],
},
}
}],
[ # cflags on OS X are stupid and have to be defined like this
"OS==\"mac\"", {
"xcode_settings": {
"OTHER_CFLAGS": [
"-mmacosx-version-min=10.7",
"-std=c++11",
"-stdlib=libc++",
"<!@(pkg-config --cflags opencv)"
],
"GCC_ENABLE_CPP_RTTI": "YES",
"GCC_ENABLE_CPP_EXCEPTIONS": "YES"
}
}]
]
}]
},
{
"target_name": "action_after_build",
"type": "none",
"dependencies": [ "<(module_name)" ],
"copies": [
{
"files": [ "<(PRODUCT_DIR)/<(module_name).node" ],
"destination": "<(module_path)"
}
]
}]
}

View File

@ -1,6 +1,6 @@
<?xml version="1.0"?>
<opencv_storage>
<cascade>
<output type_id="opencv-haar-classifier">
<stageType>BOOST</stageType>
<featureType>HOG</featureType>
<height>24</height>
@ -835,5 +835,7 @@
16 8 16 8 24</rect></_>
<_>
<rect>
16 8 16 8 28</rect></_></features></cascade>
16 8 16 8 28</rect></_></features>
</output>
</opencv_storage>

View File

@ -1,14 +1,14 @@
var cv = require('../lib/opencv');
cv.readImage("./files/mona.png", function(err, orig) {
cv.readImage("./examples/files/mona.png", function(err, orig) {
if (err) throw err;
cv.readImage("./files/over_text.png", function(err, over_text) {
cv.readImage("./examples/files/over_text.png", function(err, over_text) {
if (err) throw err;
var result = new cv.Matrix(orig.width(), orig.height());
result.addWeighted(orig, 0.7, over_text, 0.9);
result.save("./tmp/weighted.png");
console.log('Image saved to ./tmp/weighted.png');
result.save("./examples/tmp/weighted.png");
console.log('Image saved to ./examples/tmp/weighted.png');
});
});

View File

@ -1,12 +1,20 @@
var cv = require('../lib/opencv');
var camera = new cv.VideoCapture(0);
var window = new cv.NamedWindow('Video', 0)
setInterval(function() {
camera.read(function(err, im) {
if (err) throw err;
window.show(im);
window.blockingWaitKey(0, 50);
});
}, 20);
try {
var camera = new cv.VideoCapture(0);
var window = new cv.NamedWindow('Video', 0)
/*
setInterval(function() {
camera.read(function(err, im) {
if (err) throw err;
console.log(im.size())
if (im.size()[0] > 0 && im.size()[1] > 0){
window.show(im);
}
window.blockingWaitKey(0, 50);
});
}, 20);
*/
} catch (e){
console.log("Couldn't start camera:", e)
}

View File

@ -1,3 +1,6 @@
/* For some reason the cascade file is broken on linux :(
var cv = require('../lib/opencv');
cv.readImage("./files/car1.jpg", function(err, im){
@ -16,3 +19,4 @@ cv.readImage("./files/car1.jpg", function(err, im){
console.log('Image saved to ./tmp/car-detection.jpg');
});
});
*/

View File

@ -3,7 +3,7 @@ var http = require('http'),
request = require('request'),
cv = require('../lib/opencv');
http.createServer(function(req, resp){
var server = http.createServer(function(req, resp){
var url = req.url.slice(1);
request({uri:url, encoding:'binary'}, function(err, r, body){
if (err) return resp.end(err.stack);
@ -27,4 +27,7 @@ http.createServer(function(req, resp){
});
});
}).listen(3000, function(){ console.log('Listening on http://localhost:3000'); })
})
//server.listen(3000, function(){ console.log('Listening on http://localhost:3000'); })

View File

@ -1,7 +1,7 @@
var cv = require('../lib/opencv');
cv.readImage("./files/mona.png", function(err, im) {
salt(im, 1000);
salt(im, 100);
im.save("./tmp/salt.png");
console.log('Image saved to ./tmp/salt.png');
});

View File

@ -1,24 +1,32 @@
var cv = require('../lib/opencv');
var vid = new cv.VideoCapture(0);
try {
var vid = new cv.VideoCapture(0);
vid.read(function(err, im){
if (err) throw err;
im.detectObject(cv.FACE_CASCADE, {}, function(err, faces){
vid.read(function(err, im){
if (err) throw err;
if (!faces.length) return console.log("No Faces");
if (im.size()[0] > 0 && im.size()[1] > 0){
var face = faces[0];
var ims = im.size();
var im2 = im.roi(face.x, face.y, face.width, face.height)
/*
im.adjustROI(
-face.y
, (face.y + face.height) - ims[0]
, -face.x
, (face.x + face.width) - ims[1])
*/
im2.save('./tmp/take-face-pics.jpg')
console.log('Image saved to ./tmp/take-face-pics.jpg');
})
});
im.detectObject(cv.FACE_CASCADE, {}, function(err, faces){
if (err) throw err;
if (!faces.length) return console.log("No Faces");
var face = faces[0];
var ims = im.size();
var im2 = im.roi(face.x, face.y, face.width, face.height)
/*
im.adjustROI(
-face.y
, (face.y + face.height) - ims[0]
, -face.x
, (face.x + face.width) - ims[1])
*/
im2.save('./examples/tmp/take-face-pics.jpg')
console.log('Image saved to ./tmp/take-face-pics.jpg');
})
} else {
console.log("Camera didn't return image")
}
});
} catch (e){
console.log("Couldn't start camera", e)
}

View File

@ -1,6 +1,6 @@
var cv = require('../lib/opencv');
cv.readImage("./mona.png", function(err, im) {
cv.readImage("./files/mona.png", function(err, im) {
if (err) throw err;
var width = im.width();
@ -11,6 +11,6 @@ cv.readImage("./mona.png", function(err, im) {
var dstArray = [0, 0, width * 0.9, height * 0.1, width, height, width * 0.2, height * 0.8];
var xfrmMat = im.getPerspectiveTransform(srcArray, dstArray);
im.warpPerspective(xfrmMat, width, height, [255, 255, 255]);
im.save("./warp-image.png");
im.save("./tmp/warp-image.png");
console.log('Image saved to ./tmp/warp-image.png');
});

View File

@ -1 +1,7 @@
module.exports = require('../build/Release/opencv.node');
var binary = require('node-pre-gyp');
var path = require('path');
var binding_path = binary.find(path.resolve(path.join(__dirname,'../package.json')));
var binding = require(binding_path);
//module.exports = require('../build/Release/opencv.node');
module.exports = binding;

View File

@ -3,17 +3,23 @@
"description": "Node Bindings to OpenCV",
"author": "Peter Braden <peterbraden@peterbraden.co.uk>",
"dependencies": {
"node-pre-gyp": "0.5.31",
"buffers": "0.1.1",
"nan": "^1.3.0"
"nan": "1.4.3"
},
"version": "1.0.0",
"version": "3.0.1",
"devDependencies": {
"tape": "^3.0.0"
"tape": "^3.0.0",
"aws-sdk": "~2.0.21",
"glob": "^4.0.6",
"request": "^2.45.0"
},
"bundledDependencies":["node-pre-gyp"],
"license": "MIT",
"scripts": {
"build": "node-gyp build",
"test": "node test/unit.js"
"test": "node test/unit.js",
"install": "node-pre-gyp install --fallback-to-build"
},
"keywords": [
"opencv",
@ -28,5 +34,12 @@
},
"engines": {
"node": ">=0.10"
},
"binary": {
"module_name" : "opencv",
"module_path" : "./build/{module_name}/v{version}/{configuration}/{node_abi}-{platform}-{arch}/",
"remote_path" : "./{module_name}/v{version}/{configuration}/",
"package_name": "{node_abi}-{platform}-{arch}.tar.gz",
"host" : "https://node-opencv.s3.amazonaws.com"
}
}

71
publish-binaries.md Normal file
View File

@ -0,0 +1,71 @@
How to publish the pre compiled binaries.
=========================================
## Setup for Linux, Windows and OSX
Every time a new tag for the latest release is pushed to github the continous integration
builds in Travis-CI and AppVeyor will generate the binaries for each platform and architecture,
package and publish to the AS3 bucket.
This can be checked in the .travis.yml file and appveyor.yml file. Within the files there are two
methods for publishing new binaries for each version, one is if a `git tag` is detected; the other
can be triggered by passing the string `[publish binary]` in the commit message itself.
We also have an automated make task, we should always use this task to avoid forgetting any steps
(like merging into the `osx-binaries` branch).
The process for generating the binaries, publishing and releasing the npm module should be as follows:
1. Merge all changes and new features into master.
2. Bump up version of npm module in `package.json`.
3. execute make task: `make release`
This task will do the following for you:
1. Generate new tags based on package.json version number
2. Push tags to Github
3. Checkout into `osx-binaries` branch
4. Merge `master` into `osx-binaries`
5. Push `osx-binaries`
6. Checkout master
7. Finally it will run `npm publish`
With this we will make sure the binaries for all platforms and architectures will be generated each time
a new version is released.
## Config Travis, AppVeyor and Github to generate all of the binaries.
Before we are able to run everything stated above some steps need to be taken.
Specifically for being able to publish the pre compiled binaries to AWS-S3. The
correct keys need to be setup in the travis and appveyor `.yml` files. This needs
to be done by the admin of the repo, in the case of Travis, and the owner of the account,
in the case of appveyor.
### Setting up secure keys in Travis.
Setting up the keys in Travis is easy if you have ruby and ruby gems installed and working then install:
`gem install travis`
After the travis gem is installed run the following command for each of the required keys:
`travis encrypt SOMEVAR=secretvalue`
And substitute the values in the `.travis.yml` file for the new ones. Detailed instructions can
be found here: http://docs.travis-ci.com/user/environment-variables/#Secure-Variables
### Setting up secure keys in AppVeyor
It is even easier than Travis, you do not need to install anything, just go to your account and
click in `encrypt tool`, there enter the values in the input field and click encrypt. Same as with
Travis we then need to substitute the newly generated values for the old ones.
Detailed instructions can be found here: http://www.appveyor.com/docs/build-configuration#secure-variables
### OSX binaries
Since Travis does not support config file for multiple OSs we need to create a new branch that contains
a slightly different version of the .travis.yml file to compile for OSX. The branch needs to be called
`osx-binaries` and be based of `master` once the pre-compiled binaries PR has been merged in.

View File

@ -1,14 +0,0 @@
#!/bin/bash
if [ ! -f smoke/smoketest.js ]; then
echo "Please run smoke test from the top-level folder of the repository." >&2
exit 1
fi
node-gyp build && echo '-- Compiled OK --
' && node smoke/smoketest.js && echo '-- Smoke Done, running tests --
' && npm test # && echo '-- Tests Run, runnning examples --
#(building example data)
#' && ./examples/make-example-files.sh && node examples/motion-track.js

View File

@ -1,6 +1,6 @@
var cv = require('../lib/opencv')
var cv = require('../lib/opencv');
var trainingData = []
var trainingData = [];
/*
for (var i = 1; i< 41; i++){
for (var j = 1; j<10; j++){
@ -22,5 +22,5 @@ cv.readImage("/Users/peterbraden/Downloads/orl_faces/s6/10.pgm", function(e, im)
*/
cv.readImage("./examples/files/mona.png", function(e, mat){
var th = mat.threshold(200, 200, "Threshold to Zero Inverted");
th.save('./examples/tmp/out.png')
})
th.save('./examples/tmp/out.png');
});

View File

@ -21,7 +21,7 @@ BackgroundSubtractorWrap::Init(Handle<Object> target) {
NODE_SET_PROTOTYPE_METHOD(ctor, "applyMOG", ApplyMOG);
target->Set(NanNew("BackgroundSubtractor"), ctor->GetFunction());
};
NAN_METHOD(BackgroundSubtractorWrap::New) {
@ -42,17 +42,17 @@ NAN_METHOD(BackgroundSubtractorWrap::New) {
NAN_METHOD(BackgroundSubtractorWrap::CreateMOG) {
NanScope();
int history = 200;
int nmixtures = 5;
double backgroundRatio = 0.7;
double noiseSigma = 0;
if(args.Length() > 1){
INT_FROM_ARGS(history, 0)
INT_FROM_ARGS(nmixtures, 1)
DOUBLE_FROM_ARGS(backgroundRatio, 2)
DOUBLE_FROM_ARGS(noiseSigma, 3)
}
// int history = 200;
// int nmixtures = 5;
// double backgroundRatio = 0.7;
// double noiseSigma = 0;
//
// if(args.Length() > 1){
// INT_FROM_ARGS(history, 0)
// INT_FROM_ARGS(nmixtures, 1)
// DOUBLE_FROM_ARGS(backgroundRatio, 2)
// DOUBLE_FROM_ARGS(noiseSigma, 3)
// }
Local<Object> n = NanNew(BackgroundSubtractorWrap::constructor)->GetFunction()->NewInstance();
@ -83,13 +83,13 @@ NAN_METHOD(BackgroundSubtractorWrap::ApplyMOG) {
Local<Object> fgMask = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *img = ObjectWrap::Unwrap<Matrix>(fgMask);
cv::Mat mat;
if(Buffer::HasInstance(args[0])){
uint8_t *buf = (uint8_t *) Buffer::Data(args[0]->ToObject());
unsigned len = Buffer::Length(args[0]->ToObject());
unsigned len = Buffer::Length(args[0]->ToObject());
cv::Mat *mbuf = new cv::Mat(len, 1, CV_64FC1, buf);
mat = cv::imdecode(*mbuf, -1);
//mbuf->release();
@ -116,13 +116,13 @@ NAN_METHOD(BackgroundSubtractorWrap::ApplyMOG) {
TryCatch try_catch;
cb->Call(NanGetCurrentContext()->Global(), 2, argv);
if (try_catch.HasCaught()) {
FatalException(try_catch);
}
NanReturnUndefined();
}
}
catch( cv::Exception& e ){
const char* err_msg = e.what();
NanThrowError(err_msg);
@ -136,3 +136,4 @@ BackgroundSubtractorWrap::BackgroundSubtractorWrap(cv::Ptr<cv::BackgroundSubtrac
};
#endif

589
src/Calib3D.cc Normal file
View File

@ -0,0 +1,589 @@
#include "Calib3D.h"
#include "Matrix.h"
inline Local<Object> matrixFromMat(cv::Mat &input)
{
Local<Object> matrixWrap = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *matrix = ObjectWrap::Unwrap<Matrix>(matrixWrap);
matrix->mat = input;
return matrixWrap;
}
inline cv::Mat matFromMatrix(Handle<Value> matrix)
{
Matrix* m = ObjectWrap::Unwrap<Matrix>(matrix->ToObject());
return m->mat;
}
inline cv::Size sizeFromArray(Handle<Value> jsArray)
{
cv::Size patternSize;
if (jsArray->IsArray())
{
Local<Object> v8sz = jsArray->ToObject();
patternSize = cv::Size(v8sz->Get(0)->IntegerValue(), v8sz->Get(1)->IntegerValue());
}
else
{
JSTHROW_TYPE("Size is not a valid array");
}
return patternSize;
}
inline std::vector<cv::Point2f> points2fFromArray(Handle<Value> array)
{
std::vector<cv::Point2f> points;
if(array->IsArray())
{
Local<Array> pointsArray = Local<Array>::Cast(array->ToObject());
for(unsigned int i = 0; i < pointsArray->Length(); i++)
{
Local<Object> pt = pointsArray->Get(i)->ToObject();
points.push_back(cv::Point2f(pt->Get(NanNew<String>("x"))->ToNumber()->Value(),
pt->Get(NanNew<String>("y"))->ToNumber()->Value()));
}
}
else
{
JSTHROW_TYPE("Points not a valid array");
}
return points;
}
inline std::vector<cv::Point3f> points3fFromArray(Handle<Value> array)
{
std::vector<cv::Point3f> points;
if(array->IsArray()) {
Local<Array> pointsArray = Local<Array>::Cast(array->ToObject());
for(unsigned int i = 0; i < pointsArray->Length(); i++)
{
Local<Object> pt = pointsArray->Get(i)->ToObject();
points.push_back(cv::Point3f(pt->Get(NanNew<String>("x"))->ToNumber()->Value(),
pt->Get(NanNew<String>("y"))->ToNumber()->Value(),
pt->Get(NanNew<String>("z"))->ToNumber()->Value()));
}
}
else
{
JSTHROW_TYPE("Must pass array of object points for each frame")
}
return points;
}
inline std::vector<std::vector<cv::Point2f> > points2fFromArrayOfArrays(Handle<Value> array)
{
std::vector<std::vector<cv::Point2f> > points;
if(array->IsArray())
{
Local<Array> pointsArray = Local<Array>::Cast(array->ToObject());
for(unsigned int i = 0; i < pointsArray->Length(); i++)
{
points.push_back(points2fFromArray(pointsArray->Get(i)));
}
}
else
{
JSTHROW_TYPE("Must pass array of object points for each frame")
}
return points;
}
inline std::vector<std::vector<cv::Point3f> > points3fFromArrayOfArrays(Handle<Value> array)
{
std::vector<std::vector<cv::Point3f> > points;
if(array->IsArray())
{
Local<Array> pointsArray = Local<Array>::Cast(array->ToObject());
for(unsigned int i = 0; i < pointsArray->Length(); i++)
{
points.push_back(points3fFromArray(pointsArray->Get(i)));
}
}
else
{
JSTHROW_TYPE("Must pass array of object points for each frame")
}
return points;
}
void Calib3D::Init(Handle<Object> target)
{
Persistent<Object> inner;
Local<Object> obj = NanNew<Object>();
NanAssignPersistent(inner, obj);
NODE_SET_METHOD(obj, "findChessboardCorners", FindChessboardCorners);
NODE_SET_METHOD(obj, "drawChessboardCorners", DrawChessboardCorners);
NODE_SET_METHOD(obj, "calibrateCamera", CalibrateCamera);
NODE_SET_METHOD(obj, "solvePnP", SolvePnP);
NODE_SET_METHOD(obj, "getOptimalNewCameraMatrix", GetOptimalNewCameraMatrix);
NODE_SET_METHOD(obj, "stereoCalibrate", StereoCalibrate);
NODE_SET_METHOD(obj, "stereoRectify", StereoRectify);
NODE_SET_METHOD(obj, "computeCorrespondEpilines", ComputeCorrespondEpilines);
NODE_SET_METHOD(obj, "reprojectImageTo3d", ReprojectImageTo3D);
target->Set(NanNew("calib3d"), obj);
}
// cv::findChessboardCorners
NAN_METHOD(Calib3D::FindChessboardCorners)
{
NanEscapableScope();
try {
// Get the arguments from javascript
// Arg 0 is the image
cv::Mat mat = matFromMatrix(args[0]);
// Arg 1 is the pattern size
cv::Size patternSize = sizeFromArray(args[1]);
// Arg 2 would normally be the flags, ignoring this for now and using the default flags
// Find the corners
std::vector<cv::Point2f> corners;
bool found = cv::findChessboardCorners(mat, patternSize, corners);
// Make the return value
Local<Object> ret = NanNew<Object>();
ret->Set(NanNew<String>("found"), NanNew<Boolean>(found));
Local<Array> cornersArray = NanNew<Array>(corners.size());
for(unsigned int i = 0; i < corners.size(); i++)
{
Local<Object> point_data = NanNew<Object>();
point_data->Set(NanNew<String>("x"), NanNew<Number>(corners[i].x));
point_data->Set(NanNew<String>("y"), NanNew<Number>(corners[i].y));
cornersArray->Set(NanNew<Number>(i), point_data);
}
ret->Set(NanNew<String>("corners"), cornersArray);
NanReturnValue(ret);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
};
// cv::drawChessboardCorners
NAN_METHOD(Calib3D::DrawChessboardCorners)
{
NanEscapableScope();
try {
// Get the arguments
// Arg 0 is the image
cv::Mat mat = matFromMatrix(args[0]);
// Arg 1 is the pattern size
cv::Size patternSize = sizeFromArray(args[1]);
// Arg 2 is the corners array
std::vector<cv::Point2f> corners = points2fFromArray(args[2]);
// Arg 3, pattern found boolean
bool patternWasFound = args[3]->ToBoolean()->Value();
// Draw the corners
cv::drawChessboardCorners(mat, patternSize, corners, patternWasFound);
// Return the passed image, now with corners drawn on it
NanReturnValue(args[0]);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
}
// cv::calibrateCamera
NAN_METHOD(Calib3D::CalibrateCamera)
{
NanEscapableScope();
try {
// Get the arguments
// Arg 0, the array of object points, an array of arrays
std::vector<std::vector<cv::Point3f> > objectPoints = points3fFromArrayOfArrays(args[0]);
// Arg 1, the image points, another array of arrays
std::vector<std::vector<cv::Point2f> > imagePoints = points2fFromArrayOfArrays(args[1]);
// Arg 2, the image size
cv::Size imageSize = sizeFromArray(args[2]);
// Arg 3, 4, input guesses for the camrea matrix and distortion coefficients, skipping for now
cv::Mat K, dist;
// Arg 5, 6 flags and termination criteria, skipping for now
// Calibrate the camera
std::vector<cv::Mat> rvecs, tvecs;
double error = cv::calibrateCamera(objectPoints, imagePoints, imageSize, K, dist, rvecs, tvecs);
// make the return values
Local<Object> ret = NanNew<Object>();
// Reprojection error
ret->Set(NanNew<String>("reprojectionError"), NanNew<Number>(error));
// K
Local<Object> KMatrixWrap = matrixFromMat(K);
ret->Set(NanNew<String>("K"), KMatrixWrap);
// dist
Local<Object> distMatrixWrap = matrixFromMat(dist);
ret->Set(NanNew<String>("distortion"), distMatrixWrap);
// Per frame R and t, skiping for now
// Return
NanReturnValue(ret);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
}
// cv::solvePnP
NAN_METHOD(Calib3D::SolvePnP)
{
NanEscapableScope();
try {
// Get the arguments
// Arg 0, the array of object points
std::vector<cv::Point3f> objectPoints = points3fFromArray(args[0]);
// Arg 1, the image points
std::vector<cv::Point2f> imagePoints = points2fFromArray(args[1]);
// Arg 2, the camera matrix
cv::Mat K = matFromMatrix(args[2]);
// Arg 3, the distortion coefficients
cv::Mat dist = matFromMatrix(args[3]);
// Arg 4, use extrinsic guess, skipped for now
// Arg 5, flags, skip for now
// solve for r and t
cv::Mat rvec, tvec;
cv::solvePnP(objectPoints, imagePoints, K, dist, rvec, tvec);
// make the return values
Local<Object> ret = NanNew<Object>();
// rvec
Local<Object> rMatrixWrap = matrixFromMat(rvec);
ret->Set(NanNew<String>("rvec"), rMatrixWrap);
// tvec
Local<Object> tMatrixWrap = matrixFromMat(tvec);
ret->Set(NanNew<String>("tvec"), tMatrixWrap);
// Return
NanReturnValue(ret);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
}
// cv::getOptimalNewCameraMAtrix
NAN_METHOD(Calib3D::GetOptimalNewCameraMatrix)
{
NanEscapableScope();
try {
// Get the arguments
// Arg 0 is the original camera matrix
cv::Mat Kin = matFromMatrix(args[0]);
// Arg 1 is the distortion coefficients
cv::Mat dist = matFromMatrix(args[1]);
// Arg 2, the image size
cv::Size imageSize = sizeFromArray(args[2]);
// Arg 3 is the alpha free scaling parameter
double alpha = args[3]->ToNumber()->Value();
// Arg 4, the new image size
cv::Size newImageSize = sizeFromArray(args[4]);
// Arg 5, valid ROI, skip for now
// Arg 6, center principal point, skip for now
// Get the optimal new camera matrix
cv::Mat Kout = cv::getOptimalNewCameraMatrix(Kin, dist, imageSize, alpha, newImageSize);
// Wrap the output K
Local<Object> KMatrixWrap = matrixFromMat(Kout);
// Return the new K matrix
NanReturnValue(KMatrixWrap);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
}
// cv::stereoCalibrate
NAN_METHOD(Calib3D::StereoCalibrate)
{
NanEscapableScope();
try {
// Get the arguments
// Arg 0, the array of object points, an array of arrays
std::vector<std::vector<cv::Point3f> > objectPoints = points3fFromArrayOfArrays(args[0]);
// Arg 1, the image points1, another array of arrays
std::vector<std::vector<cv::Point2f> > imagePoints1 = points2fFromArrayOfArrays(args[1]);
// Arg 2, the image points2, another array of arrays =(
std::vector<std::vector<cv::Point2f> > imagePoints2 = points2fFromArrayOfArrays(args[2]);
// Arg 3 is the image size (follows the PYTHON api not the C++ api since all following arguments are optional or outputs)
cv::Size imageSize = sizeFromArray(args[3]);
// Arg 4,5,6,7 is the camera matrix and distortion coefficients (optional but must pass all 4 or none)
cv::Mat k1, d1, k2, d2;
if(args.Length() >= 8)
{
k1 = matFromMatrix(args[4]);
d1 = matFromMatrix(args[5]);
k2 = matFromMatrix(args[6]);
d2 = matFromMatrix(args[7]);
}
// Last argument is flags, skipping for now
// Output mats
cv::Mat R, t, E, F;
// Do the stereo calibration
cv::stereoCalibrate(objectPoints, imagePoints1, imagePoints2, k1, d1, k2, d2, imageSize, R, t, E, F);
// make the return value
Local<Object> ret = NanNew<Object>();
// Make the output arguments
// k1
Local<Object> K1MatrixWrap = matrixFromMat(k1);
// d1
Local<Object> d1MatrixWrap = matrixFromMat(d1);
// k2
Local<Object> K2MatrixWrap = matrixFromMat(k2);
// d2
Local<Object> d2MatrixWrap = matrixFromMat(d2);
// R
Local<Object> RMatrixWrap = matrixFromMat(R);
// t
Local<Object> tMatrixWrap = matrixFromMat(t);
// E
Local<Object> EMatrixWrap = matrixFromMat(E);
// F
Local<Object> FMatrixWrap = matrixFromMat(F);
// Add to return object
ret->Set(NanNew<String>("K1"), K1MatrixWrap);
ret->Set(NanNew<String>("distortion1"), d1MatrixWrap);
ret->Set(NanNew<String>("K2"), K2MatrixWrap);
ret->Set(NanNew<String>("distortion2"), d2MatrixWrap);
ret->Set(NanNew<String>("R"), RMatrixWrap);
ret->Set(NanNew<String>("t"), tMatrixWrap);
ret->Set(NanNew<String>("E"), EMatrixWrap);
ret->Set(NanNew<String>("F"), FMatrixWrap);
// Return
NanReturnValue(ret);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
}
// cv::stereoRectify
NAN_METHOD(Calib3D::StereoRectify)
{
NanEscapableScope();
try {
// Get the arguments
// Arg0, the first camera matrix
cv::Mat K1 = matFromMatrix(args[0]);
// Arg1, the first distortion coefficients
cv::Mat d1 = matFromMatrix(args[1]);
// Arg2, the second camera matrix
cv::Mat K2 = matFromMatrix(args[2]);
// Arg3, the second distortion coefficients
cv::Mat d2 = matFromMatrix(args[3]);
// Arg4, the image size
cv::Size imageSize = sizeFromArray(args[4]);
// arg5, the intercamera rotation matrix
cv::Mat R = matFromMatrix(args[5]);
// Arg6, the intercamera translation vector
cv::Mat t = matFromMatrix(args[6]);
// Arg8, flags, skipping for now
// Arg9, freescaling paremeter, skipping for now
// Arg10, new image size, skipping for now to fix at original image size
// Make output matrics
cv::Mat R1, R2, P1, P2, Q;
// Do the stereo rectification
cv::stereoRectify(K1, d1, K2, d2, imageSize, R, t, R1, R2, P1, P2, Q);
// Make the return object
Local<Object> ret = NanNew<Object>();
ret->Set(NanNew<String>("R1"), matrixFromMat(R1));
ret->Set(NanNew<String>("R2"), matrixFromMat(R2));
ret->Set(NanNew<String>("P1"), matrixFromMat(P1));
ret->Set(NanNew<String>("P2"), matrixFromMat(P2));
ret->Set(NanNew<String>("Q"), matrixFromMat(Q));
// Return the recification parameters
NanReturnValue(ret);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
}
// cv::computeCorrespondEpilines
NAN_METHOD(Calib3D::ComputeCorrespondEpilines)
{
NanEscapableScope();
try {
// Get the arguments
// Arg0, the image points
std::vector<cv::Point2f> points = points2fFromArray(args[0]);
// Arg1, the image index (1 or 2)
int whichImage = int(args[1]->ToNumber()->Value());
// Arg2, the fundamental matrix
cv::Mat F = matFromMatrix(args[2]);
// compute the lines
std::vector<cv::Vec3f> lines;
cv::computeCorrespondEpilines(points, whichImage, F, lines);
// Convert the lines to an array of objects (ax + by + c = 0)
Local<Array> linesArray = NanNew<Array>(lines.size());
for(unsigned int i = 0; i < lines.size(); i++)
{
Local<Object> line_data = NanNew<Object>();
line_data->Set(NanNew<String>("a"), NanNew<Number>(lines[i][0]));
line_data->Set(NanNew<String>("b"), NanNew<Number>(lines[i][1]));
line_data->Set(NanNew<String>("c"), NanNew<Number>(lines[i][2]));
linesArray->Set(NanNew<Number>(i), line_data);
}
// Return the lines
NanReturnValue(linesArray);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
}
// cv::reprojectImageTo3D
NAN_METHOD(Calib3D::ReprojectImageTo3D)
{
NanEscapableScope();
try {
// Get the arguments
// Arg0, the disparity image
cv::Mat disparity = matFromMatrix(args[0]);
// Arg1, the depth-to-disparity transformation Q
cv::Mat Q = matFromMatrix(args[1]);
// Arg 2, handle missing values, skipped for now
// Arg3, output bit depth, skipped for now
// Compute the depth image
cv::Mat depthImage;
cv::reprojectImageTo3D(disparity, depthImage, Q);
// Wrap the depth image
Local<Object> depthImageMatrix = matrixFromMat(depthImage);
NanReturnValue(depthImageMatrix);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
}

31
src/Calib3D.h Normal file
View File

@ -0,0 +1,31 @@
#ifndef __NODE_CALIB3D_H
#define __NODE_CALIB3D_H
#include "OpenCV.h"
// Implementation of calib3d.hpp functions
class Calib3D: public node::ObjectWrap {
public:
static void Init(Handle<Object> target);
static NAN_METHOD(FindChessboardCorners);
static NAN_METHOD(DrawChessboardCorners);
static NAN_METHOD(CalibrateCamera);
static NAN_METHOD(SolvePnP);
static NAN_METHOD(GetOptimalNewCameraMatrix);
static NAN_METHOD(StereoCalibrate);
static NAN_METHOD(StereoRectify);
static NAN_METHOD(ComputeCorrespondEpilines);
static NAN_METHOD(ReprojectImageTo3D);
};
#endif

View File

@ -50,7 +50,7 @@ CascadeClassifierWrap::CascadeClassifierWrap(v8::Value* fileName){
class AsyncDetectMultiScale : public NanAsyncWorker {
public:
AsyncDetectMultiScale(NanCallback *callback, CascadeClassifierWrap *cc, Matrix* im, double scale, int neighbors, int minw, int minh, int sleep_for) : NanAsyncWorker(callback), cc(cc), im(im), scale(scale), neighbors(neighbors), minw(minw), minh(minh), sleep_for(sleep_for) {}
AsyncDetectMultiScale(NanCallback *callback, CascadeClassifierWrap *cc, Matrix* im, double scale, int neighbors, int minw, int minh) : NanAsyncWorker(callback), cc(cc), im(im), scale(scale), neighbors(neighbors), minw(minw), minh(minh) {}
~AsyncDetectMultiScale() {}
void Execute () {
@ -103,7 +103,6 @@ class AsyncDetectMultiScale : public NanAsyncWorker {
int neighbors;
int minw;
int minh;
int sleep_for;
std::vector<cv::Rect> res;
};
@ -141,7 +140,7 @@ NAN_METHOD(CascadeClassifierWrap::DetectMultiScale){
NanCallback *callback = new NanCallback(cb.As<Function>());
NanAsyncQueueWorker( new AsyncDetectMultiScale(callback, self, im, scale, neighbors, minw, minh, 1) );
NanAsyncQueueWorker( new AsyncDetectMultiScale(callback, self, im, scale, neighbors, minw, minh) );
NanReturnUndefined();
}

View File

@ -4,6 +4,9 @@
#define CONST(C) \
obj->Set(NanNew<String>(#C), NanNew<Integer>(C));
#define CONST_ENUM(C) \
obj->Set(NanNew<String>(#C), NanNew<Integer>((int)(cv::C)));
void
Constants::Init(Handle<Object> target) {
Persistent<Object> inner;
@ -54,6 +57,12 @@ Constants::Init(Handle<Object> target) {
CONST(CV_64FC3);
CONST(CV_64FC4);
CONST_ENUM(INTER_NEAREST);
CONST_ENUM(INTER_LINEAR);
CONST_ENUM(INTER_AREA);
CONST_ENUM(INTER_CUBIC);
CONST_ENUM(INTER_LANCZOS4);
target->Set(NanNew("Constants"), obj);
}

View File

@ -53,7 +53,13 @@ NamedWindow::NamedWindow(const std::string& name, int f){
NAN_METHOD(NamedWindow::Show){
SETUP_FUNCTION(NamedWindow)
Matrix *im = ObjectWrap::Unwrap<Matrix>(args[0]->ToObject());
cv::imshow(self->winname, im->mat);
try{
cv::imshow(self->winname, im->mat);
} catch(cv::Exception& e ){
const char* err_msg = e.what();
NanThrowError(err_msg);
}
NanReturnValue(args.Holder());
}

169
src/ImgProc.cc Normal file
View File

@ -0,0 +1,169 @@
#include "ImgProc.h"
#include "Matrix.h"
void ImgProc::Init(Handle<Object> target)
{
Persistent<Object> inner;
Local<Object> obj = NanNew<Object>();
NanAssignPersistent(inner, obj);
NODE_SET_METHOD(obj, "undistort", Undistort);
NODE_SET_METHOD(obj, "initUndistortRectifyMap", InitUndistortRectifyMap);
NODE_SET_METHOD(obj, "remap", Remap);
target->Set(NanNew("imgproc"), obj);
}
// cv::undistort
NAN_METHOD(ImgProc::Undistort)
{
NanEscapableScope();
try {
// Get the arguments
// Arg 0 is the image
Matrix* m0 = ObjectWrap::Unwrap<Matrix>(args[0]->ToObject());
cv::Mat inputImage = m0->mat;
// Arg 1 is the camera matrix
Matrix* m1 = ObjectWrap::Unwrap<Matrix>(args[1]->ToObject());
cv::Mat K = m1->mat;
// Arg 2 is the distortion coefficents
Matrix* m2 = ObjectWrap::Unwrap<Matrix>(args[2]->ToObject());
cv::Mat dist = m2->mat;
// Make an mat to hold the result image
cv::Mat outputImage;
// Undistort
cv::undistort(inputImage, outputImage, K, dist);
// Wrap the output image
Local<Object> outMatrixWrap = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *outMatrix = ObjectWrap::Unwrap<Matrix>(outMatrixWrap);
outMatrix->mat = outputImage;
// Return the output image
NanReturnValue(outMatrixWrap);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
}
// cv::initUndistortRectifyMap
NAN_METHOD(ImgProc::InitUndistortRectifyMap)
{
NanEscapableScope();
try {
// Arg 0 is the camera matrix
Matrix* m0 = ObjectWrap::Unwrap<Matrix>(args[0]->ToObject());
cv::Mat K = m0->mat;
// Arg 1 is the distortion coefficents
Matrix* m1 = ObjectWrap::Unwrap<Matrix>(args[1]->ToObject());
cv::Mat dist = m1->mat;
// Arg 2 is the recification transformation
Matrix* m2 = ObjectWrap::Unwrap<Matrix>(args[2]->ToObject());
cv::Mat R = m2->mat;
// Arg 3 is the new camera matrix
Matrix* m3 = ObjectWrap::Unwrap<Matrix>(args[3]->ToObject());
cv::Mat newK = m3->mat;
// Arg 4 is the image size
cv::Size imageSize;
if (args[4]->IsArray()) {
Local<Object> v8sz = args[4]->ToObject();
imageSize = cv::Size(v8sz->Get(1)->IntegerValue(), v8sz->Get(0)->IntegerValue());
} else {
JSTHROW_TYPE("Must pass image size");
}
// Arg 5 is the first map type, skip for now
int m1type = args[5]->IntegerValue();
// Make matrices to hold the output maps
cv::Mat map1, map2;
// Compute the rectification map
cv::initUndistortRectifyMap(K, dist, R, newK, imageSize, m1type, map1, map2);
// Wrap the output maps
Local<Object> map1Wrap = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *map1Matrix = ObjectWrap::Unwrap<Matrix>(map1Wrap);
map1Matrix->mat = map1;
Local<Object> map2Wrap = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *map2Matrix = ObjectWrap::Unwrap<Matrix>(map2Wrap);
map2Matrix->mat = map2;
// Make a return object with the two maps
Local<Object> ret = NanNew<Object>();
ret->Set(NanNew<String>("map1"), map1Wrap);
ret->Set(NanNew<String>("map2"), map2Wrap);
// Return the maps
NanReturnValue(ret);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
}
// cv::remap
NAN_METHOD(ImgProc::Remap)
{
NanEscapableScope();
try {
// Get the arguments
// Arg 0 is the image
Matrix* m0 = ObjectWrap::Unwrap<Matrix>(args[0]->ToObject());
cv::Mat inputImage = m0->mat;
// Arg 1 is the first map
Matrix* m1 = ObjectWrap::Unwrap<Matrix>(args[1]->ToObject());
cv::Mat map1 = m1->mat;
// Arg 2 is the second map
Matrix* m2 = ObjectWrap::Unwrap<Matrix>(args[2]->ToObject());
cv::Mat map2 = m2->mat;
// Arg 3 is the interpolation mode
int interpolation = args[3]->IntegerValue();
// Args 4, 5 border settings, skipping for now
// Output image
cv::Mat outputImage;
// Remap
cv::remap(inputImage, outputImage, map1, map2, interpolation);
// Wrap the output image
Local<Object> outMatrixWrap = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *outMatrix = ObjectWrap::Unwrap<Matrix>(outMatrixWrap);
outMatrix->mat = outputImage;
// Return the image
NanReturnValue(outMatrixWrap);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
}

19
src/ImgProc.h Normal file
View File

@ -0,0 +1,19 @@
#ifndef __NODE_IMGPROC_H
#define __NODE_IMGPROC_H
#include "OpenCV.h"
// Implementation of imgproc.hpp functions
class ImgProc: public node::ObjectWrap {
public:
static void Init(Handle<Object> target);
static NAN_METHOD(Undistort);
static NAN_METHOD(InitUndistortRectifyMap);
static NAN_METHOD(Remap);
};
#endif

View File

@ -7,7 +7,7 @@ v8::Persistent<FunctionTemplate> Matrix::constructor;
cv::Scalar setColor(Local<Object> objColor);
cv::Point setPoint(Local<Object> objPoint);
cv::Rect* setRect(Local<Object> objRect);
cv::Rect* setRect(Local<Object> objRect, cv::Rect &result);
void
Matrix::Init(Handle<Object> target) {
@ -32,11 +32,13 @@ Matrix::Init(Handle<Object> target) {
NODE_SET_PROTOTYPE_METHOD(ctor, "height", Height);
NODE_SET_PROTOTYPE_METHOD(ctor, "size", Size);
NODE_SET_PROTOTYPE_METHOD(ctor, "clone", Clone);
NODE_SET_PROTOTYPE_METHOD(ctor, "crop", Crop);
NODE_SET_PROTOTYPE_METHOD(ctor, "toBuffer", ToBuffer);
NODE_SET_PROTOTYPE_METHOD(ctor, "toBufferAsync", ToBufferAsync);
NODE_SET_PROTOTYPE_METHOD(ctor, "ellipse", Ellipse);
NODE_SET_PROTOTYPE_METHOD(ctor, "rectangle", Rectangle);
NODE_SET_PROTOTYPE_METHOD(ctor, "line", Line);
NODE_SET_PROTOTYPE_METHOD(ctor, "fillPoly", FillPoly);
NODE_SET_PROTOTYPE_METHOD(ctor, "save", Save);
NODE_SET_PROTOTYPE_METHOD(ctor, "saveAsync", SaveAsync);
NODE_SET_PROTOTYPE_METHOD(ctor, "resize", Resize);
@ -68,6 +70,7 @@ Matrix::Init(Handle<Object> target) {
NODE_SET_PROTOTYPE_METHOD(ctor, "drawAllContours", DrawAllContours);
NODE_SET_PROTOTYPE_METHOD(ctor, "goodFeaturesToTrack", GoodFeaturesToTrack);
NODE_SET_PROTOTYPE_METHOD(ctor, "houghLinesP", HoughLinesP);
NODE_SET_PROTOTYPE_METHOD(ctor, "crop", Crop);
NODE_SET_PROTOTYPE_METHOD(ctor, "houghCircles", HoughCircles);
NODE_SET_PROTOTYPE_METHOD(ctor, "inRange", inRange);
NODE_SET_PROTOTYPE_METHOD(ctor, "adjustROI", AdjustROI);
@ -81,6 +84,7 @@ Matrix::Init(Handle<Object> target) {
NODE_SET_PROTOTYPE_METHOD(ctor, "equalizeHist", EqualizeHist);
NODE_SET_PROTOTYPE_METHOD(ctor, "floodFill", FloodFill);
NODE_SET_PROTOTYPE_METHOD(ctor, "matchTemplate", MatchTemplate);
NODE_SET_PROTOTYPE_METHOD(ctor, "templateMatches", TemplateMatches);
NODE_SET_PROTOTYPE_METHOD(ctor, "minMaxLoc", MinMaxLoc);
NODE_SET_PROTOTYPE_METHOD(ctor, "pushBack", PushBack);
NODE_SET_PROTOTYPE_METHOD(ctor, "putText", PutText);
@ -263,7 +267,7 @@ NAN_METHOD(Matrix::Set){
} else {
NanThrowTypeError( "Invalid number of arguments" );
}
NanReturnUndefined();
}
@ -280,9 +284,9 @@ NAN_METHOD(Matrix::Size){
NAN_METHOD(Matrix::Clone){
SETUP_FUNCTION(Matrix)
Local<Object> im_h = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *m = ObjectWrap::Unwrap<Matrix>(im_h);
m->mat = self->mat.clone();
@ -448,10 +452,10 @@ NAN_METHOD(Matrix::ToBuffer){
Local<Object> buf = NanNewBufferHandle(vec.size());
uchar* data = (uchar*) Buffer::Data(buf);
memcpy(data, &vec[0], vec.size());
v8::Local<v8::Object> globalObj = NanGetCurrentContext()->Global();
v8::Local<v8::Function> bufferConstructor = v8::Local<v8::Function>::Cast(globalObj->Get(NanNew<String>("Buffer")));
v8::Handle<v8::Value> constructorArgs[3] = {buf, NanNew<v8::Integer>(vec.size()), NanNew<v8::Integer>(0)};
v8::Handle<v8::Value> constructorArgs[3] = {buf, NanNew<v8::Integer>((unsigned)vec.size()), NanNew<v8::Integer>(0)};
v8::Local<v8::Object> actualBuffer = bufferConstructor->NewInstance(3, constructorArgs);
NanReturnValue(actualBuffer);
@ -467,24 +471,24 @@ class AsyncToBufferWorker : public NanAsyncWorker {
void Execute () {
std::vector<uchar> vec(0);
//std::vector<int> params(0);//CV_IMWRITE_JPEG_QUALITY 90
cv::imencode(ext, this->matrix->mat, vec, this->params);
res = vec;
}
void HandleOKCallback () {
NanScope();
Local<Object> buf = NanNewBufferHandle(res.size());
uchar* data = (uchar*) Buffer::Data(buf);
memcpy(data, &res[0], res.size());
v8::Local<v8::Object> globalObj = NanGetCurrentContext()->Global();
v8::Local<v8::Function> bufferConstructor = v8::Local<v8::Function>::Cast(globalObj->Get(NanNew<String>("Buffer")));
v8::Handle<v8::Value> constructorArgs[3] = {buf, NanNew<v8::Integer>(res.size()), NanNew<v8::Integer>(0)};
v8::Handle<v8::Value> constructorArgs[3] = {buf, NanNew<v8::Integer>((unsigned)res.size()), NanNew<v8::Integer>(0)};
v8::Local<v8::Object> actualBuffer = bufferConstructor->NewInstance(3, constructorArgs);
@ -492,13 +496,13 @@ class AsyncToBufferWorker : public NanAsyncWorker {
NanNull()
, actualBuffer
};
TryCatch try_catch;
callback->Call(2, argv);
if (try_catch.HasCaught()) {
FatalException(try_catch);
}
}
private:
@ -516,7 +520,7 @@ NAN_METHOD(Matrix::ToBufferAsync){
std::string ext = std::string(".jpg");
std::vector<int> params;
// See if the options argument is passed
if ((args.Length() > 1) && (args[1]->IsObject())) {
// Get this options argument
@ -536,14 +540,14 @@ NAN_METHOD(Matrix::ToBufferAsync){
int compression = options->Get(NanNew<String>("pngCompression"))->IntegerValue();
params.push_back(CV_IMWRITE_PNG_COMPRESSION);
params.push_back(compression);
}
}
}
NanCallback *callback = new NanCallback(cb.As<Function>());
NanAsyncQueueWorker(new AsyncToBufferWorker(callback, self, ext, params));
NanReturnUndefined();
}
@ -682,6 +686,42 @@ NAN_METHOD(Matrix::Line) {
NanReturnNull();
}
NAN_METHOD(Matrix::FillPoly) {
SETUP_FUNCTION(Matrix)
if(args[0]->IsArray())
{
Local<Array> polyArray = Local<Array>::Cast(args[0]->ToObject());
cv::Point **polygons = new cv::Point*[polyArray->Length()];
int *polySizes = new int[polyArray->Length()];
for(unsigned int i = 0; i < polyArray->Length(); i++)
{
Local<Array> singlePoly = Local<Array>::Cast(polyArray->Get(i)->ToObject());
polygons[i] = new cv::Point[singlePoly->Length()];
polySizes[i] = singlePoly->Length();
for(unsigned int j = 0; j < singlePoly->Length(); j++)
{
Local<Array> point = Local<Array>::Cast(singlePoly->Get(j)->ToObject());
polygons[i][j].x = point->Get(0)->IntegerValue();
polygons[i][j].y = point->Get(1)->IntegerValue();
}
}
cv::Scalar color(0, 0, 255);
if(args[1]->IsArray()) {
Local<Object> objColor = args[1]->ToObject();
color = setColor(objColor);
}
cv::fillPoly(self->mat, (const cv::Point **)polygons, polySizes, polyArray->Length(), color);
}
NanReturnNull();
}
NAN_METHOD(Matrix::Save) {
SETUP_FUNCTION(Matrix)
@ -724,7 +764,7 @@ class AsyncSaveWorker : public NanAsyncWorker {
NanNull()
, NanNew<Number>(res)
};
TryCatch try_catch;
callback->Call(2, argv);
if (try_catch.HasCaught()) {
@ -749,7 +789,7 @@ NAN_METHOD(Matrix::SaveAsync){
NanAsciiString filename(args[0]);
REQ_FUN_ARG(1, cb);
NanCallback *callback = new NanCallback(cb.As<Function>());
NanAsyncQueueWorker(new AsyncSaveWorker(callback, self, *filename));
@ -1014,7 +1054,12 @@ NAN_METHOD(Matrix::AddWeighted) {
float beta = args[3]->NumberValue();
int gamma = 0;
cv::addWeighted(src1->mat, alpha, src2->mat, beta, gamma, self->mat);
try{
cv::addWeighted(src1->mat, alpha, src2->mat, beta, gamma, self->mat);
} catch(cv::Exception& e ){
const char* err_msg = e.what();
NanThrowError(err_msg);
}
NanReturnNull();
@ -1033,7 +1078,7 @@ NAN_METHOD(Matrix::BitwiseXor) {
cv::bitwise_xor(src1->mat, src2->mat, self->mat, mask->mat);
}else{
cv::bitwise_xor(src1->mat, src2->mat, self->mat);
}
}
NanReturnNull();
}
@ -1050,7 +1095,7 @@ NAN_METHOD(Matrix::BitwiseNot) {
cv::bitwise_not(self->mat, dst->mat, mask->mat);
}else{
cv::bitwise_not(self->mat, dst->mat);
}
}
NanReturnNull();
}
@ -1068,7 +1113,7 @@ NAN_METHOD(Matrix::BitwiseAnd) {
cv::bitwise_and(src1->mat, src2->mat, self->mat, mask->mat);
}else{
cv::bitwise_and(src1->mat, src2->mat, self->mat);
}
}
NanReturnNull();
}
@ -1275,7 +1320,7 @@ NAN_METHOD(Matrix::HoughCircles) {
equalizeHist(self->mat, gray);
cv::HoughCircles(gray, circles, CV_HOUGH_GRADIENT, dp, minDist, higherThreshold, accumulatorThreshold, minRadius, maxRadius);
v8::Local<v8::Array> arr = NanNew<Array>(circles.size());
@ -1309,7 +1354,7 @@ cv::Point setPoint(Local<Object> objPoint) {
return cv::Point( objPoint->Get(0)->IntegerValue(), objPoint->Get(1)->IntegerValue() );
}
cv::Rect* setRect(Local<Object> objRect) {
cv::Rect* setRect(Local<Object> objRect, cv::Rect &result) {
if(!objRect->IsArray() || !objRect->Get(0)->IsArray() || !objRect->Get(0)->IsArray() ){
printf("error");
@ -1318,14 +1363,13 @@ cv::Rect* setRect(Local<Object> objRect) {
Local<Object> point = objRect->Get(0)->ToObject();
Local<Object> size = objRect->Get(1)->ToObject();
cv::Rect ret;
ret.x = point->Get(0)->IntegerValue();
ret.y = point->Get(1)->IntegerValue();
ret.width = size->Get(0)->IntegerValue();
ret.height = size->Get(1)->IntegerValue();
result.x = point->Get(0)->IntegerValue();
result.y = point->Get(1)->IntegerValue();
result.width = size->Get(0)->IntegerValue();
result.height = size->Get(1)->IntegerValue();
return (cv::Rect*) &ret;
return &result;
}
@ -1713,10 +1757,11 @@ NAN_METHOD(Matrix::FloodFill){
Local<Object> obj = args[0]->ToObject();
cv::Rect rect;
int ret = cv::floodFill(self->mat, setPoint(obj->Get(NanNew<String>("seedPoint"))->ToObject())
, setColor(obj->Get(NanNew<String>("newColor"))->ToObject())
, obj->Get(NanNew<String>("rect"))->IsUndefined() ? 0 : setRect(obj->Get(NanNew<String>("rect"))->ToObject())
, obj->Get(NanNew<String>("rect"))->IsUndefined() ? 0 : setRect(obj->Get(NanNew<String>("rect"))->ToObject(), rect)
, setColor(obj->Get(NanNew<String>("loDiff"))->ToObject())
, setColor(obj->Get(NanNew<String>("upDiff"))->ToObject())
, 4 );
@ -1725,6 +1770,84 @@ NAN_METHOD(Matrix::FloodFill){
NanReturnValue(NanNew<Number>( ret ));
}
// @author olfox
// Returns an array of the most probable positions
// Usage: output = input.templateMatches(min_probability, max_probability, limit, ascending, min_x_distance, min_y_distance);
NAN_METHOD(Matrix::TemplateMatches){
SETUP_FUNCTION(Matrix)
bool filter_min_probability = (args.Length() >= 1) ? args[0]->IsNumber() : false;
bool filter_max_probability = (args.Length() >= 2) ? args[1]->IsNumber() : false;
double min_probability = filter_min_probability ? args[0]->NumberValue() : 0;
double max_probability = filter_max_probability ? args[1]->NumberValue() : 0;
int limit = (args.Length() >= 3) ? args[2]->IntegerValue() : 0;
bool ascending = (args.Length() >= 4) ? args[3]->BooleanValue() : false;
int min_x_distance = (args.Length() >= 5) ? args[4]->IntegerValue() : 0;
int min_y_distance = (args.Length() >= 6) ? args[5]->IntegerValue() : 0;
cv::Mat_<int> indices;
if (ascending)
cv::sortIdx(self->mat.reshape(0,1), indices, CV_SORT_ASCENDING + CV_SORT_EVERY_ROW);
else
cv::sortIdx(self->mat.reshape(0,1), indices, CV_SORT_DESCENDING + CV_SORT_EVERY_ROW);
cv::Mat hit_mask = cv::Mat::zeros(self->mat.size(), CV_64F);
v8::Local<v8::Array> probabilites_array = NanNew<v8::Array>(limit);
cv::Mat_<float>::const_iterator begin = self->mat.begin<float>();
cv::Mat_<int>::const_iterator it = indices.begin();
cv::Mat_<int>::const_iterator end = indices.end();
int index = 0;
for (; (limit == 0 || index < limit) && it != end; ++it) {
cv::Point pt = (begin + *it).pos();
float probability = self->mat.at<float>(pt.y, pt.x);
if (filter_min_probability && probability < min_probability) {
if (ascending) continue;
else break;
}
if (filter_max_probability && probability > max_probability) {
if (ascending) break;
else continue;
}
if (min_x_distance != 0 || min_y_distance != 0) {
// Check hit mask color for for every corner
cv::Size maxSize = hit_mask.size();
int max_x = maxSize.width - 1;
int max_y = maxSize.height - 1;
cv::Point top_left = cv::Point(max(0, pt.x - min_x_distance), max(0, pt.y - min_y_distance));
cv::Point top_right = cv::Point(min(max_x, pt.x + min_x_distance), max(0, pt.y - min_y_distance));
cv::Point bottom_left = cv::Point(max(0, pt.x - min_x_distance), min(max_y, pt.y + min_y_distance));
cv::Point bottom_right = cv::Point(min(max_x, pt.x + min_x_distance), min(max_y, pt.y + min_y_distance));
if (hit_mask.at<double>(top_left.y, top_left.x) > 0) continue;
if (hit_mask.at<double>(top_right.y, top_right.x) > 0) continue;
if (hit_mask.at<double>(bottom_left.y, bottom_left.x) > 0) continue;
if (hit_mask.at<double>(bottom_right.y, bottom_right.x) > 0) continue;
cv::Scalar color(255.0);
cv::rectangle(hit_mask, top_left, bottom_right, color, CV_FILLED);
}
Local<Value> x_value = NanNew<Number>(pt.x);
Local<Value> y_value = NanNew<Number>(pt.y);
Local<Value> probability_value = NanNew<Number>(probability);
Local<Object> probability_object = NanNew<Object>();
probability_object->Set(NanNew<String>("x"), x_value);
probability_object->Set(NanNew<String>("y"), y_value);
probability_object->Set(NanNew<String>("probability"), probability_value);
probabilites_array->Set(index, probability_object);
index++;
}
NanReturnValue(probabilites_array);
}
// @author ytham
// Match Template filter
// Usage: output = input.matchTemplate("templateFileString", method);

View File

@ -35,6 +35,7 @@ class Matrix: public node::ObjectWrap {
JSFUNC(Rectangle)
JSFUNC(Line)
JSFUNC(Empty)
JSFUNC(FillPoly)
JSFUNC(Save)
JSFUNC(SaveAsync)
@ -96,6 +97,7 @@ class Matrix: public node::ObjectWrap {
JSFUNC(FloodFill)
JSFUNC(MatchTemplate)
JSFUNC(TemplateMatches)
JSFUNC(MinMaxLoc)
JSFUNC(PushBack)

View File

@ -5,48 +5,47 @@
void
OpenCV::Init(Handle<Object> target) {
NanScope();
// Version string.
char out [21];
int n = sprintf(out, "%i.%i", CV_MAJOR_VERSION, CV_MINOR_VERSION);
target->Set(NanNew<String>("version"), NanNew<String>(out, n));
NODE_SET_METHOD(target, "readImage", ReadImage);
}
}
NAN_METHOD(OpenCV::ReadImage) {
NanEscapableScope();
try{
Local<Object> im_h = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *img = ObjectWrap::Unwrap<Matrix>(im_h);
cv::Mat mat;
REQ_FUN_ARG(1, cb);
if (args[0]->IsNumber() && args[1]->IsNumber()){
int width, height;
width = args[0]->Uint32Value();
height = args[1]->Uint32Value();
height = args[1]->Uint32Value();
mat = *(new cv::Mat(width, height, CV_64FC1));
} else if (args[0]->IsString()) {
std::string filename = std::string(*NanAsciiString(args[0]->ToString()));
mat = cv::imread(filename);
} else if (Buffer::HasInstance(args[0])){
uint8_t *buf = (uint8_t *) Buffer::Data(args[0]->ToObject());
unsigned len = Buffer::Length(args[0]->ToObject());
cv::Mat *mbuf = new cv::Mat(len, 1, CV_64FC1, buf);
mat = cv::imdecode(*mbuf, -1);
if (mat.empty()){
NanThrowTypeError("Error loading file");
}
@ -74,4 +73,4 @@ NAN_METHOD(OpenCV::ReadImage) {
NanThrowError(err_msg);
NanReturnUndefined();
}
};
};

View File

@ -55,4 +55,3 @@ class OpenCV: public node::ObjectWrap{
#endif

323
src/Stereo.cc Normal file
View File

@ -0,0 +1,323 @@
#include "Stereo.h"
#include "Matrix.h"
#include <opencv2/legacy/legacy.hpp>
// Block matching
v8::Persistent<FunctionTemplate> StereoBM::constructor;
void
StereoBM::Init(Handle<Object> target) {
NanScope();
Local<FunctionTemplate> ctor = NanNew<FunctionTemplate>(StereoBM::New);
NanAssignPersistent(constructor, ctor);
ctor->InstanceTemplate()->SetInternalFieldCount(1);
ctor->SetClassName(NanNew("StereoBM"));
NODE_SET_PROTOTYPE_METHOD(ctor, "compute", Compute);
ctor->Set(NanNew<String>("BASIC_PRESET"), NanNew<Integer>((int)cv::StereoBM::BASIC_PRESET));
ctor->Set(NanNew<String>("FISH_EYE_PRESET"), NanNew<Integer>((int)cv::StereoBM::FISH_EYE_PRESET));
ctor->Set(NanNew<String>("NARROW_PRESET"), NanNew<Integer>((int)cv::StereoBM::NARROW_PRESET));
target->Set(NanNew("StereoBM"), ctor->GetFunction());
}
NAN_METHOD(StereoBM::New) {
NanScope();
if (args.This()->InternalFieldCount() == 0)
NanThrowTypeError("Cannot instantiate without new");
StereoBM *stereo;
if (args.Length() == 0)
{
stereo = new StereoBM();
}
else if (args.Length() == 1)
{
stereo = new StereoBM(args[0]->IntegerValue()); // preset
}
else if (args.Length() == 2)
{
stereo = new StereoBM(args[0]->IntegerValue(), args[1]->IntegerValue()); // preset, disparity search range
}
else
{
stereo = new StereoBM(args[0]->IntegerValue(), args[1]->IntegerValue(), args[2]->IntegerValue()); // preset, disparity search range, sum of absolute differences window size
}
stereo->Wrap(args.Holder());
NanReturnValue(args.Holder());
}
StereoBM::StereoBM(int preset, int ndisparities, int SADWindowSize)
: ObjectWrap(), stereo(preset, ndisparities, SADWindowSize)
{
}
// TODO make this async
NAN_METHOD(StereoBM::Compute)
{
SETUP_FUNCTION(StereoBM)
try {
// Get the arguments
// Arg 0, the 'left' image
Matrix* m0 = ObjectWrap::Unwrap<Matrix>(args[0]->ToObject());
cv::Mat left = m0->mat;
// Arg 1, the 'right' image
Matrix* m1 = ObjectWrap::Unwrap<Matrix>(args[1]->ToObject());
cv::Mat right = m1->mat;
// Optional 3rd arg, the disparty depth
int type = CV_16S;
if(args.Length() > 2)
{
type = args[2]->IntegerValue();
}
// Compute stereo using the block matching algorithm
cv::Mat disparity;
self->stereo(left, right, disparity, type);
// Wrap the returned disparity map
Local<Object> disparityWrap = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *disp = ObjectWrap::Unwrap<Matrix>(disparityWrap);
disp->mat = disparity;
NanReturnValue(disparityWrap);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
};
// Semi-Global Block matching
v8::Persistent<FunctionTemplate> StereoSGBM::constructor;
void
StereoSGBM::Init(Handle<Object> target) {
NanScope();
Local<FunctionTemplate> ctor = NanNew<FunctionTemplate>(StereoSGBM::New);
NanAssignPersistent(constructor, ctor);
ctor->InstanceTemplate()->SetInternalFieldCount(1);
ctor->SetClassName(NanNew("StereoSGBM"));
NODE_SET_PROTOTYPE_METHOD(ctor, "compute", Compute);
target->Set(NanNew("StereoSGBM"), ctor->GetFunction());
}
NAN_METHOD(StereoSGBM::New) {
NanScope();
if (args.This()->InternalFieldCount() == 0)
NanThrowTypeError("Cannot instantiate without new");
StereoSGBM *stereo;
if (args.Length() == 0)
{
stereo = new StereoSGBM();
}
else
{
// If passing arguments, must pass the first 3 at least
if (args.Length() >= 3)
{
switch (args.Length())
{
case 3:
stereo = new StereoSGBM(args[0]->IntegerValue(), args[1]->IntegerValue(), args[2]->IntegerValue());
break;
case 4:
stereo = new StereoSGBM(args[0]->IntegerValue(), args[1]->IntegerValue(), args[2]->IntegerValue(), args[3]->IntegerValue());
break;
case 5:
stereo = new StereoSGBM(args[0]->IntegerValue(), args[1]->IntegerValue(), args[2]->IntegerValue(), args[3]->IntegerValue(), args[4]->IntegerValue());
break;
case 6:
stereo = new StereoSGBM(args[0]->IntegerValue(), args[1]->IntegerValue(), args[2]->IntegerValue(), args[3]->IntegerValue(), args[4]->IntegerValue(), args[5]->IntegerValue());
break;
case 7:
stereo = new StereoSGBM(args[0]->IntegerValue(), args[1]->IntegerValue(), args[2]->IntegerValue(), args[3]->IntegerValue(), args[4]->IntegerValue(), args[5]->IntegerValue(), args[6]->IntegerValue());
break;
case 8:
stereo = new StereoSGBM(args[0]->IntegerValue(), args[1]->IntegerValue(), args[2]->IntegerValue(), args[3]->IntegerValue(), args[4]->IntegerValue(), args[5]->IntegerValue(), args[6]->IntegerValue(), args[7]->IntegerValue());
break;
case 9:
stereo = new StereoSGBM(args[0]->IntegerValue(), args[1]->IntegerValue(), args[2]->IntegerValue(), args[3]->IntegerValue(), args[4]->IntegerValue(), args[5]->IntegerValue(), args[6]->IntegerValue(), args[7]->IntegerValue(), args[8]->IntegerValue());
break;
case 10:
stereo = new StereoSGBM(args[0]->IntegerValue(), args[1]->IntegerValue(), args[2]->IntegerValue(), args[3]->IntegerValue(), args[4]->IntegerValue(), args[5]->IntegerValue(), args[6]->IntegerValue(), args[7]->IntegerValue(), args[8]->IntegerValue(), args[9]->IntegerValue());
break;
default:
stereo = new StereoSGBM(args[0]->IntegerValue(), args[1]->IntegerValue(), args[2]->IntegerValue(), args[3]->IntegerValue(), args[4]->IntegerValue(), args[5]->IntegerValue(), args[6]->IntegerValue(), args[7]->IntegerValue(), args[8]->IntegerValue(), args[9]->IntegerValue(), args[10]->ToBoolean()->Value());
break;
}
}
else
{
NanThrowError("If overriding default settings, must pass minDisparity, numDisparities, and SADWindowSize");
NanReturnUndefined();
}
}
stereo->Wrap(args.Holder());
NanReturnValue(args.Holder());
}
StereoSGBM::StereoSGBM()
: ObjectWrap(), stereo()
{
}
StereoSGBM::StereoSGBM(int minDisparity, int ndisparities, int SADWindowSize, int p1, int p2, int disp12MaxDiff, int preFilterCap, int uniquenessRatio, int speckleWindowSize, int speckleRange, bool fullDP)
: ObjectWrap(), stereo(minDisparity, ndisparities, SADWindowSize, p1, p2, disp12MaxDiff, preFilterCap, uniquenessRatio, speckleWindowSize, speckleRange, fullDP)
{
}
// TODO make this async
NAN_METHOD(StereoSGBM::Compute)
{
SETUP_FUNCTION(StereoSGBM)
try {
// Get the arguments
// Arg 0, the 'left' image
Matrix* m0 = ObjectWrap::Unwrap<Matrix>(args[0]->ToObject());
cv::Mat left = m0->mat;
// Arg 1, the 'right' image
Matrix* m1 = ObjectWrap::Unwrap<Matrix>(args[1]->ToObject());
cv::Mat right = m1->mat;
// Compute stereo using the block matching algorithm
cv::Mat disparity;
self->stereo(left, right, disparity);
// Wrap the returned disparity map
Local<Object> disparityWrap = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *disp = ObjectWrap::Unwrap<Matrix>(disparityWrap);
disp->mat = disparity;
NanReturnValue(disparityWrap);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
};
// Graph cut
v8::Persistent<FunctionTemplate> StereoGC::constructor;
void
StereoGC::Init(Handle<Object> target) {
NanScope();
Local<FunctionTemplate> ctor = NanNew<FunctionTemplate>(StereoGC::New);
NanAssignPersistent(constructor, ctor);
ctor->InstanceTemplate()->SetInternalFieldCount(1);
ctor->SetClassName(NanNew("StereoGC"));
NODE_SET_PROTOTYPE_METHOD(ctor, "compute", Compute);
target->Set(NanNew("StereoGC"), ctor->GetFunction());
}
NAN_METHOD(StereoGC::New) {
NanScope();
if (args.This()->InternalFieldCount() == 0)
NanThrowTypeError("Cannot instantiate without new");
StereoGC *stereo;
if (args.Length() == 0)
{
stereo = new StereoGC();
}
else if (args.Length() == 1)
{
stereo = new StereoGC(args[0]->IntegerValue()); // numberOfDisparities
}
else
{
stereo = new StereoGC(args[0]->IntegerValue(), args[1]->IntegerValue()); // max iterations
}
stereo->Wrap(args.Holder());
NanReturnValue(args.Holder());
}
StereoGC::StereoGC(int numberOfDisparities, int maxIters)
: ObjectWrap()
{
stereo = cvCreateStereoGCState(numberOfDisparities, maxIters);
}
// TODO make this async
NAN_METHOD(StereoGC::Compute)
{
SETUP_FUNCTION(StereoGC)
try {
// Get the arguments
// Arg 0, the 'left' image
Matrix* m0 = ObjectWrap::Unwrap<Matrix>(args[0]->ToObject());
cv::Mat left = m0->mat;
// Arg 1, the 'right' image
Matrix* m1 = ObjectWrap::Unwrap<Matrix>(args[1]->ToObject());
cv::Mat right = m1->mat;
// Compute stereo using the block matching algorithm
CvMat left_leg = left, right_leg = right;
CvMat *disp_left = cvCreateMat(left.rows, left.cols, CV_16S), *disp_right = cvCreateMat(right.rows, right.cols, CV_16S);
cvFindStereoCorrespondenceGC(&left_leg, &right_leg, disp_left, disp_right, self->stereo, 0);
cv::Mat disp16 = disp_left;
cv::Mat disparity(disp16.rows, disp16.cols, CV_8U);
disp16.convertTo(disparity, CV_8U, -16);
// Wrap the returned disparity map
Local<Object> disparityWrap = NanNew(Matrix::constructor)->GetFunction()->NewInstance();
Matrix *disp = ObjectWrap::Unwrap<Matrix>(disparityWrap);
disp->mat = disparity;
NanReturnValue(disparityWrap);
} catch (cv::Exception &e) {
const char *err_msg = e.what();
NanThrowError(err_msg);
NanReturnUndefined();
}
};

58
src/Stereo.h Normal file
View File

@ -0,0 +1,58 @@
#ifndef __NODE_STEREO_H
#define __NODE_STEREO_H
#include "OpenCV.h"
class StereoBM: public node::ObjectWrap {
public:
cv::StereoBM stereo;
static Persistent<FunctionTemplate> constructor;
static void Init(Handle<Object> target);
static NAN_METHOD(New);
StereoBM(int preset = cv::StereoBM::BASIC_PRESET, int ndisparities = 0, int SADWindowSize=21);
JSFUNC(Compute);
};
class StereoSGBM: public node::ObjectWrap {
public:
cv::StereoSGBM stereo;
static Persistent<FunctionTemplate> constructor;
static void Init(Handle<Object> target);
static NAN_METHOD(New);
StereoSGBM();
StereoSGBM(int minDisparity,
int ndisparities,
int SADWindowSize,
int p1 = 0,
int p2 = 0,
int disp12MaxDiff = 0,
int preFilterCap = 0,
int uniquenessRatio = 0,
int speckleWindowSize = 0,
int speckleRange = 0,
bool fullDP = false);
JSFUNC(Compute);
};
struct CvStereoGCState;
class StereoGC: public node::ObjectWrap {
public:
CvStereoGCState *stereo;
static Persistent<FunctionTemplate> constructor;
static void Init(Handle<Object> target);
static NAN_METHOD(New);
StereoGC(int numberOfDisparities = 16, int maxIterations = 2);
JSFUNC(Compute);
};
#endif

View File

@ -9,21 +9,28 @@
#include "HighGUI.h"
#include "FaceRecognizer.h"
#include "Constants.h"
#include "Calib3D.h"
#include "ImgProc.h"
#include "Stereo.h"
extern "C" void
init(Handle<Object> target) {
NanScope();
OpenCV::Init(target);
Point::Init(target);
Matrix::Init(target);
CascadeClassifierWrap::Init(target);
VideoCaptureWrap::Init(target);
Contour::Init(target);
TrackedObject::Init(target);
TrackedObject::Init(target);
NamedWindow::Init(target);
Constants::Init(target);
Calib3D::Init(target);
ImgProc::Init(target);
StereoBM::Init(target);
StereoSGBM::Init(target);
StereoGC::Init(target);
#if CV_MAJOR_VERSION >= 2 && CV_MINOR_VERSION >=4

21
test/examples.js Normal file
View File

@ -0,0 +1,21 @@
var test = require('tape')
, glob = require('glob')
, exec = require('child_process').exec
, path = require('path')
module.exports = function(){
glob.sync('./examples/*.js').forEach(function(example){
test("Example: " + example, function(assert){
var fullName = path.resolve(example)
, examples = path.resolve('./examples')
exec('node ' + fullName, {cwd: examples}, function(error, stdout, stderr){
assert.error(error)
assert.end()
})
})
})
}

View File

@ -275,3 +275,7 @@ test("fonts", function(t) {
});
})
// Test the examples folder.
require('./examples')()

13
utils/opencv_x64.pc Normal file
View File

@ -0,0 +1,13 @@
# Package Information for pkg-config
opencv_prefix=C:/OpenCV249/opencv/build/x64/vc12
exec_prefix=${opencv_prefix}/bin
libdir=${opencv_prefix}/lib
includedir=C:/OpenCV249/opencv/build/include
Name: OpenCV
Description: Open Source Computer Vision Library
Version: 2.4.9
Cflags: ${includedir} ${includedir}/opencv
Libs: ${libdir}/opencv_calib3d249 ${libdir}/opencv_contrib249 ${libdir}/opencv_core249 ${libdir}/opencv_features2d249 ${libdir}/opencv_flann249 ${libdir}/opencv_gpu249 ${libdir}/opencv_highgui249 ${libdir}/opencv_imgproc249 ${libdir}/opencv_legacy249 ${libdir}/opencv_ml249 ${libdir}/opencv_nonfree249 ${libdir}/opencv_objdetect249 ${libdir}/opencv_ocl249 ${libdir}/opencv_photo249 ${libdir}/opencv_stitching249 ${libdir}/opencv_superres249 ${libdir}/opencv_ts249 ${libdir}/opencv_video249 ${libdir}/opencv_videostab249

12
utils/opencv_x86.pc Normal file
View File

@ -0,0 +1,12 @@
# Package Information for pkg-config
opencv_prefix=C:/OpenCV249/opencv/build/x86/vc12
exec_prefix=${opencv_prefix}/bin
libdir=${opencv_prefix}/lib
includedir=C:/OpenCV249/opencv/build/include
Name: OpenCV
Description: Open Source Computer Vision Library
Version: 2.4.9
Cflags: ${includedir} ${includedir}/opencv
Libs: ${libdir}/opencv_calib3d249 ${libdir}/opencv_contrib249 ${libdir}/opencv_core249 ${libdir}/opencv_features2d249 ${libdir}/opencv_flann249 ${libdir}/opencv_gpu249 ${libdir}/opencv_highgui249 ${libdir}/opencv_imgproc249 ${libdir}/opencv_legacy249 ${libdir}/opencv_ml249 ${libdir}/opencv_nonfree249 ${libdir}/opencv_objdetect249 ${libdir}/opencv_ocl249 ${libdir}/opencv_photo249 ${libdir}/opencv_stitching249 ${libdir}/opencv_superres249 ${libdir}/opencv_ts249 ${libdir}/opencv_video249 ${libdir}/opencv_videostab249