박선진

프론트로부터 전달받은 영상 파일 base64 디코딩 후 로컬에 저장

const express = require('express');
const fs = require('fs');
const {
cv,
getDataFilePath,
drawBlueRect,
drawGreenRect
} = require('./utils');
const openCV = require('opencv4nodejs');
const router = express.Router();
router.post('/videoResult', function(req,res) {
file = req.body[0].preview.split(',')[1];
console.log(file.length);
return res.json({data:'myData'});
//================================================================
router.post('/videoResult', function (req, res) {
try {
let preview = req.body[0].preview;
str = preview.replace(/^data:(.*?);base64,/, "");
str = str.replace(/ /g, '+');
fs.writeFile(`./data/temp.mp4`, str, 'base64', function (err) {
if (err) throw err;
console.log("saved");
const vCap = new openCV.VideoCapture('./data/temp.mp4')
const delay = 1000;
let done = false;
let cnt = 0;
while (!done) {
let frame = vCap.read();
cv.imwrite('./data/' + cnt + '.jpg');
cnt++;
if (frame.empty) {
vCap.reset();
frame = vCap.read();
}
}
});
} catch (err) {
console.log("err : " + err);
}
return res.json({ data: 'myData' });
});
//================================================================
// router.post('/faceRecognition', function (req, res) {
// try {
// let preview = req.body[0].preview;
// str = preview.replace(/^data:(.*?);base64,/, "");
// str = str.replace(/ /g, '+');
// // 임시파일 저장
// fs.writeFile(`./data/temp.jpg`, str, 'base64', function (err) {
// if (err) throw err;
// console.log("saved");
// detectFaceAndEyes('./data/temp.jpg');
// });
// } catch (err) {
// console.log('err: ' + err);
// }
// return res.json({ data: 'myData' });
// });
//================================================================
function base64encode(plaintext) {
return Buffer.from(plaintext, "utf8").toString('base64');
}
function base64decode(base64text) {
console.log(base64text.length);
return Buffer.from(base64text, 'base64').toString('utf8');
}
// function detectFaceAndEyes(filePath) {
// const image = cv.imread(filePath);
// const faceClassifier = new cv.CascadeClassifier(cv.HAAR_FRONTALFACE_DEFAULT);
// const eyeClassifier = new cv.CascadeClassifier(cv.HAAR_EYE);
// // detect faces
// const faceResult = faceClassifier.detectMultiScale(image.bgrToGray());
// if (!faceResult.objects.length) {
// throw new Error('No faces detected!');
// }
// const sortByNumDetections = result => result.numDetections
// .map((num, idx) => ({ num, idx }))
// .sort(((n0, n1) => n1.num - n0.num))
// .map(({ idx }) => idx);
// // get best result
// const faceRect = faceResult.objects[sortByNumDetections(faceResult)[0]];
// console.log('faceRects:', faceResult.objects);
// console.log('confidences:', faceResult.numDetections);
// // detect eyes
// const faceRegion = image.getRegion(faceRect);
// const eyeResult = eyeClassifier.detectMultiScale(faceRegion);
// console.log('eyeRects:', eyeResult.objects);
// console.log('confidences:', eyeResult.numDetections);
// // get best result
// const eyeRects = sortByNumDetections(eyeResult)
// .slice(0, 2)
// .map(idx => eyeResult.objects[idx]);
// // draw face detection
// drawBlueRect(image, faceRect);
// // draw eyes detection in face region
// eyeRects.forEach(eyeRect => drawGreenRect(faceRegion, eyeRect));
// cv.imwrite(`./data/temp2.jpg`, image);
// }
module.exports = router;
\ No newline at end of file
......
const path = require('path');
const cv = require('opencv4nodejs');
exports.cv = cv;
const dataPath = path.resolve(__dirname, './data');
exports.dataPath = dataPath;
exports.getDataFilePath = fileName => {
let targetPath = path.resolve(dataPath, fileName);
return targetPath;
}
const grabFrames = (videoFile, delay, onFrame) => {
const cap = new cv.VideoCapture(videoFile);
let done = false;
const intvl = setInterval(() => {
let frame = cap.read();
// loop back to start on end of stream reached
if (frame.empty) {
cap.reset();
frame = cap.read();
}
onFrame(frame);
const key = cv.waitKey(delay);
done = key !== -1 && key !== 255;
if (done) {
clearInterval(intvl);
console.log('Key pressed, exiting.');
}
}, 0);
};
exports.grabFrames = grabFrames;
exports.runVideoDetection = (src, detect) => {
grabFrames(src, 1, frame => {
detect(frame);
});
};
exports.drawRectAroundBlobs = (binaryImg, dstImg, minPxSize, fixedRectWidth) => {
const {
centroids,
stats
} = binaryImg.connectedComponentsWithStats();
// pretend label 0 is background
for (let label = 1; label < centroids.rows; label += 1) {
const [x1, y1] = [stats.at(label, cv.CC_STAT_LEFT), stats.at(label, cv.CC_STAT_TOP)];
const [x2, y2] = [
x1 + (fixedRectWidth || stats.at(label, cv.CC_STAT_WIDTH)),
y1 + (fixedRectWidth || stats.at(label, cv.CC_STAT_HEIGHT))
];
const size = stats.at(label, cv.CC_STAT_AREA);
const blue = new cv.Vec(255, 0, 0);
if (minPxSize < size) {
dstImg.drawRectangle(
new cv.Point(x1, y1),
new cv.Point(x2, y2),
{ color: blue, thickness: 2 }
);
}
}
};
const drawRect = (image, rect, color, opts = { thickness: 2 }) =>
image.drawRectangle(
rect,
color,
opts.thickness,
cv.LINE_8
);
exports.drawRect = drawRect;
exports.drawBlueRect = (image, rect, opts = { thickness: 2 }) =>
drawRect(image, rect, new cv.Vec(255, 0, 0), opts);
exports.drawGreenRect = (image, rect, opts = { thickness: 2 }) =>
drawRect(image, rect, new cv.Vec(0, 255, 0), opts);
exports.drawRedRect = (image, rect, opts = { thickness: 2 }) =>
drawRect(image, rect, new cv.Vec(0, 0, 255), opts);
\ No newline at end of file