박진형

hapcam source

No preview for this file type
HapticCam @ 5cecd1ec
Subproject commit 5cecd1eccf2cbab550329d1d89d8de28c78e9056
No preview for this file type
//
// AppDelegate.swift
// HapticCam
//
// Created by 박진형 on 2020/12/15.
//
import UIKit
@main
class AppDelegate: UIResponder, UIApplicationDelegate {
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
return true
}
// MARK: UISceneSession Lifecycle
func application(_ application: UIApplication, configurationForConnecting connectingSceneSession: UISceneSession, options: UIScene.ConnectionOptions) -> UISceneConfiguration {
// Called when a new scene session is being created.
// Use this method to select a configuration to create the new scene with.
return UISceneConfiguration(name: "Default Configuration", sessionRole: connectingSceneSession.role)
}
func application(_ application: UIApplication, didDiscardSceneSessions sceneSessions: Set<UISceneSession>) {
// Called when the user discards a scene session.
// If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions.
// Use this method to release any resources that were specific to the discarded scenes, as they will not return.
}
}
No preview for this file type
{
"colors" : [
{
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}
{
"images" : [
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "20x20"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "20x20"
},
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "29x29"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "29x29"
},
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "40x40"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "40x40"
},
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "60x60"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "60x60"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "20x20"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "20x20"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "29x29"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "29x29"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "40x40"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "40x40"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "76x76"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "76x76"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "83.5x83.5"
},
{
"idiom" : "ios-marketing",
"scale" : "1x",
"size" : "1024x1024"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}
{
"info" : {
"author" : "xcode",
"version" : 1
}
}
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.16" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.12"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" xcode11CocoaTouchSystemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
</document>
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="17506" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<device id="retina5_5" orientation="portrait" appearance="light"/>
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="17505"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="System colors in document resources" minToolsVersion="11.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="ViewController" customModule="HapticCam" customModuleProvider="target" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="414" height="736"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
<color key="backgroundColor" systemColor="systemBackgroundColor"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="-210" y="80"/>
</scene>
</scenes>
<resources>
<systemColor name="systemBackgroundColor">
<color white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</systemColor>
</resources>
</document>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>$(PRODUCT_BUNDLE_PACKAGE_TYPE)</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UIApplicationSceneManifest</key>
<dict>
<key>UIApplicationSupportsMultipleScenes</key>
<false/>
<key>UISceneConfigurations</key>
<dict>
<key>UIWindowSceneSessionRoleApplication</key>
<array>
<dict>
<key>UISceneConfigurationName</key>
<string>Default Configuration</string>
<key>UISceneDelegateClassName</key>
<string>$(PRODUCT_MODULE_NAME).SceneDelegate</string>
<key>UISceneStoryboardFile</key>
<string>Main</string>
</dict>
</array>
</dict>
</dict>
<key>UIApplicationSupportsIndirectInputEvents</key>
<true/>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>NSCameraUsageDescription</key>
<string></string>
<key>NSPhotoLibraryAddUsageDescription</key>
<string></string>
<key>NSMicrophoneUsageDescription</key>
<string></string>
<key>NSPhotoLibraryUsageDescription</key>
<string></string>
</dict>
</plist>
//
// SceneDelegate.swift
// HapticCam
//
// Created by 박진형 on 2020/12/15.
//
import UIKit
class SceneDelegate: UIResponder, UIWindowSceneDelegate {
var window: UIWindow?
func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) {
// Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`.
// If using a storyboard, the `window` property will automatically be initialized and attached to the scene.
// This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead).
guard let _ = (scene as? UIWindowScene) else { return }
}
func sceneDidDisconnect(_ scene: UIScene) {
// Called as the scene is being released by the system.
// This occurs shortly after the scene enters the background, or when its session is discarded.
// Release any resources associated with this scene that can be re-created the next time the scene connects.
// The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead).
}
func sceneDidBecomeActive(_ scene: UIScene) {
// Called when the scene has moved from an inactive state to an active state.
// Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive.
}
func sceneWillResignActive(_ scene: UIScene) {
// Called when the scene will move from an active state to an inactive state.
// This may occur due to temporary interruptions (ex. an incoming phone call).
}
func sceneWillEnterForeground(_ scene: UIScene) {
// Called as the scene transitions from the background to the foreground.
// Use this method to undo the changes made on entering the background.
}
func sceneDidEnterBackground(_ scene: UIScene) {
// Called as the scene transitions from the foreground to the background.
// Use this method to save data, release shared resources, and store enough scene-specific state information
// to restore the scene back to its current state.
}
}
//
// ViewController.swift
// HapticCam
//
// Created by 박진형 on 2020/12/15.
//
import AVFoundation
import UIKit
import RxCocoa
import RxSwift
import CoreMotion
import Then
import SnapKit
import Alamofire
class ViewController: UIViewController {
// MARK:- Properties
var disposeBag = DisposeBag()
let captureSession = AVCaptureSession()
var videoDevice: AVCaptureDevice!
var videoInput: AVCaptureDeviceInput!
var audioInput: AVCaptureDeviceInput!
var videoOutput: AVCaptureMovieFileOutput!
lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession).then {
$0.bounds = CGRect(x: 0, y: 0, width: self.view.bounds.width, height: self.view.bounds.height)
$0.position = CGPoint(x: self.view.bounds.midX, y: self.view.bounds.midY)
$0.videoGravity = .resizeAspectFill
}
let topContainer = UIView()
let recordButton = UIButton().then { $0.setTitle("Record", for: .normal) }
let resultButton = UIButton().then { $0.setTitle("Result", for: .normal) }
let recordPoint = UIView().then {
$0.backgroundColor = UIColor(red: 1.0, green: 0.75, blue: 0.01, alpha: 1)
$0.layer.cornerRadius = 3
$0.alpha = 0
}
let timerLabel = UILabel().then {
$0.text = "00:00:00"
$0.textColor = .white
}
var outputURL: URL?
var motionManager: CMMotionManager!
var deviceOrientation: AVCaptureVideoOrientation = .portrait
var timer: Timer?
var secondsOfTimer = 0
// MARK:- LifeCycle Methods
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if !captureSession.isRunning {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
motionManager.stopAccelerometerUpdates()
stopTimer()
}
override func viewDidLoad() {
super.viewDidLoad()
layout()
bind()
videoDevice = bestDevice(in: .back)
setupSession()
}
// MARK:- View Rendering
private func layout() {
self.view.layer.addSublayer(previewLayer)
self.view.addSubview(topContainer)
topContainer.snp.makeConstraints {
$0.top.equalTo(view.safeAreaLayoutGuide.snp.top).offset(10)
$0.leading.trailing.equalToSuperview()
$0.height.equalTo(50)
}
topContainer.addSubview(resultButton)
resultButton.snp.makeConstraints{
$0.centerY.equalToSuperview()
$0.trailing.equalToSuperview().offset(-80)
$0.height.equalTo(40)
}
topContainer.addSubview(timerLabel)
timerLabel.snp.makeConstraints {
$0.centerX.centerY.equalToSuperview()
}
topContainer.addSubview(recordPoint)
recordPoint.snp.makeConstraints {
$0.centerY.equalToSuperview()
$0.trailing.equalTo(timerLabel.snp.leading).offset(-5)
$0.width.height.equalTo(6)
}
self.view.addSubview(recordButton)
recordButton.snp.makeConstraints {
$0.centerX.equalToSuperview()
$0.bottom.equalTo(view.safeAreaLayoutGuide.snp.bottom).offset(-50)
$0.height.equalTo(40)
}
}
// MARK:- Rx Binding
private func bind() {
recordButton.rx.tap
.subscribe(onNext: { [weak self] in
guard let `self` = self else { return }
if self.videoOutput.isRecording {
self.stopRecording()
self.recordButton.setTitle("Record", for: .normal)
} else {
self.startRecording()
self.recordButton.setTitle("Stop", for: .normal)
}
})
.disposed(by: self.disposeBag)
resultButton.rx.tap
.subscribe(onNext: { [weak self] in
guard let `self` = self else { return }
self.openResultpage()
})
.disposed(by: self.disposeBag)
}
private func setupSession() {
do {
captureSession.beginConfiguration()
videoInput = try AVCaptureDeviceInput(device: videoDevice!)
if captureSession.canAddInput(videoInput) {
captureSession.addInput(videoInput)
}
let audioDevice = AVCaptureDevice.default(for: AVMediaType.audio)!
audioInput = try AVCaptureDeviceInput(device: audioDevice)
if captureSession.canAddInput(audioInput) {
captureSession.addInput(audioInput)
}
videoOutput = AVCaptureMovieFileOutput()
if captureSession.canAddOutput(videoOutput) {
captureSession.addOutput(videoOutput)
}
captureSession.commitConfiguration()
}
catch let error as NSError {
NSLog("\(error), \(error.localizedDescription)")
}
}
private func bestDevice(in position: AVCaptureDevice.Position) -> AVCaptureDevice {
var deviceTypes: [AVCaptureDevice.DeviceType]!
if #available(iOS 11.1, *) {
deviceTypes = [.builtInTrueDepthCamera, .builtInDualCamera, .builtInWideAngleCamera]
} else {
deviceTypes = [.builtInDualCamera, .builtInWideAngleCamera]
}
let discoverySession = AVCaptureDevice.DiscoverySession(
deviceTypes: deviceTypes,
mediaType: .video,
position: .unspecified
)
let devices = discoverySession.devices
guard !devices.isEmpty else { fatalError("Missing capture devices.")}
return devices.first(where: { device in device.position == position })!
}
private func openResultpage(){
UIApplication.shared.openURL(NSURL(string: "http://192.168.43.75")! as URL)
}
private func swapCameraType() {
guard let input = captureSession.inputs.first(where: { input in
guard let input = input as? AVCaptureDeviceInput else { return false }
return input.device.hasMediaType(.video)
}) as? AVCaptureDeviceInput else { return }
captureSession.beginConfiguration()
defer { captureSession.commitConfiguration() }
// Create new capture device
var newDevice: AVCaptureDevice?
if input.device.position == .back {
newDevice = bestDevice(in: .front)
} else {
newDevice = bestDevice(in: .back)
}
do {
videoInput = try AVCaptureDeviceInput(device: newDevice!)
} catch let error {
NSLog("\(error), \(error.localizedDescription)")
return
}
// Swap capture device inputs
captureSession.removeInput(input)
captureSession.addInput(videoInput!)
}
// MARK:- Recording Methods
private func post(_ inputURL: String){
// Prepare URL
let url = URL(string: inputURL)
guard let requestUrl = url else { fatalError() }
// Prepare URL Request Object
var request = URLRequest(url: requestUrl)
request.httpMethod = "POST"
// HTTP Request Parameters which will be sent in HTTP Request Body
let postString = "userId=300&title=My urgent task&completed=false";
// Set HTTP Request Body
request.httpBody = postString.data(using: String.Encoding.utf8);
// Perform HTTP Request
let task = URLSession.shared.dataTask(with: request) { (data, response, error) in
// Check for Error
if let error = error {
print("Error took place \(error)")
return
}
// Convert HTTP Response Data to a String
if let data = data, let dataString = String(data: data, encoding: .utf8) {
print("Response data string:\n \(dataString)")
}
}
task.resume()
}
private func startRecording() {
// haptic recording start
self.post("http://192.168.43.228/record")
let connection = videoOutput.connection(with: AVMediaType.video)
if (connection?.isVideoOrientationSupported)! {
connection?.videoOrientation = self.deviceOrientation
}
let device = videoInput.device
if (device.isSmoothAutoFocusSupported) {
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error setting configuration: \(error)")
}
}
// recording point
recordPoint.alpha = 1
self.fadeViewInThenOut(view: recordPoint, delay: 0)
self.startTimer()
outputURL = tempURL()
videoOutput.startRecording(to: outputURL!, recordingDelegate: self)
}
private func stopRecording() {
self.post("http://192.168.43.228/stop")
if videoOutput.isRecording {
self.stopTimer()
videoOutput.stopRecording()
recordPoint.layer.removeAllAnimations()
}
}
private func fadeViewInThenOut(view : UIView, delay: TimeInterval) {
let animationDuration = 0.5
UIView.animate(withDuration: animationDuration, delay: delay, options: [UIView.AnimationOptions.autoreverse, UIView.AnimationOptions.repeat], animations: {
view.alpha = 0
}, completion: nil)
}
private func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
return URL(fileURLWithPath: path)
}
return nil
}
// MARK:- Timer methods
private func startTimer() {
timer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { [weak self] timer in
guard let `self` = self else { return }
self.secondsOfTimer += 1
self.timerLabel.text = Double(self.secondsOfTimer).format(units: [.hour ,.minute, .second])
}
}
private func stopTimer() {
timer?.invalidate()
self.timerLabel.text = "00:00:00"
}
}
extension ViewController: AVCaptureFileOutputRecordingDelegate {
// 레코딩이 시작되면 호출
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
}
// 레코딩이 끝나면 호출
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if (error != nil) {
print("Error recording movie: \(error!.localizedDescription)")
} else {
let videoRecorded = outputURL! as URL
Alamofire.upload(multipartFormData: { (multipartFormData) in
multipartFormData.append(videoRecorded, withName: "Video")
}, to:"http://192.168.43.75/combine")
{ (result) in
}
self.dismiss(animated: true, completion: nil)
UISaveVideoAtPathToSavedPhotosAlbum(videoRecorded.path, nil, nil, nil)
}
}
}
extension Double {
func format(units: NSCalendar.Unit) -> String {
let formatter = DateComponentsFormatter()
formatter.unitsStyle = .positional
formatter.allowedUnits = units
formatter.zeroFormattingBehavior = [ .pad ]
return formatter.string(from: self)!
}
}
# Uncomment the next line to define a global platform for your project
# platform :ios, '9.0'
target 'HapticCam' do
# Comment the next line if you don't want to use dynamic frameworks
use_frameworks!
# Pods for HapticCam
pod 'RxSwift', '~> 4.5.0'
pod 'RxCocoa', '~> 4.5.0'
pod 'Alamofire', '~> 4.8.2'
pod 'Then'
pod 'SnapKit'
target 'HapticCamTests' do
inherit! :search_paths
# Pods for testing
end
target 'HapticCamUITests' do
# Pods for testing
end
end
This file is too large to display.