Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
119 changes: 93 additions & 26 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ The ThinkSys Mediapipe enables pose detection for React Native apps, providing a
</p>

## Requirement
* Gradle minimum SDK 24 or higher
* iOS 13 or higher
* Gradle minimum SDK 24 or higher
* Android SDK Version 26 or higher


Expand Down Expand Up @@ -36,35 +36,102 @@ Add these to your project's manifest.
```

## Usage

### Basic

```js
import { RNMediapipe } from 'react-native-thinksys-mediapipe';

export default function App() {

return (
<View>
<RNMediapipe
width={400}
height={300}
/>
</View>
)
}
```

### Usage with body prop

#### Used to show/hide any body part overlay
#### By default, the body prop is set to true

```js
import { RNMediapipe } from 'react-native-thinksys-mediapipe';

export default function App() {

return (
<View>
<RNMediapipe
width={400}
height={300}
face={true}
leftArm={true}
rightArm={true}
leftWrist={true}
rightWrist={true}
torso={true}
leftLeg={true}
rightLeg={true}
leftAnkle={true}
rightAnkle={true}
/>
</View>
)
}
```

### Usage with switch camera method

```js
import { RNMediapipe, switchCamera } from 'react-native-thinksys-mediapipe';

export default function App() {

const onFlip = () => {
switchCamera();
};

return (
<View>
<RNMediapipe
width={400}
height={300}
/>

const onFlip = () => {
switchCamera();
};

<RNMediapipe
width={400}
height={300}
onLandmark={(data: any) => {
console.log('Body Landmark Data:', data);
}}
face={true}
leftArm={true}
rightArm={true}
leftWrist={true}
rightWrist={true}
torso={true}
leftLeg={true}
rightLeg={true}
leftAnkle={true}
rightAnkle={true}
/>

<TouchableOpacity onPress={onFlip} style={styles.btnView}>
<Text style={styles.btnTxt}>Switch Camera</Text>
</TouchableOpacity>
<TouchableOpacity onPress={onFlip} style={styles.btnView}>
<Text style={styles.btnTxt}>Switch Camera</Text>
</TouchableOpacity>
</View>
)
}

```

### Usage with onLandmark prop

```js
import { RNMediapipe } from 'react-native-thinksys-mediapipe';

export default function App() {

return (
<View>
<RNMediapipe
width={400}
height={300}
onLandmark={(data) => {
console.log('Body Landmark Data:', data);
}}
/>
</View>
)
}

```

Expand Down
4 changes: 2 additions & 2 deletions example/ios/Podfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -938,7 +938,7 @@ PODS:
- React-Mapbuffer (0.74.2):
- glog
- React-debug
- react-native-thinksys-mediapipe (0.0.1):
- react-native-thinksys-mediapipe (0.0.7):
- DoubleConversion
- glog
- hermes-engine
Expand Down Expand Up @@ -1404,7 +1404,7 @@ SPEC CHECKSUMS:
React-jsitracing: 0fa7f78d8fdda794667cb2e6f19c874c1cf31d7e
React-logger: 29fa3e048f5f67fe396bc08af7606426d9bd7b5d
React-Mapbuffer: bf56147c9775491e53122a94c423ac201417e326
react-native-thinksys-mediapipe: 11ea8a45520fc1726b98f46519ed2ef7ddc418a5
react-native-thinksys-mediapipe: 6467414fb6e2621c0008c9de9deba4379d973cdc
React-nativeconfig: 9f223cd321823afdecf59ed00861ab2d69ee0fc1
React-NativeModulesApple: ff7efaff7098639db5631236cfd91d60abff04c0
React-perflogger: 32ed45d9cee02cf6639acae34251590dccd30994
Expand Down
37 changes: 5 additions & 32 deletions ios/Services/CameraFeedService.swift
Original file line number Diff line number Diff line change
Expand Up @@ -623,29 +623,22 @@ extension CameraFeedService: AVCaptureVideoDataOutputSampleBufferDelegate {
//
if self.isPoseStarted {
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
// // print("Frame timestamp: \(timestamp.seconds)")
//

if let lastTimestamp = lastTimestamp {
let elapsed = CMTimeSubtract(timestamp, lastTimestamp)
let seconds = CMTimeGetSeconds(elapsed)

frameCount += 1
self.frameRate = Double(frameCount) / seconds
// print("seconds: \(seconds)")
// print("Current Frame Rate: \(frameRate)")

} else {
// Initial timestamp
lastTimestamp = timestamp
frameCount = 0
}

var currentTimeStamp = Int(Date().timeIntervalSince1970 * 1000)
//
// let limit = frameRate/16.5
// poseCount = poseCount + 1
// if (poseCount > limit){
// poseCount = 0
// if shouldProcessSampleBuffer(currentTimestamp: timestamp) {

let data = LandmarkData(height: imageSize.height, width: imageSize.width, frameNumber: frameCount, presentationTimeStamp: Double(timestamp.value), frameRate: self.frameRate, startTimestamp:currentTimeStamp)

delegate?.didOutput(sampleBuffer: sampleBuffer, orientation: UIImage.Orientation.from(deviceOrientation: UIDevice.current.orientation), landmarkData: data)
Expand Down Expand Up @@ -676,17 +669,7 @@ extension CameraFeedService: AVCaptureVideoDataOutputSampleBufferDelegate {


func processSampleBuffer( imageBuffer : CVImageBuffer,sampleBuffer:CMSampleBuffer, imageSize: CGSize) -> CMSampleBuffer? {
// Extract image buffer from sample buffer
// guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
// print("Error: Failed to get image buffer from sample buffer")
// return nil
// }
//
// var dateFormatter = DateFormatter()
// dateFormatter.dateFormat = "mm:ss" // HH for 24h clock

// let date = Date()
// let timeString = dateFormatter.string(from: date)

var timerString: String = "00:00"
let date = Date()
let currentTimestamp = Int(date.timeIntervalSince1970 * 1000)
Expand All @@ -699,14 +682,6 @@ extension CameraFeedService: AVCaptureVideoDataOutputSampleBufferDelegate {
// Convert image buffer to CIImage
let ciImage = CIImage(cvImageBuffer: imageBuffer)

// var data = compressCIImage(ciImage: ciImage11, compressionQuality: 0.3)
//
// let ciImage = ciImageFromJPEGData(jpegData: data!)

// Apply manipulations to the CIImage (e.g., add watermark)
// let watermarkedImage = generateWatermarkImage(baseImage: image, topLeftText: "1", topRightText: "2",bottomLeftText: "3",bottomRightText: "4")


var timerRepStrig = "Timer "+timerString
if (self.repCount != nil){
timerRepStrig = "Timer "+timerString+"\n"+self.repCount!
Expand All @@ -721,9 +696,7 @@ extension CameraFeedService: AVCaptureVideoDataOutputSampleBufferDelegate {
counter = 0
watermarkImage = generateWatermarkImage(imageSize: imageSize, texts: texts, logo: logo)
}
// let texts = ["Top Left", "Top Right", "Bottom Left", "Bottom Right"]

// // let watermarkImage = generateWatermarkImage(timeString: timeString)

let watermarkedImage = watermarkImage!.composited(over: ciImage)

// Render the modified CIImage back to a pixel buffer
Expand Down