diff --git a/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/.idea/.gitignore b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/.idea/.gitignore
new file mode 100644
index 0000000..35410ca
--- /dev/null
+++ b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/.idea/.gitignore
@@ -0,0 +1,8 @@
+# 默认忽略的文件
+/shelf/
+/workspace.xml
+# 基于编辑器的 HTTP 客户端请求
+/httpRequests/
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml
diff --git a/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/.idea/misc.xml b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/.idea/misc.xml
new file mode 100644
index 0000000..1b2d693
--- /dev/null
+++ b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/.idea/misc.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/.idea/modules.xml b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/.idea/modules.xml
new file mode 100644
index 0000000..3c4f5d2
--- /dev/null
+++ b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/.idea/vcs.xml b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/.idea/vcs.xml
new file mode 100644
index 0000000..07117e4
--- /dev/null
+++ b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/AndroidARView.kt b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/AndroidARView.kt
index f114ad9..7afb175 100644
--- a/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/AndroidARView.kt
+++ b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/AndroidARView.kt
@@ -1,13 +1,14 @@
package io.carius.lars.ar_flutter_plugin_flutterflow
-
import android.app.Activity
import android.app.Application
import android.content.Context
import android.graphics.Bitmap
+import android.media.Image
import android.net.Uri
import android.os.Bundle
import android.os.Handler
import android.os.HandlerThread
+import android.os.Looper
import android.util.Log
import android.view.MotionEvent
import android.view.PixelCopy
@@ -32,6 +33,9 @@ import java.io.ByteArrayOutputStream
import java.io.IOException
import java.nio.FloatBuffer
import java.util.concurrent.CompletableFuture
+//import com.chaquo.python.Python
+//import com.chaquo.python.PyObject
+import kotlinx.coroutines.*
import android.R
import com.google.ar.sceneform.rendering.*
@@ -40,17 +44,8 @@ import android.view.ViewGroup
import com.google.ar.core.TrackingState
-
-
-
-
-
-
-
-
-
-
-
+import DepthImgUtil
+import ImageUtil
internal class AndroidARView(
@@ -94,6 +89,13 @@ internal class AndroidARView(
private var planeCount = 0
private var isCameraEnabled = true
+ // newly added from Miranda
+ private val imageUtil = ImageUtil()
+ private val depthImgUtil = DepthImgUtil()
+
+ private val imageFetchingScope = CoroutineScope(Dispatchers.IO + SupervisorJob())
+ private var trackingProgress = 0
+
// Method channel handlers
private val onSessionMethodCall =
object : MethodChannel.MethodCallHandler {
@@ -163,6 +165,27 @@ internal class AndroidARView(
isCameraEnabled = true
}
+ // newly added
+ "getCameraImage" -> {
+ val imageMap = getCameraImage()
+ result.success(imageMap)
+ }
+ "getDepthImage" -> {
+ val imageMap = getDepthImage()
+ result.success(imageMap)
+ }
+ "startFetchingImages" -> {
+ startFetchingImages()
+ result.success(null)
+ }
+ "stopFetchingImages" -> {
+ stopFetchingImages()
+ result.success(null)
+ }
+ "getCameraIntrinsics" -> {
+ val intrinsics = getCameraIntrinsics()
+ result.success(intrinsics)
+ }
else -> {}
}
}
@@ -413,6 +436,8 @@ internal class AndroidARView(
val config = Config(session)
config.updateMode = Config.UpdateMode.LATEST_CAMERA_IMAGE
config.focusMode = Config.FocusMode.AUTO
+ // newly added
+ config.depthMode = Config.DepthMode.AUTOMATIC
session.configure(config)
arSceneView.setupSession(session)
}
@@ -615,32 +640,96 @@ internal class AndroidARView(
result.success(null)
}
+// private fun onFrame(frameTime: FrameTime) {
+// if(isCameraEnabled){
+// if (arSceneView.arFrame != null){
+// for (plane in arSceneView.arFrame!!.getUpdatedTrackables(Plane::class.java)) {
+// if (plane.trackingState == TrackingState.TRACKING && !detectedPlanes.contains(plane)) {
+// detectedPlanes.add(plane)
+// planeCount++
+// sessionManagerChannel.invokeMethod("onPlaneDetected", planeCount)
+// }
+// }}
+// // hide instructions view if no longer required
+// if (showAnimatedGuide && arSceneView.arFrame != null){
+// for (plane in arSceneView.arFrame!!.getUpdatedTrackables(Plane::class.java)) {
+// if (plane.trackingState === TrackingState.TRACKING) {
+// val view = activity.findViewById(R.id.content) as ViewGroup
+// view.removeView(animatedGuide)
+// showAnimatedGuide = false
+// break
+// }
+// }
+// }
+//
+// if (showFeaturePoints) {
+// // remove points from last frame
+// while (pointCloudNode.children?.size
+// ?: 0 > 0) {
+// pointCloudNode.children?.first()?.setParent(null)
+// }
+// var pointCloud = arSceneView.arFrame?.acquirePointCloud()
+// // Access point cloud data (returns FloatBufferw with x,y,z coordinates and confidence
+// // value).
+// val points = pointCloud?.getPoints() ?: FloatBuffer.allocate(0)
+// // Check if there are any feature points
+// if (points.limit() / 4 >= 1) {
+// for (index in 0 until points.limit() / 4) {
+// // Add feature point to scene
+// val featurePoint =
+// modelBuilder.makeFeaturePointNode(
+// viewContext,
+// points.get(4 * index),
+// points.get(4 * index + 1),
+// points.get(4 * index + 2))
+// featurePoint.setParent(pointCloudNode)
+// }
+// }
+// // Release resources
+// pointCloud?.release()
+// }}
+// val updatedAnchors = arSceneView.arFrame!!.updatedAnchors
+// // Notify the cloudManager of all the updates.
+// if (this::cloudAnchorHandler.isInitialized) {cloudAnchorHandler.onUpdate(updatedAnchors)}
+// if(isCameraEnabled) {
+// if (keepNodeSelected && transformationSystem.selectedNode != null && transformationSystem.selectedNode!!.isTransforming) {
+// // If the selected node is currently transforming, we want to deselect it as soon as the transformation is done
+// keepNodeSelected = false
+// }
+// if (!keepNodeSelected && transformationSystem.selectedNode != null && !transformationSystem.selectedNode!!.isTransforming) {
+// // once the transformation is done, deselect the node and allow selection of another node
+// transformationSystem.selectNode(null)
+// keepNodeSelected = true
+// }
+// if (!enablePans && !enableRotation) {
+// //unselect all nodes as we do not want the selection visualizer
+// transformationSystem.selectNode(null)
+// }
+// }
+//
+// }
+
private fun onFrame(frameTime: FrameTime) {
- if(isCameraEnabled){
- if (arSceneView.arFrame != null){
- for (plane in arSceneView.arFrame!!.getUpdatedTrackables(Plane::class.java)) {
- if (plane.trackingState == TrackingState.TRACKING && !detectedPlanes.contains(plane)) {
- detectedPlanes.add(plane)
- planeCount++
- sessionManagerChannel.invokeMethod("onPlaneDetected", planeCount)
- }
- }}
// hide instructions view if no longer required
- if (showAnimatedGuide && arSceneView.arFrame != null){
+ if (showAnimatedGuide && arSceneView.arFrame != null && trackingProgress < 100) {
for (plane in arSceneView.arFrame!!.getUpdatedTrackables(Plane::class.java)) {
if (plane.trackingState === TrackingState.TRACKING) {
- val view = activity.findViewById(R.id.content) as ViewGroup
- view.removeView(animatedGuide)
- showAnimatedGuide = false
- break
+ trackingProgress += 5
+ sessionManagerChannel.invokeMethod("motionData", trackingProgress)
}
}
+
+ if(trackingProgress >= 100) {
+ val view = activity.findViewById(R.id.content) as ViewGroup
+ view.removeView(animatedGuide)
+ showAnimatedGuide = false
+ }
}
if (showFeaturePoints) {
// remove points from last frame
while (pointCloudNode.children?.size
- ?: 0 > 0) {
+ ?: 0 > 0) {
pointCloudNode.children?.first()?.setParent(null)
}
var pointCloud = arSceneView.arFrame?.acquirePointCloud()
@@ -652,36 +741,34 @@ internal class AndroidARView(
for (index in 0 until points.limit() / 4) {
// Add feature point to scene
val featurePoint =
- modelBuilder.makeFeaturePointNode(
- viewContext,
- points.get(4 * index),
- points.get(4 * index + 1),
- points.get(4 * index + 2))
+ modelBuilder.makeFeaturePointNode(
+ viewContext,
+ points.get(4 * index),
+ points.get(4 * index + 1),
+ points.get(4 * index + 2))
featurePoint.setParent(pointCloudNode)
}
}
// Release resources
pointCloud?.release()
- }}
+ }
val updatedAnchors = arSceneView.arFrame!!.updatedAnchors
// Notify the cloudManager of all the updates.
if (this::cloudAnchorHandler.isInitialized) {cloudAnchorHandler.onUpdate(updatedAnchors)}
- if(isCameraEnabled) {
- if (keepNodeSelected && transformationSystem.selectedNode != null && transformationSystem.selectedNode!!.isTransforming) {
- // If the selected node is currently transforming, we want to deselect it as soon as the transformation is done
- keepNodeSelected = false
- }
- if (!keepNodeSelected && transformationSystem.selectedNode != null && !transformationSystem.selectedNode!!.isTransforming) {
- // once the transformation is done, deselect the node and allow selection of another node
- transformationSystem.selectNode(null)
- keepNodeSelected = true
- }
- if (!enablePans && !enableRotation) {
- //unselect all nodes as we do not want the selection visualizer
- transformationSystem.selectNode(null)
- }
- }
+ if (keepNodeSelected && transformationSystem.selectedNode != null && transformationSystem.selectedNode!!.isTransforming){
+ // If the selected node is currently transforming, we want to deselect it as soon as the transformation is done
+ keepNodeSelected = false
+ }
+ if (!keepNodeSelected && transformationSystem.selectedNode != null && !transformationSystem.selectedNode!!.isTransforming){
+ // once the transformation is done, deselect the node and allow selection of another node
+ transformationSystem.selectNode(null)
+ keepNodeSelected = true
+ }
+ if (!enablePans && !enableRotation){
+ //unselect all nodes as we do not want the selection visualizer
+ transformationSystem.selectNode(null)
+ }
}
private fun addNode(dict_node: HashMap, dict_anchor: HashMap? = null): CompletableFuture{
@@ -896,6 +983,185 @@ internal class AndroidARView(
}
}
+ // newly added from Miranda
+
+ private fun getCameraIntrinsics(): HashMap? {
+ val arFrame = arSceneView.arFrame ?: return null
+ val cameraIntrinsics = arFrame.camera.getImageIntrinsics()
+
+ // Fetching the focal length ([fx, fy])
+ val focalLength = cameraIntrinsics.focalLength
+
+ // Fetching the image dimensions
+ val imageDimensions = cameraIntrinsics.imageDimensions
+
+ // Fetching the principal point ([cx, cy])
+ val principalPoint = cameraIntrinsics.principalPoint
+
+ // Constructing the map to return
+ val intrinsicsMap = hashMapOf(
+ "focalLengthX" to focalLength[0],
+ "focalLengthY" to focalLength[1],
+ "imageWidth" to imageDimensions[0],
+ "imageHeight" to imageDimensions[1],
+ "principalPointX" to principalPoint[0],
+ "principalPointY" to principalPoint[1]
+ )
+
+ return intrinsicsMap
+ }
+
+
+ private fun getCameraImage(): HashMap? {
+ val arFrame = arSceneView.arFrame ?: return null
+
+ val cameraImage: Image = arFrame.acquireCameraImage()
+ val bytes = ImageUtil().imageToByteArray(cameraImage) ?: byteArrayOf()
+
+ val imageMap = hashMapOf(
+ "bytes" to bytes,
+ "width" to cameraImage.width,
+ "height" to cameraImage.height
+ )
+
+ cameraImage.close()
+
+ return imageMap
+ }
+
+ private fun getFullDepthOnly(): HashMap? {
+ val arFrame = arSceneView.arFrame ?: return null
+ try {
+ val depthImage: Image = arFrame.acquireDepthImage16Bits()
+
+ val array = DepthImgUtil().parseImg(depthImage)
+
+ val buffer = depthImage.planes[0].buffer
+ val stride = depthImage.planes[0].rowStride
+ val bytes = ByteArray(buffer.remaining())
+ buffer.get(bytes)
+
+ val imageMap = hashMapOf(
+ "depthImgBytes" to bytes,
+ "width" to depthImage.width,
+ "height" to depthImage.height,
+ "depthImgArrays" to mapOf(
+ "xBuffer" to array.xBuffer.map { it.toInt() },
+ "yBuffer" to array.yBuffer.map { it.toInt() },
+ "dBuffer" to array.dBuffer.toList(),
+ "percentageBuffer" to array.percentageBuffer.toList(),
+ "length" to array.length
+ ),
+ )
+ depthImage.close()
+ return imageMap
+ } catch (e: NotYetAvailableException) {
+ // This means that depth data is not available yet.
+ // Depth data will not be available if there are no tracked
+ // feature points. This can happen when there is no motion, or when the
+ // camera loses its ability to track objects in the surrounding
+ // environment.
+ }
+ return null
+ }
+
+ private fun getDepthImage(): HashMap? {
+ val arFrame = arSceneView.arFrame ?: return null
+
+ val depth = arFrame.acquireDepthImage16Bits()
+ val rawDepth = arFrame.acquireRawDepthImage16Bits()
+ val rawDepthConfidence = arFrame.acquireRawDepthConfidenceImage()
+
+ val array = DepthImgUtil().parseImg(depth)
+ val rawArray = DepthImgUtil().parseImg(rawDepth)
+ val confidenceArray = DepthImgUtil().parseImg(rawDepthConfidence)
+
+ val depthBytes = ByteArray(depth.planes[0].buffer.remaining()).apply { depth.planes[0].buffer.get(this) }
+ val rawDepthBytes = ByteArray(rawDepth.planes[0].buffer.remaining()).apply { rawDepth.planes[0].buffer.get(this) }
+ val confidenceBytes = ByteArray(rawDepthConfidence.planes[0].buffer.remaining()).apply { rawDepthConfidence.planes[0].buffer.get(this) }
+
+ val imageMap = hashMapOf(
+ "depthImgBytes" to depthBytes,
+ "rawDepthImgBytes" to rawDepthBytes,
+ "confidenceImgBytes" to confidenceBytes,
+ "width" to depth.width,
+ "height" to depth.height,
+ "depthImgArrays" to mapOf(
+ "xBuffer" to array.xBuffer.map { it.toInt() },
+ "yBuffer" to array.yBuffer.map { it.toInt() },
+ "dBuffer" to array.dBuffer.toList(),
+ "percentageBuffer" to array.percentageBuffer.toList(),
+ "length" to array.length
+ ),
+ "rawDepthImgArrays" to mapOf(
+ "xBuffer" to rawArray.xBuffer.map { it.toInt() },
+ "yBuffer" to rawArray.yBuffer.map { it.toInt() },
+ "dBuffer" to rawArray.dBuffer.toList(),
+ "percentageBuffer" to rawArray.percentageBuffer.toList(),
+ "length" to rawArray.length
+ ),
+ "confidenceImgArrays" to mapOf(
+ "xBuffer" to confidenceArray.xBuffer.map { it.toInt() },
+ "yBuffer" to confidenceArray.yBuffer.map { it.toInt() },
+ "dBuffer" to confidenceArray.dBuffer.toList(),
+ "percentageBuffer" to confidenceArray.percentageBuffer.toList(),
+ "length" to confidenceArray.length
+ )
+ )
+
+ depth.close()
+ rawDepth.close()
+ rawDepthConfidence.close()
+
+ return imageMap
+ }
+
+ fun startFetchingImages() {
+ imageFetchingScope.launch {
+ fetchImages()
+ }
+ }
+
+ suspend fun fetchImages() {
+ while (imageFetchingScope.isActive) {
+ val arFrame = arSceneView.arFrame
+ if (arFrame == null || arFrame.camera.trackingState != TrackingState.TRACKING) {
+ delay(1000 / 30) // for ~30fps
+ continue
+ }
+
+ var cameraImage: Image? = null
+
+ try {
+ cameraImage = arFrame.acquireCameraImage()
+ val bytes = imageUtil.yuvToJpegByteArray(cameraImage) ?: byteArrayOf()
+
+ if (cameraImage != null) {
+// withContext(Dispatchers.Default) {
+// val pyResult: PyObject = pythonModule.callAttr("get_ratio_inside_box", bytes)
+// val result: Double = pyResult.toDouble()
+// withContext(Dispatchers.Main) {
+// sessionManagerChannel.invokeMethod("imageData", result)
+// }
+ Log.d("ARCore", "Result sent to session manager: $bytes")
+ }
+ } catch (e: DeadlineExceededException) {
+ Log.e("ARCore", "Deadline exceeded when trying to acquire resources.")
+ } finally {
+ cameraImage?.close()
+ }
+
+ delay(1000 / 30) // for ~30fps
+ }
+ }
+
+ fun stopFetchingImages() {
+ imageFetchingScope.cancel()
+ }
+
+ // newly added over
+
+
private inner class cloudAnchorUploadedListener: CloudAnchorHandler.CloudAnchorListener {
override fun onCloudTaskComplete(anchorName: String?, anchor: Anchor?) {
val cloudState = anchor!!.cloudAnchorState
diff --git a/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/utils/DepthImgUtil.kt b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/utils/DepthImgUtil.kt
new file mode 100644
index 0000000..9016f5f
--- /dev/null
+++ b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/utils/DepthImgUtil.kt
@@ -0,0 +1,45 @@
+import android.media.Image
+import java.nio.ShortBuffer
+
+class DepthImgUtil {
+ inner class DepthImgArrays(size: Int) {
+ var xBuffer: ShortArray = ShortArray(size)
+ var yBuffer: ShortArray = ShortArray(size)
+ var dBuffer: FloatArray = FloatArray(size)
+ var percentageBuffer: FloatArray = FloatArray(size)
+ var length: Int = size
+ }
+
+ fun parseImg(depthImg: Image): DepthImgArrays {
+ // Buffers for storing TOF output
+ val arrays = DepthImgArrays(depthImg.width * depthImg.height)
+ val plane = depthImg.planes[0]
+ val shortDepthBuffer: ShortBuffer = plane.buffer.asShortBuffer()
+
+ val stride = plane.rowStride
+ var offset = 0
+ var i = 0
+ for (y in 0 until depthImg.height) {
+ for (x in 0 until depthImg.width) {
+ // Parse the data. Format is [depth|confidence]
+ var depthSample =
+ shortDepthBuffer[y / 2 * stride + x].toInt() and 0xFFFF
+ depthSample =
+ (depthSample and 0xFF shl 8 and 0xFF00) or ((depthSample and 0xFF00 shr 8) and 0xFF)
+ val depthSampleShort = depthSample.toShort()
+ val depthRange = (depthSampleShort.toInt() and 0x1FFF).toShort()
+ val depthConfidence = ((depthSampleShort.toInt() shr 13) and 0x7).toShort()
+ val depthPercentage = if (depthConfidence.toInt() == 0) 1f else (depthConfidence - 1) / 7f
+
+ // Store data in buffer
+ arrays.xBuffer[i] = x.toShort()
+ arrays.yBuffer[i] = y.toShort()
+ arrays.dBuffer[i] = depthRange / 1000.0f
+ arrays.percentageBuffer[i] = depthPercentage
+ i++
+ }
+ offset += depthImg.width
+ }
+ return arrays
+ }
+}
diff --git a/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/utils/ImageUtil.kt b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/utils/ImageUtil.kt
new file mode 100644
index 0000000..f2fa6fa
--- /dev/null
+++ b/android/src/main/kotlin/io/carius/lars/ar_flutter_plugin_flutterflow/utils/ImageUtil.kt
@@ -0,0 +1,122 @@
+import android.graphics.ImageFormat
+import android.graphics.Rect
+import android.media.Image
+import android.util.Log
+import android.util.Size
+import android.graphics.YuvImage
+import java.io.ByteArrayOutputStream
+import java.nio.ByteBuffer
+
+class ImageUtil {
+ fun yuvToJpegByteArray(yuvImage: Image): ByteArray {
+ val width = yuvImage.width
+ val height = yuvImage.height
+
+ val yBuffer = yuvImage.planes[0].buffer
+ val uBuffer = yuvImage.planes[1].buffer
+ val vBuffer = yuvImage.planes[2].buffer
+
+ val ySize = yBuffer.remaining()
+ val uSize = uBuffer.remaining()
+ val vSize = vBuffer.remaining()
+
+ val nv21 = ByteArray(ySize + uSize + vSize)
+
+ yBuffer.get(nv21, 0, ySize)
+ vBuffer.get(nv21, ySize, vSize)
+ uBuffer.get(nv21, ySize + vSize, uSize)
+
+ val yuv = YuvImage(nv21, ImageFormat.NV21, width, height, null)
+ val out = ByteArrayOutputStream()
+ yuv.compressToJpeg(Rect(0, 0, width, height), 90, out) // can adjust the quality here
+ return out.toByteArray()
+ }
+
+
+ fun imageToByteArray(image: Image): ByteArray? {
+ var data: ByteArray? = null
+ if (image.format == ImageFormat.YUV_420_888) {
+ data = NV21toJPEG(
+ YUV_420_888toNV21(image),
+ image.width, image.height)
+ }
+ return data
+ }
+
+ fun YUV_420_888toNV21(image: Image): ByteArray {
+
+ val width = image.width
+ val height = image.height
+ val ySize = width * height
+ val uvSize = width * height / 4
+
+ val nv21 = ByteArray(ySize + uvSize * 2)
+
+ val yBuffer = image.planes[0].buffer // Y
+ val uBuffer = image.planes[1].buffer // U
+ val vBuffer = image.planes[2].buffer // V
+
+ var rowStride = image.planes[0].rowStride
+ check(image.planes[0].pixelStride == 1)
+
+ var pos = 0
+
+ if (rowStride == width) { // likely
+ yBuffer.get(nv21, 0, ySize)
+ pos += ySize
+ } else {
+ var yBufferPos: Long = 0
+ while (pos < ySize) {
+ yBuffer.position(yBufferPos.toInt())
+ yBuffer.get(nv21, pos, width)
+ yBufferPos += (rowStride - width).toLong()
+ pos += width
+ }
+ }
+
+ rowStride = image.planes[2].rowStride
+ val pixelStride = image.planes[2].pixelStride
+
+ check(rowStride == image.planes[1].rowStride)
+ check(pixelStride == image.planes[1].pixelStride)
+
+ val vBufferCopy = ByteArray(vBuffer.remaining()) // Create a byte array
+ vBuffer.get(vBufferCopy) // Copy the data from the vBuffer
+ vBuffer.rewind()
+
+ if (pixelStride == 2 && rowStride == width && uBuffer.get(0) == vBuffer.get(1)) {
+ val savePixel = vBuffer.get(1)
+ try {
+ val invertedSavePixel = (savePixel.toInt() xor 0xFF).toByte() // invert the byte
+ vBufferCopy[1] = invertedSavePixel
+ if (uBuffer.get(0) == invertedSavePixel) {
+ vBufferCopy[1] = savePixel
+ System.arraycopy(vBufferCopy, 0, nv21, ySize, uvSize)
+ // vBufferCopy.get(nv21, ySize, uvSize)
+
+ // Log.i("AMELIA", "I'm taking a shortcut in image parsing")
+ // return nv21 // shortcut
+ }
+ vBufferCopy[1] = savePixel
+ } catch (ex: Exception) {
+ ex.printStackTrace()
+ }
+
+ for (row in 0 until height / 2) {
+ for (col in 0 until width / 2) {
+ val vuPos = col * pixelStride + row * rowStride
+ nv21[pos++] = vBufferCopy[vuPos]
+ nv21[pos++] = uBuffer.get(vuPos)
+ }
+ }
+ }
+ return nv21
+ }
+
+ private fun NV21toJPEG(nv21: ByteArray, width: Int, height: Int): ByteArray {
+ val out = ByteArrayOutputStream()
+ val yuv = YuvImage(nv21, ImageFormat.NV21, width, height, null)
+ yuv.compressToJpeg(Rect(0, 0, width, height), 100, out)
+ return out.toByteArray()
+ }
+}
\ No newline at end of file
diff --git a/lib/managers/ar_session_manager.dart b/lib/managers/ar_session_manager.dart
index 0a78984..2f94cfa 100644
--- a/lib/managers/ar_session_manager.dart
+++ b/lib/managers/ar_session_manager.dart
@@ -1,14 +1,19 @@
import 'dart:math' show sqrt;
+import 'dart:async';
import 'dart:typed_data';
import 'package:ar_flutter_plugin_flutterflow/datatypes/config_planedetection.dart';
import 'package:ar_flutter_plugin_flutterflow/models/ar_anchor.dart';
import 'package:ar_flutter_plugin_flutterflow/models/ar_hittest_result.dart';
+import 'package:ar_flutter_plugin_flutterflow/models/camera_intrinsics.dart';
import 'package:ar_flutter_plugin_flutterflow/utils/json_converters.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:vector_math/vector_math_64.dart';
+import '../models/ar_image.dart';
+import '../models/camera_image.dart';
+
// Type definitions to enforce a consistent use of the API
typedef ARHitResultHandler = void Function(List hits);
typedef ARPlaneResultHandler = void Function(int planeCount);
@@ -37,6 +42,12 @@ class ARSessionManager {
/// Callback that is triggered once error is triggered
ErrorHandler? onError;
+ /// Stream of depth images
+ static StreamController? _imageStreamController;
+
+ /// Stream of motion progress updates
+ static StreamController? _progressController;
+
ARSessionManager(int id, this.buildContext, this.planeDetectionConfig,
{this.debug = false}) {
_channel = MethodChannel('arsession_$id');
@@ -121,6 +132,59 @@ class ARSessionManager {
_channel.invokeMethod('enableCamera');
}
+
+ /// Returns the camera intrinsics
+ Future getCameraIntrinsics() async {
+ final Map result =
+ await _channel.invokeMethod('getCameraIntrinsics');
+ return CameraIntrinsics.fromMap(result);
+ }
+
+ /// Returns the camera image
+ Future getCameraImage() async {
+ final Map result =
+ await _channel.invokeMethod('getCameraImage');
+ return CameraImage.fromMap(result);
+ }
+
+ /// Returns the depth image
+ Future getDepthImage() async {
+ final Map result =
+ await _channel.invokeMethod('getDepthImage');
+ return ARImage.fromMap(result);
+ }
+
+ /// Starts depth images stream
+ void startFetchingImages() {
+ _channel.invokeMethod('startFetchingImages');
+ }
+
+ /// Stops depth images stream
+ void stopFetchingImages() {
+ _channel.invokeMethod('stopFetchingImages');
+ _imageStreamController?.close();
+ _imageStreamController = null;
+ }
+
+ /// Returns the depth image stream for listening
+ Stream get depthQualityStream {
+ _imageStreamController ??= StreamController.broadcast();
+ return _imageStreamController!.stream;
+ }
+
+ /// Stops motion progress stream
+ void stopMotionUpdates() {
+ _channel.invokeMethod('stopMotionUpdates');
+ _progressController?.close();
+ _progressController = null;
+ }
+
+ /// Returns the motion progress stream for listening
+ Stream get motionUpdatesStream {
+ _progressController ??= StreamController.broadcast();
+ return _progressController!.stream;
+ }
+
Future _platformCallHandler(MethodCall call) {
if (debug) {
print('_platformCallHandler call ${call.method} ${call.arguments}');
@@ -163,6 +227,14 @@ class ARSessionManager {
case 'dispose':
_channel.invokeMethod("dispose");
break;
+ // case 'imageData':
+ // double imageQuality = call.arguments as double;
+ // _imageStreamController?.add(imageQuality);
+ // break;
+ case 'motionData':
+ int motionProgress = call.arguments as int;
+ _progressController?.add(motionProgress);
+ break;
default:
if (debug) {
print('Unimplemented method ${call.method} ');
@@ -200,12 +272,25 @@ class ARSessionManager {
});
}
+ // /// Displays the [errorMessage] in a snackbar of the parent widget
+ // onError(String errorMessage) {
+ // ScaffoldMessenger.of(buildContext).showSnackBar(SnackBar(
+ // content: Text(errorMessage),
+ // action: SnackBarAction(
+ // label: 'HIDE',
+ // onPressed:
+ // ScaffoldMessenger.of(buildContext).hideCurrentSnackBar)));
+ // }
/// Dispose the AR view on the platforms to pause the scenes and disconnect the platform handlers.
/// You should call this before removing the AR view to prevent out of memory erros
dispose() async {
try {
await _channel.invokeMethod("dispose");
+ _imageStreamController?.close();
+ _imageStreamController = null;
+ _progressController?.close();
+ _progressController = null;
} catch (e) {
print(e);
}
diff --git a/lib/models/ar_image.dart b/lib/models/ar_image.dart
new file mode 100644
index 0000000..ba030c9
--- /dev/null
+++ b/lib/models/ar_image.dart
@@ -0,0 +1,46 @@
+import 'dart:typed_data';
+
+import 'depth_img_array.dart';
+
+class ARImage {
+ ARImage({
+ this.width,
+ this.height,
+ this.depthImgBytes,
+ this.depthImgArrays,
+ this.rawDepthImgBytes,
+ this.confidenceImgBytes,
+ this.rawDepthImgArrays,
+ this.confidenceImgArrays,
+ }) : assert(depthImgBytes != null),
+ assert(width != null && width > 0),
+ assert(height != null && height > 0);
+
+ final Uint8List? depthImgBytes;
+ final Uint8List? rawDepthImgBytes;
+ final Uint8List? confidenceImgBytes;
+ final int? width;
+ final int? height;
+ final DepthImgArrays? depthImgArrays;
+ final DepthImgArrays? rawDepthImgArrays;
+ final DepthImgArrays? confidenceImgArrays;
+
+ static ARImage fromMap(Map map) {
+ return ARImage(
+ depthImgBytes: map['depthImgBytes'],
+ rawDepthImgBytes: map['rawDepthImgBytes'],
+ confidenceImgBytes: map['confidenceImgBytes'],
+ width: map['width'],
+ height: map['height'],
+ depthImgArrays: DepthImgArrays.fromMap(Map.from(map['depthImgArrays'])),
+ rawDepthImgArrays:
+ map['rawDepthImgArrays'] == null
+ ? null
+ : DepthImgArrays.fromMap(Map.from(map['rawDepthImgArrays'])),
+ confidenceImgArrays:
+ map['confidenceImgArrays'] == null
+ ? null
+ : DepthImgArrays.fromMap(Map.from(map['confidenceImgArrays'])),
+ );
+ }
+}
diff --git a/lib/models/camera_image.dart b/lib/models/camera_image.dart
new file mode 100644
index 0000000..012441a
--- /dev/null
+++ b/lib/models/camera_image.dart
@@ -0,0 +1,29 @@
+import 'dart:typed_data';
+
+class CameraImage {
+ CameraImage({
+ this.bytes,
+ this.width,
+ this.height,
+ }) : assert(bytes != null),
+ assert(width != null && width > 0),
+ assert(height != null && height > 0);
+
+ final Uint8List? bytes;
+ final int? width;
+ final int? height;
+
+ static CameraImage fromMap(Map map) {
+ return CameraImage(
+ bytes: map['bytes'],
+ width: map['width'],
+ height: map['height'],
+ );
+ }
+
+ Map toMap() => {
+ 'bytes': bytes,
+ 'width': width,
+ 'height': height
+ }..removeWhere((String k, dynamic v) => v == null);
+}
\ No newline at end of file
diff --git a/lib/models/camera_intrinsics.dart b/lib/models/camera_intrinsics.dart
new file mode 100644
index 0000000..e8e2467
--- /dev/null
+++ b/lib/models/camera_intrinsics.dart
@@ -0,0 +1,39 @@
+class CameraIntrinsics {
+ final double focalLengthX;
+ final double focalLengthY;
+ final int imageWidth;
+ final int imageHeight;
+ final double principalPointX;
+ final double principalPointY;
+
+ CameraIntrinsics({
+ required this.focalLengthX,
+ required this.focalLengthY,
+ required this.imageWidth,
+ required this.imageHeight,
+ required this.principalPointX,
+ required this.principalPointY,
+ });
+
+ static CameraIntrinsics fromMap(Map map) {
+ return CameraIntrinsics(
+ focalLengthX: map['focalLengthX'],
+ focalLengthY: map['focalLengthY'],
+ imageWidth: map['imageWidth'],
+ imageHeight: map['imageHeight'],
+ principalPointX: map['principalPointX'],
+ principalPointY: map['principalPointY'],
+ );
+ }
+
+ Map toMap() {
+ return {
+ 'focalLengthX': focalLengthX,
+ 'focalLengthY': focalLengthY,
+ 'imageWidth': imageWidth,
+ 'imageHeight': imageHeight,
+ 'principalPointX': principalPointX,
+ 'principalPointY': principalPointY,
+ };
+ }
+}
diff --git a/lib/models/depth_img_array.dart b/lib/models/depth_img_array.dart
new file mode 100644
index 0000000..97f7c47
--- /dev/null
+++ b/lib/models/depth_img_array.dart
@@ -0,0 +1,32 @@
+class DepthImgArrays {
+ List xBuffer;
+ List yBuffer;
+ List dBuffer;
+ List percentageBuffer;
+ int length;
+
+ DepthImgArrays(
+ {required this.xBuffer,
+ required this.yBuffer,
+ required this.dBuffer,
+ required this.percentageBuffer,
+ required this.length});
+
+ factory DepthImgArrays.fromMap(Map map) {
+ return DepthImgArrays(
+ xBuffer: List.from(map['xBuffer']),
+ yBuffer: List.from(map['yBuffer']),
+ dBuffer: List.from(map['dBuffer']),
+ percentageBuffer: List.from(map['percentageBuffer']),
+ length: map['length'],
+ );
+ }
+
+ Map toJson() => {
+ 'xBuffer': xBuffer,
+ 'yBuffer': yBuffer,
+ 'dBuffer': dBuffer,
+ 'percentageBuffer': percentageBuffer,
+ 'length': length,
+ };
+}