📸 Alttaki kod ile kameraya gelen görüntüyü ekrana basacağız
👮♂️ cameraProviderFuture.get() ile kameranın olduğundan emin oluyoruz
🎳 PreviewView.ImplementationMode.TEXTURE_VIEW animasyonları ve dönüşümleri destekler, daha fazla memory kullanır
🕊️ PreviewView.ImplementationMode.SURFACE_VIEW daha hızlı ve basit çalışan bir yapıdır
class CameraXActivity : AppCompatActivity() {
private lateinit var cameraProviderFuture:
ListenableFuture<ProcessCameraProvider>
// ...
/**
* Initialize CameraX provider
*/
private fun startCamera() {
cameraProviderFuture = ProcessCameraProvider.getInstance(this)
cameraProviderFuture.addListener(Runnable {
// Kamera sağlayıcı ile kameranın aktif olduğundan emin oluyoruz
val cameraProvider = cameraProviderFuture.get()
// Daha kullanışlı ama daha çok memory harcar
// https://stackoverflow.com/a/28620918
pvCameraX.implementationMode = PreviewView.ImplementationMode.TEXTURE_VIEW
// Kamera ön izlemesini tanımlama
val cameraPreview = Preview.Builder().apply {
setTargetRotation(pvCameraX.display.rotation)
setTargetAspectRatio(AspectRatio.RATIO_16_9)
setTargetName("Preview")
}.build().apply { setSurfaceProvider(pvCameraX.previewSurfaceProvider) }
// Ön-arka kamera seçimini yapıyoruz
val cameraSelector = CameraSelector.Builder()
.requireLensFacing(CameraSelector.LENS_FACING_BACK)
.build()
// Kamera kullanım durumlarını kameranın yaşam döngüsüne dahil ediyoruz
val camera = cameraProvider.bindToLifecycle(
this as LifecycleOwner, cameraSelector, cameraPreview
)
}, ContextCompat.getMainExecutor(this))
}
}
private fun startCamera() {
// ...
imageAnalysis.setAnalyzer(
executor,
ImageAnalysis.Analyzer { imageProxy ->
// Process image if exists
imageProxy.image?.let { image ->
val fvImage =
image.toFvImage(imageProxy.imageInfo.rotationDegrees, isDegree = true)
fvImage.detectFaces {
Log.i(TAG, "startCamera: Face count: ${it.size}")
}
}
// val rotationDegree = image.imageInfo.rotationDegrees
// Log.i("TEMP", "startCamera: Image received ${System.currentTimeMillis()}")
// Once the image being analyzed is closed by calling ImageProxy.close(),
// the next latest image will be delivered.
// Important: The Analyzer method implementation must call image.close()
// on received images when finished using them.
// Otherwise, new images may not be received or the camera may stall,
// depending on back pressure setting.
imageProxy.close()
})
// ...
}
/**
* Resim içerisinde bulunan yüzleri algılar, algılama tamamlandığında [onDetected] metodu
* çalışır
*/
fun FirebaseVisionImage.detectFaces(onDetected: (List<FirebaseVisionFace>) -> Unit): Task<MutableList<FirebaseVisionFace>> {
val options = FirebaseVisionFaceDetectorOptions.Builder()
.setClassificationMode(FirebaseVisionFaceDetectorOptions.ACCURATE)
.setLandmarkMode(FirebaseVisionFaceDetectorOptions.ALL_LANDMARKS)
.setClassificationMode(FirebaseVisionFaceDetectorOptions.ALL_CLASSIFICATIONS)
.setMinFaceSize(0.15f)
.enableTracking()
.build()
val detector = FirebaseVision.getInstance().getVisionFaceDetector(options)
return detector.detectInImage(this)
.addOnSuccessListener(onDetected)
.addOnFailureListener(Throwable::printStackTrace)
}
fun Image.toFvImage(rotation: Int, isDegree: Boolean = false): FirebaseVisionImage {
return when (isDegree) {
false -> FirebaseVisionImage.fromMediaImage(this, rotation)
true -> FirebaseVisionImage.fromMediaImage(
this,
degreesToFirebaseRotation(rotation)
)
}
}
fun degreesToFirebaseRotation(degrees: Int): Int {
return when (degrees) {
0 -> FirebaseVisionImageMetadata.ROTATION_0
90 -> FirebaseVisionImageMetadata.ROTATION_90
180 -> FirebaseVisionImageMetadata.ROTATION_180
270 -> FirebaseVisionImageMetadata.ROTATION_270
else -> throw IllegalArgumentException("Rotation must be 0, 90, 180, or 270.")
}
}