diff --git a/app/proguard-rules.pro b/app/proguard-rules.pro index f72d4d0..2fb4752 100644 --- a/app/proguard-rules.pro +++ b/app/proguard-rules.pro @@ -1,10 +1,5 @@ # Add project specific ProGuard rules here. +# CameraX and GMS Location ship their own consumer ProGuard rules. -# Keep CameraX classes --keep class androidx.camera.** { *; } - -# Keep OpenGL shader-related code +# Keep OpenGL shader-related code (accessed via reflection by GLSL pipeline) -keep class no.naiv.tiltshift.effect.** { *; } - -# Keep location provider --keep class com.google.android.gms.location.** { *; } diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml index 8cd08b7..c56c4e9 100644 --- a/app/src/main/AndroidManifest.xml +++ b/app/src/main/AndroidManifest.xml @@ -6,10 +6,13 @@ - + + + + @@ -30,7 +33,7 @@ android:exported="true" android:screenOrientation="fullSensor" android:configChanges="orientation|screenSize|screenLayout|keyboardHidden" - android:windowSoftInputMode="adjustResize" + android:windowSoftInputMode="adjustNothing" android:theme="@style/Theme.TiltShiftCamera"> diff --git a/app/src/main/java/no/naiv/tiltshift/camera/CameraManager.kt b/app/src/main/java/no/naiv/tiltshift/camera/CameraManager.kt index 20104bd..9c440ac 100644 --- a/app/src/main/java/no/naiv/tiltshift/camera/CameraManager.kt +++ b/app/src/main/java/no/naiv/tiltshift/camera/CameraManager.kt @@ -18,7 +18,9 @@ import androidx.lifecycle.LifecycleOwner import kotlinx.coroutines.flow.MutableStateFlow import kotlinx.coroutines.flow.StateFlow import kotlinx.coroutines.flow.asStateFlow +import java.lang.ref.WeakReference import java.util.concurrent.Executor +import java.util.concurrent.ExecutorService import java.util.concurrent.Executors /** @@ -58,11 +60,12 @@ class CameraManager(private val context: Context) { val isFrontCamera: StateFlow = _isFrontCamera.asStateFlow() /** Background executor for image capture callbacks to avoid blocking the main thread. */ - private val captureExecutor: Executor = Executors.newSingleThreadExecutor() + private val captureExecutor: ExecutorService = Executors.newSingleThreadExecutor() private var surfaceTextureProvider: (() -> SurfaceTexture?)? = null private var surfaceSize: Size = Size(1920, 1080) - private var lifecycleOwnerRef: LifecycleOwner? = null + /** Weak reference to avoid preventing Activity GC across config changes. */ + private var lifecycleOwnerRef: WeakReference? = null /** * Starts the camera with the given lifecycle owner. @@ -73,7 +76,7 @@ class CameraManager(private val context: Context) { surfaceTextureProvider: () -> SurfaceTexture? ) { this.surfaceTextureProvider = surfaceTextureProvider - this.lifecycleOwnerRef = lifecycleOwner + this.lifecycleOwnerRef = WeakReference(lifecycleOwner) val cameraProviderFuture = ProcessCameraProvider.getInstance(context) cameraProviderFuture.addListener({ @@ -89,7 +92,10 @@ class CameraManager(private val context: Context) { } private fun bindCameraUseCases(lifecycleOwner: LifecycleOwner) { - val provider = cameraProvider ?: return + val provider = cameraProvider ?: run { + Log.w(TAG, "bindCameraUseCases called before camera provider initialized") + return + } // Unbind all use cases before rebinding provider.unbindAll() @@ -164,12 +170,24 @@ class CameraManager(private val context: Context) { } /** - * Sets the zoom ratio. + * Sets the zoom ratio. Updates UI state only after the camera confirms the change. */ fun setZoom(ratio: Float) { val clamped = ratio.coerceIn(_minZoomRatio.value, _maxZoomRatio.value) - camera?.cameraControl?.setZoomRatio(clamped) - _zoomRatio.value = clamped + val future = camera?.cameraControl?.setZoomRatio(clamped) + if (future != null) { + future.addListener({ + try { + future.get() + _zoomRatio.value = clamped + } catch (e: Exception) { + Log.w(TAG, "Zoom operation failed", e) + } + }, ContextCompat.getMainExecutor(context)) + } else { + // Optimistic update when camera not available (e.g. during init) + _zoomRatio.value = clamped + } } /** @@ -185,7 +203,7 @@ class CameraManager(private val context: Context) { fun switchCamera() { _isFrontCamera.value = !_isFrontCamera.value _zoomRatio.value = 1.0f // Reset zoom when switching - lifecycleOwnerRef?.let { bindCameraUseCases(it) } + lifecycleOwnerRef?.get()?.let { bindCameraUseCases(it) } } /** @@ -207,10 +225,11 @@ class CameraManager(private val context: Context) { } /** - * Releases camera resources. + * Releases camera resources and shuts down the background executor. */ fun release() { cameraProvider?.unbindAll() + captureExecutor.shutdown() camera = null preview = null imageCapture = null diff --git a/app/src/main/java/no/naiv/tiltshift/camera/ImageCaptureHandler.kt b/app/src/main/java/no/naiv/tiltshift/camera/ImageCaptureHandler.kt index 04cc1dd..4736e52 100644 --- a/app/src/main/java/no/naiv/tiltshift/camera/ImageCaptureHandler.kt +++ b/app/src/main/java/no/naiv/tiltshift/camera/ImageCaptureHandler.kt @@ -36,6 +36,8 @@ class ImageCaptureHandler( private const val TAG = "ImageCaptureHandler" /** Maximum decoded image dimension to prevent OOM from huge gallery images. */ private const val MAX_IMAGE_DIMENSION = 4096 + /** Scale factor for downscaling blur and mask computations. */ + private const val SCALE_FACTOR = 4 } /** @@ -51,7 +53,7 @@ class ImageCaptureHandler( * Captures a photo and applies the tilt-shift effect. * * Phase 1 (inside suspendCancellableCoroutine / camera callback): - * decode → rotate → apply effect (synchronous CPU work only) + * decode -> rotate -> apply effect (synchronous CPU work only) * * Phase 2 (after continuation resumes, back in coroutine context): * save bitmap via PhotoSaver (suspend-safe) @@ -75,7 +77,6 @@ class ImageCaptureHandler( val imageRotation = imageProxy.imageInfo.rotationDegrees currentBitmap = imageProxyToBitmap(imageProxy) - imageProxy.close() if (currentBitmap == null) { continuation.resume( @@ -97,6 +98,8 @@ class ImageCaptureHandler( continuation.resume( CaptureOutcome.Failed(SaveResult.Error("Failed to process image. Please try again.", e)) ) + } finally { + imageProxy.close() } } @@ -114,8 +117,9 @@ class ImageCaptureHandler( return when (captureResult) { is CaptureOutcome.Failed -> captureResult.result is CaptureOutcome.Processed -> { + var thumbnail: Bitmap? = null try { - val thumbnail = createThumbnail(captureResult.processed) + thumbnail = createThumbnail(captureResult.processed) val result = photoSaver.saveBitmapPair( original = captureResult.original, processed = captureResult.processed, @@ -123,12 +127,14 @@ class ImageCaptureHandler( location = location ) if (result is SaveResult.Success) { - result.copy(thumbnail = thumbnail) + val output = result.copy(thumbnail = thumbnail) + thumbnail = null // prevent finally from recycling the returned thumbnail + output } else { - thumbnail?.recycle() result } } finally { + thumbnail?.recycle() captureResult.original.recycle() captureResult.processed.recycle() } @@ -206,7 +212,7 @@ class ImageCaptureHandler( /** * Processes an existing image from the gallery through the tilt-shift pipeline. - * Loads the image, applies EXIF rotation, processes the effect, and saves both versions. + * Loads the image, applies EXIF rotation, processes the effect, and saves the result. */ suspend fun processExistingImage( imageUri: Uri, @@ -215,6 +221,7 @@ class ImageCaptureHandler( ): SaveResult = withContext(Dispatchers.IO) { var originalBitmap: Bitmap? = null var processedBitmap: Bitmap? = null + var thumbnail: Bitmap? = null try { originalBitmap = loadBitmapFromUri(imageUri) ?: return@withContext SaveResult.Error("Failed to load image") @@ -223,7 +230,7 @@ class ImageCaptureHandler( processedBitmap = applyTiltShiftEffect(originalBitmap, blurParams) - val thumbnail = createThumbnail(processedBitmap) + thumbnail = createThumbnail(processedBitmap) val result = photoSaver.saveBitmap( bitmap = processedBitmap, @@ -232,9 +239,10 @@ class ImageCaptureHandler( ) if (result is SaveResult.Success) { - result.copy(thumbnail = thumbnail) + val output = result.copy(thumbnail = thumbnail) + thumbnail = null // prevent finally from recycling the returned thumbnail + output } else { - thumbnail?.recycle() result } } catch (e: SecurityException) { @@ -244,6 +252,7 @@ class ImageCaptureHandler( Log.e(TAG, "Gallery image processing failed", e) SaveResult.Error("Failed to process image. Please try again.", e) } finally { + thumbnail?.recycle() originalBitmap?.recycle() processedBitmap?.recycle() } @@ -259,6 +268,14 @@ class ImageCaptureHandler( val options = BitmapFactory.Options().apply { inJustDecodeBounds = true } context.contentResolver.openInputStream(uri)?.use { stream -> BitmapFactory.decodeStream(stream, null, options) + } ?: run { + Log.e(TAG, "Could not open input stream for URI (dimensions pass): $uri") + return null + } + + if (options.outWidth <= 0 || options.outHeight <= 0) { + Log.e(TAG, "Image has invalid dimensions: ${options.outWidth}x${options.outHeight}, mime: ${options.outMimeType}") + return null } // Calculate sample size to stay within MAX_IMAGE_DIMENSION @@ -271,9 +288,15 @@ class ImageCaptureHandler( // Second pass: decode with sample size val decodeOptions = BitmapFactory.Options().apply { inSampleSize = sampleSize } - context.contentResolver.openInputStream(uri)?.use { stream -> + val bitmap = context.contentResolver.openInputStream(uri)?.use { stream -> BitmapFactory.decodeStream(stream, null, decodeOptions) } + + if (bitmap == null) { + Log.e(TAG, "BitmapFactory.decodeStream returned null for URI: $uri (mime: ${options.outMimeType})") + } + + bitmap } catch (e: SecurityException) { Log.e(TAG, "Permission denied loading bitmap from URI", e) null @@ -340,6 +363,9 @@ class ImageCaptureHandler( * Applies tilt-shift blur effect to a bitmap. * Supports both linear and radial modes. * + * The gradient mask is computed at 1/4 resolution (matching the blur downscale) + * and upscaled for compositing, reducing peak memory by ~93%. + * * All intermediate bitmaps are tracked and recycled in a finally block * so that an OOM or other exception does not leak native memory. */ @@ -351,14 +377,12 @@ class ImageCaptureHandler( var scaled: Bitmap? = null var blurred: Bitmap? = null var blurredFullSize: Bitmap? = null - var mask: Bitmap? = null try { result = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888) - val scaleFactor = 4 - val blurredWidth = width / scaleFactor - val blurredHeight = height / scaleFactor + val blurredWidth = width / SCALE_FACTOR + val blurredHeight = height / SCALE_FACTOR scaled = Bitmap.createScaledBitmap(source, blurredWidth, blurredHeight, true) @@ -370,24 +394,22 @@ class ImageCaptureHandler( blurred.recycle() blurred = null - mask = createGradientMask(width, height, params) + // Compute mask at reduced resolution and upscale to avoid full-res per-pixel trig + val maskPixels = createGradientMaskPixels(blurredWidth, blurredHeight, params) + val fullMaskPixels = upscaleMask(maskPixels, blurredWidth, blurredHeight, width, height) // Composite: blend original with blurred based on mask val pixels = IntArray(width * height) val blurredPixels = IntArray(width * height) - val maskPixels = IntArray(width * height) source.getPixels(pixels, 0, width, 0, 0, width, height) blurredFullSize.getPixels(blurredPixels, 0, width, 0, 0, width, height) - mask.getPixels(maskPixels, 0, width, 0, 0, width, height) blurredFullSize.recycle() blurredFullSize = null - mask.recycle() - mask = null for (i in pixels.indices) { - val maskAlpha = (maskPixels[i] and 0xFF) / 255f + val maskAlpha = (fullMaskPixels[i] and 0xFF) / 255f val origR = (pixels[i] shr 16) and 0xFF val origG = (pixels[i] shr 8) and 0xFF val origB = pixels[i] and 0xFF @@ -412,16 +434,14 @@ class ImageCaptureHandler( scaled?.recycle() blurred?.recycle() blurredFullSize?.recycle() - mask?.recycle() } } /** - * Creates a gradient mask for the tilt-shift effect. - * Supports both linear and radial modes. + * Creates a gradient mask as a pixel array at the given dimensions. + * Returns packed ARGB ints where the blue channel encodes blur amount. */ - private fun createGradientMask(width: Int, height: Int, params: BlurParameters): Bitmap { - val mask = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888) + private fun createGradientMaskPixels(width: Int, height: Int, params: BlurParameters): IntArray { val pixels = IntArray(width * height) val centerX = width * params.positionX @@ -437,32 +457,22 @@ class ImageCaptureHandler( for (x in 0 until width) { val dist = when (params.mode) { BlurMode.LINEAR -> { - // Rotate point around focus center val dx = x - centerX val dy = y - centerY val rotatedY = -dx * sinAngle + dy * cosAngle kotlin.math.abs(rotatedY) } BlurMode.RADIAL -> { - // Calculate elliptical distance from center var dx = x - centerX var dy = y - centerY - - // Adjust for screen aspect ratio dx *= screenAspect - - // Rotate val rotatedX = dx * cosAngle - dy * sinAngle val rotatedY = dx * sinAngle + dy * cosAngle - - // Apply ellipse aspect ratio val adjustedX = rotatedX / params.aspectRatio - sqrt(adjustedX * adjustedX + rotatedY * rotatedY) } } - // Calculate blur amount based on distance from focus region val blurAmount = when { dist < focusSize -> 0f dist < focusSize + transitionSize -> { @@ -476,8 +486,48 @@ class ImageCaptureHandler( } } - mask.setPixels(pixels, 0, width, 0, 0, width, height) - return mask + return pixels + } + + /** + * Bilinear upscale of a mask pixel array from small dimensions to full dimensions. + */ + private fun upscaleMask( + smallPixels: IntArray, + smallW: Int, smallH: Int, + fullW: Int, fullH: Int + ): IntArray { + val fullPixels = IntArray(fullW * fullH) + val xRatio = smallW.toFloat() / fullW + val yRatio = smallH.toFloat() / fullH + + for (y in 0 until fullH) { + val srcY = y * yRatio + val y0 = srcY.toInt().coerceIn(0, smallH - 1) + val y1 = (y0 + 1).coerceIn(0, smallH - 1) + val yFrac = srcY - y0 + + for (x in 0 until fullW) { + val srcX = x * xRatio + val x0 = srcX.toInt().coerceIn(0, smallW - 1) + val x1 = (x0 + 1).coerceIn(0, smallW - 1) + val xFrac = srcX - x0 + + // Bilinear interpolation on the blue channel (all channels are equal) + val v00 = smallPixels[y0 * smallW + x0] and 0xFF + val v10 = smallPixels[y0 * smallW + x1] and 0xFF + val v01 = smallPixels[y1 * smallW + x0] and 0xFF + val v11 = smallPixels[y1 * smallW + x1] and 0xFF + + val top = v00 + (v10 - v00) * xFrac + val bottom = v01 + (v11 - v01) * xFrac + val gray = (top + (bottom - top) * yFrac).toInt().coerceIn(0, 255) + + fullPixels[y * fullW + x] = (0xFF shl 24) or (gray shl 16) or (gray shl 8) or gray + } + } + + return fullPixels } /** diff --git a/app/src/main/java/no/naiv/tiltshift/effect/TiltShiftRenderer.kt b/app/src/main/java/no/naiv/tiltshift/effect/TiltShiftRenderer.kt index cd1873c..b3bf963 100644 --- a/app/src/main/java/no/naiv/tiltshift/effect/TiltShiftRenderer.kt +++ b/app/src/main/java/no/naiv/tiltshift/effect/TiltShiftRenderer.kt @@ -65,6 +65,7 @@ class TiltShiftRenderer( 1f, 0f // Top right of screen ) + @Volatile private var currentTexCoords = texCoordsBack override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) { diff --git a/app/src/main/java/no/naiv/tiltshift/effect/TiltShiftShader.kt b/app/src/main/java/no/naiv/tiltshift/effect/TiltShiftShader.kt index 6b962b5..2c3a7d5 100644 --- a/app/src/main/java/no/naiv/tiltshift/effect/TiltShiftShader.kt +++ b/app/src/main/java/no/naiv/tiltshift/effect/TiltShiftShader.kt @@ -4,6 +4,8 @@ import android.content.Context import android.opengl.GLES11Ext import android.opengl.GLES20 import no.naiv.tiltshift.R +import kotlin.math.cos +import kotlin.math.sin import java.io.BufferedReader import java.io.InputStreamReader @@ -33,6 +35,8 @@ class TiltShiftShader(private val context: Context) { private var uFalloffLocation: Int = 0 private var uAspectRatioLocation: Int = 0 private var uResolutionLocation: Int = 0 + private var uCosAngleLocation: Int = 0 + private var uSinAngleLocation: Int = 0 /** * Compiles and links the shader program. @@ -75,6 +79,8 @@ class TiltShiftShader(private val context: Context) { uFalloffLocation = GLES20.glGetUniformLocation(programId, "uFalloff") uAspectRatioLocation = GLES20.glGetUniformLocation(programId, "uAspectRatio") uResolutionLocation = GLES20.glGetUniformLocation(programId, "uResolution") + uCosAngleLocation = GLES20.glGetUniformLocation(programId, "uCosAngle") + uSinAngleLocation = GLES20.glGetUniformLocation(programId, "uSinAngle") // Clean up shaders (they're linked into program now) GLES20.glDeleteShader(vertexShader) @@ -103,6 +109,16 @@ class TiltShiftShader(private val context: Context) { GLES20.glUniform1f(uFalloffLocation, params.falloff) GLES20.glUniform1f(uAspectRatioLocation, params.aspectRatio) GLES20.glUniform2f(uResolutionLocation, width.toFloat(), height.toFloat()) + + // Precompute angle trig on CPU to avoid per-fragment transcendental calls. + // The adjusted angle accounts for the 90deg coordinate transform. + val adjustedAngle = if (isFrontCamera) { + -params.angle - (Math.PI / 2).toFloat() + } else { + params.angle + (Math.PI / 2).toFloat() + } + GLES20.glUniform1f(uCosAngleLocation, cos(adjustedAngle)) + GLES20.glUniform1f(uSinAngleLocation, sin(adjustedAngle)) } /** diff --git a/app/src/main/java/no/naiv/tiltshift/storage/PhotoSaver.kt b/app/src/main/java/no/naiv/tiltshift/storage/PhotoSaver.kt index 083f6cd..8facb4c 100644 --- a/app/src/main/java/no/naiv/tiltshift/storage/PhotoSaver.kt +++ b/app/src/main/java/no/naiv/tiltshift/storage/PhotoSaver.kt @@ -102,7 +102,10 @@ class PhotoSaver(private val context: Context) { ) ?: return SaveResult.Error("Failed to create MediaStore entry") contentResolver.openOutputStream(uri)?.use { outputStream -> - bitmap.compress(Bitmap.CompressFormat.JPEG, 95, outputStream) + if (!bitmap.compress(Bitmap.CompressFormat.JPEG, 95, outputStream)) { + Log.e(TAG, "Bitmap compression returned false") + return SaveResult.Error("Failed to compress image") + } } ?: return SaveResult.Error("Failed to open output stream") writeExifToUri(uri, orientation, location) @@ -121,8 +124,11 @@ class PhotoSaver(private val context: Context) { } } - private fun writeExifToUri(uri: Uri, orientation: Int, location: Location?) { - try { + /** + * Writes EXIF metadata to a saved image. Returns false if writing failed. + */ + private fun writeExifToUri(uri: Uri, orientation: Int, location: Location?): Boolean { + return try { context.contentResolver.openFileDescriptor(uri, "rw")?.use { pfd -> val exif = ExifInterface(pfd.fileDescriptor) @@ -142,8 +148,10 @@ class PhotoSaver(private val context: Context) { exif.saveAttributes() } + true } catch (e: Exception) { - Log.w(TAG, "Failed to write EXIF data", e) + Log.e(TAG, "Failed to write EXIF data", e) + false } } diff --git a/app/src/main/java/no/naiv/tiltshift/ui/CameraScreen.kt b/app/src/main/java/no/naiv/tiltshift/ui/CameraScreen.kt index 7c2838e..6cb7705 100644 --- a/app/src/main/java/no/naiv/tiltshift/ui/CameraScreen.kt +++ b/app/src/main/java/no/naiv/tiltshift/ui/CameraScreen.kt @@ -165,10 +165,19 @@ fun CameraScreen( } } - // Cleanup GL resources (ViewModel handles its own cleanup in onCleared) + // Pause/resume GLSurfaceView when entering/leaving gallery preview + LaunchedEffect(isGalleryPreview) { + if (isGalleryPreview) { + glSurfaceView?.onPause() + } else { + glSurfaceView?.onResume() + } + } + + // Cleanup GL resources on GL thread (ViewModel handles its own cleanup in onCleared) DisposableEffect(Unit) { onDispose { - renderer?.release() + glSurfaceView?.queueEvent { renderer?.release() } } } @@ -460,6 +469,7 @@ fun CameraScreen( context.startActivity(intent) } catch (e: android.content.ActivityNotFoundException) { Log.w("CameraScreen", "No activity found to view image", e) + viewModel.showCameraError("No app available to view photos") } } }, diff --git a/app/src/main/java/no/naiv/tiltshift/ui/CameraViewModel.kt b/app/src/main/java/no/naiv/tiltshift/ui/CameraViewModel.kt index cc9eef8..9595e21 100644 --- a/app/src/main/java/no/naiv/tiltshift/ui/CameraViewModel.kt +++ b/app/src/main/java/no/naiv/tiltshift/ui/CameraViewModel.kt @@ -10,6 +10,7 @@ import androidx.lifecycle.AndroidViewModel import androidx.lifecycle.viewModelScope import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Job +import kotlinx.coroutines.delay import kotlinx.coroutines.flow.MutableStateFlow import kotlinx.coroutines.flow.StateFlow import kotlinx.coroutines.flow.asStateFlow @@ -28,6 +29,12 @@ import no.naiv.tiltshift.util.OrientationDetector /** * ViewModel for the camera screen. * Survives configuration changes (rotation) and process death (via SavedStateHandle for primitives). + * + * Bitmap lifecycle: bitmaps emitted to StateFlows are never eagerly recycled, + * because Compose may still be drawing them on the next frame. Instead, the + * previous bitmap is stored and recycled only when the *next* replacement arrives, + * giving Compose at least one full frame to finish. Final cleanup happens in + * cancelGalleryPreview() and onCleared(). */ class CameraViewModel(application: Application) : AndroidViewModel(application) { @@ -35,6 +42,8 @@ class CameraViewModel(application: Application) : AndroidViewModel(application) private const val TAG = "CameraViewModel" /** Max dimension for the preview source bitmap to keep effect computation fast. */ private const val PREVIEW_MAX_DIMENSION = 1024 + /** Debounce delay before recomputing preview to reduce GC pressure during slider drags. */ + private const val PREVIEW_DEBOUNCE_MS = 80L } val cameraManager = CameraManager(application) @@ -75,12 +84,16 @@ class CameraViewModel(application: Application) : AndroidViewModel(application) val galleryImageUri: StateFlow = _galleryImageUri.asStateFlow() /** Downscaled source for fast preview recomputation. */ + @Volatile private var galleryPreviewSource: Bitmap? = null /** Processed preview bitmap shown in the UI. */ private val _galleryPreviewBitmap = MutableStateFlow(null) val galleryPreviewBitmap: StateFlow = _galleryPreviewBitmap.asStateFlow() + /** Previous preview bitmap, kept alive one extra cycle so Compose can finish drawing it. */ + private var pendingRecyclePreview: Bitmap? = null + private var previewJob: Job? = null val isGalleryPreview: Boolean get() = _galleryBitmap.value != null @@ -96,6 +109,10 @@ class CameraViewModel(application: Application) : AndroidViewModel(application) private val _isProcessing = MutableStateFlow(false) val isProcessing: StateFlow = _isProcessing.asStateFlow() + // Dismiss jobs for timed indicators + private var errorDismissJob: Job? = null + private var successDismissJob: Job? = null + fun updateBlurParams(params: BlurParameters) { _blurParams.value = params } @@ -127,22 +144,34 @@ class CameraViewModel(application: Application) : AndroidViewModel(application) _galleryImageUri.value = uri // Create downscaled source for fast preview recomputation - galleryPreviewSource = withContext(Dispatchers.IO) { - val maxDim = maxOf(bitmap.width, bitmap.height) - if (maxDim > PREVIEW_MAX_DIMENSION) { - val scale = PREVIEW_MAX_DIMENSION.toFloat() / maxDim - Bitmap.createScaledBitmap( - bitmap, - (bitmap.width * scale).toInt(), - (bitmap.height * scale).toInt(), - true - ) - } else { - bitmap.copy(bitmap.config ?: Bitmap.Config.ARGB_8888, false) + val previewSource = try { + withContext(Dispatchers.IO) { + val maxDim = maxOf(bitmap.width, bitmap.height) + if (maxDim > PREVIEW_MAX_DIMENSION) { + val scale = PREVIEW_MAX_DIMENSION.toFloat() / maxDim + Bitmap.createScaledBitmap( + bitmap, + (bitmap.width * scale).toInt(), + (bitmap.height * scale).toInt(), + true + ) + } else { + bitmap.copy(bitmap.config ?: Bitmap.Config.ARGB_8888, false) + } } + } catch (e: Exception) { + Log.e(TAG, "Failed to create preview source", e) + null } - startPreviewLoop() + if (previewSource != null) { + galleryPreviewSource = previewSource + startPreviewLoop() + } else { + haptics.error() + showError("Failed to prepare image for preview") + cancelGalleryPreview() + } } else { haptics.error() showError("Failed to load image") @@ -150,38 +179,55 @@ class CameraViewModel(application: Application) : AndroidViewModel(application) } } - /** Reactively recomputes the tilt-shift preview when blur params change. */ + /** + * Reactively recomputes the tilt-shift preview when blur params change. + * Uses debounce to reduce allocations during rapid slider drags. + * Old preview bitmaps are recycled one cycle late to avoid racing with Compose draws. + */ private fun startPreviewLoop() { previewJob?.cancel() previewJob = viewModelScope.launch { _blurParams.collectLatest { params -> + delay(PREVIEW_DEBOUNCE_MS) val source = galleryPreviewSource ?: return@collectLatest try { val processed = captureHandler.applyTiltShiftPreview(source, params) - val old = _galleryPreviewBitmap.value + // Recycle the bitmap from two updates ago (Compose has had time to finish) + pendingRecyclePreview?.recycle() + // The current preview becomes pending; the new one becomes current + pendingRecyclePreview = _galleryPreviewBitmap.value _galleryPreviewBitmap.value = processed - old?.recycle() } catch (e: Exception) { - Log.w(TAG, "Preview computation failed", e) + Log.e(TAG, "Preview computation failed", e) + showError("Preview update failed") } } } } fun cancelGalleryPreview() { - previewJob?.cancel() + // Cancel the preview job and wait for its CPU work to finish + // so we don't recycle galleryPreviewSource while it's being read + val job = previewJob previewJob = null + job?.cancel() - val oldGallery = _galleryBitmap.value - val oldPreview = _galleryPreviewBitmap.value - _galleryBitmap.value = null - _galleryImageUri.value = null - _galleryPreviewBitmap.value = null + viewModelScope.launch { + job?.join() - oldGallery?.recycle() - oldPreview?.recycle() - galleryPreviewSource?.recycle() - galleryPreviewSource = null + val oldGallery = _galleryBitmap.value + val oldPreview = _galleryPreviewBitmap.value + _galleryBitmap.value = null + _galleryImageUri.value = null + _galleryPreviewBitmap.value = null + + oldGallery?.recycle() + oldPreview?.recycle() + pendingRecyclePreview?.recycle() + pendingRecyclePreview = null + galleryPreviewSource?.recycle() + galleryPreviewSource = null + } } fun applyGalleryEffect() { @@ -226,17 +272,22 @@ class CameraViewModel(application: Application) : AndroidViewModel(application) } } + /** Previous thumbnail kept one cycle for Compose to finish drawing. */ + private var pendingRecycleThumbnail: Bitmap? = null + private fun handleSaveResult(result: SaveResult) { when (result) { is SaveResult.Success -> { haptics.success() - val oldThumb = _lastThumbnailBitmap.value + // Recycle the thumbnail from two updates ago (safe from Compose) + pendingRecycleThumbnail?.recycle() + pendingRecycleThumbnail = _lastThumbnailBitmap.value _lastThumbnailBitmap.value = result.thumbnail _lastSavedUri.value = result.uri - oldThumb?.recycle() - viewModelScope.launch { + successDismissJob?.cancel() + successDismissJob = viewModelScope.launch { _showSaveSuccess.value = true - kotlinx.coroutines.delay(1500) + delay(1500) _showSaveSuccess.value = false } } @@ -248,9 +299,10 @@ class CameraViewModel(application: Application) : AndroidViewModel(application) } private fun showError(message: String) { - viewModelScope.launch { - _showSaveError.value = message - kotlinx.coroutines.delay(2000) + errorDismissJob?.cancel() + _showSaveError.value = message + errorDismissJob = viewModelScope.launch { + delay(2000) _showSaveError.value = null } } @@ -263,8 +315,10 @@ class CameraViewModel(application: Application) : AndroidViewModel(application) super.onCleared() cameraManager.release() _lastThumbnailBitmap.value?.recycle() + pendingRecycleThumbnail?.recycle() _galleryBitmap.value?.recycle() _galleryPreviewBitmap.value?.recycle() + pendingRecyclePreview?.recycle() galleryPreviewSource?.recycle() } } diff --git a/app/src/main/java/no/naiv/tiltshift/util/LocationProvider.kt b/app/src/main/java/no/naiv/tiltshift/util/LocationProvider.kt index f8d7b53..ae8fb34 100644 --- a/app/src/main/java/no/naiv/tiltshift/util/LocationProvider.kt +++ b/app/src/main/java/no/naiv/tiltshift/util/LocationProvider.kt @@ -79,6 +79,10 @@ class LocationProvider(private val context: Context) { return ContextCompat.checkSelfPermission( context, Manifest.permission.ACCESS_FINE_LOCATION + ) == PackageManager.PERMISSION_GRANTED || + ContextCompat.checkSelfPermission( + context, + Manifest.permission.ACCESS_COARSE_LOCATION ) == PackageManager.PERMISSION_GRANTED } } diff --git a/app/src/main/res/raw/tiltshift_fragment.glsl b/app/src/main/res/raw/tiltshift_fragment.glsl index db1b415..f1618e4 100644 --- a/app/src/main/res/raw/tiltshift_fragment.glsl +++ b/app/src/main/res/raw/tiltshift_fragment.glsl @@ -20,6 +20,10 @@ uniform float uFalloff; // Transition sharpness (0-1, higher = more grad uniform float uAspectRatio; // Ellipse aspect ratio for radial mode uniform vec2 uResolution; // Texture resolution for proper sampling +// Precomputed trig for the adjusted angle (avoids per-fragment cos/sin calls) +uniform float uCosAngle; +uniform float uSinAngle; + varying vec2 vTexCoord; // Calculate signed distance from the focus region for LINEAR mode @@ -37,25 +41,11 @@ float linearFocusDistance(vec2 uv) { vec2 offset = uv - center; // Correct for screen aspect ratio to make coordinate space square - // After transform: offset.x = screen Y direction, offset.y = screen X direction - // Scale offset.y to match the scale of offset.x (height units) float screenAspect = uResolution.x / uResolution.y; offset.y *= screenAspect; - // Adjust angle to compensate for the coordinate transformation - // Back camera: +90° for the 90° CW rotation - // Front camera: -90° (negated due to X flip mirror effect) - float adjustedAngle; - if (uIsFrontCamera == 1) { - adjustedAngle = -uAngle - 1.5707963; - } else { - adjustedAngle = uAngle + 1.5707963; - } - float cosA = cos(adjustedAngle); - float sinA = sin(adjustedAngle); - - // After rotation, measure perpendicular distance from center line - float rotatedY = -offset.x * sinA + offset.y * cosA; + // Use precomputed cos/sin for the adjusted angle + float rotatedY = -offset.x * uSinAngle + offset.y * uCosAngle; return abs(rotatedY); } @@ -63,7 +53,6 @@ float linearFocusDistance(vec2 uv) { // Calculate signed distance from the focus region for RADIAL mode float radialFocusDistance(vec2 uv) { // Center point of the focus region - // Transform from screen coordinates to texture coordinates vec2 center; if (uIsFrontCamera == 1) { center = vec2(1.0 - uPositionY, 1.0 - uPositionX); @@ -72,24 +61,14 @@ float radialFocusDistance(vec2 uv) { } vec2 offset = uv - center; - // Correct for screen aspect ratio to make coordinate space square - // After transform: offset.x = screen Y direction, offset.y = screen X direction - // Scale offset.y to match the scale of offset.x (height units) + // Correct for screen aspect ratio float screenAspect = uResolution.x / uResolution.y; offset.y *= screenAspect; - // Apply rotation with angle adjustment for coordinate transformation - float adjustedAngle; - if (uIsFrontCamera == 1) { - adjustedAngle = -uAngle - 1.5707963; - } else { - adjustedAngle = uAngle + 1.5707963; - } - float cosA = cos(adjustedAngle); - float sinA = sin(adjustedAngle); + // Use precomputed cos/sin for rotation vec2 rotated = vec2( - offset.x * cosA - offset.y * sinA, - offset.x * sinA + offset.y * cosA + offset.x * uCosAngle - offset.y * uSinAngle, + offset.x * uSinAngle + offset.y * uCosAngle ); // Apply ellipse aspect ratio @@ -114,26 +93,12 @@ float blurFactor(float dist) { return smoothstep(0.0, 1.0, normalizedDist) * uBlurAmount; } -// Get Gaussian weight for blur kernel (9-tap, sigma ~= 2.0) -float getWeight(int i) { - if (i == 0) return 0.0162; - if (i == 1) return 0.0540; - if (i == 2) return 0.1216; - if (i == 3) return 0.1933; - if (i == 4) return 0.2258; - if (i == 5) return 0.1933; - if (i == 6) return 0.1216; - if (i == 7) return 0.0540; - return 0.0162; // i == 8 -} - -// Sample with Gaussian blur +// Sample with Gaussian blur (9-tap, sigma ~= 2.0, unrolled for GLSL ES 1.00 compatibility) vec4 sampleBlurred(vec2 uv, float blur) { if (blur < 0.01) { return texture2D(uTexture, uv); } - vec4 color = vec4(0.0); vec2 texelSize = 1.0 / uResolution; // For radial mode, blur in radial direction from center @@ -141,7 +106,6 @@ vec4 sampleBlurred(vec2 uv, float blur) { vec2 blurDir; if (uMode == 1) { // Radial: blur away from center - // Transform from screen coordinates to texture coordinates vec2 center; if (uIsFrontCamera == 1) { center = vec2(1.0 - uPositionY, 1.0 - uPositionX); @@ -156,26 +120,25 @@ vec4 sampleBlurred(vec2 uv, float blur) { blurDir = vec2(1.0, 0.0); } } else { - // Linear: blur perpendicular to focus line - // Adjust angle for coordinate transformation - float blurAngle; - if (uIsFrontCamera == 1) { - blurAngle = -uAngle - 1.5707963; - } else { - blurAngle = uAngle + 1.5707963; - } - blurDir = vec2(cos(blurAngle), sin(blurAngle)); + // Linear: blur perpendicular to focus line using precomputed trig + blurDir = vec2(uCosAngle, uSinAngle); } // Scale blur radius by blur amount float radius = blur * 20.0; + vec2 step = blurDir * texelSize * radius; - // 9-tap Gaussian blur - for (int i = 0; i < 9; i++) { - float offset = float(i) - 4.0; - vec2 samplePos = uv + blurDir * texelSize * offset * radius; - color += texture2D(uTexture, samplePos) * getWeight(i); - } + // Unrolled 9-tap Gaussian blur (avoids integer-branched weight lookup) + vec4 color = vec4(0.0); + color += texture2D(uTexture, uv + step * -4.0) * 0.0162; + color += texture2D(uTexture, uv + step * -3.0) * 0.0540; + color += texture2D(uTexture, uv + step * -2.0) * 0.1216; + color += texture2D(uTexture, uv + step * -1.0) * 0.1933; + color += texture2D(uTexture, uv) * 0.2258; + color += texture2D(uTexture, uv + step * 1.0) * 0.1933; + color += texture2D(uTexture, uv + step * 2.0) * 0.1216; + color += texture2D(uTexture, uv + step * 3.0) * 0.0540; + color += texture2D(uTexture, uv + step * 4.0) * 0.0162; return color; } diff --git a/app/src/main/res/values-night/themes.xml b/app/src/main/res/values-night/themes.xml new file mode 100644 index 0000000..e92f290 --- /dev/null +++ b/app/src/main/res/values-night/themes.xml @@ -0,0 +1,9 @@ + + + + diff --git a/app/src/main/res/values/themes.xml b/app/src/main/res/values/themes.xml index c1f6d5a..50888f8 100644 --- a/app/src/main/res/values/themes.xml +++ b/app/src/main/res/values/themes.xml @@ -1,8 +1,9 @@ -