@Published Array Not Updating Despite Correct Values in Async Closure

Hi! I'm very new to Swift development and I'm currently working on an app that can read in texts and do different logic on them depending on their position on the image. I have been having issues where my TextRecognitionViewModel.recognizedTexts isn't getting the values I assign to it with mappedTexts. I already checked, and mappedTexts has the strings and bounding boxes perfectly. Thanks for the help!


//
//  ContentView.swift
//  receipt
//
//  Created by Daniel Elek on 27/10/2024.
//

import SwiftUI
import Vision

struct ContentView: View {
    @State private var isShowingCamera = false
    @State private var imageTaken: UIImage?
    @StateObject private var recogViewModel = TextRecognitionViewModel()

    func performHapticFeedback() {
        let generator = UIImpactFeedbackGenerator(style: .light)
        generator.impactOccurred()
    }

    var body: some View {
        ZStack {
            if imageTaken == nil {
                Color(.blue).ignoresSafeArea()

                VStack {
                    Text("Recognizing Text")
                        .bold()
                        .font(.system(size: 40, weight: .medium))
                        .foregroundColor(.white)
                        .padding(50)
                    Spacer()

                    Button {
                        performHapticFeedback()
                        self.isShowingCamera.toggle()
                    } label: {
                        Text("Take Photo")
                            .frame(width: 200, height: 44)
                            .background(Color.black)
                            .cornerRadius(10)
                            .font(.system(size: 20, weight: .medium))
                            .foregroundColor(.white)
                            .padding(.bottom, 80)
                    }
                }
            } else {
                VStack {
                    if recogViewModel.isLoading {
                        ProgressView("Processing Image...").padding()
                    } else {
                        VStack {
                            Image(uiImage: imageTaken ?? UIImage())
                                .resizable()
                                .aspectRatio(contentMode: .fit)
                                .frame(maxWidth: .infinity)
                                .padding()

                            Button(action: {
                                self.imageTaken = nil
                                recogViewModel.recognizedTexts = []
                            }) {
                                HStack {
                                    Image(systemName: "camera")
                                    Text("Re-take Picture")
                                }
                            }
                            .padding()

                            if !recogViewModel.recognizedTexts.isEmpty {
                                List(recogViewModel.recognizedTexts, id: \.self) { item in
                                    Text(item.text)
                                }
                            }
                        }
                    }
                }
            }
        }
        .fullScreenCover(isPresented: $isShowingCamera) {
            CameraView(image: $imageTaken)
                .edgesIgnoringSafeArea(.all)
                .onDisappear {
                    if imageTaken != nil {
                        recogViewModel.recognizeCardText(from: imageTaken)
                    }
                }
        }
    }
}


struct ContentView_Previews: PreviewProvider {
    static var previews: some View {
        ContentView()
    }
}


//
//  TextRecognitionViewModel.swift
//  receipt
//
//  Created by Dániel Elek on 05/11/2024.
//


import SwiftUI
import Vision
import Combine

class TextRecognitionViewModel: ObservableObject {
    @Published var recognizedTexts: [DetectedText] = []
    @Published var isLoading = false
    
    struct DetectedText: Identifiable, Hashable {
        let id: UUID
        let text: String
        let boundingBox: CGRect
    }

    func recognizeCardText(from image: UIImage?) {
        guard let cgImage = image?.cgImage else {
            print("Failed to get CGImage from UIImage")
            self.isLoading = false
            return
        }

        let requestHandler = VNImageRequestHandler(cgImage: cgImage, options: [:])
        let recognizeTextRequest = VNRecognizeTextRequest { [weak self] (request, error) in
            if let error = error {
                print("Error during text recognition: \(error.localizedDescription)")
                DispatchQueue.main.async {
                    self?.isLoading = false
                }
                return
            }

            guard let observations = request.results as? [VNRecognizedTextObservation] else {
                print("No text found.")
                DispatchQueue.main.async {
                    self?.isLoading = false
                }
                return
            }

            var textsWithBoundingBoxes: [(text: String, boundingBox: CGRect)] = []
            for observation in observations {
                if let candidate = observation.topCandidates(1).first {
                    let recognizedText = candidate.string
                    let boundingBox = observation.boundingBox
                    textsWithBoundingBoxes.append((text: recognizedText, boundingBox: boundingBox))
                }
            }

            DispatchQueue.main.async {
                let mappedTexts = textsWithBoundingBoxes.map { (text, boundingBox) in
                    TextRecognitionViewModel.DetectedText(id: UUID(), text: text, boundingBox: boundingBox)
                }
                print("Mapped Texts:", mappedTexts)  // Verify this has values

                self?.recognizedTexts = mappedTexts
                self?.isLoading = false
            }
        }
        
        recognizeTextRequest.recognitionLevel = .accurate
        self.isLoading = true

        DispatchQueue.global(qos: .userInitiated).async {
            do {
                try requestHandler.perform([recognizeTextRequest])
            } catch {
                print("Failed to perform text recognition: \(error.localizedDescription)")
                DispatchQueue.main.async {
                    self.isLoading = false
                }
            }
        }
    }
}

This should be in "Using Swift" category.
(I wanted to fix it but apparently I've lost the superpowers).


Distill this down to what's essential. For example:

import SwiftUI

struct DetectedText: Identifiable, Hashable {
    let id: UUID
    let text: String
}
class Model: ObservableObject {
    @Published var recognizedTexts: [DetectedText] = []
    private var n = 0
    static let texts: [[DetectedText]] = [
        [],
        [.init(id: .init(), text: "Hej!")],
        [.init(id: .init(), text: "Hello"), .init(id: .init(), text: "World")]
    ]
    init() {
        Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { _ in
            self.recognizedTexts = Self.texts[self.n % 3]
            self.n += 1
        }
    }
}
struct ContentView: View {
    @StateObject private var model = Model()

    var body: some View {
        if !model.recognizedTexts.isEmpty {
            List(model.recognizedTexts) { item in
                Text(item.text)
            }
        } else {
            Text("empty list")
        }
    }
}

(BTW, this works as expected).


By distilling it down to minimum minimorum you'll:

  • make it easy for the readers to understand.
  • you'll likely find what's triggering the error (and thus it would be obvious how to fix it)
  • or if the bug is still there in a minimal app and there's nothing obviously wrong, you may find a bug in SwiftUI (unlikely in this case), so the distilled sample would be a perfect payload to go with the bug report.