I’m engaged on a lung most cancers scanning app for iOS with a CoreML mannequin and after I take a look at my app on a bodily gadget, the mannequin ends in the identical prediction 100% of the time. I even modified the names round and nonetheless resulted in the identical case. I’ve listed my labels in instances and when its simply caught on the identical case (case 1)
My code is under:
import SwiftUI
import UIKit
import CoreML
import FirebaseAuth
import Firebase
import FirebaseFirestore
import Basis
let firestoreManager = AppDelegate()
struct ConditionsListView: View {
let circumstances: [String]
@Binding var showConditions: Bool
var physique: some View {
NavigationView {
VStack {
Textual content("App is 99% correct in diagnosing these circumstances. This isn't a complete record of all abnormalities.n")
.font(.subheadline)
.foregroundColor(.black)
.padding(.horizontal, 15)
.padding(.backside, 3)
Checklist(circumstances, id: .self) { situation in
Textual content(situation)
}
}
.navigationTitle("Educated Circumstances")
.toolbar {
ToolbarItem(placement: .navigationBarLeading) {
Button(motion: {
showConditions = false
}) {
Picture(systemName: "xmark")
.foregroundColor(.black)
}
}
}
.background(Coloration(hex: colorPalette[0]))
.edgesIgnoringSafeArea(.backside)
}
}
}
struct ImagePicker: UIViewControllerRepresentable {
class Coordinator: NSObject, UINavigationControllerDelegate, UIImagePickerControllerDelegate {
var dad or mum: ImagePicker
init(dad or mum: ImagePicker) {
self.dad or mum = dad or mum
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo information: [UIImagePickerController.InfoKey : Any]) {
if let picture = information[.originalImage] as? UIImage {
dad or mum.selectedImage = picture
}
dad or mum.presentationMode.wrappedValue.dismiss()
}
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
dad or mum.presentationMode.wrappedValue.dismiss()
}
}
@Binding var selectedImage: UIImage?
@Setting(.presentationMode) var presentationMode
func makeCoordinator() -> Coordinator {
Coordinator(dad or mum: self)
}
func makeUIViewController(context: Context) -> UIImagePickerController {
let imagePicker = UIImagePickerController()
imagePicker.sourceType = .digital camera
imagePicker.delegate = context.coordinator
return imagePicker
}
func updateUIViewController(_ uiViewController: UIImagePickerController, context: Context) {}
}
struct BodyImageView: View {
@Binding var showImagePicker: Bool
@Binding var selectedLocation: String?
@Binding var showPopup: Bool
let bodyAreas = [
//(label: "Torso", rect: CGRect(x: 140, y: 110, width: 80, height: 170)),
//(label: "Head/Shoulders", rect: CGRect(x: 100, y: 0, width: 170, height: 100)),
//(label: "Right Arm", rect: CGRect(x: 220, y: 90, width: 60, height: 220)),
//(label: "Left Arm", rect: CGRect(x: 80, y: 90, width: 60, height: 220)),
(label: "Right Lung", rect: CGRect(x: 180, y: 250, width: 50, height: 260)),
(label: "Left Lung", rect: CGRect(x: 100, y: 250, width: 50, height: 260))
]
var physique: some View {
VStack {
ZStack {
Picture("lung")
.resizable()
.scaledToFit()
.body(maxWidth: 300, maxHeight: 500)
.padding(.backside, 5)
.gesture(
DragGesture(minimumDistance: 0)
.onEnded { worth in
let tapLocation = worth.location
if let space = checkIfInsideOutlinedArea(location: tapLocation) {
selectedLocation = space
}
}
)
ForEach(bodyAreas, id: .label) { space in
RoundedRectangle(cornerRadius: 10)
.stroke(Coloration.purple, lineWidth: 2)
.body(width: space.rect.width, peak: space.rect.peak)
.place(x: space.rect.midX, y: space.rect.midY)
.opacity(0)
}
}
HStack {
if let location = selectedLocation {
//Textual content("nn")
ZStack {
Rectangle()
.fill(Coloration(hex: colorPalette[0]))
.body(width: 230, peak: 60)
.cornerRadius(10)
Textual content("Chosen: (location)")
.foregroundColor(.black)
.padding(5)
.cornerRadius(5)
}
} else {
ZStack {
Rectangle()
.fill(Coloration(hex: colorPalette[0]))
.body(width: 230, peak: 60)
.cornerRadius(10)
Textual content("Chosen: ")
.foregroundColor(.black)
.padding(5)
.cornerRadius(5)
}
.cornerRadius(10)
}
Button(motion: {
showPopup = true
}) {
Picture(systemName: "digital camera.fill")
.resizable()
.body(width: 30, peak: 24)
.padding()
.foregroundColor(.black)
.background(Coloration(hex: colorPalette[2]))
.cornerRadius(10)
}
.sheet(isPresented: $showPopup) {
ZStack {
VStack(spacing: 20) {
Textual content("Take a transparent image with the CT Scan **centered** and **in focus**. Maintain the telephone 90 levels to the scan.")
.font(.subheadline)
.padding()
.body(maxWidth: .infinity)
.multilineTextAlignment(.main)
.cornerRadius(10)
.foregroundColor(.white)
Textual content("If a **non-scan picture** is submitted, the outcomes are usually not relevant.")
.font(.subheadline)
.padding()
.body(maxWidth: .infinity)
.cornerRadius(10)
.foregroundColor(.white)
Button(motion: {
showPopup = false
showImagePicker = true
}) {
Picture(systemName: "xmark")
.padding()
.foregroundColor(Coloration(hex: colorPalette[4]))
.background(Coloration.white)
.cornerRadius(10)
}
}
.padding()
.background(Coloration(hex: colorPalette[4]))
.cornerRadius(20)
.shadow(radius: 15)
.body(width: 320)
}
}
if showImagePicker {
ImagePicker(selectedImage: .fixed(nil))
.zIndex(1)
}
}
}
}
non-public func checkIfInsideOutlinedArea(location: CGPoint) -> String? {
for space in bodyAreas {
if space.rect.incorporates(location) {
return space.label
}
}
return nil
}
}
struct SecondView: View {
@State non-public var showImagePicker = false
@State non-public var picture: UIImage?
@State non-public var firstName = ""
@State non-public var lastName = ""
@State non-public var area = ""
@State non-public var showForm = false
@State non-public var predictionResult: String?
@State non-public var diagResult: Int?
@State non-public var diagResultString: String?
@State non-public var documentID: String? = nil
@State non-public var outcome: String?
@State non-public var selectedLocation: String?
@State non-public var showPopup = false
@State non-public var showAlert = false
@State non-public var alertMessage = ""
non-public let circumstances = [
"Large Cell Carcinoma"
, "Adenocarcinoma"
, "Squamous Cell Carcinoma"
]
/*
non-public let riskClassifier: RiskClassifier = {
do {
let configuration = MLModelConfiguration()
return strive RiskClassifier(configuration: configuration)
} catch {
fatalError("Could not load RiskClassifier mannequin: (error)")
}
}()
*/
non-public let riskClassifier: LungAI_image_input = {
do {
let configuration = MLModelConfiguration()
return strive LungAI_image_input(configuration: configuration)
} catch {
fatalError("Could not load LungAI mannequin: (error)")
}
}()
non-public let diagnoser: Diagnoser = {
do {
let configuration = MLModelConfiguration()
return strive Diagnoser(configuration: configuration)
} catch {
fatalError("Could not load Diagnoser mannequin: (error)")
}
}()
var physique: some View {
ScrollView {
HStack {
VStack {
if let picture = picture {
} else {
//Textual content("nTap the physique half being scanned, then click on the pink digital camera icon.n")
// .multilineTextAlignment(.middle)
//.padding(.horizontal, 12)
//BodyImageView(showImagePicker: $showImagePicker, selectedLocation: $selectedLocation, showPopup: $showPopup)
BodyImageView(showImagePicker: $showImagePicker, selectedLocation: $selectedLocation, showPopup: $showPopup)
}
VStack {
if let outcome = predictionResult, let prognosis = diagResultString {
ZStack {
VStack {
HStack(spacing: 10) {
ZStack {
VStack {
Textual content("Prediction:")
.font(.headline)
.fontWeight(.daring)
.padding(.prime, 10)
.multilineTextAlignment(.middle)
.foregroundColor(.white)
Textual content("(outcome)")
.font(.subheadline)
.padding()
.lineLimit(nil)
.multilineTextAlignment(.middle)
.foregroundColor(.white)
Button(motion: {
if outcome.lowercased() == "regular" {
showAlert(message: "Regular signifies that the situation will not be cancerous.")
} else if outcome.lowercased() == "large_cell_carcinoma" {
showAlert(message: "Giant Cell Carcinoma signifies that the situation is dangerous and is cancerous.")
} else if outcome.lowercased() == "Lung_adenocarcinoma" {
showAlert(message: "Adenocarcinoma signifies that the situation is dangerous and is cancerous.")
} else if outcome.lowercased() == "Lung squamous_cell_carcinoma" {
showAlert(message: "Squamous Cell Carcinoma signifies that the situation is dangerous and is cancerous.")
}
}) {
Picture(systemName: "questionmark.circle.fill")
.font(.title)
.foregroundColor(.white)
}
.padding(.prime, 1)
}
.padding()
.body(width: 170, peak: 200)
.background(
LinearGradient(
gradient: Gradient(colours: [
Color(hex: colorPalette[0]).opacity(0.6),
Coloration(hex: colorPalette[1]).opacity(1.4)
]),
startPoint: .topLeading,
endPoint: .bottomTrailing
)
)
.cornerRadius(12)
.shadow(radius: 2)
.padding(.prime, 40)
}
ZStack {
VStack {
Textual content("Analysis:")
.font(.headline)
.fontWeight(.daring)
.padding(.prime, 10)
.multilineTextAlignment(.middle)
.foregroundColor(.white)
Textual content("(prognosis)")
.font(.subheadline)
.padding()
.multilineTextAlignment(.middle)
.lineLimit(nil)
.foregroundColor(.white)
Button(motion: {
if prognosis.lowercased() == "regular" {
showAlert(message: "You are most certainly wholesome.")
} else {
showAlert(message: "Study extra about your prognosis by asking the Detect to Shield Radiologist within the Chat tab.")
}
}) {
Picture(systemName: "questionmark.circle.fill")
.font(.title)
.foregroundColor(.white)
}
// .padding(.prime, 5)
}
.padding()
.body(width: 170, peak: 200)
.background(
LinearGradient(
gradient: Gradient(colours: [
Color(hex: colorPalette[2]).opacity(0.8),
Coloration(hex: colorPalette[3]).opacity(1.3)
]),
startPoint: .topLeading,
endPoint: .bottomTrailing
)
)
.cornerRadius(12)
.shadow(radius: 2)
.padding(.prime, 40)
}
}
.padding(.horizontal, 20)
.padding(.prime, 10)
HStack(spacing: 10) {
VStack {
ZStack {
Textual content("This isn't an alternative to medical recommendation. Please see a licensed doctor for an extra prognosis.")
.multilineTextAlignment(.middle)
.padding(.vertical, 15)
.font(.subheadline)
.foregroundColor(.white)
.padding(.horizontal, 15)
}
.body(width: 170, peak: 250)
.background(
LinearGradient(
gradient: Gradient(colours: [
Color(hex: colorPalette[4]).opacity(0.64),
Coloration(hex: colorPalette[4]).opacity(1.6)
]),
startPoint: .topLeading,
endPoint: .bottomTrailing
)
)
.cornerRadius(12)
}
VStack {
ZStack {
Picture(uiImage: picture!)
.resizable()
.scaledToFit()
.aspectRatio(contentMode: .fill)
.body(width: 170, peak: 250)
.cornerRadius(12)
}
}
}
.padding(.backside, 20)
.padding()
Button(motion: {
resetView()
}) {
Textual content("Scan Once more")
.padding(.horizontal, 30)
.padding(.vertical, 10)
.font(.headline)
.foregroundColor(.white)
.background(Coloration(hex: colorPalette[5]))
.cornerRadius(12)
}
}
.padding(.horizontal, 20)
.padding(.prime, 5)
.background(Coloration.clear)
.cornerRadius(15)
}
}
}
.sheet(isPresented: $showImagePicker) {
ImagePicker(selectedImage: $picture)
}
.alert(isPresented: $showAlert) {
Alert(title: Textual content("Study Extra"), message: Textual content(alertMessage), dismissButton: .default(Textual content("OK")))
}
.onChange(of: picture) { oldValue, newImage in
if let picture = newImage {
predictImage(picture: picture)
}
}
}
}
}
}
non-public func resetView() {
picture = nil
predictionResult = nil
diagResult = nil
diagResultString = nil
documentID = nil
outcome = nil
selectedLocation = nil
showForm = false
}
func showAlert(message: String) {
alertMessage = message
showAlert = true
}
non-public func predictImage(picture: UIImage) {
let currentLocation = selectedLocation
guard let pixelBuffer = picture.toCVPixelBuffer() else {
print("Did not convert picture to pixel buffer")
return
}
DispatchQueue.world(qos: .userInitiated).async {
do {
let riskPrediction = strive self.riskClassifier.prediction(conv2d_input: pixelBuffer)
// Entry the output utilizing the proper title: "Id"
let riskLogits = riskPrediction.Id
let riskProbabilities = self.softmaxFloat32(riskLogits)
let predictedRiskClass = riskProbabilities.argmax()
let riskResult: String
// Assuming your lessons are ordered: 0, 1, 2, 3
change predictedRiskClass {
case 0:
riskResult = "Lung_adenocarcinoma"
case 1:
riskResult = "Lung squamous_cell_carcinoma"
case 2:
riskResult = "Lung_benign_tissue"
case 3:
riskResult = "large_cell_carcinoma"
default:
riskResult = "Unknown"
}
var diagResultValue: Int? = nil
var diagResultStringValue: String = "Analysis not obtainable"
// ... (remainder of your code to course of prognosis and replace UI)
DispatchQueue.principal.async {
self.predictionResult = riskResult;
self.outcome = riskResult;
self.diagResult = diagResultValue;
self.diagResultString = diagResultStringValue;
}
} catch {
print("Prediction error: (error.localizedDescription)")
DispatchQueue.principal.async {
self.predictionResult = "Prediction failed"
self.diagResult = 0
}
}
}
}
// Perform to calculate softmax for Float32 MLMultiArray
non-public func softmaxFloat32(_ logits: MLMultiArray) -> [Float32] {
let values = (0.. [Float32] {
guard let array = output as? MLMultiArray else {
print("Did not solid output to MLMultiArray")
return []
}
return (0.. UIImage? {
let fileManager = FileManager.default
guard fileManager.fileExists(atPath: path) else {
print("File doesn't exist at path: (path)")
return nil
}
return UIImage(contentsOfFile: path)
}
}
extension Array the place Component == Double {
func argmax() -> Int {
guard !isEmpty else { return -1 }
return self.enumerated().max(by: { $0.factor < $1.factor })?.offset ?? 0
}
}
extension Array the place Component == Float32 {
func argmax() -> Int {
guard !isEmpty else { return -1 }
return self.enumerated().max(by: { $0.factor < $1.factor })?.offset ?? 0
}
}
extension MLMultiArray {
func argmax() -> Int {
var maxIndex = 0
var maxValue = self[0].doubleValue
for i in 1.. maxValue {
maxIndex = i
maxValue = currentValue
}
}
return maxIndex
}
}
extension UIImage {
func toCVPixelBuffer() -> CVPixelBuffer? {
let targetSize = CGSize(width: 224, peak: 224)
UIGraphicsBeginImageContextWithOptions(targetSize, false, 1.0)
draw(in: CGRect(origin: .zero, measurement: targetSize))
let resizedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
guard let cgImage = resizedImage?.cgImage else {
print("Did not get CGImage from resized UIImage")
return nil
}
let choices: [CFString: Any] = [
kCVPixelBufferCGImageCompatibilityKey: true,
kCVPixelBufferCGBitmapContextCompatibilityKey: true
]
var pixelBuffer: CVPixelBuffer?
let standing = CVPixelBufferCreate(kCFAllocatorDefault, Int(targetSize.width), Int(targetSize.peak), kCVPixelFormatType_32ARGB, choices as CFDictionary, &pixelBuffer)
guard standing == kCVReturnSuccess, let buffer = pixelBuffer else {
print("Did not create pixel buffer, standing: (standing)")
return nil
}
CVPixelBufferLockBaseAddress(buffer, .init(rawValue: 0))
let pixelaData = CVPixelBufferGetBaseAddress(buffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(knowledge: pixelaData,
width: Int(targetSize.width),
peak: Int(targetSize.peak),
bitsPerComponent: 8,
bytesPerRow: CVPixelBufferGetBytesPerRow(buffer),
house: rgbColorSpace,
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)
context?.draw(cgImage, in: CGRect(x: 0, y: 0, width: targetSize.width, peak: targetSize.peak))
CVPixelBufferUnlockBaseAddress(buffer, .init(rawValue: 0))
return buffer
}
}
#Preview {
SecondView()
.previewDevice("iPhone 15 Professional")
}