This code compile and run just fine in a small SwiftUI project:
extension Int {
// collect digits ignoring negative sign
var digits: [Int] {
if self == 0 {
return [0]
}
var result = [Int]()
var n = abs(self)
while (n > 0) {
result.append(n % 10)
n = n / 10
}
return result.reversed()
}
static let superscriptMap: [Character] = [
"\u{2070}",
"\u{00B9}",
"\u{00B2}",
"\u{00B3}",
"\u{2074}",
"\u{2075}",
"\u{2076}",
"\u{2077}",
"\u{2078}",
"\u{2079}"
]
static let subscriptMap: [Character] = [
"\u{2080}",
"\u{2081}",
"\u{2082}",
"\u{2083}",
"\u{2084}",
"\u{2085}",
"\u{2086}",
"\u{2087}",
"\u{2088}",
"\u{2089}"
]
var superscriptString: String {
let ds = digits.map { Self.superscriptMap[$0] }
if self >= 0 {
return String(ds)
} else {
return "\u{207B}" + String(ds)
}
}
var subscriptString: String {
let ds = digits.map { Self.subscriptMap[$0] }
if self >= 0 {
return String(ds)
} else {
return "\u{208B}" + String(ds)
}
}
}
In a large SwiftUI project with many source files, do not compile:
The error messages seems to indicate the compiler is not able to tell the string literals are of type Character, it think those are String literals.
Xcode Version 13.2 beta (13C5066c), something wrong with Swift compiler or Xcode?
