AnyObject on Linux may give different result for ObjectIdentifier.init()

  1. On ObjectiveC supported platform, A class type T will treat T.self as AnyObject implicitly. And on Linux, we need to spell it out via as AnyObject explicly.

  2. For some value passing methods, passing X.self to the ObjectIdentifier.init will lead to different results.

import Foundation
class X {}
func test1(_ obj: AnyObject) -> UInt64 {
    UInt64(UInt(bitPattern: ObjectIdentifier(obj)))
}
func test2() {
    print("RawValue: \(test1(X.self as AnyObject))")
}
test2()
test2()
// Result will be x and y (different value/unexpected)
import Foundation
class X {}
func test1(_ obj: AnyObject) -> UInt64 {
    UInt64(UInt(bitPattern: ObjectIdentifier(obj)))
}
func test2() {
	let a = X.self as AnyObject
    print("RawValue: \(test1(a))")
}
test2()
test2()
// Result will be x and x (same value/expected)

See more context here Passing class-constrained type as AnyObject instance behavior is not matched on Darwin and non-Darwin platform · Issue #70645 · apple/swift · GitHub

Could anyone familiar with Swift on Linux's compiler give some help here?

I checked on godbolt: can see this issue on 5.2 but not on anything newer.

I can reproduce it on the latest Swift 5.9.2 on Ubuntu 22.0.4

Full app
import Foundation
class X {}
func test1(_ obj: AnyObject) -> UInt64 {
    UInt64(UInt(bitPattern: ObjectIdentifier(obj)))
}
func test2() {
    print("RawValue: \(test1(X.self as AnyObject))")
}
test2()
test2()

var info = utsname()
uname(&info)
print(withUnsafeBytes(of: info.sysname) { String(cString: $0.baseAddress!.assumingMemoryBound(to: UInt8.self)) })
print(withUnsafeBytes(of: info.nodename) { String(cString: $0.baseAddress!.assumingMemoryBound(to: UInt8.self)) })
print(withUnsafeBytes(of: info.release) { String(cString: $0.baseAddress!.assumingMemoryBound(to: UInt8.self)) })
print(withUnsafeBytes(of: info.version) { String(cString: $0.baseAddress!.assumingMemoryBound(to: UInt8.self)) })
print(withUnsafeBytes(of: info.machine) { String(cString: $0.baseAddress!.assumingMemoryBound(to: UInt8.self)) })

Output:

RawValue: 94716013974832
RawValue: 94716013974832
Linux
ce
6.2.0-1016-aws
#16~22.04.1-Ubuntu SMP Sun Nov  5 20:08:16 UTC 2023
x86_64

Swift version is 5.9 (not sure about the bug fix version's digit, it's not shown in the menu).

Running your "Full app" gives me the following output

RawValue: 187650376148288
RawValue: 187650376149328
Linux
ubuntu-2204
6.5.13-orbstack-00121-ge428743e4e98
#1 SMP Wed Dec 27 10:22:46 UTC 2023
aarch64
swift --version
Swift version 5.9.2 (swift-5.9.2-RELEASE)
Target: aarch64-unknown-linux-gnu

The diff between your env and mine is "aarch64" vs "x86_64" and "Swift 5.9.2" vs "Swift 5.9"

Hmm, ARM vs x86 difference?

BTW, is there a way to print exact Swift version from the app?

No (https://forums.swift.org/t/get-swift-language-version-programmatically/)

Ouch.
I made a function that generates the source for "let swiftVersion: String" variable:

generator
func generateSwiftVers(minMajor: Int = 1, maxMajor: Int = 20, minMinor: Int = 0, maxMinor: Int = 20, minBug: Int = 0, maxBug: Int = 20, distantMajor: String = "X", distantMinor: String = "X", distantBug: String = "X") {
    for major in minMajor ... maxMajor {
        if major == minMajor {
            print("#if swift(<\(major + 1))")
        } else {
            print("#elseif swift(<\(major + 1))")
        }
        for minor in minMinor ... maxMinor {
            if minor == minMinor {
                print("    #if swift(<\(major).\(minor + 1))")
            } else {
                print("    #elseif swift(<\(major).\(minor + 1))")
            }
            for bug in minBug ... maxBug {
                if bug == minBug {
                    print("        #if swift(<\(major).\(minor).\(bug + 1))")
                } else {
                    print("        #elseif swift(<\(major).\(minor).\(bug + 1))")
                }
                if bug == 0 {
                    print("            let swiftVersion = \"\(major).\(minor)\"")
                } else {
                    print("            let swiftVersion = \"\(major).\(minor).\(bug)\"")
                }
            }
            print("        #else")
            print("            let swiftVersion = \"\(major).\(minor).\(distantBug)\"")
            print("        #endif")
        }
        print("    #else")
        print("        let swiftVersion = \"\(major).\(distantMinor)\"")
        print("    #endif")
    }
    print("#else")
    print("    let swiftVersion = \"\(distantMajor)\"")
    print("#endif")
}

If you run it without parameters it'll generate a 19K line file that will correctly determine swift version in the range 1.0.0 ... 20.20.20. (20 is chosen as default maximum value for major / minor / bugFix version numbers). The good thing this source doesn't contribute to the binary size at all.

Here's an example output for a narrow range of versions (to keep this post small):

generateSwiftVers(minMajor: 5, maxMajor: 5, minMinor: 7, maxMinor: 9, maxBug: 9)

Output:

#if swift(<6)
    #if swift(<5.8)
        #if swift(<5.7.1)
            let swiftVersion = "5.7"
        #elseif swift(<5.7.2)
            let swiftVersion = "5.7.1"
        #elseif swift(<5.7.3)
            let swiftVersion = "5.7.2"
        #elseif swift(<5.7.4)
            let swiftVersion = "5.7.3"
        #elseif swift(<5.7.5)
            let swiftVersion = "5.7.4"
        #elseif swift(<5.7.6)
            let swiftVersion = "5.7.5"
        #elseif swift(<5.7.7)
            let swiftVersion = "5.7.6"
        #elseif swift(<5.7.8)
            let swiftVersion = "5.7.7"
        #elseif swift(<5.7.9)
            let swiftVersion = "5.7.8"
        #elseif swift(<5.7.10)
            let swiftVersion = "5.7.9"
        #else
            let swiftVersion = "5.7.X"
        #endif
    #elseif swift(<5.9)
        #if swift(<5.8.1)
            let swiftVersion = "5.8"
        #elseif swift(<5.8.2)
            let swiftVersion = "5.8.1"
        #elseif swift(<5.8.3)
            let swiftVersion = "5.8.2"
        #elseif swift(<5.8.4)
            let swiftVersion = "5.8.3"
        #elseif swift(<5.8.5)
            let swiftVersion = "5.8.4"
        #elseif swift(<5.8.6)
            let swiftVersion = "5.8.5"
        #elseif swift(<5.8.7)
            let swiftVersion = "5.8.6"
        #elseif swift(<5.8.8)
            let swiftVersion = "5.8.7"
        #elseif swift(<5.8.9)
            let swiftVersion = "5.8.8"
        #elseif swift(<5.8.10)
            let swiftVersion = "5.8.9"
        #else
            let swiftVersion = "5.8.X"
        #endif
    #elseif swift(<5.10)
        #if swift(<5.9.1)
            let swiftVersion = "5.9"
        #elseif swift(<5.9.2)
            let swiftVersion = "5.9.1"
        #elseif swift(<5.9.3)
            let swiftVersion = "5.9.2"
        #elseif swift(<5.9.4)
            let swiftVersion = "5.9.3"
        #elseif swift(<5.9.5)
            let swiftVersion = "5.9.4"
        #elseif swift(<5.9.6)
            let swiftVersion = "5.9.5"
        #elseif swift(<5.9.7)
            let swiftVersion = "5.9.6"
        #elseif swift(<5.9.8)
            let swiftVersion = "5.9.7"
        #elseif swift(<5.9.9)
            let swiftVersion = "5.9.8"
        #elseif swift(<5.9.10)
            let swiftVersion = "5.9.9"
        #else
            let swiftVersion = "5.9.X"
        #endif
    #else
        let swiftVersion = "5.X"
    #endif
#else
    let swiftVersion = "X"
#endif

I ran the full 19K line detector on godbolt, the detected version is "5.9" when "5.9" is selected in the menu. For "nightly" the detected version is "5.11".

Let's end the discussion of the Swift version and go back to the original problem. What is your Swift version on your Linux test machine? Is it convenient to install Swift 5.9.2 for testing? If the result is still the same on 5.9.2, then the diffs will left to be arch.

I don't have a linux machine, I ran the app on godbolt which happened to be a linux machine (see the details above) and now thanks to my version detector we know the swift version, which is 5.9 (or 5.11 for "nightly").

Class metatype values are only usable as objects themselves with Objective-C interop. On Linux, class metatypes are not objects, and as AnyObject just wraps the value in a generic object wrapper.

1 Like

Will this work for you on Linux?

unsafeBitCast(X.self, to: ObjectIdentifier.self)

If all you want is an ObjectIdentifier from a type, there is already an initializer to do that directly, ObjectIdentifier.init(_: Any.Type).

2 Likes

So should the original snippet should be rewritten as a generic?

class X {}
func test1<T>(_ t: T.Type = T.self) -> UInt64 {
    UInt64(UInt(bitPattern: ObjectIdentifier(t)))
}
func test2() {
    print("RawValue: \(test1(X.self))")
}
test2()
test2()

Seems to work as expected for me:

# Execution result with exit code 0
# Standard out:
RawValue: 94730000371904
RawValue: 94730000371904

LGTM. What is the reason for having the default value " = T.self" ?

It’s just a reflex. If T is in return position, the default argument makes it possible to call the function without any shenanigans:

func getZero<T: BinaryInteger>(_: T.Type = T.self) -> T {
  return T.zero
}

let zero: UInt = getZero() // don’t need to say `UInt.self`
2 Likes

Ok, it's neither. I changed your original example and now can see the issue your are talking about on godbolt's 5.9 and 5.11:

import Foundation
class X {}
func test1(_ obj: AnyObject) -> UInt64 {
    UInt64(UInt(bitPattern: ObjectIdentifier(obj)))
}
func test2() {
    let a = X.self as AnyObject
    let b = X.self as AnyObject
    let c = X.self as AnyObject
    print("RawValue: \(test1(a))")
    print("RawValue: \(test1(b))")
    print("RawValue: \(test1(c))")
}
test2()

It "worked" before because the reference object was allocated (by luck) at the same address as the previously deallocated object.

I see. It just look odd above as it won't be possible calling test1() without parameters.

BTW, these three should do the same:

let zero: UInt = getZero()
let zero = getZero(UInt.self)
let zero = getZero() as UInt

so unless there are particularly aesthetic reasons the second form is not required.

This does not explain the following output here on ubuntu-2204 aarch64 Swift 5.9.2

import Foundation
class X {}
func test1(_ obj: AnyObject) -> UInt64 {
    UInt64(UInt(bitPattern: ObjectIdentifier(obj)))
}
func test2() {
    print("RawValue: \(test1(X.self as AnyObject))")
}
test2()
test2()
test2()
test2()
// Result will be x y y y
import Foundation
class X {}
func test1(_ obj: AnyObject) -> UInt64 {
    UInt64(UInt(bitPattern: ObjectIdentifier(obj)))
}
func test2() {
	let a = X.self as AnyObject
    print("RawValue: \(test1(a))")
}
test2()
test2()
test2()
test2()
// Result will be x x x x
import Foundation
class X {}
func test1(_ obj: AnyObject) -> UInt64 {
    UInt64(UInt(bitPattern: ObjectIdentifier(obj)))
}
func test2() {
	let a = X.self
    print("RawValue: \(test1(a as AnyObject))")
}
test2()
test2()
test2()
test2()
// Result will be x y y y

It should either be x x x x for all test example or x y z w for all test example.

But the result is some ways of passing value will result a different result for the first call. And that's what confuses me.

Also, do we have a plan to fix/unify it? cc @Joe_Groff
Someone also reported that "That would be a step towards resolving a number of other related problems with metatypes on Linux."

Context Passing class-constrained type as AnyObject instance behavior is not matched on Darwin and non-Darwin platform · Issue #70645 · apple/swift · GitHub