I have always thought that Self
was static in reference types, regardless of whether a type has sub-types or not, and that it was always interchangeable with the name of the type.
class U {
func f (_ u: AnyObject) {
// Don't hardwire `U` but use `Self` instead
guard let v = u as? Self else {
print (#function, "expecting U", "got", Self.self)
return
}
g (v)
}
// Subclasses to override
func g (_ u: U) {
print (#function, Self.self)
}
}
But, to my great surprise, I have discovered that this is not the case at all when a type has sub-types. Below, simply changing the typealias
in U.f
changes the output of the test.
@main
enum ReferenceTypes {
static func main () {
let u = V ()
u.f (u)
let v = W ()
u.f (v)
v.f (u)
}
}
class U {
func f (_ u: AnyObject) {
typealias T = Self
// typealias T = U
guard let v = u as? T else {
print (#function, "expecting U", "got", Self.self)
return
}
g (v)
}
// Subclasses to override
func g (_ u: U) {
print (#function, Self.self)
}
}
class V: U {
override func g (_ u: U) {
print (#function, Self.self)
}
}
class W: U {
override func g (_ u: U) {
print (#function, Self.self)
}
}
With typealias T = U
in U.f
, the output is as I expect it.
// typealias T = U in U.f
g(_:) V
g(_:) V
g(_:) W
However, with typealias T = Self
in U.f
, the output is not as I expect it.
// typealias T = Self in U.f
g(_:) V
f(_:) expecting U got V
f(_:) expecting U got W
Can anyone explain the reason behind this?