i have a plain-old-data IPv6 type defined with a layout matching the “natural” UInt16x8 format:
@frozen public
struct IPv6:Equatable, Hashable, Sendable
{
public
var a:UInt16
public
var b:UInt16
public
var c:UInt16
public
var d:UInt16
public
var e:UInt16
public
var f:UInt16
public
var g:UInt16
public
var h:UInt16
}
the stored properties are just raw storage and hold the words in big-endian representation.
for curiosity, i plugged this into godbolt to see what the synthesized Equatable
conformance looks like:
public
func eq(a:IPv6, b:IPv6) -> Bool
{
a == b
}
-O
-whole-module-optimization
output.eq(a: output.IPv6, b: output.IPv6) -> Swift.Bool:
xor eax, eax
cmp rdi, rdx
jne .LBB41_5
cmp si, cx
jne .LBB41_5
mov rdx, rsi
shr rdx, 16
mov rdi, rcx
shr rdi, 16
cmp dx, di
jne .LBB41_5
mov rdx, rsi
shr rdx, 32
mov rdi, rcx
shr rdi, 32
cmp dx, di
jne .LBB41_5
shr rcx, 48
shr rsi, 48
cmp si, cx
sete al
.LBB41_5:
ret
thats… yikes! is it really comparing the addresses 16 bits at a time?
if i lay out the type like:
@frozen public
struct IPv6:Equatable, Hashable, Sendable
{
/// The prefix address, in big-endian byte order.
public
var prefix:UInt64
/// The subnet address, in big-endian byte order.
public
var subnet:UInt64
@inlinable public
init(prefix:UInt64, subnet:UInt64)
{
self.prefix = prefix
self.subnet = subnet
}
}
instead, i get:
output.eq(a: output.IPv6, b: output.IPv6) -> Swift.Bool:
xor rdi, rdx
xor rsi, rcx
or rsi, rdi
sete al
ret
any reason why the compiler can’t do that on its own?