Fix to codegen when using a bitstruct constant defined using a cast with an operator #2248.

This commit is contained in:
Christoffer Lerno
2025-07-07 17:09:32 +02:00
parent dc23cef59a
commit 5b835bec3e
4 changed files with 143 additions and 45 deletions

View File

@@ -0,0 +1,31 @@
// #target: macos-x64
module test;
import std;
bitstruct BitstructFlags : uint
{
bool first;
bool second;
}
const BitstructFlags B_FIRST = { true, false };
const BitstructFlags B_FIRST2 = (BitstructFlags) 1;
const BitstructFlags B_SECOND = { false, true };
fn int main()
{
BitstructFlags x = B_FIRST | B_SECOND;
BitstructFlags y = B_FIRST2 | B_SECOND;
io::printfn("X: %d Y: %d", (uint)x, (uint)y);
return 0;
}
/* #expect: test.ll
define i32 @main() #0 {
entry:
%x = alloca i32, align 4
%y = alloca i32, align 4
%varargslots = alloca [2 x %any], align 16
%retparam = alloca i64, align 8
store i32 3, ptr %x, align 4
store i32 3, ptr %y, align 4

View File

@@ -24,6 +24,6 @@ fn void main()
define void @test.main() #0 {
entry:
%x = alloca i32, align 4
store i32 1, ptr %x, align 4
store i32 3, ptr %x, align 4
ret void
}