Skip to content

Commit 819fc7e

Browse files
committed
codegen: mark write barrier field load as volatile
This is as an alternative to changing all allocation functions to mutating all memory and returning an aliasing pointer. This operates usually late in the pipeline, so either should not make much difference, but I think it should suffice to mark this volatile to prevent any de-duplication of this load, and this should also be most conservative. Fixes #59547 Written by Claude.
1 parent 8349aed commit 819fc7e

File tree

2 files changed

+44
-0
lines changed

2 files changed

+44
-0
lines changed

src/llvm-late-gc-lowering.cpp

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1913,6 +1913,9 @@ Value *LateLowerGCFrame::EmitLoadTag(IRBuilder<> &builder, Type *T_size, Value *
19131913
auto &M = *builder.GetInsertBlock()->getModule();
19141914
LoadInst *load = builder.CreateAlignedLoad(T_size, addr, M.getDataLayout().getPointerABIAlignment(0), V->getName() + ".tag");
19151915
load->setOrdering(AtomicOrdering::Unordered);
1916+
// Mark as volatile to prevent optimizers from treating GC tag loads as constants
1917+
// since GC mark bits can change during runtime (issue #59547)
1918+
load->setVolatile(true);
19161919
load->setMetadata(LLVMContext::MD_tbaa, tbaa_tag);
19171920
MDBuilder MDB(load->getContext());
19181921
auto *NullInt = ConstantInt::get(T_size, 0);
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
; This file is a part of Julia. License is MIT: https://julialang.org/license
2+
3+
; RUN: opt --load-pass-plugin=libjulia-codegen%shlibext -passes='function(LateLowerGCFrame,FinalLowerGC,gvn)' -S %s | FileCheck %s
4+
5+
; Test for issue #59547: Ensure write barrier GC tag loads are volatile
6+
; This test verifies that the LateLowerGCFrame pass marks GC tag loads as volatile
7+
; to prevent GVN from incorrectly constant-folding them, which would eliminate
8+
; necessary write barrier checks.
9+
10+
@tag = external addrspace(10) global {}, align 16
11+
12+
declare void @julia.write_barrier({} addrspace(10)*, {} addrspace(10)*)
13+
declare {}*** @julia.get_pgcstack()
14+
declare {} addrspace(10)* @julia.gc_alloc_obj({}**, i64, {} addrspace(10)*)
15+
16+
; Test that write barrier expansion produces volatile GC tag loads
17+
; CHECK-LABEL: @test_writebarrier_volatile_tags
18+
define {} addrspace(10)* @test_writebarrier_volatile_tags() {
19+
top:
20+
%pgcstack = call {}*** @julia.get_pgcstack()
21+
%current_task = bitcast {}*** %pgcstack to {}**
22+
%parent = call {} addrspace(10)* @julia.gc_alloc_obj({}** %current_task, i64 8, {} addrspace(10)* @tag)
23+
%child = call {} addrspace(10)* @julia.gc_alloc_obj({}** %current_task, i64 8, {} addrspace(10)* @tag)
24+
call void @julia.write_barrier({} addrspace(10)* %parent, {} addrspace(10)* %child)
25+
ret {} addrspace(10)* %parent
26+
27+
; The critical test: GC tag loads must be volatile to prevent constant folding
28+
; CHECK: load atomic volatile i64, ptr {{.*}} unordered, align 8, {{.*}}!tbaa
29+
; CHECK: and i64 {{.*}}, 3
30+
; CHECK: icmp eq i64 {{.*}}, 3
31+
; CHECK: br i1 {{.*}}, label %may_trigger_wb, label
32+
33+
; CHECK: may_trigger_wb:
34+
; CHECK: load atomic volatile i64, ptr {{.*}} unordered, align 8, {{.*}}!tbaa
35+
; CHECK: and i64 {{.*}}, 1
36+
; CHECK: icmp eq i64 {{.*}}, 0
37+
; CHECK: br i1 {{.*}}, label %trigger_wb, label
38+
39+
; CHECK: trigger_wb:
40+
; CHECK: call void @ijl_gc_queue_root(ptr {{.*}})
41+
}

0 commit comments

Comments
 (0)