1; RUN: llc < %s -emulated-tls -mtriple=i386-linux-gnu | FileCheck -check-prefix=X32 %s 2; RUN: llc < %s -emulated-tls -mtriple=x86_64-linux-gnu | FileCheck -check-prefix=X64 %s 3; RUN: llc < %s -emulated-tls -mtriple=i386-linux-android | FileCheck -check-prefix=X32 %s 4; RUN: llc < %s -emulated-tls -mtriple=x86_64-linux-android | FileCheck -check-prefix=X64 %s 5 6; RUN: llc < %s -mtriple=i386-linux-gnu | FileCheck -check-prefix=NoEMU %s 7; RUN: llc < %s -mtriple=x86_64-linux-gnu | FileCheck -check-prefix=NoEMU %s 8; RUN: llc < %s -mtriple=i386-linux-android | FileCheck -check-prefix=X32 %s 9; RUN: llc < %s -mtriple=x86_64-linux-android | FileCheck -check-prefix=X64 %s 10 11; Copied from tls.ll; emulated TLS model is not implemented 12; for *-pc-win32 and *-pc-windows targets yet. 13 14; NoEMU-NOT: __emutls 15 16; Use my_emutls_get_address like __emutls_get_address. 17@my_emutls_v_xyz = external global i8*, align 4 18declare i8* @my_emutls_get_address(i8*) 19 20define i32 @my_get_xyz() { 21; X32-LABEL: my_get_xyz: 22; X32: movl $my_emutls_v_xyz, (%esp) 23; X32-NEXT: calll my_emutls_get_address 24; X32-NEXT: movl (%eax), %eax 25; X32-NEXT: addl $12, %esp 26; X32-NEXT: .cfi_def_cfa_offset 4 27; X32-NEXT: retl 28; X64-LABEL: my_get_xyz: 29; X64: movl $my_emutls_v_xyz, %edi 30; X64-NEXT: callq my_emutls_get_address 31; X64-NEXT: movl (%rax), %eax 32; X64-NEXT: popq %rcx 33; X64-NEXT: .cfi_def_cfa_offset 8 34; X64-NEXT: retq 35 36entry: 37 %call = call i8* @my_emutls_get_address(i8* bitcast (i8** @my_emutls_v_xyz to i8*)) 38 %0 = bitcast i8* %call to i32* 39 %1 = load i32, i32* %0, align 4 40 ret i32 %1 41} 42 43@i1 = thread_local global i32 15 44@i2 = external thread_local global i32 45@i3 = internal thread_local global i32 15 46@i4 = hidden thread_local global i32 15 47@i5 = external hidden thread_local global i32 48@s1 = thread_local global i16 15 49@b1 = thread_local global i8 0 50 51define i32 @f1() { 52; X32-LABEL: f1: 53; X32: movl $__emutls_v.i1, (%esp) 54; X32-NEXT: calll __emutls_get_address 55; X32-NEXT: movl (%eax), %eax 56; X32-NEXT: addl $12, %esp 57; X32-NEXT: .cfi_def_cfa_offset 4 58; X32-NEXT: retl 59; X64-LABEL: f1: 60; X64: movl $__emutls_v.i1, %edi 61; X64-NEXT: callq __emutls_get_address 62; X64-NEXT: movl (%rax), %eax 63; X64-NEXT: popq %rcx 64; X64-NEXT: .cfi_def_cfa_offset 8 65; X64-NEXT: retq 66 67entry: 68 %tmp1 = load i32, i32* @i1 69 ret i32 %tmp1 70} 71 72define i32* @f2() { 73; X32-LABEL: f2: 74; X32: movl $__emutls_v.i1, (%esp) 75; X32-NEXT: calll __emutls_get_address 76; X32-NEXT: addl $12, %esp 77; X32-NEXT: .cfi_def_cfa_offset 4 78; X32-NEXT: retl 79; X64-LABEL: f2: 80; X64: movl $__emutls_v.i1, %edi 81; X64-NEXT: callq __emutls_get_address 82; X64-NEXT: popq %rcx 83; X64-NEXT: .cfi_def_cfa_offset 8 84; X64-NEXT: retq 85 86entry: 87 ret i32* @i1 88} 89 90define i32 @f3() nounwind { 91; X32-LABEL: f3: 92; X32: movl $__emutls_v.i2, (%esp) 93; X32-NEXT: calll __emutls_get_address 94; X32-NEXT: movl (%eax), %eax 95; X32-NEXT: addl $12, %esp 96; X32-NEXT: retl 97 98entry: 99 %tmp1 = load i32, i32* @i2 100 ret i32 %tmp1 101} 102 103define i32* @f4() { 104; X32-LABEL: f4: 105; X32: movl $__emutls_v.i2, (%esp) 106; X32-NEXT: calll __emutls_get_address 107; X32-NEXT: addl $12, %esp 108; X32-NEXT: .cfi_def_cfa_offset 4 109; X32-NEXT: retl 110 111entry: 112 ret i32* @i2 113} 114 115define i32 @f5() nounwind { 116; X32-LABEL: f5: 117; X32: movl $__emutls_v.i3, (%esp) 118; X32-NEXT: calll __emutls_get_address 119; X32-NEXT: movl (%eax), %eax 120; X32-NEXT: addl $12, %esp 121; X32-NEXT: retl 122 123entry: 124 %tmp1 = load i32, i32* @i3 125 ret i32 %tmp1 126} 127 128define i32* @f6() { 129; X32-LABEL: f6: 130; X32: movl $__emutls_v.i3, (%esp) 131; X32-NEXT: calll __emutls_get_address 132; X32-NEXT: addl $12, %esp 133; X32-NEXT: .cfi_def_cfa_offset 4 134; X32-NEXT: retl 135 136entry: 137 ret i32* @i3 138} 139 140define i32 @f7() { 141; X32-LABEL: f7: 142; X32: movl $__emutls_v.i4, (%esp) 143; X32-NEXT: calll __emutls_get_address 144; X32-NEXT: movl (%eax), %eax 145; X32-NEXT: addl $12, %esp 146; X32-NEXT: .cfi_def_cfa_offset 4 147; X32-NEXT: retl 148 149entry: 150 %tmp1 = load i32, i32* @i4 151 ret i32 %tmp1 152} 153 154define i32* @f8() { 155; X32-LABEL: f8: 156; X32: movl $__emutls_v.i4, (%esp) 157; X32-NEXT: calll __emutls_get_address 158; X32-NEXT: addl $12, %esp 159; X32-NEXT: .cfi_def_cfa_offset 4 160; X32-NEXT: retl 161 162entry: 163 ret i32* @i4 164} 165 166define i32 @f9() { 167; X32-LABEL: f9: 168; X32: movl $__emutls_v.i5, (%esp) 169; X32-NEXT: calll __emutls_get_address 170; X32-NEXT: movl (%eax), %eax 171; X32-NEXT: addl $12, %esp 172; X32-NEXT: .cfi_def_cfa_offset 4 173; X32-NEXT: retl 174 175entry: 176 %tmp1 = load i32, i32* @i5 177 ret i32 %tmp1 178} 179 180define i32* @f10() { 181; X32-LABEL: f10: 182; X32: movl $__emutls_v.i5, (%esp) 183; X32-NEXT: calll __emutls_get_address 184; X32-NEXT: addl $12, %esp 185; X32-NEXT: .cfi_def_cfa_offset 4 186; X32-NEXT: retl 187 188entry: 189 ret i32* @i5 190} 191 192define i16 @f11() { 193; X32-LABEL: f11: 194; X32: movl $__emutls_v.s1, (%esp) 195; X32-NEXT: calll __emutls_get_address 196; X32-NEXT: movzwl (%eax), %eax 197; X32-NEXT: addl $12, %esp 198; X32-NEXT: .cfi_def_cfa_offset 4 199; X32-NEXT: retl 200 201entry: 202 %tmp1 = load i16, i16* @s1 203 ret i16 %tmp1 204} 205 206define i32 @f12() { 207; X32-LABEL: f12: 208; X32: movl $__emutls_v.s1, (%esp) 209; X32-NEXT: calll __emutls_get_address 210; X32-NEXT: movswl (%eax), %eax 211; X32-NEXT: addl $12, %esp 212; X32-NEXT: .cfi_def_cfa_offset 4 213; X32-NEXT: retl 214 215entry: 216 %tmp1 = load i16, i16* @s1 217 %tmp2 = sext i16 %tmp1 to i32 218 ret i32 %tmp2 219} 220 221define i8 @f13() { 222; X32-LABEL: f13: 223; X32: movl $__emutls_v.b1, (%esp) 224; X32-NEXT: calll __emutls_get_address 225; X32-NEXT: movb (%eax), %al 226; X32-NEXT: addl $12, %esp 227; X32-NEXT: .cfi_def_cfa_offset 4 228; X32-NEXT: retl 229 230entry: 231 %tmp1 = load i8, i8* @b1 232 ret i8 %tmp1 233} 234 235define i32 @f14() { 236; X32-LABEL: f14: 237; X32: movl $__emutls_v.b1, (%esp) 238; X32-NEXT: calll __emutls_get_address 239; X32-NEXT: movsbl (%eax), %eax 240; X32-NEXT: addl $12, %esp 241; X32-NEXT: .cfi_def_cfa_offset 4 242; X32-NEXT: retl 243 244entry: 245 %tmp1 = load i8, i8* @b1 246 %tmp2 = sext i8 %tmp1 to i32 247 ret i32 %tmp2 248} 249 250;;;;;;;;;;;;;; 32-bit __emutls_v. and __emutls_t. 251 252; X32 .section .data.rel.local, 253; X32-LABEL: __emutls_v.i1: 254; X32-NEXT: .long 4 255; X32-NEXT: .long 4 256; X32-NEXT: .long 0 257; X32-NEXT: .long __emutls_t.i1 258 259; X32 .section .rodata, 260; X32-LABEL: __emutls_t.i1: 261; X32-NEXT: .long 15 262 263; X32-NOT: __emutls_v.i2 264 265; X32 .section .data.rel.local, 266; X32-LABEL: __emutls_v.i3: 267; X32-NEXT: .long 4 268; X32-NEXT: .long 4 269; X32-NEXT: .long 0 270; X32-NEXT: .long __emutls_t.i3 271 272; X32 .section .rodata, 273; X32-LABEL: __emutls_t.i3: 274; X32-NEXT: .long 15 275 276; X32 .section .data.rel.local, 277; X32-LABEL: __emutls_v.i4: 278; X32-NEXT: .long 4 279; X32-NEXT: .long 4 280; X32-NEXT: .long 0 281; X32-NEXT: .long __emutls_t.i4 282 283; X32 .section .rodata, 284; X32-LABEL: __emutls_t.i4: 285; X32-NEXT: .long 15 286 287; X32-NOT: __emutls_v.i5: 288; X32 .hidden __emutls_v.i5 289; X32-NOT: __emutls_v.i5: 290 291; X32 .section .data.rel.local, 292; X32-LABEL: __emutls_v.s1: 293; X32-NEXT: .long 2 294; X32-NEXT: .long 2 295; X32-NEXT: .long 0 296; X32-NEXT: .long __emutls_t.s1 297 298; X32 .section .rodata, 299; X32-LABEL: __emutls_t.s1: 300; X32-NEXT: .short 15 301 302; X32 .section .data.rel.local, 303; X32-LABEL: __emutls_v.b1: 304; X32-NEXT: .long 1 305; X32-NEXT: .long 1 306; X32-NEXT: .long 0 307; X32-NEXT: .long 0 308 309; X32-NOT: __emutls_t.b1 310 311;;;;;;;;;;;;;; 64-bit __emutls_v. and __emutls_t. 312 313; X64 .section .data.rel.local, 314; X64-LABEL: __emutls_v.i1: 315; X64-NEXT: .quad 4 316; X64-NEXT: .quad 4 317; X64-NEXT: .quad 0 318; X64-NEXT: .quad __emutls_t.i1 319 320; X64 .section .rodata, 321; X64-LABEL: __emutls_t.i1: 322; X64-NEXT: .long 15 323 324; X64-NOT: __emutls_v.i2 325 326; X64 .section .data.rel.local, 327; X64-LABEL: __emutls_v.i3: 328; X64-NEXT: .quad 4 329; X64-NEXT: .quad 4 330; X64-NEXT: .quad 0 331; X64-NEXT: .quad __emutls_t.i3 332 333; X64 .section .rodata, 334; X64-LABEL: __emutls_t.i3: 335; X64-NEXT: .long 15 336 337; X64 .section .data.rel.local, 338; X64-LABEL: __emutls_v.i4: 339; X64-NEXT: .quad 4 340; X64-NEXT: .quad 4 341; X64-NEXT: .quad 0 342; X64-NEXT: .quad __emutls_t.i4 343 344; X64 .section .rodata, 345; X64-LABEL: __emutls_t.i4: 346; X64-NEXT: .long 15 347 348; X64-NOT: __emutls_v.i5: 349; X64 .hidden __emutls_v.i5 350; X64-NOT: __emutls_v.i5: 351 352; X64 .section .data.rel.local, 353; X64-LABEL: __emutls_v.s1: 354; X64-NEXT: .quad 2 355; X64-NEXT: .quad 2 356; X64-NEXT: .quad 0 357; X64-NEXT: .quad __emutls_t.s1 358 359; X64 .section .rodata, 360; X64-LABEL: __emutls_t.s1: 361; X64-NEXT: .short 15 362 363; X64 .section .data.rel.local, 364; X64-LABEL: __emutls_v.b1: 365; X64-NEXT: .quad 1 366; X64-NEXT: .quad 1 367; X64-NEXT: .quad 0 368; X64-NEXT: .quad 0 369 370; X64-NOT: __emutls_t.b1 371