mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-23 11:13:28 +01:00
9217c910b5
float-point exception. This patch also modify some mayRaiseFPException flag which set in D68854. Differential Revision: https://reviews.llvm.org/D72750
1370 lines
47 KiB
LLVM
1370 lines
47 KiB
LLVM
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
|
|
; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+sse2 -O3 | FileCheck %s --check-prefixes=SSE-X86
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse2 -O3 | FileCheck %s --check-prefixes=SSE-X64
|
|
; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx -O3 | FileCheck %s --check-prefixes=AVX-X86,AVX1-X86
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx -O3 | FileCheck %s --check-prefixes=AVX-X64,AVX1-X64
|
|
; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx512f -mattr=+avx512vl -O3 | FileCheck %s --check-prefixes=AVX-X86,AVX512-X86
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f -mattr=+avx512vl -O3 | FileCheck %s --check-prefixes=AVX-X64,AVX512-X64
|
|
; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=-sse -O3 | FileCheck %s --check-prefixes=CHECK,X87
|
|
|
|
declare i1 @llvm.experimental.constrained.fptosi.i1.f32(float, metadata)
|
|
declare i8 @llvm.experimental.constrained.fptosi.i8.f32(float, metadata)
|
|
declare i16 @llvm.experimental.constrained.fptosi.i16.f32(float, metadata)
|
|
declare i32 @llvm.experimental.constrained.fptosi.i32.f32(float, metadata)
|
|
declare i64 @llvm.experimental.constrained.fptosi.i64.f32(float, metadata)
|
|
declare i1 @llvm.experimental.constrained.fptoui.i1.f32(float, metadata)
|
|
declare i8 @llvm.experimental.constrained.fptoui.i8.f32(float, metadata)
|
|
declare i16 @llvm.experimental.constrained.fptoui.i16.f32(float, metadata)
|
|
declare i32 @llvm.experimental.constrained.fptoui.i32.f32(float, metadata)
|
|
declare i64 @llvm.experimental.constrained.fptoui.i64.f32(float, metadata)
|
|
|
|
declare i1 @llvm.experimental.constrained.fptosi.i1.f64(double, metadata)
|
|
declare i8 @llvm.experimental.constrained.fptosi.i8.f64(double, metadata)
|
|
declare i16 @llvm.experimental.constrained.fptosi.i16.f64(double, metadata)
|
|
declare i32 @llvm.experimental.constrained.fptosi.i32.f64(double, metadata)
|
|
declare i64 @llvm.experimental.constrained.fptosi.i64.f64(double, metadata)
|
|
declare i1 @llvm.experimental.constrained.fptoui.i1.f64(double, metadata)
|
|
declare i8 @llvm.experimental.constrained.fptoui.i8.f64(double, metadata)
|
|
declare i16 @llvm.experimental.constrained.fptoui.i16.f64(double, metadata)
|
|
declare i32 @llvm.experimental.constrained.fptoui.i32.f64(double, metadata)
|
|
declare i64 @llvm.experimental.constrained.fptoui.i64.f64(double, metadata)
|
|
|
|
define i1 @fptosi_f32toi1(float %x) #0 {
|
|
; SSE-X86-LABEL: fptosi_f32toi1:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: cvttss2si {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptosi_f32toi1:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttss2si %xmm0, %eax
|
|
; SSE-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptosi_f32toi1:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: vcvttss2si {{[0-9]+}}(%esp), %eax
|
|
; AVX-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptosi_f32toi1:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttss2si %xmm0, %eax
|
|
; AVX-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptosi_f32toi1:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: subl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 12
|
|
; CHECK-NEXT: flds {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistps {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movb {{[0-9]+}}(%esp), %al
|
|
; CHECK-NEXT: addl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i1 @llvm.experimental.constrained.fptosi.i1.f32(float %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i1 %result
|
|
}
|
|
|
|
define i8 @fptosi_f32toi8(float %x) #0 {
|
|
; SSE-X86-LABEL: fptosi_f32toi8:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: cvttss2si {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptosi_f32toi8:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttss2si %xmm0, %eax
|
|
; SSE-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptosi_f32toi8:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: vcvttss2si {{[0-9]+}}(%esp), %eax
|
|
; AVX-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptosi_f32toi8:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttss2si %xmm0, %eax
|
|
; AVX-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptosi_f32toi8:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: subl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 12
|
|
; CHECK-NEXT: flds {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistps {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movb {{[0-9]+}}(%esp), %al
|
|
; CHECK-NEXT: addl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i8 @llvm.experimental.constrained.fptosi.i8.f32(float %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i8 %result
|
|
}
|
|
|
|
define i16 @fptosi_f32toi16(float %x) #0 {
|
|
; SSE-X86-LABEL: fptosi_f32toi16:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: cvttss2si {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptosi_f32toi16:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttss2si %xmm0, %eax
|
|
; SSE-X64-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptosi_f32toi16:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: vcvttss2si {{[0-9]+}}(%esp), %eax
|
|
; AVX-X86-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptosi_f32toi16:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttss2si %xmm0, %eax
|
|
; AVX-X64-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptosi_f32toi16:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: subl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 12
|
|
; CHECK-NEXT: flds {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistps {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: addl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i16 @llvm.experimental.constrained.fptosi.i16.f32(float %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i16 %result
|
|
}
|
|
|
|
define i32 @fptosi_f32toi32(float %x) #0 {
|
|
; SSE-X86-LABEL: fptosi_f32toi32:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: cvttss2si {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptosi_f32toi32:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttss2si %xmm0, %eax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptosi_f32toi32:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: vcvttss2si {{[0-9]+}}(%esp), %eax
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptosi_f32toi32:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttss2si %xmm0, %eax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptosi_f32toi32:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: subl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 12
|
|
; CHECK-NEXT: flds {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw (%esp)
|
|
; CHECK-NEXT: movzwl (%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistpl {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw (%esp)
|
|
; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: addl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i32 @llvm.experimental.constrained.fptosi.i32.f32(float %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i32 %result
|
|
}
|
|
|
|
define i64 @fptosi_f32toi64(float %x) #0 {
|
|
; SSE-X86-LABEL: fptosi_f32toi64:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: pushl %ebp
|
|
; SSE-X86-NEXT: .cfi_def_cfa_offset 8
|
|
; SSE-X86-NEXT: .cfi_offset %ebp, -8
|
|
; SSE-X86-NEXT: movl %esp, %ebp
|
|
; SSE-X86-NEXT: .cfi_def_cfa_register %ebp
|
|
; SSE-X86-NEXT: andl $-8, %esp
|
|
; SSE-X86-NEXT: subl $16, %esp
|
|
; SSE-X86-NEXT: movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
|
|
; SSE-X86-NEXT: movss %xmm0, {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: flds {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: wait
|
|
; SSE-X86-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; SSE-X86-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: fistpll {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
; SSE-X86-NEXT: movl %ebp, %esp
|
|
; SSE-X86-NEXT: popl %ebp
|
|
; SSE-X86-NEXT: .cfi_def_cfa %esp, 4
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptosi_f32toi64:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttss2si %xmm0, %rax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptosi_f32toi64:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: pushl %ebp
|
|
; AVX-X86-NEXT: .cfi_def_cfa_offset 8
|
|
; AVX-X86-NEXT: .cfi_offset %ebp, -8
|
|
; AVX-X86-NEXT: movl %esp, %ebp
|
|
; AVX-X86-NEXT: .cfi_def_cfa_register %ebp
|
|
; AVX-X86-NEXT: andl $-8, %esp
|
|
; AVX-X86-NEXT: subl $8, %esp
|
|
; AVX-X86-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
|
|
; AVX-X86-NEXT: vmovss %xmm0, (%esp)
|
|
; AVX-X86-NEXT: flds (%esp)
|
|
; AVX-X86-NEXT: fisttpll (%esp)
|
|
; AVX-X86-NEXT: wait
|
|
; AVX-X86-NEXT: movl (%esp), %eax
|
|
; AVX-X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
; AVX-X86-NEXT: movl %ebp, %esp
|
|
; AVX-X86-NEXT: popl %ebp
|
|
; AVX-X86-NEXT: .cfi_def_cfa %esp, 4
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptosi_f32toi64:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttss2si %xmm0, %rax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptosi_f32toi64:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: pushl %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 8
|
|
; CHECK-NEXT: .cfi_offset %ebp, -8
|
|
; CHECK-NEXT: movl %esp, %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa_register %ebp
|
|
; CHECK-NEXT: andl $-8, %esp
|
|
; CHECK-NEXT: subl $16, %esp
|
|
; CHECK-NEXT: flds 8(%ebp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistpll {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
; CHECK-NEXT: movl %ebp, %esp
|
|
; CHECK-NEXT: popl %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa %esp, 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i64 @llvm.experimental.constrained.fptosi.i64.f32(float %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i64 %result
|
|
}
|
|
|
|
define i1 @fptoui_f32toi1(float %x) #0 {
|
|
; SSE-X86-LABEL: fptoui_f32toi1:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: cvttss2si {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptoui_f32toi1:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttss2si %xmm0, %eax
|
|
; SSE-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptoui_f32toi1:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: vcvttss2si {{[0-9]+}}(%esp), %eax
|
|
; AVX-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptoui_f32toi1:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttss2si %xmm0, %eax
|
|
; AVX-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptoui_f32toi1:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: subl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 12
|
|
; CHECK-NEXT: flds {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistps {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movb {{[0-9]+}}(%esp), %al
|
|
; CHECK-NEXT: addl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i1 @llvm.experimental.constrained.fptoui.i1.f32(float %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i1 %result
|
|
}
|
|
|
|
define i8 @fptoui_f32toi8(float %x) #0 {
|
|
; SSE-X86-LABEL: fptoui_f32toi8:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: cvttss2si {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptoui_f32toi8:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttss2si %xmm0, %eax
|
|
; SSE-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptoui_f32toi8:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: vcvttss2si {{[0-9]+}}(%esp), %eax
|
|
; AVX-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptoui_f32toi8:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttss2si %xmm0, %eax
|
|
; AVX-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptoui_f32toi8:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: subl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 12
|
|
; CHECK-NEXT: flds {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistps {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movb {{[0-9]+}}(%esp), %al
|
|
; CHECK-NEXT: addl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i8 @llvm.experimental.constrained.fptoui.i8.f32(float %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i8 %result
|
|
}
|
|
|
|
define i16 @fptoui_f32toi16(float %x) #0 {
|
|
; SSE-X86-LABEL: fptoui_f32toi16:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: cvttss2si {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptoui_f32toi16:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttss2si %xmm0, %eax
|
|
; SSE-X64-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptoui_f32toi16:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: vcvttss2si {{[0-9]+}}(%esp), %eax
|
|
; AVX-X86-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptoui_f32toi16:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttss2si %xmm0, %eax
|
|
; AVX-X64-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptoui_f32toi16:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: subl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 12
|
|
; CHECK-NEXT: flds {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw (%esp)
|
|
; CHECK-NEXT: movzwl (%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistpl {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw (%esp)
|
|
; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; CHECK-NEXT: addl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i16 @llvm.experimental.constrained.fptoui.i16.f32(float %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i16 %result
|
|
}
|
|
|
|
define i32 @fptoui_f32toi32(float %x) #0 {
|
|
; SSE-X86-LABEL: fptoui_f32toi32:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
|
|
; SSE-X86-NEXT: movss {{.*#+}} xmm2 = mem[0],zero,zero,zero
|
|
; SSE-X86-NEXT: comiss %xmm0, %xmm2
|
|
; SSE-X86-NEXT: xorps %xmm1, %xmm1
|
|
; SSE-X86-NEXT: ja .LBB8_2
|
|
; SSE-X86-NEXT: # %bb.1:
|
|
; SSE-X86-NEXT: movaps %xmm2, %xmm1
|
|
; SSE-X86-NEXT: .LBB8_2:
|
|
; SSE-X86-NEXT: setbe %al
|
|
; SSE-X86-NEXT: movzbl %al, %ecx
|
|
; SSE-X86-NEXT: shll $31, %ecx
|
|
; SSE-X86-NEXT: subss %xmm1, %xmm0
|
|
; SSE-X86-NEXT: cvttss2si %xmm0, %eax
|
|
; SSE-X86-NEXT: xorl %ecx, %eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptoui_f32toi32:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttss2si %xmm0, %rax
|
|
; SSE-X64-NEXT: # kill: def $eax killed $eax killed $rax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX1-X86-LABEL: fptoui_f32toi32:
|
|
; AVX1-X86: # %bb.0:
|
|
; AVX1-X86-NEXT: pushl %ebp
|
|
; AVX1-X86-NEXT: .cfi_def_cfa_offset 8
|
|
; AVX1-X86-NEXT: .cfi_offset %ebp, -8
|
|
; AVX1-X86-NEXT: movl %esp, %ebp
|
|
; AVX1-X86-NEXT: .cfi_def_cfa_register %ebp
|
|
; AVX1-X86-NEXT: andl $-8, %esp
|
|
; AVX1-X86-NEXT: subl $8, %esp
|
|
; AVX1-X86-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
|
|
; AVX1-X86-NEXT: vmovss %xmm0, (%esp)
|
|
; AVX1-X86-NEXT: flds (%esp)
|
|
; AVX1-X86-NEXT: fisttpll (%esp)
|
|
; AVX1-X86-NEXT: wait
|
|
; AVX1-X86-NEXT: movl (%esp), %eax
|
|
; AVX1-X86-NEXT: movl %ebp, %esp
|
|
; AVX1-X86-NEXT: popl %ebp
|
|
; AVX1-X86-NEXT: .cfi_def_cfa %esp, 4
|
|
; AVX1-X86-NEXT: retl
|
|
;
|
|
; AVX1-X64-LABEL: fptoui_f32toi32:
|
|
; AVX1-X64: # %bb.0:
|
|
; AVX1-X64-NEXT: vcvttss2si %xmm0, %rax
|
|
; AVX1-X64-NEXT: # kill: def $eax killed $eax killed $rax
|
|
; AVX1-X64-NEXT: retq
|
|
;
|
|
; AVX512-X86-LABEL: fptoui_f32toi32:
|
|
; AVX512-X86: # %bb.0:
|
|
; AVX512-X86-NEXT: vcvttss2usi {{[0-9]+}}(%esp), %eax
|
|
; AVX512-X86-NEXT: retl
|
|
;
|
|
; AVX512-X64-LABEL: fptoui_f32toi32:
|
|
; AVX512-X64: # %bb.0:
|
|
; AVX512-X64-NEXT: vcvttss2usi %xmm0, %eax
|
|
; AVX512-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptoui_f32toi32:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: pushl %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 8
|
|
; CHECK-NEXT: .cfi_offset %ebp, -8
|
|
; CHECK-NEXT: movl %esp, %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa_register %ebp
|
|
; CHECK-NEXT: andl $-8, %esp
|
|
; CHECK-NEXT: subl $16, %esp
|
|
; CHECK-NEXT: flds 8(%ebp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistpll {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: movl %ebp, %esp
|
|
; CHECK-NEXT: popl %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa %esp, 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i32 @llvm.experimental.constrained.fptoui.i32.f32(float %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i32 %result
|
|
}
|
|
|
|
define i64 @fptoui_f32toi64(float %x) #0 {
|
|
; SSE-X86-LABEL: fptoui_f32toi64:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: pushl %ebp
|
|
; SSE-X86-NEXT: .cfi_def_cfa_offset 8
|
|
; SSE-X86-NEXT: .cfi_offset %ebp, -8
|
|
; SSE-X86-NEXT: movl %esp, %ebp
|
|
; SSE-X86-NEXT: .cfi_def_cfa_register %ebp
|
|
; SSE-X86-NEXT: andl $-8, %esp
|
|
; SSE-X86-NEXT: subl $16, %esp
|
|
; SSE-X86-NEXT: movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
|
|
; SSE-X86-NEXT: movss {{.*#+}} xmm2 = mem[0],zero,zero,zero
|
|
; SSE-X86-NEXT: comiss %xmm0, %xmm2
|
|
; SSE-X86-NEXT: xorps %xmm1, %xmm1
|
|
; SSE-X86-NEXT: ja .LBB9_2
|
|
; SSE-X86-NEXT: # %bb.1:
|
|
; SSE-X86-NEXT: movaps %xmm2, %xmm1
|
|
; SSE-X86-NEXT: .LBB9_2:
|
|
; SSE-X86-NEXT: subss %xmm1, %xmm0
|
|
; SSE-X86-NEXT: movss %xmm0, {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: setbe %al
|
|
; SSE-X86-NEXT: flds {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: wait
|
|
; SSE-X86-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
|
|
; SSE-X86-NEXT: orl $3072, %ecx # imm = 0xC00
|
|
; SSE-X86-NEXT: movw %cx, {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: fistpll {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: movzbl %al, %edx
|
|
; SSE-X86-NEXT: shll $31, %edx
|
|
; SSE-X86-NEXT: xorl {{[0-9]+}}(%esp), %edx
|
|
; SSE-X86-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: movl %ebp, %esp
|
|
; SSE-X86-NEXT: popl %ebp
|
|
; SSE-X86-NEXT: .cfi_def_cfa %esp, 4
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptoui_f32toi64:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: movss {{.*#+}} xmm2 = mem[0],zero,zero,zero
|
|
; SSE-X64-NEXT: comiss %xmm2, %xmm0
|
|
; SSE-X64-NEXT: xorps %xmm1, %xmm1
|
|
; SSE-X64-NEXT: jb .LBB9_2
|
|
; SSE-X64-NEXT: # %bb.1:
|
|
; SSE-X64-NEXT: movaps %xmm2, %xmm1
|
|
; SSE-X64-NEXT: .LBB9_2:
|
|
; SSE-X64-NEXT: subss %xmm1, %xmm0
|
|
; SSE-X64-NEXT: cvttss2si %xmm0, %rcx
|
|
; SSE-X64-NEXT: setae %al
|
|
; SSE-X64-NEXT: movzbl %al, %eax
|
|
; SSE-X64-NEXT: shlq $63, %rax
|
|
; SSE-X64-NEXT: xorq %rcx, %rax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX1-X86-LABEL: fptoui_f32toi64:
|
|
; AVX1-X86: # %bb.0:
|
|
; AVX1-X86-NEXT: pushl %ebp
|
|
; AVX1-X86-NEXT: .cfi_def_cfa_offset 8
|
|
; AVX1-X86-NEXT: .cfi_offset %ebp, -8
|
|
; AVX1-X86-NEXT: movl %esp, %ebp
|
|
; AVX1-X86-NEXT: .cfi_def_cfa_register %ebp
|
|
; AVX1-X86-NEXT: andl $-8, %esp
|
|
; AVX1-X86-NEXT: subl $8, %esp
|
|
; AVX1-X86-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
|
|
; AVX1-X86-NEXT: vmovss {{.*#+}} xmm1 = mem[0],zero,zero,zero
|
|
; AVX1-X86-NEXT: vcomiss %xmm0, %xmm1
|
|
; AVX1-X86-NEXT: vxorps %xmm2, %xmm2, %xmm2
|
|
; AVX1-X86-NEXT: ja .LBB9_2
|
|
; AVX1-X86-NEXT: # %bb.1:
|
|
; AVX1-X86-NEXT: vmovaps %xmm1, %xmm2
|
|
; AVX1-X86-NEXT: .LBB9_2:
|
|
; AVX1-X86-NEXT: vsubss %xmm2, %xmm0, %xmm0
|
|
; AVX1-X86-NEXT: vmovss %xmm0, (%esp)
|
|
; AVX1-X86-NEXT: flds (%esp)
|
|
; AVX1-X86-NEXT: fisttpll (%esp)
|
|
; AVX1-X86-NEXT: wait
|
|
; AVX1-X86-NEXT: setbe %al
|
|
; AVX1-X86-NEXT: movzbl %al, %edx
|
|
; AVX1-X86-NEXT: shll $31, %edx
|
|
; AVX1-X86-NEXT: xorl {{[0-9]+}}(%esp), %edx
|
|
; AVX1-X86-NEXT: movl (%esp), %eax
|
|
; AVX1-X86-NEXT: movl %ebp, %esp
|
|
; AVX1-X86-NEXT: popl %ebp
|
|
; AVX1-X86-NEXT: .cfi_def_cfa %esp, 4
|
|
; AVX1-X86-NEXT: retl
|
|
;
|
|
; AVX1-X64-LABEL: fptoui_f32toi64:
|
|
; AVX1-X64: # %bb.0:
|
|
; AVX1-X64-NEXT: vmovss {{.*#+}} xmm1 = mem[0],zero,zero,zero
|
|
; AVX1-X64-NEXT: vcomiss %xmm1, %xmm0
|
|
; AVX1-X64-NEXT: vxorps %xmm2, %xmm2, %xmm2
|
|
; AVX1-X64-NEXT: jb .LBB9_2
|
|
; AVX1-X64-NEXT: # %bb.1:
|
|
; AVX1-X64-NEXT: vmovaps %xmm1, %xmm2
|
|
; AVX1-X64-NEXT: .LBB9_2:
|
|
; AVX1-X64-NEXT: vsubss %xmm2, %xmm0, %xmm0
|
|
; AVX1-X64-NEXT: vcvttss2si %xmm0, %rcx
|
|
; AVX1-X64-NEXT: setae %al
|
|
; AVX1-X64-NEXT: movzbl %al, %eax
|
|
; AVX1-X64-NEXT: shlq $63, %rax
|
|
; AVX1-X64-NEXT: xorq %rcx, %rax
|
|
; AVX1-X64-NEXT: retq
|
|
;
|
|
; AVX512-X86-LABEL: fptoui_f32toi64:
|
|
; AVX512-X86: # %bb.0:
|
|
; AVX512-X86-NEXT: pushl %ebp
|
|
; AVX512-X86-NEXT: .cfi_def_cfa_offset 8
|
|
; AVX512-X86-NEXT: .cfi_offset %ebp, -8
|
|
; AVX512-X86-NEXT: movl %esp, %ebp
|
|
; AVX512-X86-NEXT: .cfi_def_cfa_register %ebp
|
|
; AVX512-X86-NEXT: andl $-8, %esp
|
|
; AVX512-X86-NEXT: subl $8, %esp
|
|
; AVX512-X86-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
|
|
; AVX512-X86-NEXT: vmovss {{.*#+}} xmm1 = mem[0],zero,zero,zero
|
|
; AVX512-X86-NEXT: xorl %edx, %edx
|
|
; AVX512-X86-NEXT: vcomiss %xmm0, %xmm1
|
|
; AVX512-X86-NEXT: seta %al
|
|
; AVX512-X86-NEXT: kmovw %eax, %k1
|
|
; AVX512-X86-NEXT: vxorps %xmm2, %xmm2, %xmm2
|
|
; AVX512-X86-NEXT: vmovss %xmm2, %xmm1, %xmm1 {%k1}
|
|
; AVX512-X86-NEXT: vsubss %xmm1, %xmm0, %xmm0
|
|
; AVX512-X86-NEXT: vmovss %xmm0, (%esp)
|
|
; AVX512-X86-NEXT: flds (%esp)
|
|
; AVX512-X86-NEXT: fisttpll (%esp)
|
|
; AVX512-X86-NEXT: wait
|
|
; AVX512-X86-NEXT: setbe %dl
|
|
; AVX512-X86-NEXT: shll $31, %edx
|
|
; AVX512-X86-NEXT: xorl {{[0-9]+}}(%esp), %edx
|
|
; AVX512-X86-NEXT: movl (%esp), %eax
|
|
; AVX512-X86-NEXT: movl %ebp, %esp
|
|
; AVX512-X86-NEXT: popl %ebp
|
|
; AVX512-X86-NEXT: .cfi_def_cfa %esp, 4
|
|
; AVX512-X86-NEXT: retl
|
|
;
|
|
; AVX512-X64-LABEL: fptoui_f32toi64:
|
|
; AVX512-X64: # %bb.0:
|
|
; AVX512-X64-NEXT: vcvttss2usi %xmm0, %rax
|
|
; AVX512-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptoui_f32toi64:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: pushl %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 8
|
|
; CHECK-NEXT: .cfi_offset %ebp, -8
|
|
; CHECK-NEXT: movl %esp, %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa_register %ebp
|
|
; CHECK-NEXT: andl $-8, %esp
|
|
; CHECK-NEXT: subl $16, %esp
|
|
; CHECK-NEXT: flds 8(%ebp)
|
|
; CHECK-NEXT: flds {{\.LCPI.*}}
|
|
; CHECK-NEXT: fcom %st(1)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstsw %ax
|
|
; CHECK-NEXT: xorl %edx, %edx
|
|
; CHECK-NEXT: # kill: def $ah killed $ah killed $ax
|
|
; CHECK-NEXT: sahf
|
|
; CHECK-NEXT: setbe %al
|
|
; CHECK-NEXT: fldz
|
|
; CHECK-NEXT: ja .LBB9_2
|
|
; CHECK-NEXT: # %bb.1:
|
|
; CHECK-NEXT: fstp %st(0)
|
|
; CHECK-NEXT: fldz
|
|
; CHECK-NEXT: fxch %st(1)
|
|
; CHECK-NEXT: .LBB9_2:
|
|
; CHECK-NEXT: fstp %st(1)
|
|
; CHECK-NEXT: fsubrp %st, %st(1)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
|
|
; CHECK-NEXT: orl $3072, %ecx # imm = 0xC00
|
|
; CHECK-NEXT: movw %cx, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistpll {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movb %al, %dl
|
|
; CHECK-NEXT: shll $31, %edx
|
|
; CHECK-NEXT: xorl {{[0-9]+}}(%esp), %edx
|
|
; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: movl %ebp, %esp
|
|
; CHECK-NEXT: popl %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa %esp, 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i64 @llvm.experimental.constrained.fptoui.i64.f32(float %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i64 %result
|
|
}
|
|
|
|
define i8 @fptosi_f64toi8(double %x) #0 {
|
|
; SSE-X86-LABEL: fptosi_f64toi8:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: cvttsd2si {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptosi_f64toi8:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttsd2si %xmm0, %eax
|
|
; SSE-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptosi_f64toi8:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: vcvttsd2si {{[0-9]+}}(%esp), %eax
|
|
; AVX-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptosi_f64toi8:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttsd2si %xmm0, %eax
|
|
; AVX-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptosi_f64toi8:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: subl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 12
|
|
; CHECK-NEXT: fldl {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistps {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movb {{[0-9]+}}(%esp), %al
|
|
; CHECK-NEXT: addl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i8 @llvm.experimental.constrained.fptosi.i8.f64(double %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i8 %result
|
|
}
|
|
|
|
define i16 @fptosi_f64toi16(double %x) #0 {
|
|
; SSE-X86-LABEL: fptosi_f64toi16:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: cvttsd2si {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptosi_f64toi16:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttsd2si %xmm0, %eax
|
|
; SSE-X64-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptosi_f64toi16:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: vcvttsd2si {{[0-9]+}}(%esp), %eax
|
|
; AVX-X86-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptosi_f64toi16:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttsd2si %xmm0, %eax
|
|
; AVX-X64-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptosi_f64toi16:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: subl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 12
|
|
; CHECK-NEXT: fldl {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistps {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: addl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i16 @llvm.experimental.constrained.fptosi.i16.f64(double %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i16 %result
|
|
}
|
|
|
|
define i32 @fptosi_f64toi32(double %x) #0 {
|
|
; SSE-X86-LABEL: fptosi_f64toi32:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: cvttsd2si {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptosi_f64toi32:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttsd2si %xmm0, %eax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptosi_f64toi32:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: vcvttsd2si {{[0-9]+}}(%esp), %eax
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptosi_f64toi32:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttsd2si %xmm0, %eax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptosi_f64toi32:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: subl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 12
|
|
; CHECK-NEXT: fldl {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw (%esp)
|
|
; CHECK-NEXT: movzwl (%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistpl {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw (%esp)
|
|
; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: addl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i32 @llvm.experimental.constrained.fptosi.i32.f64(double %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i32 %result
|
|
}
|
|
|
|
define i64 @fptosi_f64toi64(double %x) #0 {
|
|
; SSE-X86-LABEL: fptosi_f64toi64:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: pushl %ebp
|
|
; SSE-X86-NEXT: .cfi_def_cfa_offset 8
|
|
; SSE-X86-NEXT: .cfi_offset %ebp, -8
|
|
; SSE-X86-NEXT: movl %esp, %ebp
|
|
; SSE-X86-NEXT: .cfi_def_cfa_register %ebp
|
|
; SSE-X86-NEXT: andl $-8, %esp
|
|
; SSE-X86-NEXT: subl $16, %esp
|
|
; SSE-X86-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
|
|
; SSE-X86-NEXT: movsd %xmm0, {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: fldl {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: wait
|
|
; SSE-X86-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; SSE-X86-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: fistpll {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
; SSE-X86-NEXT: movl %ebp, %esp
|
|
; SSE-X86-NEXT: popl %ebp
|
|
; SSE-X86-NEXT: .cfi_def_cfa %esp, 4
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptosi_f64toi64:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttsd2si %xmm0, %rax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptosi_f64toi64:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: pushl %ebp
|
|
; AVX-X86-NEXT: .cfi_def_cfa_offset 8
|
|
; AVX-X86-NEXT: .cfi_offset %ebp, -8
|
|
; AVX-X86-NEXT: movl %esp, %ebp
|
|
; AVX-X86-NEXT: .cfi_def_cfa_register %ebp
|
|
; AVX-X86-NEXT: andl $-8, %esp
|
|
; AVX-X86-NEXT: subl $8, %esp
|
|
; AVX-X86-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
|
|
; AVX-X86-NEXT: vmovsd %xmm0, (%esp)
|
|
; AVX-X86-NEXT: fldl (%esp)
|
|
; AVX-X86-NEXT: fisttpll (%esp)
|
|
; AVX-X86-NEXT: wait
|
|
; AVX-X86-NEXT: movl (%esp), %eax
|
|
; AVX-X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
; AVX-X86-NEXT: movl %ebp, %esp
|
|
; AVX-X86-NEXT: popl %ebp
|
|
; AVX-X86-NEXT: .cfi_def_cfa %esp, 4
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptosi_f64toi64:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttsd2si %xmm0, %rax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptosi_f64toi64:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: pushl %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 8
|
|
; CHECK-NEXT: .cfi_offset %ebp, -8
|
|
; CHECK-NEXT: movl %esp, %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa_register %ebp
|
|
; CHECK-NEXT: andl $-8, %esp
|
|
; CHECK-NEXT: subl $16, %esp
|
|
; CHECK-NEXT: fldl 8(%ebp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistpll {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
; CHECK-NEXT: movl %ebp, %esp
|
|
; CHECK-NEXT: popl %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa %esp, 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i64 @llvm.experimental.constrained.fptosi.i64.f64(double %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i64 %result
|
|
}
|
|
|
|
define i1 @fptoui_f64toi1(double %x) #0 {
|
|
; SSE-X86-LABEL: fptoui_f64toi1:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: cvttsd2si {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptoui_f64toi1:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttsd2si %xmm0, %eax
|
|
; SSE-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptoui_f64toi1:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: vcvttsd2si {{[0-9]+}}(%esp), %eax
|
|
; AVX-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptoui_f64toi1:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttsd2si %xmm0, %eax
|
|
; AVX-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptoui_f64toi1:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: subl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 12
|
|
; CHECK-NEXT: fldl {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistps {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movb {{[0-9]+}}(%esp), %al
|
|
; CHECK-NEXT: addl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i1 @llvm.experimental.constrained.fptoui.i1.f64(double %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i1 %result
|
|
}
|
|
|
|
define i8 @fptoui_f64toi8(double %x) #0 {
|
|
; SSE-X86-LABEL: fptoui_f64toi8:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: cvttsd2si {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptoui_f64toi8:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttsd2si %xmm0, %eax
|
|
; SSE-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptoui_f64toi8:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: vcvttsd2si {{[0-9]+}}(%esp), %eax
|
|
; AVX-X86-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptoui_f64toi8:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttsd2si %xmm0, %eax
|
|
; AVX-X64-NEXT: # kill: def $al killed $al killed $eax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptoui_f64toi8:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: subl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 12
|
|
; CHECK-NEXT: fldl {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistps {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movb {{[0-9]+}}(%esp), %al
|
|
; CHECK-NEXT: addl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i8 @llvm.experimental.constrained.fptoui.i8.f64(double %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i8 %result
|
|
}
|
|
|
|
define i16 @fptoui_f64toi16(double %x) #0 {
|
|
; SSE-X86-LABEL: fptoui_f64toi16:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: cvttsd2si {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptoui_f64toi16:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttsd2si %xmm0, %eax
|
|
; SSE-X64-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX-X86-LABEL: fptoui_f64toi16:
|
|
; AVX-X86: # %bb.0:
|
|
; AVX-X86-NEXT: vcvttsd2si {{[0-9]+}}(%esp), %eax
|
|
; AVX-X86-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; AVX-X86-NEXT: retl
|
|
;
|
|
; AVX-X64-LABEL: fptoui_f64toi16:
|
|
; AVX-X64: # %bb.0:
|
|
; AVX-X64-NEXT: vcvttsd2si %xmm0, %eax
|
|
; AVX-X64-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; AVX-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptoui_f64toi16:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: subl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 12
|
|
; CHECK-NEXT: fldl {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw (%esp)
|
|
; CHECK-NEXT: movzwl (%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistpl {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw (%esp)
|
|
; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: # kill: def $ax killed $ax killed $eax
|
|
; CHECK-NEXT: addl $8, %esp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i16 @llvm.experimental.constrained.fptoui.i16.f64(double %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i16 %result
|
|
}
|
|
|
|
define i32 @fptoui_f64toi32(double %x) #0 {
|
|
; SSE-X86-LABEL: fptoui_f64toi32:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
|
|
; SSE-X86-NEXT: movsd {{.*#+}} xmm2 = mem[0],zero
|
|
; SSE-X86-NEXT: comisd %xmm0, %xmm2
|
|
; SSE-X86-NEXT: xorpd %xmm1, %xmm1
|
|
; SSE-X86-NEXT: ja .LBB17_2
|
|
; SSE-X86-NEXT: # %bb.1:
|
|
; SSE-X86-NEXT: movapd %xmm2, %xmm1
|
|
; SSE-X86-NEXT: .LBB17_2:
|
|
; SSE-X86-NEXT: setbe %al
|
|
; SSE-X86-NEXT: movzbl %al, %ecx
|
|
; SSE-X86-NEXT: shll $31, %ecx
|
|
; SSE-X86-NEXT: subsd %xmm1, %xmm0
|
|
; SSE-X86-NEXT: cvttsd2si %xmm0, %eax
|
|
; SSE-X86-NEXT: xorl %ecx, %eax
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptoui_f64toi32:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: cvttsd2si %xmm0, %rax
|
|
; SSE-X64-NEXT: # kill: def $eax killed $eax killed $rax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX1-X86-LABEL: fptoui_f64toi32:
|
|
; AVX1-X86: # %bb.0:
|
|
; AVX1-X86-NEXT: pushl %ebp
|
|
; AVX1-X86-NEXT: .cfi_def_cfa_offset 8
|
|
; AVX1-X86-NEXT: .cfi_offset %ebp, -8
|
|
; AVX1-X86-NEXT: movl %esp, %ebp
|
|
; AVX1-X86-NEXT: .cfi_def_cfa_register %ebp
|
|
; AVX1-X86-NEXT: andl $-8, %esp
|
|
; AVX1-X86-NEXT: subl $8, %esp
|
|
; AVX1-X86-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
|
|
; AVX1-X86-NEXT: vmovsd %xmm0, (%esp)
|
|
; AVX1-X86-NEXT: fldl (%esp)
|
|
; AVX1-X86-NEXT: fisttpll (%esp)
|
|
; AVX1-X86-NEXT: wait
|
|
; AVX1-X86-NEXT: movl (%esp), %eax
|
|
; AVX1-X86-NEXT: movl %ebp, %esp
|
|
; AVX1-X86-NEXT: popl %ebp
|
|
; AVX1-X86-NEXT: .cfi_def_cfa %esp, 4
|
|
; AVX1-X86-NEXT: retl
|
|
;
|
|
; AVX1-X64-LABEL: fptoui_f64toi32:
|
|
; AVX1-X64: # %bb.0:
|
|
; AVX1-X64-NEXT: vcvttsd2si %xmm0, %rax
|
|
; AVX1-X64-NEXT: # kill: def $eax killed $eax killed $rax
|
|
; AVX1-X64-NEXT: retq
|
|
;
|
|
; AVX512-X86-LABEL: fptoui_f64toi32:
|
|
; AVX512-X86: # %bb.0:
|
|
; AVX512-X86-NEXT: vcvttsd2usi {{[0-9]+}}(%esp), %eax
|
|
; AVX512-X86-NEXT: retl
|
|
;
|
|
; AVX512-X64-LABEL: fptoui_f64toi32:
|
|
; AVX512-X64: # %bb.0:
|
|
; AVX512-X64-NEXT: vcvttsd2usi %xmm0, %eax
|
|
; AVX512-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptoui_f64toi32:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: pushl %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 8
|
|
; CHECK-NEXT: .cfi_offset %ebp, -8
|
|
; CHECK-NEXT: movl %esp, %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa_register %ebp
|
|
; CHECK-NEXT: andl $-8, %esp
|
|
; CHECK-NEXT: subl $16, %esp
|
|
; CHECK-NEXT: fldl 8(%ebp)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: orl $3072, %eax # imm = 0xC00
|
|
; CHECK-NEXT: movw %ax, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistpll {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: movl %ebp, %esp
|
|
; CHECK-NEXT: popl %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa %esp, 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i32 @llvm.experimental.constrained.fptoui.i32.f64(double %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i32 %result
|
|
}
|
|
|
|
define i64 @fptoui_f64toi64(double %x) #0 {
|
|
; SSE-X86-LABEL: fptoui_f64toi64:
|
|
; SSE-X86: # %bb.0:
|
|
; SSE-X86-NEXT: pushl %ebp
|
|
; SSE-X86-NEXT: .cfi_def_cfa_offset 8
|
|
; SSE-X86-NEXT: .cfi_offset %ebp, -8
|
|
; SSE-X86-NEXT: movl %esp, %ebp
|
|
; SSE-X86-NEXT: .cfi_def_cfa_register %ebp
|
|
; SSE-X86-NEXT: andl $-8, %esp
|
|
; SSE-X86-NEXT: subl $16, %esp
|
|
; SSE-X86-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
|
|
; SSE-X86-NEXT: movsd {{.*#+}} xmm2 = mem[0],zero
|
|
; SSE-X86-NEXT: comisd %xmm0, %xmm2
|
|
; SSE-X86-NEXT: xorpd %xmm1, %xmm1
|
|
; SSE-X86-NEXT: ja .LBB18_2
|
|
; SSE-X86-NEXT: # %bb.1:
|
|
; SSE-X86-NEXT: movapd %xmm2, %xmm1
|
|
; SSE-X86-NEXT: .LBB18_2:
|
|
; SSE-X86-NEXT: subsd %xmm1, %xmm0
|
|
; SSE-X86-NEXT: movsd %xmm0, {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: setbe %al
|
|
; SSE-X86-NEXT: fldl {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: wait
|
|
; SSE-X86-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
|
|
; SSE-X86-NEXT: orl $3072, %ecx # imm = 0xC00
|
|
; SSE-X86-NEXT: movw %cx, {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: fistpll {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; SSE-X86-NEXT: movzbl %al, %edx
|
|
; SSE-X86-NEXT: shll $31, %edx
|
|
; SSE-X86-NEXT: xorl {{[0-9]+}}(%esp), %edx
|
|
; SSE-X86-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; SSE-X86-NEXT: movl %ebp, %esp
|
|
; SSE-X86-NEXT: popl %ebp
|
|
; SSE-X86-NEXT: .cfi_def_cfa %esp, 4
|
|
; SSE-X86-NEXT: retl
|
|
;
|
|
; SSE-X64-LABEL: fptoui_f64toi64:
|
|
; SSE-X64: # %bb.0:
|
|
; SSE-X64-NEXT: movsd {{.*#+}} xmm2 = mem[0],zero
|
|
; SSE-X64-NEXT: comisd %xmm2, %xmm0
|
|
; SSE-X64-NEXT: xorpd %xmm1, %xmm1
|
|
; SSE-X64-NEXT: jb .LBB18_2
|
|
; SSE-X64-NEXT: # %bb.1:
|
|
; SSE-X64-NEXT: movapd %xmm2, %xmm1
|
|
; SSE-X64-NEXT: .LBB18_2:
|
|
; SSE-X64-NEXT: subsd %xmm1, %xmm0
|
|
; SSE-X64-NEXT: cvttsd2si %xmm0, %rcx
|
|
; SSE-X64-NEXT: setae %al
|
|
; SSE-X64-NEXT: movzbl %al, %eax
|
|
; SSE-X64-NEXT: shlq $63, %rax
|
|
; SSE-X64-NEXT: xorq %rcx, %rax
|
|
; SSE-X64-NEXT: retq
|
|
;
|
|
; AVX1-X86-LABEL: fptoui_f64toi64:
|
|
; AVX1-X86: # %bb.0:
|
|
; AVX1-X86-NEXT: pushl %ebp
|
|
; AVX1-X86-NEXT: .cfi_def_cfa_offset 8
|
|
; AVX1-X86-NEXT: .cfi_offset %ebp, -8
|
|
; AVX1-X86-NEXT: movl %esp, %ebp
|
|
; AVX1-X86-NEXT: .cfi_def_cfa_register %ebp
|
|
; AVX1-X86-NEXT: andl $-8, %esp
|
|
; AVX1-X86-NEXT: subl $8, %esp
|
|
; AVX1-X86-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
|
|
; AVX1-X86-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
|
|
; AVX1-X86-NEXT: vcomisd %xmm0, %xmm1
|
|
; AVX1-X86-NEXT: vxorpd %xmm2, %xmm2, %xmm2
|
|
; AVX1-X86-NEXT: ja .LBB18_2
|
|
; AVX1-X86-NEXT: # %bb.1:
|
|
; AVX1-X86-NEXT: vmovapd %xmm1, %xmm2
|
|
; AVX1-X86-NEXT: .LBB18_2:
|
|
; AVX1-X86-NEXT: vsubsd %xmm2, %xmm0, %xmm0
|
|
; AVX1-X86-NEXT: vmovsd %xmm0, (%esp)
|
|
; AVX1-X86-NEXT: fldl (%esp)
|
|
; AVX1-X86-NEXT: fisttpll (%esp)
|
|
; AVX1-X86-NEXT: wait
|
|
; AVX1-X86-NEXT: setbe %al
|
|
; AVX1-X86-NEXT: movzbl %al, %edx
|
|
; AVX1-X86-NEXT: shll $31, %edx
|
|
; AVX1-X86-NEXT: xorl {{[0-9]+}}(%esp), %edx
|
|
; AVX1-X86-NEXT: movl (%esp), %eax
|
|
; AVX1-X86-NEXT: movl %ebp, %esp
|
|
; AVX1-X86-NEXT: popl %ebp
|
|
; AVX1-X86-NEXT: .cfi_def_cfa %esp, 4
|
|
; AVX1-X86-NEXT: retl
|
|
;
|
|
; AVX1-X64-LABEL: fptoui_f64toi64:
|
|
; AVX1-X64: # %bb.0:
|
|
; AVX1-X64-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
|
|
; AVX1-X64-NEXT: vcomisd %xmm1, %xmm0
|
|
; AVX1-X64-NEXT: vxorpd %xmm2, %xmm2, %xmm2
|
|
; AVX1-X64-NEXT: jb .LBB18_2
|
|
; AVX1-X64-NEXT: # %bb.1:
|
|
; AVX1-X64-NEXT: vmovapd %xmm1, %xmm2
|
|
; AVX1-X64-NEXT: .LBB18_2:
|
|
; AVX1-X64-NEXT: vsubsd %xmm2, %xmm0, %xmm0
|
|
; AVX1-X64-NEXT: vcvttsd2si %xmm0, %rcx
|
|
; AVX1-X64-NEXT: setae %al
|
|
; AVX1-X64-NEXT: movzbl %al, %eax
|
|
; AVX1-X64-NEXT: shlq $63, %rax
|
|
; AVX1-X64-NEXT: xorq %rcx, %rax
|
|
; AVX1-X64-NEXT: retq
|
|
;
|
|
; AVX512-X86-LABEL: fptoui_f64toi64:
|
|
; AVX512-X86: # %bb.0:
|
|
; AVX512-X86-NEXT: pushl %ebp
|
|
; AVX512-X86-NEXT: .cfi_def_cfa_offset 8
|
|
; AVX512-X86-NEXT: .cfi_offset %ebp, -8
|
|
; AVX512-X86-NEXT: movl %esp, %ebp
|
|
; AVX512-X86-NEXT: .cfi_def_cfa_register %ebp
|
|
; AVX512-X86-NEXT: andl $-8, %esp
|
|
; AVX512-X86-NEXT: subl $8, %esp
|
|
; AVX512-X86-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
|
|
; AVX512-X86-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
|
|
; AVX512-X86-NEXT: xorl %edx, %edx
|
|
; AVX512-X86-NEXT: vcomisd %xmm0, %xmm1
|
|
; AVX512-X86-NEXT: seta %al
|
|
; AVX512-X86-NEXT: kmovw %eax, %k1
|
|
; AVX512-X86-NEXT: vxorpd %xmm2, %xmm2, %xmm2
|
|
; AVX512-X86-NEXT: vmovsd %xmm2, %xmm1, %xmm1 {%k1}
|
|
; AVX512-X86-NEXT: vsubsd %xmm1, %xmm0, %xmm0
|
|
; AVX512-X86-NEXT: vmovsd %xmm0, (%esp)
|
|
; AVX512-X86-NEXT: fldl (%esp)
|
|
; AVX512-X86-NEXT: fisttpll (%esp)
|
|
; AVX512-X86-NEXT: wait
|
|
; AVX512-X86-NEXT: setbe %dl
|
|
; AVX512-X86-NEXT: shll $31, %edx
|
|
; AVX512-X86-NEXT: xorl {{[0-9]+}}(%esp), %edx
|
|
; AVX512-X86-NEXT: movl (%esp), %eax
|
|
; AVX512-X86-NEXT: movl %ebp, %esp
|
|
; AVX512-X86-NEXT: popl %ebp
|
|
; AVX512-X86-NEXT: .cfi_def_cfa %esp, 4
|
|
; AVX512-X86-NEXT: retl
|
|
;
|
|
; AVX512-X64-LABEL: fptoui_f64toi64:
|
|
; AVX512-X64: # %bb.0:
|
|
; AVX512-X64-NEXT: vcvttsd2usi %xmm0, %rax
|
|
; AVX512-X64-NEXT: retq
|
|
;
|
|
; CHECK-LABEL: fptoui_f64toi64:
|
|
; CHECK: # %bb.0:
|
|
; CHECK-NEXT: pushl %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa_offset 8
|
|
; CHECK-NEXT: .cfi_offset %ebp, -8
|
|
; CHECK-NEXT: movl %esp, %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa_register %ebp
|
|
; CHECK-NEXT: andl $-8, %esp
|
|
; CHECK-NEXT: subl $16, %esp
|
|
; CHECK-NEXT: fldl 8(%ebp)
|
|
; CHECK-NEXT: flds {{\.LCPI.*}}
|
|
; CHECK-NEXT: fcom %st(1)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstsw %ax
|
|
; CHECK-NEXT: xorl %edx, %edx
|
|
; CHECK-NEXT: # kill: def $ah killed $ah killed $ax
|
|
; CHECK-NEXT: sahf
|
|
; CHECK-NEXT: setbe %al
|
|
; CHECK-NEXT: fldz
|
|
; CHECK-NEXT: ja .LBB18_2
|
|
; CHECK-NEXT: # %bb.1:
|
|
; CHECK-NEXT: fstp %st(0)
|
|
; CHECK-NEXT: fldz
|
|
; CHECK-NEXT: fxch %st(1)
|
|
; CHECK-NEXT: .LBB18_2:
|
|
; CHECK-NEXT: fstp %st(1)
|
|
; CHECK-NEXT: fsubrp %st, %st(1)
|
|
; CHECK-NEXT: wait
|
|
; CHECK-NEXT: fnstcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
|
|
; CHECK-NEXT: orl $3072, %ecx # imm = 0xC00
|
|
; CHECK-NEXT: movw %cx, {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fistpll {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: fldcw {{[0-9]+}}(%esp)
|
|
; CHECK-NEXT: movb %al, %dl
|
|
; CHECK-NEXT: shll $31, %edx
|
|
; CHECK-NEXT: xorl {{[0-9]+}}(%esp), %edx
|
|
; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
; CHECK-NEXT: movl %ebp, %esp
|
|
; CHECK-NEXT: popl %ebp
|
|
; CHECK-NEXT: .cfi_def_cfa %esp, 4
|
|
; CHECK-NEXT: retl
|
|
%result = call i64 @llvm.experimental.constrained.fptoui.i64.f64(double %x,
|
|
metadata !"fpexcept.strict") #0
|
|
ret i64 %result
|
|
}
|
|
|
|
attributes #0 = { strictfp }
|