mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-22 18:54:02 +01:00
9f94b4e5f6
This way, they can be detected later, e.g. by the SIOptimizeVGPRLiveRange pass. Differential Revision: https://reviews.llvm.org/D105467
349 lines
21 KiB
YAML
349 lines
21 KiB
YAML
# RUN: llc -march=amdgcn -mcpu=gfx700 -verify-machineinstrs -verify-machine-dom-info --run-pass=si-fix-sgpr-copies -o - %s | FileCheck %s --check-prefixes=W64,ADDR64
|
|
# RUN: llc -march=amdgcn -mcpu=gfx900 -verify-machineinstrs -verify-machine-dom-info --run-pass=si-fix-sgpr-copies -o - %s | FileCheck %s --check-prefixes=W64,W64-NO-ADDR64
|
|
# RUN: llc -march=amdgcn -mcpu=gfx1010 -mattr=-wavefrontsize32,+wavefrontsize64 -verify-machineinstrs -verify-machine-dom-info --run-pass=si-fix-sgpr-copies -o - %s | FileCheck %s --check-prefixes=W64,W64-NO-ADDR64
|
|
# RUN: llc -march=amdgcn -mcpu=gfx1010 -mattr=+wavefrontsize32,-wavefrontsize64 -verify-machineinstrs -verify-machine-dom-info --run-pass=si-fix-sgpr-copies -o - %s | FileCheck %s --check-prefixes=W32
|
|
|
|
# Test that we correctly legalize VGPR Rsrc operands in MUBUF instructions.
|
|
#
|
|
# On ADDR64 hardware we optimize the _ADDR64 and _OFFSET cases to avoid
|
|
# needing a waterfall. For all other instruction variants, and when we are
|
|
# on non-ADDR64 hardware, we emit a waterfall loop.
|
|
|
|
|
|
# W64-LABEL: name: idxen
|
|
# W64-LABEL: bb.0:
|
|
# W64-NEXT: successors: %bb.1({{.*}})
|
|
# W64: [[VRSRC:%[0-9]+]]:vreg_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
# W64: [[SAVEEXEC:%[0-9]+]]:sreg_64_xexec = S_MOV_B64 $exec
|
|
# W64-LABEL: bb.1:
|
|
# W64-NEXT: successors: %bb.1({{.*}}), %bb.2({{.*}})
|
|
# W64: [[SRSRC0:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub0, implicit $exec
|
|
# W64: [[SRSRC1:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub1, implicit $exec
|
|
# W64: [[STMP0:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1
|
|
# W64: [[CMP0:%[0-9]+]]:sreg_64_xexec = V_CMP_EQ_U64_e64 [[STMP0]], [[VRSRC]].sub0_sub1, implicit $exec
|
|
# W64: [[SRSRC2:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub2, implicit $exec
|
|
# W64: [[SRSRC3:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub3, implicit $exec
|
|
# W64: [[STMP1:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC2]], %subreg.sub0, [[SRSRC3]], %subreg.sub1
|
|
# W64: [[CMP1:%[0-9]+]]:sreg_64_xexec = V_CMP_EQ_U64_e64 [[STMP1]], [[VRSRC]].sub2_sub3, implicit $exec
|
|
# W64: [[CMP:%[0-9]+]]:sreg_64_xexec = S_AND_B64 [[CMP0]], [[CMP1]], implicit-def $scc
|
|
# W64: [[SRSRC:%[0-9]+]]:sgpr_128 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1, [[SRSRC2]], %subreg.sub2, [[SRSRC3]], %subreg.sub3
|
|
# W64: [[TMPEXEC:%[0-9]+]]:sreg_64_xexec = S_AND_SAVEEXEC_B64 killed [[CMP]], implicit-def $exec, implicit-def $scc, implicit $exec
|
|
# W64: {{[0-9]+}}:vgpr_32 = BUFFER_LOAD_FORMAT_X_IDXEN %4, killed [[SRSRC]], 0, 0, 0, 0, 0, implicit $exec
|
|
# W64: $exec = S_XOR_B64_term $exec, [[TMPEXEC]], implicit-def $scc
|
|
# W64: SI_WATERFALL_LOOP %bb.1, implicit $exec
|
|
# W64-LABEL: bb.2:
|
|
# W64: $exec = S_MOV_B64 [[SAVEEXEC]]
|
|
|
|
# W32-LABEL: name: idxen
|
|
# W32-LABEL: bb.0:
|
|
# W32-NEXT: successors: %bb.1({{.*}})
|
|
# W32: [[VRSRC:%[0-9]+]]:vreg_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
# W32: [[SAVEEXEC:%[0-9]+]]:sreg_32_xm0_xexec = S_MOV_B32 $exec_lo
|
|
# W32-LABEL: bb.1:
|
|
# W32-NEXT: successors: %bb.1({{.*}}), %bb.2({{.*}})
|
|
# W32: [[SRSRC0:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub0, implicit $exec
|
|
# W32: [[SRSRC1:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub1, implicit $exec
|
|
# W32: [[STMP0:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1
|
|
# W32: [[CMP0:%[0-9]+]]:sreg_32_xm0_xexec = V_CMP_EQ_U64_e64 [[STMP0]], [[VRSRC]].sub0_sub1, implicit $exec
|
|
# W32: [[SRSRC2:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub2, implicit $exec
|
|
# W32: [[SRSRC3:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub3, implicit $exec
|
|
# W32: [[STMP1:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC2]], %subreg.sub0, [[SRSRC3]], %subreg.sub1
|
|
# W32: [[CMP1:%[0-9]+]]:sreg_32_xm0_xexec = V_CMP_EQ_U64_e64 [[STMP1]], [[VRSRC]].sub2_sub3, implicit $exec
|
|
# W32: [[CMP:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 [[CMP0]], [[CMP1]], implicit-def $scc
|
|
# W32: [[SRSRC:%[0-9]+]]:sgpr_128 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1, [[SRSRC2]], %subreg.sub2, [[SRSRC3]], %subreg.sub3
|
|
# W32: [[TMPEXEC:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_SAVEEXEC_B32 killed [[CMP]], implicit-def $exec, implicit-def $scc, implicit $exec
|
|
# W32: {{[0-9]+}}:vgpr_32 = BUFFER_LOAD_FORMAT_X_IDXEN %4, killed [[SRSRC]], 0, 0, 0, 0, 0, implicit $exec
|
|
# TODO: S_XOR_B32_term should be `implicit-def $scc`
|
|
# W32: $exec_lo = S_XOR_B32_term $exec_lo, [[TMPEXEC]]
|
|
# W32: SI_WATERFALL_LOOP %bb.1, implicit $exec
|
|
# W32-LABEL: bb.2:
|
|
# W32: $exec_lo = S_MOV_B32 [[SAVEEXEC]]
|
|
---
|
|
name: idxen
|
|
liveins:
|
|
- { reg: '$vgpr0', virtual-reg: '%0' }
|
|
- { reg: '$vgpr1', virtual-reg: '%1' }
|
|
- { reg: '$vgpr2', virtual-reg: '%2' }
|
|
- { reg: '$vgpr3', virtual-reg: '%3' }
|
|
- { reg: '$vgpr4', virtual-reg: '%4' }
|
|
- { reg: '$sgpr30_sgpr31', virtual-reg: '%5' }
|
|
body: |
|
|
bb.0:
|
|
liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $sgpr30_sgpr31
|
|
%5:sreg_64 = COPY $sgpr30_sgpr31
|
|
%4:vgpr_32 = COPY $vgpr4
|
|
%3:vgpr_32 = COPY $vgpr3
|
|
%2:vgpr_32 = COPY $vgpr2
|
|
%1:vgpr_32 = COPY $vgpr1
|
|
%0:vgpr_32 = COPY $vgpr0
|
|
%6:sgpr_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
%7:vgpr_32 = BUFFER_LOAD_FORMAT_X_IDXEN %4, killed %6, 0, 0, 0, 0, 0, implicit $exec
|
|
$sgpr30_sgpr31 = COPY %5
|
|
$vgpr0 = COPY %7
|
|
S_SETPC_B64_return $sgpr30_sgpr31, implicit $vgpr0
|
|
...
|
|
|
|
# W64-LABEL: name: offen
|
|
# W64-LABEL: bb.0:
|
|
# W64-NEXT: successors: %bb.1({{.*}})
|
|
# W64: [[VRSRC:%[0-9]+]]:vreg_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
# W64: [[SAVEEXEC:%[0-9]+]]:sreg_64_xexec = S_MOV_B64 $exec
|
|
# W64-LABEL: bb.1:
|
|
# W64-NEXT: successors: %bb.1({{.*}}), %bb.2({{.*}})
|
|
# W64: [[SRSRC0:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub0, implicit $exec
|
|
# W64: [[SRSRC1:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub1, implicit $exec
|
|
# W64: [[STMP0:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1
|
|
# W64: [[CMP0:%[0-9]+]]:sreg_64_xexec = V_CMP_EQ_U64_e64 [[STMP0]], [[VRSRC]].sub0_sub1, implicit $exec
|
|
# W64: [[SRSRC2:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub2, implicit $exec
|
|
# W64: [[SRSRC3:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub3, implicit $exec
|
|
# W64: [[STMP1:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC2]], %subreg.sub0, [[SRSRC3]], %subreg.sub1
|
|
# W64: [[CMP1:%[0-9]+]]:sreg_64_xexec = V_CMP_EQ_U64_e64 [[STMP1]], [[VRSRC]].sub2_sub3, implicit $exec
|
|
# W64: [[CMP:%[0-9]+]]:sreg_64_xexec = S_AND_B64 [[CMP0]], [[CMP1]], implicit-def $scc
|
|
# W64: [[SRSRC:%[0-9]+]]:sgpr_128 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1, [[SRSRC2]], %subreg.sub2, [[SRSRC3]], %subreg.sub3
|
|
# W64: [[TMPEXEC:%[0-9]+]]:sreg_64_xexec = S_AND_SAVEEXEC_B64 killed [[CMP]], implicit-def $exec, implicit-def $scc, implicit $exec
|
|
# W64: {{[0-9]+}}:vgpr_32 = BUFFER_LOAD_FORMAT_X_OFFEN %4, killed [[SRSRC]], 0, 0, 0, 0, 0, implicit $exec
|
|
# W64: $exec = S_XOR_B64_term $exec, [[TMPEXEC]], implicit-def $scc
|
|
# W64: SI_WATERFALL_LOOP %bb.1, implicit $exec
|
|
# W64-LABEL: bb.2:
|
|
# W64: $exec = S_MOV_B64 [[SAVEEXEC]]
|
|
|
|
# W32-LABEL: name: offen
|
|
# W32-LABEL: bb.0:
|
|
# W32-NEXT: successors: %bb.1({{.*}})
|
|
# W32: [[VRSRC:%[0-9]+]]:vreg_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
# W32: [[SAVEEXEC:%[0-9]+]]:sreg_32_xm0_xexec = S_MOV_B32 $exec_lo
|
|
# W32-LABEL: bb.1:
|
|
# W32-NEXT: successors: %bb.1({{.*}}), %bb.2({{.*}})
|
|
# W32: [[SRSRC0:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub0, implicit $exec
|
|
# W32: [[SRSRC1:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub1, implicit $exec
|
|
# W32: [[STMP0:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1
|
|
# W32: [[CMP0:%[0-9]+]]:sreg_32_xm0_xexec = V_CMP_EQ_U64_e64 [[STMP0]], [[VRSRC]].sub0_sub1, implicit $exec
|
|
# W32: [[SRSRC2:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub2, implicit $exec
|
|
# W32: [[SRSRC3:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub3, implicit $exec
|
|
# W32: [[STMP1:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC2]], %subreg.sub0, [[SRSRC3]], %subreg.sub1
|
|
# W32: [[CMP1:%[0-9]+]]:sreg_32_xm0_xexec = V_CMP_EQ_U64_e64 [[STMP1]], [[VRSRC]].sub2_sub3, implicit $exec
|
|
# W32: [[CMP:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 [[CMP0]], [[CMP1]], implicit-def $scc
|
|
# W32: [[SRSRC:%[0-9]+]]:sgpr_128 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1, [[SRSRC2]], %subreg.sub2, [[SRSRC3]], %subreg.sub3
|
|
# W32: [[TMPEXEC:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_SAVEEXEC_B32 killed [[CMP]], implicit-def $exec, implicit-def $scc, implicit $exec
|
|
# W32: {{[0-9]+}}:vgpr_32 = BUFFER_LOAD_FORMAT_X_OFFEN %4, killed [[SRSRC]], 0, 0, 0, 0, 0, implicit $exec
|
|
# TODO: S_XOR_B32_term should be `implicit-def $scc`
|
|
# W32: $exec_lo = S_XOR_B32_term $exec_lo, [[TMPEXEC]]
|
|
# W32: SI_WATERFALL_LOOP %bb.1, implicit $exec
|
|
# W32-LABEL: bb.2:
|
|
# W32: $exec_lo = S_MOV_B32 [[SAVEEXEC]]
|
|
---
|
|
name: offen
|
|
liveins:
|
|
- { reg: '$vgpr0', virtual-reg: '%0' }
|
|
- { reg: '$vgpr1', virtual-reg: '%1' }
|
|
- { reg: '$vgpr2', virtual-reg: '%2' }
|
|
- { reg: '$vgpr3', virtual-reg: '%3' }
|
|
- { reg: '$vgpr4', virtual-reg: '%4' }
|
|
- { reg: '$sgpr30_sgpr31', virtual-reg: '%5' }
|
|
body: |
|
|
bb.0:
|
|
liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $sgpr30_sgpr31
|
|
%5:sreg_64 = COPY $sgpr30_sgpr31
|
|
%4:vgpr_32 = COPY $vgpr4
|
|
%3:vgpr_32 = COPY $vgpr3
|
|
%2:vgpr_32 = COPY $vgpr2
|
|
%1:vgpr_32 = COPY $vgpr1
|
|
%0:vgpr_32 = COPY $vgpr0
|
|
%6:sgpr_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
%7:vgpr_32 = BUFFER_LOAD_FORMAT_X_OFFEN %4, killed %6, 0, 0, 0, 0, 0, implicit $exec
|
|
$sgpr30_sgpr31 = COPY %5
|
|
$vgpr0 = COPY %7
|
|
S_SETPC_B64_return $sgpr30_sgpr31, implicit $vgpr0
|
|
...
|
|
|
|
# W64-LABEL: name: bothen
|
|
# W64-LABEL: bb.0:
|
|
# W64-NEXT: successors: %bb.1({{.*}})
|
|
# W64: [[VRSRC:%[0-9]+]]:vreg_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
# W64: [[SAVEEXEC:%[0-9]+]]:sreg_64_xexec = S_MOV_B64 $exec
|
|
# W64-LABEL: bb.1:
|
|
# W64-NEXT: successors: %bb.1({{.*}}), %bb.2({{.*}})
|
|
# W64: [[SRSRC0:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub0, implicit $exec
|
|
# W64: [[SRSRC1:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub1, implicit $exec
|
|
# W64: [[STMP0:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1
|
|
# W64: [[CMP0:%[0-9]+]]:sreg_64_xexec = V_CMP_EQ_U64_e64 [[STMP0]], [[VRSRC]].sub0_sub1, implicit $exec
|
|
# W64: [[SRSRC2:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub2, implicit $exec
|
|
# W64: [[SRSRC3:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub3, implicit $exec
|
|
# W64: [[STMP1:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC2]], %subreg.sub0, [[SRSRC3]], %subreg.sub1
|
|
# W64: [[CMP1:%[0-9]+]]:sreg_64_xexec = V_CMP_EQ_U64_e64 [[STMP1]], [[VRSRC]].sub2_sub3, implicit $exec
|
|
# W64: [[CMP:%[0-9]+]]:sreg_64_xexec = S_AND_B64 [[CMP0]], [[CMP1]], implicit-def $scc
|
|
# W64: [[SRSRC:%[0-9]+]]:sgpr_128 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1, [[SRSRC2]], %subreg.sub2, [[SRSRC3]], %subreg.sub3
|
|
# W64: [[TMPEXEC:%[0-9]+]]:sreg_64_xexec = S_AND_SAVEEXEC_B64 killed [[CMP]], implicit-def $exec, implicit-def $scc, implicit $exec
|
|
# W64: {{[0-9]+}}:vgpr_32 = BUFFER_LOAD_FORMAT_X_BOTHEN %4, killed [[SRSRC]], 0, 0, 0, 0, 0, implicit $exec
|
|
# W64: $exec = S_XOR_B64_term $exec, [[TMPEXEC]], implicit-def $scc
|
|
# W64: SI_WATERFALL_LOOP %bb.1, implicit $exec
|
|
# W64-LABEL: bb.2:
|
|
# W64: $exec = S_MOV_B64 [[SAVEEXEC]]
|
|
|
|
# W32-LABEL: name: bothen
|
|
# W32-LABEL: bb.0:
|
|
# W32-NEXT: successors: %bb.1({{.*}})
|
|
# W32: [[VRSRC:%[0-9]+]]:vreg_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
# W32: [[SAVEEXEC:%[0-9]+]]:sreg_32_xm0_xexec = S_MOV_B32 $exec_lo
|
|
# W32-LABEL: bb.1:
|
|
# W32-NEXT: successors: %bb.1({{.*}}), %bb.2({{.*}})
|
|
# W32: [[SRSRC0:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub0, implicit $exec
|
|
# W32: [[SRSRC1:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub1, implicit $exec
|
|
# W32: [[STMP0:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1
|
|
# W32: [[CMP0:%[0-9]+]]:sreg_32_xm0_xexec = V_CMP_EQ_U64_e64 [[STMP0]], [[VRSRC]].sub0_sub1, implicit $exec
|
|
# W32: [[SRSRC2:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub2, implicit $exec
|
|
# W32: [[SRSRC3:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub3, implicit $exec
|
|
# W32: [[STMP1:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC2]], %subreg.sub0, [[SRSRC3]], %subreg.sub1
|
|
# W32: [[CMP1:%[0-9]+]]:sreg_32_xm0_xexec = V_CMP_EQ_U64_e64 [[STMP1]], [[VRSRC]].sub2_sub3, implicit $exec
|
|
# W32: [[CMP:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 [[CMP0]], [[CMP1]], implicit-def $scc
|
|
# W32: [[SRSRC:%[0-9]+]]:sgpr_128 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1, [[SRSRC2]], %subreg.sub2, [[SRSRC3]], %subreg.sub3
|
|
# W32: [[TMPEXEC:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_SAVEEXEC_B32 killed [[CMP]], implicit-def $exec, implicit-def $scc, implicit $exec
|
|
# W32: {{[0-9]+}}:vgpr_32 = BUFFER_LOAD_FORMAT_X_BOTHEN %4, killed [[SRSRC]], 0, 0, 0, 0, 0, implicit $exec
|
|
# TODO: S_XOR_B32_term should be `implicit-def $scc`
|
|
# W32: $exec_lo = S_XOR_B32_term $exec_lo, [[TMPEXEC]]
|
|
# W32: SI_WATERFALL_LOOP %bb.1, implicit $exec
|
|
# W32-LABEL: bb.2:
|
|
# W32: $exec_lo = S_MOV_B32 [[SAVEEXEC]]
|
|
---
|
|
name: bothen
|
|
liveins:
|
|
- { reg: '$vgpr0', virtual-reg: '%0' }
|
|
- { reg: '$vgpr1', virtual-reg: '%1' }
|
|
- { reg: '$vgpr2', virtual-reg: '%2' }
|
|
- { reg: '$vgpr3', virtual-reg: '%3' }
|
|
- { reg: '$vgpr4_vgpr5', virtual-reg: '%4' }
|
|
- { reg: '$sgpr30_sgpr31', virtual-reg: '%5' }
|
|
body: |
|
|
bb.0:
|
|
liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $sgpr30_sgpr31
|
|
%5:sreg_64 = COPY $sgpr30_sgpr31
|
|
%4:vreg_64 = COPY $vgpr4_vgpr5
|
|
%3:vgpr_32 = COPY $vgpr3
|
|
%2:vgpr_32 = COPY $vgpr2
|
|
%1:vgpr_32 = COPY $vgpr1
|
|
%0:vgpr_32 = COPY $vgpr0
|
|
%6:sgpr_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
%7:vgpr_32 = BUFFER_LOAD_FORMAT_X_BOTHEN %4, killed %6, 0, 0, 0, 0, 0, implicit $exec
|
|
$sgpr30_sgpr31 = COPY %5
|
|
$vgpr0 = COPY %7
|
|
S_SETPC_B64_return $sgpr30_sgpr31, implicit $vgpr0
|
|
...
|
|
|
|
# ADDR64-LABEL: name: addr64
|
|
# ADDR64-LABEL: bb.0:
|
|
# ADDR64: %14:vreg_64 = COPY %8.sub0_sub1
|
|
# ADDR64: %15:sreg_64 = S_MOV_B64 0
|
|
# ADDR64: %16:sgpr_32 = S_MOV_B32 0
|
|
# ADDR64: %17:sgpr_32 = S_MOV_B32 61440
|
|
# ADDR64: %18:sgpr_128 = REG_SEQUENCE %15, %subreg.sub0_sub1, %16, %subreg.sub2, %17, %subreg.sub3
|
|
# ADDR64: %9:vgpr_32, %12:sreg_64_xexec = V_ADD_CO_U32_e64 %14.sub0, %4.sub0, 0, implicit $exec
|
|
# ADDR64: %10:vgpr_32, dead %13:sreg_64_xexec = V_ADDC_U32_e64 %14.sub1, %4.sub1, killed %12, 0, implicit $exec
|
|
# ADDR64: %11:vreg_64 = REG_SEQUENCE %9, %subreg.sub0, %10, %subreg.sub1
|
|
# ADDR64: {{[0-9]+}}:vgpr_32 = BUFFER_LOAD_FORMAT_X_ADDR64 %11, killed %18, 0, 0, 0, 0, 0, implicit $exec
|
|
---
|
|
name: addr64
|
|
liveins:
|
|
- { reg: '$vgpr0', virtual-reg: '%0' }
|
|
- { reg: '$vgpr1', virtual-reg: '%1' }
|
|
- { reg: '$vgpr2', virtual-reg: '%2' }
|
|
- { reg: '$vgpr3', virtual-reg: '%3' }
|
|
- { reg: '$vgpr4_vgpr5', virtual-reg: '%4' }
|
|
- { reg: '$sgpr30_sgpr31', virtual-reg: '%5' }
|
|
body: |
|
|
bb.0:
|
|
liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $sgpr30_sgpr31
|
|
%5:sreg_64 = COPY $sgpr30_sgpr31
|
|
%4:vreg_64 = COPY $vgpr4_vgpr5
|
|
%3:vgpr_32 = COPY $vgpr3
|
|
%2:vgpr_32 = COPY $vgpr2
|
|
%1:vgpr_32 = COPY $vgpr1
|
|
%0:vgpr_32 = COPY $vgpr0
|
|
%6:sgpr_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
%7:vgpr_32 = BUFFER_LOAD_FORMAT_X_ADDR64 %4, killed %6, 0, 0, 0, 0, 0, implicit $exec
|
|
$sgpr30_sgpr31 = COPY %5
|
|
$vgpr0 = COPY %7
|
|
S_SETPC_B64_return $sgpr30_sgpr31, implicit $vgpr0
|
|
...
|
|
|
|
# W64-LABEL: name: offset
|
|
# W64-LABEL: bb.0:
|
|
|
|
# W64-NO-ADDR64: successors: %bb.1({{.*}})
|
|
# W64-NO-ADDR64: [[VRSRC:%[0-9]+]]:vreg_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
# W64-NO-ADDR64: [[SAVEEXEC:%[0-9]+]]:sreg_64_xexec = S_MOV_B64 $exec
|
|
# W64-NO-ADDR64-LABEL: bb.1:
|
|
# W64-NO-ADDR64-NEXT: successors: %bb.1({{.*}}), %bb.2({{.*}})
|
|
# W64-NO-ADDR64: [[SRSRC0:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub0, implicit $exec
|
|
# W64-NO-ADDR64: [[SRSRC1:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub1, implicit $exec
|
|
# W64-NO-ADDR64: [[STMP0:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1
|
|
# W64-NO-ADDR64: [[CMP0:%[0-9]+]]:sreg_64_xexec = V_CMP_EQ_U64_e64 [[STMP0]], [[VRSRC]].sub0_sub1, implicit $exec
|
|
# W64-NO-ADDR64: [[SRSRC2:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub2, implicit $exec
|
|
# W64-NO-ADDR64: [[SRSRC3:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub3, implicit $exec
|
|
# W64-NO-ADDR64: [[STMP1:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC2]], %subreg.sub0, [[SRSRC3]], %subreg.sub1
|
|
# W64-NO-ADDR64: [[CMP1:%[0-9]+]]:sreg_64_xexec = V_CMP_EQ_U64_e64 [[STMP1]], [[VRSRC]].sub2_sub3, implicit $exec
|
|
# W64-NO-ADDR64: [[CMP:%[0-9]+]]:sreg_64_xexec = S_AND_B64 [[CMP0]], [[CMP1]], implicit-def $scc
|
|
# W64-NO-ADDR64: [[SRSRC:%[0-9]+]]:sgpr_128 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1, [[SRSRC2]], %subreg.sub2, [[SRSRC3]], %subreg.sub3
|
|
# W64-NO-ADDR64: [[TMPEXEC:%[0-9]+]]:sreg_64_xexec = S_AND_SAVEEXEC_B64 killed [[CMP]], implicit-def $exec, implicit-def $scc, implicit $exec
|
|
# W64-NO-ADDR64: {{[0-9]+}}:vgpr_32 = BUFFER_LOAD_FORMAT_X_OFFSET killed [[SRSRC]], 0, 0, 0, 0, 0, implicit $exec
|
|
# W64-NO-ADDR64: $exec = S_XOR_B64_term $exec, [[TMPEXEC]], implicit-def $scc
|
|
# W64-NO-ADDR64: SI_WATERFALL_LOOP %bb.1, implicit $exec
|
|
# W64-NO-ADDR64-LABEL: bb.2:
|
|
# W64-NO-ADDR64: $exec = S_MOV_B64 [[SAVEEXEC]]
|
|
|
|
# W32: successors: %bb.1({{.*}})
|
|
# W32: [[VRSRC:%[0-9]+]]:vreg_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
# W32: [[SAVEEXEC:%[0-9]+]]:sreg_32_xm0_xexec = S_MOV_B32 $exec_lo
|
|
# W32-LABEL: bb.1:
|
|
# W32-NEXT: successors: %bb.1({{.*}}), %bb.2({{.*}})
|
|
# W32: [[SRSRC0:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub0, implicit $exec
|
|
# W32: [[SRSRC1:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub1, implicit $exec
|
|
# W32: [[STMP0:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1
|
|
# W32: [[CMP0:%[0-9]+]]:sreg_32_xm0_xexec = V_CMP_EQ_U64_e64 [[STMP0]], [[VRSRC]].sub0_sub1, implicit $exec
|
|
# W32: [[SRSRC2:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub2, implicit $exec
|
|
# W32: [[SRSRC3:%[0-9]+]]:sgpr_32 = V_READFIRSTLANE_B32 [[VRSRC]].sub3, implicit $exec
|
|
# W32: [[STMP1:%[0-9]+]]:sgpr_64 = REG_SEQUENCE [[SRSRC2]], %subreg.sub0, [[SRSRC3]], %subreg.sub1
|
|
# W32: [[CMP1:%[0-9]+]]:sreg_32_xm0_xexec = V_CMP_EQ_U64_e64 [[STMP1]], [[VRSRC]].sub2_sub3, implicit $exec
|
|
# W32: [[CMP:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 [[CMP0]], [[CMP1]], implicit-def $scc
|
|
# W32: [[SRSRC:%[0-9]+]]:sgpr_128 = REG_SEQUENCE [[SRSRC0]], %subreg.sub0, [[SRSRC1]], %subreg.sub1, [[SRSRC2]], %subreg.sub2, [[SRSRC3]], %subreg.sub3
|
|
# W32: [[TMPEXEC:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_SAVEEXEC_B32 killed [[CMP]], implicit-def $exec, implicit-def $scc, implicit $exec
|
|
# W32: {{[0-9]+}}:vgpr_32 = BUFFER_LOAD_FORMAT_X_OFFSET killed [[SRSRC]], 0, 0, 0, 0, 0, implicit $exec
|
|
# TODO: S_XOR_B32_term should be `implicit-def $scc`
|
|
# W32: $exec_lo = S_XOR_B32_term $exec_lo, [[TMPEXEC]]
|
|
# W32: SI_WATERFALL_LOOP %bb.1, implicit $exec
|
|
# W32-LABEL: bb.2:
|
|
# W32: $exec_lo = S_MOV_B32 [[SAVEEXEC]]
|
|
|
|
# ADDR64: [[VRSRC:%[0-9]+]]:vreg_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
# ADDR64: [[RSRCPTR:%[0-9]+]]:vreg_64 = COPY [[VRSRC]].sub0_sub1
|
|
# ADDR64: [[ZERO64:%[0-9]+]]:sreg_64 = S_MOV_B64 0
|
|
# ADDR64: [[RSRCFMTLO:%[0-9]+]]:sgpr_32 = S_MOV_B32 0
|
|
# ADDR64: [[RSRCFMTHI:%[0-9]+]]:sgpr_32 = S_MOV_B32 61440
|
|
# ADDR64: [[ZERORSRC:%[0-9]+]]:sgpr_128 = REG_SEQUENCE [[ZERO64]], %subreg.sub0_sub1, [[RSRCFMTLO]], %subreg.sub2, [[RSRCFMTHI]], %subreg.sub3
|
|
# ADDR64: [[VADDR64:%[0-9]+]]:vreg_64 = REG_SEQUENCE [[RSRCPTR]].sub0, %subreg.sub0, [[RSRCPTR]].sub1, %subreg.sub1
|
|
# ADDR64: {{[0-9]+}}:vgpr_32 = BUFFER_LOAD_FORMAT_X_ADDR64 [[VADDR64]], [[ZERORSRC]], 0, 0, 0, 0, 0, implicit $exec
|
|
|
|
---
|
|
name: offset
|
|
liveins:
|
|
- { reg: '$vgpr0', virtual-reg: '%0' }
|
|
- { reg: '$vgpr1', virtual-reg: '%1' }
|
|
- { reg: '$vgpr2', virtual-reg: '%2' }
|
|
- { reg: '$vgpr3', virtual-reg: '%3' }
|
|
- { reg: '$vgpr4_vgpr5', virtual-reg: '%4' }
|
|
- { reg: '$sgpr30_sgpr31', virtual-reg: '%5' }
|
|
body: |
|
|
bb.0:
|
|
liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $sgpr30_sgpr31
|
|
%5:sreg_64 = COPY $sgpr30_sgpr31
|
|
%4:vreg_64 = COPY $vgpr4_vgpr5
|
|
%3:vgpr_32 = COPY $vgpr3
|
|
%2:vgpr_32 = COPY $vgpr2
|
|
%1:vgpr_32 = COPY $vgpr1
|
|
%0:vgpr_32 = COPY $vgpr0
|
|
%6:sgpr_128 = REG_SEQUENCE %0, %subreg.sub0, %1, %subreg.sub1, %2, %subreg.sub2, %3, %subreg.sub3
|
|
%7:vgpr_32 = BUFFER_LOAD_FORMAT_X_OFFSET killed %6, 0, 0, 0, 0, 0, implicit $exec
|
|
$sgpr30_sgpr31 = COPY %5
|
|
$vgpr0 = COPY %7
|
|
S_SETPC_B64_return $sgpr30_sgpr31, implicit $vgpr0
|
|
...
|