llvm/llvm/test/CodeGen/X86/shift-amount-mod.ll

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=i686-unknown-unknown   | FileCheck %s --check-prefix=X86
; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefix=X64

;==============================================================================;
; the shift amount is negated (shiftbitwidth - shiftamt)
;==============================================================================;

; shift left
;------------------------------------------------------------------------------;

define i32 @reg32_shl_by_negated(i32 %val, i32 %shamt) nounwind {
; X86-LABEL: reg32_shl_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shll %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_shl_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movl %esi, %ecx
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shll %cl, %eax
; X64-NEXT:    retq
  %negshamt = sub i32 32, %shamt
  %shifted = shl i32 %val, %negshamt
  ret i32 %shifted
}
define i32 @load32_shl_by_negated(ptr %valptr, i32 %shamt) nounwind {
; X86-LABEL: load32_shl_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl (%eax), %eax
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shll %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: load32_shl_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movl %esi, %ecx
; X64-NEXT:    movl (%rdi), %eax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shll %cl, %eax
; X64-NEXT:    retq
  %val = load i32, ptr %valptr
  %negshamt = sub i32 32, %shamt
  %shifted = shl i32 %val, %negshamt
  ret i32 %shifted
}
define void @store32_shl_by_negated(i32 %val, ptr %dstptr, i32 %shamt) nounwind {
; X86-LABEL: store32_shl_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shll %cl, %edx
; X86-NEXT:    movl %edx, (%eax)
; X86-NEXT:    retl
;
; X64-LABEL: store32_shl_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movl %edx, %ecx
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shll %cl, %edi
; X64-NEXT:    movl %edi, (%rsi)
; X64-NEXT:    retq
  %negshamt = sub i32 32, %shamt
  %shifted = shl i32 %val, %negshamt
  store i32 %shifted, ptr %dstptr
  ret void
}
define void @modify32_shl_by_negated(ptr %valptr, i32 %shamt) nounwind {
; X86-LABEL: modify32_shl_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movb $32, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    shll %cl, (%eax)
; X86-NEXT:    retl
;
; X64-LABEL: modify32_shl_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movb $32, %cl
; X64-NEXT:    subb %sil, %cl
; X64-NEXT:    shll %cl, (%rdi)
; X64-NEXT:    retq
  %val = load i32, ptr %valptr
  %negshamt = sub i32 32, %shamt
  %shifted = shl i32 %val, %negshamt
  store i32 %shifted, ptr %valptr
  ret void
}

define i64 @reg64_shl_by_negated(i64 %val, i64 %shamt) nounwind {
; X86-LABEL: reg64_shl_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    movl %esi, %eax
; X86-NEXT:    shll %cl, %eax
; X86-NEXT:    shldl %cl, %esi, %edx
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB4_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %eax, %edx
; X86-NEXT:    xorl %eax, %eax
; X86-NEXT:  .LBB4_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_shl_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movq %rsi, %rcx
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    shlq %cl, %rax
; X64-NEXT:    retq
  %negshamt = sub i64 64, %shamt
  %shifted = shl i64 %val, %negshamt
  ret i64 %shifted
}
define i64 @load64_shl_by_negated(ptr %valptr, i64 %shamt) nounwind {
; X86-LABEL: load64_shl_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl (%eax), %esi
; X86-NEXT:    movl 4(%eax), %edx
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    movl %esi, %eax
; X86-NEXT:    shll %cl, %eax
; X86-NEXT:    shldl %cl, %esi, %edx
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB5_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %eax, %edx
; X86-NEXT:    xorl %eax, %eax
; X86-NEXT:  .LBB5_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: load64_shl_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movq %rsi, %rcx
; X64-NEXT:    movq (%rdi), %rax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    shlq %cl, %rax
; X64-NEXT:    retq
  %val = load i64, ptr %valptr
  %negshamt = sub i64 64, %shamt
  %shifted = shl i64 %val, %negshamt
  ret i64 %shifted
}
define void @store64_shl_by_negated(i64 %val, ptr %dstptr, i64 %shamt) nounwind {
; X86-LABEL: store64_shl_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %edi
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    movl %edi, %esi
; X86-NEXT:    shll %cl, %esi
; X86-NEXT:    shldl %cl, %edi, %edx
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB6_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    xorl %esi, %esi
; X86-NEXT:  .LBB6_2:
; X86-NEXT:    movl %edx, 4(%eax)
; X86-NEXT:    movl %esi, (%eax)
; X86-NEXT:    popl %esi
; X86-NEXT:    popl %edi
; X86-NEXT:    retl
;
; X64-LABEL: store64_shl_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdx, %rcx
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    shlq %cl, %rdi
; X64-NEXT:    movq %rdi, (%rsi)
; X64-NEXT:    retq
  %negshamt = sub i64 64, %shamt
  %shifted = shl i64 %val, %negshamt
  store i64 %shifted, ptr %dstptr
  ret void
}
define void @modify64_shl_by_negated(ptr %valptr, i64 %shamt) nounwind {
; X86-LABEL: modify64_shl_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %edi
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl (%eax), %edi
; X86-NEXT:    movl 4(%eax), %edx
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    movl %edi, %esi
; X86-NEXT:    shll %cl, %esi
; X86-NEXT:    shldl %cl, %edi, %edx
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB7_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    xorl %esi, %esi
; X86-NEXT:  .LBB7_2:
; X86-NEXT:    movl %esi, (%eax)
; X86-NEXT:    movl %edx, 4(%eax)
; X86-NEXT:    popl %esi
; X86-NEXT:    popl %edi
; X86-NEXT:    retl
;
; X64-LABEL: modify64_shl_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movb $64, %cl
; X64-NEXT:    subb %sil, %cl
; X64-NEXT:    shlq %cl, (%rdi)
; X64-NEXT:    retq
  %val = load i64, ptr %valptr
  %negshamt = sub i64 64, %shamt
  %shifted = shl i64 %val, %negshamt
  store i64 %shifted, ptr %valptr
  ret void
}

; logical shift right
;------------------------------------------------------------------------------;

define i32 @reg32_lshr_by_negated(i32 %val, i32 %shamt) nounwind {
; X86-LABEL: reg32_lshr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movl %esi, %ecx
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %negshamt = sub i32 32, %shamt
  %shifted = lshr i32 %val, %negshamt
  ret i32 %shifted
}
define i32 @load32_lshr_by_negated(ptr %valptr, i32 %shamt) nounwind {
; X86-LABEL: load32_lshr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl (%eax), %eax
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: load32_lshr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movl %esi, %ecx
; X64-NEXT:    movl (%rdi), %eax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %val = load i32, ptr %valptr
  %negshamt = sub i32 32, %shamt
  %shifted = lshr i32 %val, %negshamt
  ret i32 %shifted
}
define void @store32_lshr_by_negated(i32 %val, ptr %dstptr, i32 %shamt) nounwind {
; X86-LABEL: store32_lshr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    movl %edx, (%eax)
; X86-NEXT:    retl
;
; X64-LABEL: store32_lshr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movl %edx, %ecx
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %edi
; X64-NEXT:    movl %edi, (%rsi)
; X64-NEXT:    retq
  %negshamt = sub i32 32, %shamt
  %shifted = lshr i32 %val, %negshamt
  store i32 %shifted, ptr %dstptr
  ret void
}
define void @modify32_lshr_by_negated(ptr %valptr, i32 %shamt) nounwind {
; X86-LABEL: modify32_lshr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movb $32, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    shrl %cl, (%eax)
; X86-NEXT:    retl
;
; X64-LABEL: modify32_lshr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movb $32, %cl
; X64-NEXT:    subb %sil, %cl
; X64-NEXT:    shrl %cl, (%rdi)
; X64-NEXT:    retq
  %val = load i32, ptr %valptr
  %negshamt = sub i32 32, %shamt
  %shifted = lshr i32 %val, %negshamt
  store i32 %shifted, ptr %valptr
  ret void
}

define i64 @reg64_lshr_by_negated(i64 %val, i64 %shamt) nounwind {
; X86-LABEL: reg64_lshr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB12_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB12_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movq %rsi, %rcx
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %negshamt = sub i64 64, %shamt
  %shifted = lshr i64 %val, %negshamt
  ret i64 %shifted
}
define i64 @load64_lshr_by_negated(ptr %valptr, i64 %shamt) nounwind {
; X86-LABEL: load64_lshr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl (%ecx), %eax
; X86-NEXT:    movl 4(%ecx), %esi
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB13_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB13_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: load64_lshr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movq %rsi, %rcx
; X64-NEXT:    movq (%rdi), %rax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %val = load i64, ptr %valptr
  %negshamt = sub i64 64, %shamt
  %shifted = lshr i64 %val, %negshamt
  ret i64 %shifted
}
define void @store64_lshr_by_negated(i64 %val, ptr %dstptr, i64 %shamt) nounwind {
; X86-LABEL: store64_lshr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %edi
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edi
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    movl %edi, %esi
; X86-NEXT:    shrl %cl, %esi
; X86-NEXT:    shrdl %cl, %edi, %edx
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB14_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    xorl %esi, %esi
; X86-NEXT:  .LBB14_2:
; X86-NEXT:    movl %esi, 4(%eax)
; X86-NEXT:    movl %edx, (%eax)
; X86-NEXT:    popl %esi
; X86-NEXT:    popl %edi
; X86-NEXT:    retl
;
; X64-LABEL: store64_lshr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdx, %rcx
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    shrq %cl, %rdi
; X64-NEXT:    movq %rdi, (%rsi)
; X64-NEXT:    retq
  %negshamt = sub i64 64, %shamt
  %shifted = lshr i64 %val, %negshamt
  store i64 %shifted, ptr %dstptr
  ret void
}
define void @modify64_lshr_by_negated(ptr %valptr, i64 %shamt) nounwind {
; X86-LABEL: modify64_lshr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %edi
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl (%eax), %edx
; X86-NEXT:    movl 4(%eax), %edi
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    movl %edi, %esi
; X86-NEXT:    shrl %cl, %esi
; X86-NEXT:    shrdl %cl, %edi, %edx
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB15_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    xorl %esi, %esi
; X86-NEXT:  .LBB15_2:
; X86-NEXT:    movl %edx, (%eax)
; X86-NEXT:    movl %esi, 4(%eax)
; X86-NEXT:    popl %esi
; X86-NEXT:    popl %edi
; X86-NEXT:    retl
;
; X64-LABEL: modify64_lshr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movb $64, %cl
; X64-NEXT:    subb %sil, %cl
; X64-NEXT:    shrq %cl, (%rdi)
; X64-NEXT:    retq
  %val = load i64, ptr %valptr
  %negshamt = sub i64 64, %shamt
  %shifted = lshr i64 %val, %negshamt
  store i64 %shifted, ptr %valptr
  ret void
}

; arithmetic shift right
;------------------------------------------------------------------------------;

define i32 @reg32_ashr_by_negated(i32 %val, i32 %shamt) nounwind {
; X86-LABEL: reg32_ashr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    sarl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_ashr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movl %esi, %ecx
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    sarl %cl, %eax
; X64-NEXT:    retq
  %negshamt = sub i32 32, %shamt
  %shifted = ashr i32 %val, %negshamt
  ret i32 %shifted
}
define i32 @load32_ashr_by_negated(ptr %valptr, i32 %shamt) nounwind {
; X86-LABEL: load32_ashr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl (%eax), %eax
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    sarl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: load32_ashr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movl %esi, %ecx
; X64-NEXT:    movl (%rdi), %eax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    sarl %cl, %eax
; X64-NEXT:    retq
  %val = load i32, ptr %valptr
  %negshamt = sub i32 32, %shamt
  %shifted = ashr i32 %val, %negshamt
  ret i32 %shifted
}
define void @store32_ashr_by_negated(i32 %val, ptr %dstptr, i32 %shamt) nounwind {
; X86-LABEL: store32_ashr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    sarl %cl, %edx
; X86-NEXT:    movl %edx, (%eax)
; X86-NEXT:    retl
;
; X64-LABEL: store32_ashr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movl %edx, %ecx
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    sarl %cl, %edi
; X64-NEXT:    movl %edi, (%rsi)
; X64-NEXT:    retq
  %negshamt = sub i32 32, %shamt
  %shifted = ashr i32 %val, %negshamt
  store i32 %shifted, ptr %dstptr
  ret void
}
define void @modify32_ashr_by_negated(ptr %valptr, i32 %shamt) nounwind {
; X86-LABEL: modify32_ashr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movb $32, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    sarl %cl, (%eax)
; X86-NEXT:    retl
;
; X64-LABEL: modify32_ashr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movb $32, %cl
; X64-NEXT:    subb %sil, %cl
; X64-NEXT:    sarl %cl, (%rdi)
; X64-NEXT:    retq
  %val = load i32, ptr %valptr
  %negshamt = sub i32 32, %shamt
  %shifted = ashr i32 %val, %negshamt
  store i32 %shifted, ptr %valptr
  ret void
}

define i64 @reg64_ashr_by_negated(i64 %val, i64 %shamt) nounwind {
; X86-LABEL: reg64_ashr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    sarl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB20_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    sarl $31, %esi
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:  .LBB20_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_ashr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movq %rsi, %rcx
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    sarq %cl, %rax
; X64-NEXT:    retq
  %negshamt = sub i64 64, %shamt
  %shifted = ashr i64 %val, %negshamt
  ret i64 %shifted
}
define i64 @load64_ashr_by_negated(ptr %valptr, i64 %shamt) nounwind {
; X86-LABEL: load64_ashr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl (%ecx), %eax
; X86-NEXT:    movl 4(%ecx), %esi
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    sarl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB21_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    sarl $31, %esi
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:  .LBB21_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: load64_ashr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movq %rsi, %rcx
; X64-NEXT:    movq (%rdi), %rax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    sarq %cl, %rax
; X64-NEXT:    retq
  %val = load i64, ptr %valptr
  %negshamt = sub i64 64, %shamt
  %shifted = ashr i64 %val, %negshamt
  ret i64 %shifted
}
define void @store64_ashr_by_negated(i64 %val, ptr %dstptr, i64 %shamt) nounwind {
; X86-LABEL: store64_ashr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %edi
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edi
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    movl %edi, %esi
; X86-NEXT:    sarl %cl, %esi
; X86-NEXT:    shrdl %cl, %edi, %edx
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB22_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    sarl $31, %edi
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    movl %edi, %esi
; X86-NEXT:  .LBB22_2:
; X86-NEXT:    movl %esi, 4(%eax)
; X86-NEXT:    movl %edx, (%eax)
; X86-NEXT:    popl %esi
; X86-NEXT:    popl %edi
; X86-NEXT:    retl
;
; X64-LABEL: store64_ashr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdx, %rcx
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    sarq %cl, %rdi
; X64-NEXT:    movq %rdi, (%rsi)
; X64-NEXT:    retq
  %negshamt = sub i64 64, %shamt
  %shifted = ashr i64 %val, %negshamt
  store i64 %shifted, ptr %dstptr
  ret void
}
define void @modify64_ashr_by_negated(ptr %valptr, i64 %shamt) nounwind {
; X86-LABEL: modify64_ashr_by_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %edi
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl (%eax), %edx
; X86-NEXT:    movl 4(%eax), %edi
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    movl %edi, %esi
; X86-NEXT:    sarl %cl, %esi
; X86-NEXT:    shrdl %cl, %edi, %edx
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB23_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    sarl $31, %edi
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    movl %edi, %esi
; X86-NEXT:  .LBB23_2:
; X86-NEXT:    movl %edx, (%eax)
; X86-NEXT:    movl %esi, 4(%eax)
; X86-NEXT:    popl %esi
; X86-NEXT:    popl %edi
; X86-NEXT:    retl
;
; X64-LABEL: modify64_ashr_by_negated:
; X64:       # %bb.0:
; X64-NEXT:    movb $64, %cl
; X64-NEXT:    subb %sil, %cl
; X64-NEXT:    sarq %cl, (%rdi)
; X64-NEXT:    retq
  %val = load i64, ptr %valptr
  %negshamt = sub i64 64, %shamt
  %shifted = ashr i64 %val, %negshamt
  store i64 %shifted, ptr %valptr
  ret void
}

;||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||;
; next let's only test simple reg pattern, and only lshr.
;||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||;

;==============================================================================;
; subtraction from negated shift amount

define i32 @reg32_lshr_by_sub_from_negated(i32 %val, i32 %a, i32 %b) nounwind {
; X86-LABEL: reg32_lshr_by_sub_from_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    negb %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_sub_from_negated:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edx killed $edx def $rdx
; X64-NEXT:    # kill: def $esi killed $esi def $rsi
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    leal (%rsi,%rdx), %ecx
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %nega = sub i32 32, %a
  %negasubb = sub i32 %nega, %b
  %shifted = lshr i32 %val, %negasubb
  ret i32 %shifted
}
define i64 @reg64_lshr_by_sub_from_negated(i64 %val, i64 %a, i64 %b) nounwind {
; X86-LABEL: reg64_lshr_by_sub_from_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb %dl, %cl
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB25_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB25_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_sub_from_negated:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    leal (%rdx,%rsi), %ecx
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %nega = sub i64 64, %a
  %negasubb = sub i64 %nega, %b
  %shifted = lshr i64 %val, %negasubb
  ret i64 %shifted
}

;==============================================================================;
; subtraction of negated shift amount

define i32 @reg32_lshr_by_sub_of_negated(i32 %val, i32 %a, i32 %b) nounwind {
; X86-LABEL: reg32_lshr_by_sub_of_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_sub_of_negated:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edx killed $edx def $rdx
; X64-NEXT:    # kill: def $esi killed $esi def $rsi
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    leal (%rsi,%rdx), %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %nega = sub i32 32, %a
  %negasubb = sub i32 %b, %nega
  %shifted = lshr i32 %val, %negasubb
  ret i32 %shifted
}
define i64 @reg64_lshr_by_sub_of_negated(i64 %val, i64 %a, i64 %b) nounwind {
; X86-LABEL: reg64_lshr_by_sub_of_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    addb $-64, %cl
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB27_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB27_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_sub_of_negated:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    leal (%rdx,%rsi), %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %nega = sub i64 64, %a
  %negasubb = sub i64 %b, %nega
  %shifted = lshr i64 %val, %negasubb
  ret i64 %shifted
}

;==============================================================================;
; add to negated shift amount
;

define i32 @reg32_lshr_by_add_to_negated(i32 %val, i32 %a, i32 %b) nounwind {
; X86-LABEL: reg32_lshr_by_add_to_negated:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_add_to_negated:
; X64:       # %bb.0:
; X64-NEXT:    movl %edx, %ecx
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    subl %esi, %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %nega = sub i32 32, %a
  %negasubb = add i32 %nega, %b
  %shifted = lshr i32 %val, %negasubb
  ret i32 %shifted
}
define i64 @reg64_lshr_by_add_to_negated(i64 %val, i64 %a, i64 %b) nounwind {
; X86-LABEL: reg64_lshr_by_add_to_negated:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    addb $64, %cl
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB29_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB29_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_add_to_negated:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdx, %rcx
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    subl %esi, %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %nega = sub i64 64, %a
  %negasubb = add i64 %nega, %b
  %shifted = lshr i64 %val, %negasubb
  ret i64 %shifted
}

;==============================================================================;
; subtraction of negated shift amounts

define i32 @reg32_lshr_by_sub_of_negated_amts(i32 %val, i32 %a, i32 %b) nounwind {
; X86-LABEL: reg32_lshr_by_sub_of_negated_amts:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_sub_of_negated_amts:
; X64:       # %bb.0:
; X64-NEXT:    movl %edx, %ecx
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    subl %esi, %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %nega = sub i32 32, %a
  %negb = sub i32 32, %b
  %negasubnegb = sub i32 %nega, %negb
  %shifted = lshr i32 %val, %negasubnegb
  ret i32 %shifted
}
define i64 @reg64_lshr_by_sub_of_negated_amts(i64 %val, i64 %a, i64 %b) nounwind {
; X86-LABEL: reg64_lshr_by_sub_of_negated_amts:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB31_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB31_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_sub_of_negated_amts:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdx, %rcx
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    subl %esi, %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %nega = sub i64 64, %a
  %negb = sub i64 64, %b
  %negasubnegb = sub i64 %nega, %negb
  %shifted = lshr i64 %val, %negasubnegb
  ret i64 %shifted
}

;==============================================================================;
; addition of negated shift amounts

define i32 @reg32_lshr_by_add_of_negated_amts(i32 %val, i32 %a, i32 %b) nounwind {
; X86-LABEL: reg32_lshr_by_add_of_negated_amts:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    negb %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_add_of_negated_amts:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edx killed $edx def $rdx
; X64-NEXT:    # kill: def $esi killed $esi def $rsi
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    leal (%rsi,%rdx), %ecx
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %nega = sub i32 32, %a
  %negb = sub i32 32, %b
  %negasubnegb = add i32 %nega, %negb
  %shifted = lshr i32 %val, %negasubnegb
  ret i32 %shifted
}
define i64 @reg64_lshr_by_add_of_negated_amts(i64 %val, i64 %a, i64 %b) nounwind {
; X86-LABEL: reg64_lshr_by_add_of_negated_amts:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    movb $-128, %cl
; X86-NEXT:    subb %dl, %cl
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB33_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB33_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_add_of_negated_amts:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    leal (%rdx,%rsi), %ecx
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %nega = sub i64 64, %a
  %negb = sub i64 64, %b
  %negasubnegb = add i64 %nega, %negb
  %shifted = lshr i64 %val, %negasubnegb
  ret i64 %shifted
}

;==============================================================================;
; and patterns with an actual negation+addition

define i32 @reg32_lshr_by_negated_unfolded(i32 %val, i32 %shamt) nounwind {
; X86-LABEL: reg32_lshr_by_negated_unfolded:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_negated_unfolded:
; X64:       # %bb.0:
; X64-NEXT:    movl %esi, %ecx
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %negshamt = sub i32 0, %shamt
  %negaaddbitwidth = add i32 %negshamt, 32
  %shifted = lshr i32 %val, %negaaddbitwidth
  ret i32 %shifted
}
define i64 @reg64_lshr_by_negated_unfolded(i64 %val, i64 %shamt) nounwind {
; X86-LABEL: reg64_lshr_by_negated_unfolded:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB35_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB35_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_negated_unfolded:
; X64:       # %bb.0:
; X64-NEXT:    movq %rsi, %rcx
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %negshamt = sub i64 0, %shamt
  %negaaddbitwidth = add i64 %negshamt, 64
  %shifted = lshr i64 %val, %negaaddbitwidth
  ret i64 %shifted
}

define i32 @reg32_lshr_by_negated_unfolded_sub_b(i32 %val, i32 %a, i32 %b) nounwind {
; X86-LABEL: reg32_lshr_by_negated_unfolded_sub_b:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    negb %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_negated_unfolded_sub_b:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edx killed $edx def $rdx
; X64-NEXT:    # kill: def $esi killed $esi def $rsi
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    leal (%rsi,%rdx), %ecx
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %nega = sub i32 0, %a
  %negaaddbitwidth = add i32 %nega, 32
  %negaaddbitwidthsubb = sub i32 %negaaddbitwidth, %b
  %shifted = lshr i32 %val, %negaaddbitwidthsubb
  ret i32 %shifted
}
define i64 @reg64_lshr_by_negated_unfolded_sub_b(i64 %val, i64 %a, i64 %b) nounwind {
; X86-LABEL: reg64_lshr_by_negated_unfolded_sub_b:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    movb $64, %cl
; X86-NEXT:    subb %dl, %cl
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB37_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB37_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_negated_unfolded_sub_b:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    leal (%rdx,%rsi), %ecx
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %nega = sub i64 0, %a
  %negaaddbitwidth = add i64 %nega, 64
  %negaaddbitwidthsubb = sub i64 %negaaddbitwidth, %b
  %shifted = lshr i64 %val, %negaaddbitwidthsubb
  ret i64 %shifted
}

define i32 @reg32_lshr_by_b_sub_negated_unfolded(i32 %val, i32 %a, i32 %b) nounwind {
; X86-LABEL: reg32_lshr_by_b_sub_negated_unfolded:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_b_sub_negated_unfolded:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edx killed $edx def $rdx
; X64-NEXT:    # kill: def $esi killed $esi def $rsi
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    leal (%rsi,%rdx), %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %nega = sub i32 0, %a
  %negaaddbitwidth = add i32 %nega, 32
  %negaaddbitwidthsubb = sub i32 %b, %negaaddbitwidth
  %shifted = lshr i32 %val, %negaaddbitwidthsubb
  ret i32 %shifted
}
define i64 @reg64_lshr_by_b_sub_negated_unfolded(i64 %val, i64 %a, i64 %b) nounwind {
; X86-LABEL: reg64_lshr_by_b_sub_negated_unfolded:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    addb $-64, %cl
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB39_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB39_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_b_sub_negated_unfolded:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    leal (%rdx,%rsi), %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %nega = sub i64 0, %a
  %negaaddbitwidth = add i64 %nega, 64
  %negaaddbitwidthsubb = sub i64 %b, %negaaddbitwidth
  %shifted = lshr i64 %val, %negaaddbitwidthsubb
  ret i64 %shifted
}

define i32 @reg32_lshr_by_negated_unfolded_add_b(i32 %val, i32 %a, i32 %b) nounwind {
; X86-LABEL: reg32_lshr_by_negated_unfolded_add_b:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_negated_unfolded_add_b:
; X64:       # %bb.0:
; X64-NEXT:    movl %edx, %ecx
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    subl %esi, %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %nega = sub i32 0, %a
  %negaaddbitwidth = add i32 %nega, 32
  %negaaddbitwidthaddb = add i32 %negaaddbitwidth, %b
  %shifted = lshr i32 %val, %negaaddbitwidthaddb
  ret i32 %shifted
}
define i64 @reg64_lshr_by_negated_unfolded_add_b(i64 %val, i64 %a, i64 %b) nounwind {
; X86-LABEL: reg64_lshr_by_negated_unfolded_add_b:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    addb $64, %cl
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB41_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB41_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_negated_unfolded_add_b:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdx, %rcx
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    subl %esi, %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %nega = sub i64 0, %a
  %negaaddbitwidth = add i64 %nega, 64
  %negaaddbitwidthaddb = add i64 %negaaddbitwidth, %b
  %shifted = lshr i64 %val, %negaaddbitwidthaddb
  ret i64 %shifted
}

;==============================================================================;
; and patterns with an actual negation+mask

define i32 @reg32_lshr_by_masked_negated_unfolded(i32 %val, i32 %shamt) nounwind {
; X86-LABEL: reg32_lshr_by_masked_negated_unfolded:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subb {{[0-9]+}}(%esp), %cl
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_masked_negated_unfolded:
; X64:       # %bb.0:
; X64-NEXT:    movl %esi, %ecx
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %negshamt = sub i32 0, %shamt
  %negaaddbitwidth = and i32 %negshamt, 31
  %shifted = lshr i32 %val, %negaaddbitwidth
  ret i32 %shifted
}
define i64 @reg64_lshr_by_masked_negated_unfolded(i64 %val, i64 %shamt) nounwind {
; X86-LABEL: reg64_lshr_by_masked_negated_unfolded:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    subb %dl, %cl
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB43_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB43_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_masked_negated_unfolded:
; X64:       # %bb.0:
; X64-NEXT:    movq %rsi, %rcx
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    negb %cl
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %negshamt = sub i64 0, %shamt
  %negaaddbitwidth = and i64 %negshamt, 63
  %shifted = lshr i64 %val, %negaaddbitwidth
  ret i64 %shifted
}

define i32 @reg32_lshr_by_masked_negated_unfolded_sub_b(i32 %val, i32 %a, i32 %b) nounwind {
; X86-LABEL: reg32_lshr_by_masked_negated_unfolded_sub_b:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    andl $31, %ecx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_masked_negated_unfolded_sub_b:
; X64:       # %bb.0:
; X64-NEXT:    movl %esi, %ecx
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    negl %ecx
; X64-NEXT:    andl $31, %ecx
; X64-NEXT:    subl %edx, %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %nega = sub i32 0, %a
  %negaaddbitwidth = and i32 %nega, 31
  %negaaddbitwidthsubb = sub i32 %negaaddbitwidth, %b
  %shifted = lshr i32 %val, %negaaddbitwidthsubb
  ret i32 %shifted
}
define i64 @reg64_lshr_by_masked_negated_unfolded_sub_b(i64 %val, i64 %a, i64 %b) nounwind {
; X86-LABEL: reg64_lshr_by_masked_negated_unfolded_sub_b:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    andl $63, %ecx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB45_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB45_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_masked_negated_unfolded_sub_b:
; X64:       # %bb.0:
; X64-NEXT:    movq %rsi, %rcx
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    negl %ecx
; X64-NEXT:    andl $63, %ecx
; X64-NEXT:    subl %edx, %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %nega = sub i64 0, %a
  %negaaddbitwidth = and i64 %nega, 63
  %negaaddbitwidthsubb = sub i64 %negaaddbitwidth, %b
  %shifted = lshr i64 %val, %negaaddbitwidthsubb
  ret i64 %shifted
}

define i32 @reg32_lshr_by_masked_b_sub_negated_unfolded(i32 %val, i32 %a, i32 %b) nounwind {
; X86-LABEL: reg32_lshr_by_masked_b_sub_negated_unfolded:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    andl $31, %edx
; X86-NEXT:    subl %edx, %ecx
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_masked_b_sub_negated_unfolded:
; X64:       # %bb.0:
; X64-NEXT:    movl %edx, %ecx
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    negl %esi
; X64-NEXT:    andl $31, %esi
; X64-NEXT:    subl %esi, %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %nega = sub i32 0, %a
  %negaaddbitwidth = and i32 %nega, 31
  %negaaddbitwidthsubb = sub i32 %b, %negaaddbitwidth
  %shifted = lshr i32 %val, %negaaddbitwidthsubb
  ret i32 %shifted
}
define i64 @reg64_lshr_by_masked_b_sub_negated_unfolded(i64 %val, i64 %a, i64 %b) nounwind {
; X86-LABEL: reg64_lshr_by_masked_b_sub_negated_unfolded:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    andl $63, %edx
; X86-NEXT:    subl %edx, %ecx
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB47_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB47_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_masked_b_sub_negated_unfolded:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdx, %rcx
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    negl %esi
; X64-NEXT:    andl $63, %esi
; X64-NEXT:    subl %esi, %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %nega = sub i64 0, %a
  %negaaddbitwidth = and i64 %nega, 63
  %negaaddbitwidthsubb = sub i64 %b, %negaaddbitwidth
  %shifted = lshr i64 %val, %negaaddbitwidthsubb
  ret i64 %shifted
}

define i32 @reg32_lshr_by_masked_negated_unfolded_add_b(i32 %val, i32 %a, i32 %b) nounwind {
; X86-LABEL: reg32_lshr_by_masked_negated_unfolded_add_b:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    andl $31, %ecx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
; X86-NEXT:    shrl %cl, %eax
; X86-NEXT:    retl
;
; X64-LABEL: reg32_lshr_by_masked_negated_unfolded_add_b:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edx killed $edx def $rdx
; X64-NEXT:    # kill: def $esi killed $esi def $rsi
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    negl %esi
; X64-NEXT:    andl $31, %esi
; X64-NEXT:    leal (%rsi,%rdx), %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrl %cl, %eax
; X64-NEXT:    retq
  %nega = sub i32 0, %a
  %negaaddbitwidth = and i32 %nega, 31
  %negaaddbitwidthaddb = add i32 %negaaddbitwidth, %b
  %shifted = lshr i32 %val, %negaaddbitwidthaddb
  ret i32 %shifted
}
define i64 @reg64_lshr_by_masked_negated_unfolded_add_b(i64 %val, i64 %a, i64 %b) nounwind {
; X86-LABEL: reg64_lshr_by_masked_negated_unfolded_add_b:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    xorl %ecx, %ecx
; X86-NEXT:    subl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    andl $63, %ecx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl %esi, %edx
; X86-NEXT:    shrl %cl, %edx
; X86-NEXT:    shrdl %cl, %esi, %eax
; X86-NEXT:    testb $32, %cl
; X86-NEXT:    je .LBB49_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    movl %edx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:  .LBB49_2:
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: reg64_lshr_by_masked_negated_unfolded_add_b:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    negl %esi
; X64-NEXT:    andl $63, %esi
; X64-NEXT:    leal (%rdx,%rsi), %ecx
; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
; X64-NEXT:    shrq %cl, %rax
; X64-NEXT:    retq
  %nega = sub i64 0, %a
  %negaaddbitwidth = and i64 %nega, 63
  %negaaddbitwidthaddb = add i64 %negaaddbitwidth, %b
  %shifted = lshr i64 %val, %negaaddbitwidthaddb
  ret i64 %shifted
}

define i16 @sh_trunc_sh(i64 %x) {
; X86-LABEL: sh_trunc_sh:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    shrl $4, %eax
; X86-NEXT:    andl $15, %eax
; X86-NEXT:    # kill: def $ax killed $ax killed $eax
; X86-NEXT:    retl
;
; X64-LABEL: sh_trunc_sh:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    shrq $36, %rax
; X64-NEXT:    andl $15, %eax
; X64-NEXT:    # kill: def $ax killed $ax killed $rax
; X64-NEXT:    retq
  %s = lshr i64 %x, 24
  %t = trunc i64 %s to i16
  %r = lshr i16 %t, 12
  ret i16 %r
}