Skip to content

Instantly share code, notes, and snippets.

Created December 20, 2017 22:57

Revisions

  1. @invalid-email-address Anonymous created this gist Dec 20, 2017.
    1,160 changes: 1,160 additions & 0 deletions gistfile1.txt
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,1160 @@
    .text
    .def _ZN49_$LT$alloc..raw_vec..RawVec$LT$T$C$$u20$A$GT$$GT$6double17h6b735ddf742ce4c7E;
    .scl 3;
    .type 32;
    .endef
    .p2align 4, 0x90
    _ZN49_$LT$alloc..raw_vec..RawVec$LT$T$C$$u20$A$GT$$GT$6double17h6b735ddf742ce4c7E:
    .Lcfi0:
    .seh_proc _ZN49_$LT$alloc..raw_vec..RawVec$LT$T$C$$u20$A$GT$$GT$6double17h6b735ddf742ce4c7E
    pushq %rsi
    .Lcfi1:
    .seh_pushreg 6
    pushq %rdi
    .Lcfi2:
    .seh_pushreg 7
    subq $120, %rsp
    .Lcfi3:
    .seh_stackalloc 120
    .Lcfi4:
    .seh_endprologue
    movq %rcx, %rsi
    movq 8(%rsi), %rdi
    testq %rdi, %rdi
    je .LBB0_1
    leaq (,%rdi,4), %rax
    movq $1, 96(%rsp)
    movq %rax, 104(%rsp)
    movl $4, %ecx
    movl $2, %eax
    jmp .LBB0_3
    .LBB0_1:
    xorl %eax, %eax
    xorl %ecx, %ecx
    .LBB0_3:
    movq %rcx, 96(%rsp,%rax,8)
    cmpq $1, 96(%rsp)
    jne .LBB0_7
    movq 104(%rsp), %rdx
    movq 112(%rsp), %r8
    leaq (,%rdi,8), %r9
    movq (%rsi), %rcx
    leaq 56(%rsp), %rax
    movq %rax, 40(%rsp)
    movq %r8, 32(%rsp)
    callq __rust_realloc
    testq %rax, %rax
    je .LBB0_6
    addq %rdi, %rdi
    jmp .LBB0_9
    .LBB0_7:
    leaq 56(%rsp), %r8
    movl $16, %ecx
    movl $4, %edx
    callq __rust_alloc
    testq %rax, %rax
    je .LBB0_10
    movl $4, %edi
    .LBB0_9:
    movq %rax, (%rsi)
    movq %rdi, 8(%rsi)
    addq $120, %rsp
    popq %rdi
    popq %rsi
    retq
    .LBB0_6:
    movq 56(%rsp), %rax
    vmovups 64(%rsp), %xmm0
    vmovaps %xmm0, 80(%rsp)
    movq %rax, 56(%rsp)
    jmp .LBB0_11
    .LBB0_10:
    vmovups 64(%rsp), %xmm0
    vmovaps %xmm0, 80(%rsp)
    .LBB0_11:
    vmovaps 80(%rsp), %xmm0
    vmovups %xmm0, 64(%rsp)
    leaq 56(%rsp), %rcx
    callq __rust_oom
    ud2
    .seh_handlerdata
    .text
    .Lcfi5:
    .seh_endproc

    .def _ZN53_$LT$$RF$$u27$a$u20$T$u20$as$u20$core..fmt..Debug$GT$3fmt17h035611eed912fa86E;
    .scl 3;
    .type 32;
    .endef
    .p2align 4, 0x90
    _ZN53_$LT$$RF$$u27$a$u20$T$u20$as$u20$core..fmt..Debug$GT$3fmt17h035611eed912fa86E:
    movq (%rcx), %rcx
    jmp _ZN4core3fmt3num50_$LT$impl$u20$core..fmt..Debug$u20$for$u20$u64$GT$3fmt17h7ded8a2555a7d81bE

    .def _ZN4test4main17h0eddcc39ebf1578fE;
    .scl 3;
    .type 32;
    .endef
    .section .rdata,"dr"
    .p2align 5
    .LCPI2_0:
    .zero 32,1
    .LCPI2_1:
    .long 1127219200
    .long 1160773632
    .long 0
    .long 0
    .LCPI2_2:
    .quad 4841369599423283200
    .quad 4985484787499139072
    .LCPI2_3:
    .quad 4890909195324358656
    .text
    .p2align 4, 0x90
    _ZN4test4main17h0eddcc39ebf1578fE:
    .Lcfi6:
    .seh_proc _ZN4test4main17h0eddcc39ebf1578fE
    pushq %r15
    .Lcfi7:
    .seh_pushreg 15
    pushq %r14
    .Lcfi8:
    .seh_pushreg 14
    pushq %r13
    .Lcfi9:
    .seh_pushreg 13
    pushq %r12
    .Lcfi10:
    .seh_pushreg 12
    pushq %rsi
    .Lcfi11:
    .seh_pushreg 6
    pushq %rdi
    .Lcfi12:
    .seh_pushreg 7
    pushq %rbp
    .Lcfi13:
    .seh_pushreg 5
    pushq %rbx
    .Lcfi14:
    .seh_pushreg 3
    subq $264, %rsp
    .Lcfi15:
    .seh_stackalloc 264
    vmovdqa %xmm6, 240(%rsp)
    .Lcfi16:
    .seh_savexmm 6, 240
    .Lcfi17:
    .seh_endprologue
    movq $4, 128(%rsp)
    vpxor %xmm0, %xmm0, %xmm0
    vmovdqu %xmm0, 136(%rsp)
    leaq 128(%rsp), %rcx
    callq _ZN49_$LT$alloc..raw_vec..RawVec$LT$T$C$$u20$A$GT$$GT$6double17h6b735ddf742ce4c7E
    movq 128(%rsp), %rax
    movq 144(%rsp), %rcx
    movl $2, (%rax,%rcx,4)
    addq $1, %rcx
    movq %rcx, 144(%rsp)
    leaq 40(%rsp), %r8
    movl $71, %ecx
    movl $1, %edx
    callq __rust_alloc
    movq %rax, %r14
    testq %r14, %r14
    je .LBB2_1
    vmovdqa .LCPI2_0(%rip), %ymm0
    vmovdqu %ymm0, 32(%r14)
    vmovdqu %ymm0, (%r14)
    movl $16843009, 64(%r14)
    movb $0, 4(%r14)
    movb $0, 6(%r14)
    movb $0, 8(%r14)
    movb $0, 10(%r14)
    movb $0, 14(%r14)
    movb $0, 16(%r14)
    movb $0, 20(%r14)
    movb $0, 22(%r14)
    movb $0, 38(%r14)
    movb $0, 44(%r14)
    movb $0, 46(%r14)
    movb $0, 68(%r14)
    movb $0, 9(%r14)
    movb $0, 12(%r14)
    movb $0, 15(%r14)
    movb $0, 18(%r14)
    movb $0, 21(%r14)
    movb $0, 39(%r14)
    movb $0, 42(%r14)
    movb $0, 69(%r14)
    movb $0, 30(%r14)
    movb $0, 40(%r14)
    movb $0, 45(%r14)
    movb $0, 60(%r14)
    movb $0, 70(%r14)
    movb $0, 28(%r14)
    movl $0, 24(%r14)
    movb $0, 36(%r14)
    movl $0, 32(%r14)
    movb $0, 52(%r14)
    movl $0, 48(%r14)
    movb $0, 58(%r14)
    movl $0, 54(%r14)
    movb $0, 66(%r14)
    movl $0, 62(%r14)
    leaq 40(%rsp), %r8
    movl $32768, %ecx
    movl $1, %edx
    vzeroupper
    callq __rust_alloc
    movq %rax, %rbx
    testq %rbx, %rbx
    je .LBB2_1
    movq $4, 160(%rsp)
    vpxor %xmm0, %xmm0, %xmm0
    vmovdqu %xmm0, 168(%rsp)
    movq $4, 96(%rsp)
    vmovdqu %xmm0, 104(%rsp)
    movl $1, %edx
    movl $32768, %r8d
    movq %rbx, %rcx
    callq memset
    movb $1, %cl
    movl $9, %ebp
    xorl %r12d, %r12d
    movl $4, %eax
    movl $5, %edi
    leaq 96(%rsp), %r13
    movl $4, %r15d
    testb %cl, %cl
    jne .LBB2_40
    jmp .LBB2_45
    .p2align 4, 0x90
    .LBB2_46:
    movzbl (%r14,%rdi), %ecx
    addq $2, %rdi
    testb %cl, %cl
    je .LBB2_45
    .LBB2_40:
    leaq -2(%rdi), %rsi
    cmpq 168(%rsp), %r12
    je .LBB2_41
    .LBB2_42:
    movl %esi, (%r15,%r12,4)
    addq $1, %r12
    movq %r12, 176(%rsp)
    movq 112(%rsp), %rcx
    cmpq 104(%rsp), %rcx
    je .LBB2_43
    .LBB2_44:
    movq 96(%rsp), %rax
    movl %ebp, (%rax,%rcx,4)
    addq $1, %rcx
    movq %rcx, 112(%rsp)
    .LBB2_45:
    movq %rdi, %rbp
    imulq %rbp, %rbp
    cmpq $5000, %rbp
    jbe .LBB2_46
    jmp .LBB2_15
    .LBB2_41:
    leaq 160(%rsp), %rcx
    callq _ZN49_$LT$alloc..raw_vec..RawVec$LT$T$C$$u20$A$GT$$GT$6double17h6b735ddf742ce4c7E
    movq 160(%rsp), %r15
    movq 176(%rsp), %r12
    jmp .LBB2_42
    .LBB2_43:
    movq %r13, %rcx
    callq _ZN49_$LT$alloc..raw_vec..RawVec$LT$T$C$$u20$A$GT$$GT$6double17h6b735ddf742ce4c7E
    movq 112(%rsp), %rcx
    jmp .LBB2_44
    .LBB2_15:
    testq %r12, %r12
    je .LBB2_16
    movq 160(%rsp), %rdx
    leaq -4(,%r12,4), %rcx
    movq %rcx, %rdi
    shrq $2, %rdi
    btl $2, %ecx
    jb .LBB2_23
    movl (%rax), %ecx
    cmpl $32767, %ecx
    ja .LBB2_27
    movl (%rdx), %ebp
    addl %ebp, %ebp
    .p2align 4, 0x90
    .LBB2_26:
    movl %ecx, %esi
    movb $0, (%rbx,%rsi)
    addl %ebp, %ecx
    cmpl $32768, %ecx
    jb .LBB2_26
    .LBB2_27:
    leaq 4(%rdx), %rbp
    addl $-32768, %ecx
    movl %ecx, (%rax)
    movl $1, %ecx
    testq %rdi, %rdi
    jne .LBB2_29
    jmp .LBB2_16
    .LBB2_23:
    xorl %ecx, %ecx
    movq %rdx, %rbp
    testq %rdi, %rdi
    je .LBB2_16
    .LBB2_29:
    leaq (%rdx,%r12,4), %r8
    .p2align 4, 0x90
    .LBB2_30:
    movl (%rax,%rcx,4), %edi
    cmpl $32767, %edi
    ja .LBB2_33
    movl (%rbp), %esi
    addl %esi, %esi
    .p2align 4, 0x90
    .LBB2_32:
    movl %edi, %edx
    movb $0, (%rbx,%rdx)
    addl %esi, %edi
    cmpl $32768, %edi
    jb .LBB2_32
    .LBB2_33:
    addl $-32768, %edi
    movl %edi, (%rax,%rcx,4)
    leaq 8(%rbp), %rdi
    movl 4(%rax,%rcx,4), %esi
    cmpl $32768, %esi
    jae .LBB2_36
    movl 4(%rbp), %ebp
    addl %ebp, %ebp
    .p2align 4, 0x90
    .LBB2_35:
    movl %esi, %edx
    movb $0, (%rbx,%rdx)
    addl %ebp, %esi
    cmpl $32768, %esi
    jb .LBB2_35
    .LBB2_36:
    addl $-32768, %esi
    movl %esi, 4(%rax,%rcx,4)
    addq $2, %rcx
    movq %rdi, %rbp
    cmpq %r8, %rdi
    jne .LBB2_30
    .LBB2_16:
    movl $3, %esi
    leaq 128(%rsp), %rdi
    jmp .LBB2_17
    .LBB2_19:
    movq %rdi, %rcx
    callq _ZN49_$LT$alloc..raw_vec..RawVec$LT$T$C$$u20$A$GT$$GT$6double17h6b735ddf742ce4c7E
    movq 144(%rsp), %rax
    jmp .LBB2_20
    .p2align 4, 0x90
    .LBB2_17:
    cmpb $0, (%rbx,%rsi)
    je .LBB2_21
    movq 144(%rsp), %rax
    cmpq 136(%rsp), %rax
    je .LBB2_19
    .LBB2_20:
    movq 128(%rsp), %rcx
    movl %esi, (%rcx,%rax,4)
    addq $1, %rax
    movq %rax, 144(%rsp)
    .LBB2_21:
    addq $2, %rsi
    cmpq $5000, %rsi
    jbe .LBB2_17
    movq 128(%rsp), %rdi
    movq 136(%rsp), %r15
    movq 144(%rsp), %r12
    movq 96(%rsp), %rcx
    movq 104(%rsp), %rax
    testq %rax, %rax
    je .LBB2_4
    shlq $2, %rax
    movq $1, 40(%rsp)
    movq %rax, 48(%rsp)
    movl $4, %edx
    movl $2, %eax
    jmp .LBB2_6
    .LBB2_4:
    xorl %eax, %eax
    xorl %edx, %edx
    .LBB2_6:
    movq %rdx, 40(%rsp,%rax,8)
    cmpq $1, 40(%rsp)
    jne .LBB2_8
    movq 48(%rsp), %rdx
    movq 56(%rsp), %r8
    callq __rust_dealloc
    .LBB2_8:
    movq 160(%rsp), %rcx
    movq 168(%rsp), %rax
    testq %rax, %rax
    je .LBB2_9
    shlq $2, %rax
    movq $1, 40(%rsp)
    movq %rax, 48(%rsp)
    movl $4, %edx
    movl $2, %eax
    jmp .LBB2_11
    .LBB2_9:
    xorl %eax, %eax
    xorl %edx, %edx
    .LBB2_11:
    movq %rdx, 40(%rsp,%rax,8)
    cmpq $1, 40(%rsp)
    jne .LBB2_13
    movq 48(%rsp), %rdx
    movq 56(%rsp), %r8
    callq __rust_dealloc
    .LBB2_13:
    movl $32768, %edx
    movl $1, %r8d
    movq %rbx, %rcx
    callq __rust_dealloc
    movl $71, %edx
    movl $1, %r8d
    movq %r14, %rcx
    callq __rust_dealloc
    leaq (%rdi,%r12,4), %rsi
    testq %r12, %r12
    je .LBB2_14
    leaq -4(,%r12,4), %rax
    shrq $2, %rax
    addq $1, %rax
    xorl %edx, %edx
    cmpq $32, %rax
    jae .LBB2_49
    movq %rdi, %rbp
    jmp .LBB2_59
    .LBB2_14:
    xorl %edx, %edx
    jmp .LBB2_60
    .LBB2_49:
    movabsq $9223372036854775776, %rcx
    movq %rdi, %rbp
    andq %rax, %rcx
    je .LBB2_59
    leaq -32(%rcx), %rbx
    movl %ebx, %edx
    shrl $5, %edx
    addl $1, %edx
    andq $3, %rdx
    je .LBB2_51
    negq %rdx
    vpxor %ymm0, %ymm0, %ymm0
    xorl %ebp, %ebp
    vpxor %ymm1, %ymm1, %ymm1
    vpxor %ymm2, %ymm2, %ymm2
    vpxor %ymm3, %ymm3, %ymm3
    .p2align 4, 0x90
    .LBB2_53:
    vpaddd (%rdi,%rbp,4), %ymm0, %ymm0
    vpaddd 32(%rdi,%rbp,4), %ymm1, %ymm1
    vpaddd 64(%rdi,%rbp,4), %ymm2, %ymm2
    vpaddd 96(%rdi,%rbp,4), %ymm3, %ymm3
    addq $32, %rbp
    addq $1, %rdx
    jne .LBB2_53
    jmp .LBB2_54
    .LBB2_51:
    xorl %ebp, %ebp
    vpxor %ymm0, %ymm0, %ymm0
    vpxor %ymm1, %ymm1, %ymm1
    vpxor %ymm2, %ymm2, %ymm2
    vpxor %ymm3, %ymm3, %ymm3
    .LBB2_54:
    cmpq $96, %rbx
    jb .LBB2_57
    movq %rcx, %rdx
    subq %rbp, %rdx
    leaq 480(%rdi,%rbp,4), %rbp
    .p2align 4, 0x90
    .LBB2_56:
    vpaddd -480(%rbp), %ymm0, %ymm0
    vpaddd -448(%rbp), %ymm1, %ymm1
    vpaddd -416(%rbp), %ymm2, %ymm2
    vpaddd -384(%rbp), %ymm3, %ymm3
    vpaddd -352(%rbp), %ymm0, %ymm0
    vpaddd -320(%rbp), %ymm1, %ymm1
    vpaddd -288(%rbp), %ymm2, %ymm2
    vpaddd -256(%rbp), %ymm3, %ymm3
    vpaddd -224(%rbp), %ymm0, %ymm0
    vpaddd -192(%rbp), %ymm1, %ymm1
    vpaddd -160(%rbp), %ymm2, %ymm2
    vpaddd -128(%rbp), %ymm3, %ymm3
    vpaddd -96(%rbp), %ymm0, %ymm0
    vpaddd -64(%rbp), %ymm1, %ymm1
    vpaddd -32(%rbp), %ymm2, %ymm2
    vpaddd (%rbp), %ymm3, %ymm3
    addq $512, %rbp
    addq $-128, %rdx
    jne .LBB2_56
    .LBB2_57:
    vpaddd %ymm2, %ymm0, %ymm0
    vpaddd %ymm3, %ymm1, %ymm1
    vpaddd %ymm1, %ymm0, %ymm0
    vextracti128 $1, %ymm0, %xmm1
    vpaddd %ymm1, %ymm0, %ymm0
    vpshufd $78, %xmm0, %xmm1
    vpaddd %ymm1, %ymm0, %ymm0
    vphaddd %ymm0, %ymm0, %ymm0
    vmovd %xmm0, %edx
    cmpq %rcx, %rax
    je .LBB2_60
    leaq (%rdi,%rcx,4), %rbp
    .p2align 4, 0x90
    .LBB2_59:
    addl (%rbp), %edx
    addq $4, %rbp
    cmpq %rbp, %rsi
    jne .LBB2_59
    .LBB2_60:
    movq %rsi, 200(%rsp)
    movl %edx, %esi
    leaq 8(,%rsi,8), %rbp
    leaq 40(%rsp), %r8
    movl $8, %edx
    movq %rbp, %rcx
    vzeroupper
    callq __rust_alloc_zeroed
    movq %rax, %rbx
    testq %rbx, %rbx
    je .LBB2_63
    movq %rbp, 184(%rsp)
    movq %r15, 192(%rsp)
    movq %rdi, 152(%rsp)
    addq $1, %rsi
    movabsq $10000000000000000, %r13
    movq $1, (%rbx)
    testq %r12, %r12
    movq %rsi, %r8
    je .LBB2_62
    movq %r8, 88(%rsp)
    leaq 96(%rbx), %rax
    movq %rax, 208(%rsp)
    xorl %r12d, %r12d
    movabsq $-3689348814741910323, %r15
    movabsq $4611686018427387902, %rsi
    movabsq $4153837486827862103, %r9
    xorl %edx, %edx
    movq 152(%rsp), %rax
    movq 200(%rsp), %r10
    .p2align 4, 0x90
    .LBB2_65:
    testq %rax, %rax
    je .LBB2_66
    addq $1, %rdx
    leaq 1(%r12), %rcx
    movl (%rax), %r14d
    addq $4, %rax
    movq %rax, 224(%rsp)
    movq %rcx, %rax
    testq %rax, %rax
    jne .LBB2_95
    jmp .LBB2_94
    .p2align 4, 0x90
    .LBB2_130:
    movq 48(%rsp), %rdi
    movq (%rbx,%rdi,8), %rbp
    addq %r14, %rdi
    addq %rbp, (%rbx,%rdi,8)
    testq %rax, %rax
    je .LBB2_94
    .LBB2_95:
    addq $-1, %rax
    movq $1, 40(%rsp)
    movl $1, %ebp
    jmp .LBB2_96
    .p2align 4, 0x90
    .LBB2_94:
    xorl %eax, %eax
    xorl %ebp, %ebp
    .LBB2_96:
    movq %rax, 40(%rsp,%rbp,8)
    cmpq $1, 40(%rsp)
    je .LBB2_130
    mulxq %r15, %rax, %rbp
    shrq $2, %rbp
    andq %rsi, %rbp
    leaq (%rbp,%rbp,4), %rax
    movq %rdx, %r8
    cmpq %rax, %rdx
    jne .LBB2_111
    cmpq %rcx, 88(%rsp)
    jb .LBB2_131
    shlq $3, %rcx
    testq %rcx, %rcx
    je .LBB2_111
    movb $61, %al
    bzhiq %rax, %r12, %r11
    addq $1, %r11
    movq %rbx, %rcx
    cmpq $16, %r11
    jb .LBB2_106
    leal 1(%r12), %eax
    andl $15, %eax
    subq %rax, %r11
    je .LBB2_102
    movq %rax, 216(%rsp)
    leaq (%rbx,%r11,8), %rcx
    movq 208(%rsp), %r10
    .p2align 4, 0x90
    .LBB2_104:
    vmovdqu -96(%r10), %ymm1
    vmovdqu -64(%r10), %ymm3
    vextracti128 $1, %ymm1, %xmm4
    vpextrq $1, %xmm4, %rax
    movq %rax, %rdx
    mulxq %r9, %rdx, %rbp
    vmovdqu -32(%r10), %ymm2
    shrq $51, %rbp
    imulq %r13, %rbp
    vmovdqu (%r10), %ymm0
    vmovq %xmm4, %rdx
    mulxq %r9, %rdi, %r15
    subq %rbp, %rax
    shrq $51, %r15
    imulq %r13, %r15
    vmovq %rax, %xmm4
    subq %r15, %rdx
    vmovq %rdx, %xmm5
    vpunpcklqdq %xmm4, %xmm5, %xmm4
    vpextrq $1, %xmm1, %rdx
    mulxq %r9, %rax, %rdi
    shrq $51, %rdi
    imulq %r13, %rdi
    subq %rdi, %rdx
    vmovq %rdx, %xmm5
    vmovq %xmm1, %rdx
    mulxq %r9, %rax, %rdi
    shrq $51, %rdi
    imulq %r13, %rdi
    subq %rdi, %rdx
    vmovq %rdx, %xmm1
    vextracti128 $1, %ymm3, %xmm6
    vpextrq $1, %xmm6, %rax
    movq %rax, %rdx
    mulxq %r9, %rdx, %rdi
    vpunpcklqdq %xmm5, %xmm1, %xmm1
    shrq $51, %rdi
    imulq %r13, %rdi
    vinserti128 $1, %xmm4, %ymm1, %ymm1
    vmovq %xmm6, %rdx
    mulxq %r9, %rbp, %rsi
    subq %rdi, %rax
    shrq $51, %rsi
    imulq %r13, %rsi
    vmovq %rax, %xmm4
    subq %rsi, %rdx
    vmovq %rdx, %xmm5
    vpunpcklqdq %xmm4, %xmm5, %xmm4
    vpextrq $1, %xmm3, %rdx
    mulxq %r9, %rax, %rsi
    shrq $51, %rsi
    imulq %r13, %rsi
    subq %rsi, %rdx
    vmovq %rdx, %xmm5
    vmovq %xmm3, %rdx
    mulxq %r9, %rax, %rsi
    shrq $51, %rsi
    imulq %r13, %rsi
    subq %rsi, %rdx
    vmovq %rdx, %xmm3
    vextracti128 $1, %ymm2, %xmm6
    vpextrq $1, %xmm6, %rax
    movq %rax, %rdx
    mulxq %r9, %rdx, %rsi
    vpunpcklqdq %xmm5, %xmm3, %xmm3
    shrq $51, %rsi
    imulq %r13, %rsi
    vinserti128 $1, %xmm4, %ymm3, %ymm3
    vmovq %xmm6, %rdx
    mulxq %r9, %rdi, %rbp
    subq %rsi, %rax
    shrq $51, %rbp
    imulq %r13, %rbp
    vmovq %rax, %xmm4
    subq %rbp, %rdx
    vmovq %rdx, %xmm5
    vpunpcklqdq %xmm4, %xmm5, %xmm4
    vpextrq $1, %xmm2, %rdx
    mulxq %r9, %rax, %rsi
    shrq $51, %rsi
    imulq %r13, %rsi
    subq %rsi, %rdx
    vmovq %rdx, %xmm5
    vmovq %xmm2, %rdx
    mulxq %r9, %rax, %rsi
    shrq $51, %rsi
    imulq %r13, %rsi
    subq %rsi, %rdx
    vmovq %rdx, %xmm2
    vextracti128 $1, %ymm0, %xmm6
    vpextrq $1, %xmm6, %rax
    movq %rax, %rdx
    mulxq %r9, %rdx, %rsi
    vpunpcklqdq %xmm5, %xmm2, %xmm2
    shrq $51, %rsi
    imulq %r13, %rsi
    subq %rsi, %rax
    vmovq %xmm6, %rdx
    mulxq %r9, %rsi, %rdi
    vinserti128 $1, %xmm4, %ymm2, %ymm2
    shrq $51, %rdi
    imulq %r13, %rdi
    vmovq %rax, %xmm4
    subq %rdi, %rdx
    vmovq %rdx, %xmm5
    vpunpcklqdq %xmm4, %xmm5, %xmm4
    vpextrq $1, %xmm0, %rdx
    mulxq %r9, %rax, %rsi
    shrq $51, %rsi
    imulq %r13, %rsi
    subq %rsi, %rdx
    vmovq %rdx, %xmm5
    vmovq %xmm0, %rdx
    mulxq %r9, %rax, %rsi
    shrq $51, %rsi
    imulq %r13, %rsi
    subq %rsi, %rdx
    vmovq %rdx, %xmm0
    vpunpcklqdq %xmm5, %xmm0, %xmm0
    vinserti128 $1, %xmm4, %ymm0, %ymm0
    vmovdqu %ymm1, -96(%r10)
    vmovdqu %ymm3, -64(%r10)
    vmovdqu %ymm2, -32(%r10)
    vmovdqu %ymm0, (%r10)
    subq $-128, %r10
    addq $-16, %r11
    jne .LBB2_104
    cmpq $0, 216(%rsp)
    movq 200(%rsp), %r10
    movabsq $-3689348814741910323, %rsi
    movq %rsi, %r15
    movabsq $4611686018427387902, %rsi
    jne .LBB2_106
    jmp .LBB2_111
    .LBB2_102:
    movq %rbx, %rcx
    .LBB2_106:
    addq $-8, %rcx
    leaq (%rbx,%r12,8), %rdi
    .p2align 4, 0x90
    .LBB2_107:
    movq 8(%rcx), %rax
    movq %rax, %rdx
    orq %r13, %rdx
    shrq $32, %rdx
    je .LBB2_108
    xorl %edx, %edx
    divq %r13
    jmp .LBB2_110
    .p2align 4, 0x90
    .LBB2_108:
    xorl %edx, %edx
    divl %r13d
    .LBB2_110:
    movq %rdx, 8(%rcx)
    addq $8, %rcx
    cmpq %rcx, %rdi
    jne .LBB2_107
    .LBB2_111:
    addq %r14, %r12
    movq 224(%rsp), %rax
    cmpq %r10, %rax
    movq %r8, %rdx
    jne .LBB2_65
    .LBB2_66:
    cmpq $2, %r12
    jae .LBB2_68
    xorl %r14d, %r14d
    movl $8, %r9d
    jmp .LBB2_82
    .LBB2_62:
    xorl %r12d, %r12d
    movl $8, %r9d
    xorl %r14d, %r14d
    cmpq $2, %r8
    ja .LBB2_84
    jmp .LBB2_129
    .LBB2_68:
    movabsq $-6148914691236517205, %r15
    movq %r12, %rdx
    mulxq %r15, %rax, %r14
    shrq $7, %r14
    leaq 8(,%r14,8), %rcx
    leaq 40(%rsp), %r8
    movl $8, %edx
    vzeroupper
    callq __rust_alloc
    movq %rax, %r9
    testq %r9, %r9
    je .LBB2_1
    addq $1, %r14
    movl $1, %ecx
    movq %r9, %rax
    movq 88(%rsp), %r8
    .p2align 4, 0x90
    .LBB2_70:
    cmpq %r14, %rcx
    jae .LBB2_71
    movq %rcx, %rdx
    addq $1, %rdx
    jae .LBB2_73
    .LBB2_71:
    movq %rcx, %rdx
    xorl %edi, %edi
    xorl %ecx, %ecx
    jmp .LBB2_74
    .p2align 4, 0x90
    .LBB2_73:
    movq $1, 40(%rsp)
    movl $1, %edi
    .LBB2_74:
    movq %rcx, 40(%rsp,%rdi,8)
    cmpq $1, 40(%rsp)
    movq $-1, (%rax)
    leaq 8(%rax), %rax
    movq %rdx, %rcx
    je .LBB2_70
    andb $-2, (%r9)
    vmovq %r12, %xmm0
    vpunpckldq .LCPI2_1(%rip), %xmm0, %xmm0
    vsubpd .LCPI2_2(%rip), %xmm0, %xmm0
    vhaddpd %xmm0, %xmm0, %xmm0
    vsqrtsd %xmm0, %xmm0, %xmm0
    vmovsd .LCPI2_3(%rip), %xmm1
    vsubsd %xmm1, %xmm0, %xmm2
    vcvttsd2si %xmm2, %rax
    movabsq $-9223372036854775808, %rcx
    xorq %rax, %rcx
    vcvttsd2si %xmm0, %r11
    vucomisd %xmm1, %xmm0
    cmovaeq %rcx, %r11
    addq $1, %r11
    cmpq $6, %r11
    jb .LBB2_83
    movl $2, %r10d
    movl $5, %eax
    .p2align 4, 0x90
    .LBB2_77:
    movq %rax, %rdx
    mulxq %r15, %rdx, %rcx
    movq %rcx, %rdx
    shrq $7, %rdx
    shrb %cl
    movq (%r9,%rdx,8), %rdx
    movzbl %cl, %ecx
    btq %rcx, %rdx
    jae .LBB2_81
    movq %rax, %rdx
    imulq %rdx, %rdx
    cmpq %r12, %rdx
    jae .LBB2_81
    movq %r10, %rsi
    .p2align 4, 0x90
    .LBB2_80:
    mulxq %r15, %rbp, %rcx
    movq %rcx, %rbp
    shrb %cl
    movq $-2, %rdi
    rolq %cl, %rdi
    shrq $7, %rbp
    andq %rdi, (%r9,%rbp,8)
    movl $6, %ecx
    subq %rsi, %rcx
    imulq %rax, %rsi
    addq %rsi, %rdx
    movq %rcx, %rsi
    cmpq %r12, %rdx
    jb .LBB2_80
    .LBB2_81:
    addq %r10, %rax
    movl $6, %ecx
    subq %r10, %rcx
    movq %rcx, %r10
    cmpq %r11, %rax
    jb .LBB2_77
    .LBB2_82:
    movq 88(%rsp), %r8
    .LBB2_83:
    cmpq $2, %r8
    jbe .LBB2_129
    .LBB2_84:
    movq 16(%rbx), %r15
    cmpq $3, %r12
    jb .LBB2_112
    movl $3, %edi
    movabsq $-6148914691236517205, %rcx
    .p2align 4, 0x90
    .LBB2_86:
    movq %rdi, %rax
    andq $-2, %rax
    cmpq $2, %rax
    je .LBB2_90
    movq %rdi, %rdx
    mulxq %rcx, %rdx, %rax
    testb $1, %dil
    je .LBB2_127
    shrq %rax
    leaq (%rax,%rax,2), %rax
    movq %rdi, %rdx
    subq %rax, %rdx
    je .LBB2_127
    movq %rdi, %rdx
    mulxq %rcx, %rdx, %rax
    movq %rax, %rdx
    shrq $7, %rdx
    shrb %al
    movq (%r9,%rdx,8), %rdx
    movzbl %al, %eax
    btq %rax, %rdx
    jae .LBB2_127
    .LBB2_90:
    cmpq %rdi, %r8
    jbe .LBB2_91
    addq (%rbx,%rdi,8), %r15
    movq %r15, %rax
    orq %r13, %rax
    shrq $32, %rax
    je .LBB2_125
    xorl %edx, %edx
    movq %r15, %rax
    divq %r13
    movq %rdx, %r15
    jmp .LBB2_127
    .p2align 4, 0x90
    .LBB2_125:
    xorl %edx, %edx
    movl %r15d, %eax
    divl %r13d
    movl %edx, %r15d
    .LBB2_127:
    addq $2, %rdi
    cmpq %r12, %rdi
    jbe .LBB2_86
    .LBB2_112:
    testq %r14, %r14
    je .LBB2_113
    shlq $3, %r14
    movq $1, 40(%rsp)
    movq %r14, 48(%rsp)
    movl $8, %ecx
    movl $2, %eax
    jmp .LBB2_115
    .LBB2_113:
    xorl %eax, %eax
    xorl %ecx, %ecx
    .LBB2_115:
    movq 152(%rsp), %rsi
    movq 192(%rsp), %rdi
    movq 184(%rsp), %rbp
    movq %rcx, 40(%rsp,%rax,8)
    cmpq $1, 40(%rsp)
    jne .LBB2_117
    movq 48(%rsp), %rdx
    movq 56(%rsp), %r8
    movq %r9, %rcx
    vzeroupper
    callq __rust_dealloc
    .LBB2_117:
    movl $8, %r8d
    movq %rbx, %rcx
    movq %rbp, %rdx
    vzeroupper
    callq __rust_dealloc
    testq %rdi, %rdi
    je .LBB2_118
    shlq $2, %rdi
    movq $1, 40(%rsp)
    movq %rdi, 48(%rsp)
    movl $4, %ecx
    movl $2, %eax
    jmp .LBB2_120
    .LBB2_118:
    xorl %eax, %eax
    xorl %ecx, %ecx
    .LBB2_120:
    movq %rcx, 40(%rsp,%rax,8)
    cmpq $1, 40(%rsp)
    jne .LBB2_122
    movq 48(%rsp), %rdx
    movq 56(%rsp), %r8
    movq %rsi, %rcx
    callq __rust_dealloc
    .LBB2_122:
    movq %r15, 232(%rsp)
    leaq 232(%rsp), %rax
    movq %rax, 128(%rsp)
    leaq .Lref.n(%rip), %rax
    movq %rax, 160(%rsp)
    movabsq $9275262564250418, %rax
    cmpq %rax, %r15
    jne .LBB2_123
    vmovaps 240(%rsp), %xmm6
    addq $264, %rsp
    popq %rbx
    popq %rbp
    popq %rdi
    popq %rsi
    popq %r12
    popq %r13
    popq %r14
    popq %r15
    retq
    .LBB2_91:
    leaq .Lpanic_bounds_check_loc.h(%rip), %rcx
    movq %rdi, %rdx
    vzeroupper
    callq _ZN4core9panicking18panic_bounds_check17h063daf87282afab9E
    ud2
    .LBB2_131:
    movq 88(%rsp), %rdx
    vzeroupper
    callq _ZN4core5slice20slice_index_len_fail17h1aa8edd253461d33E
    ud2
    .LBB2_1:
    vmovups 48(%rsp), %xmm0
    vmovaps %xmm0, 96(%rsp)
    jmp .LBB2_2
    .LBB2_63:
    movq 40(%rsp), %rax
    vmovups 48(%rsp), %xmm0
    vmovaps %xmm0, 96(%rsp)
    movq %rax, 40(%rsp)
    .LBB2_2:
    vmovaps 96(%rsp), %xmm0
    vmovups %xmm0, 48(%rsp)
    leaq 40(%rsp), %rcx
    callq __rust_oom
    ud2
    .LBB2_129:
    leaq .Lpanic_bounds_check_loc.h(%rip), %rcx
    movl $2, %edx
    vzeroupper
    callq _ZN4core9panicking18panic_bounds_check17h063daf87282afab9E
    ud2
    .LBB2_123:
    leaq 128(%rsp), %rax
    movq %rax, 96(%rsp)
    leaq _ZN53_$LT$$RF$$u27$a$u20$T$u20$as$u20$core..fmt..Debug$GT$3fmt17h035611eed912fa86E(%rip), %rax
    movq %rax, 104(%rsp)
    leaq 160(%rsp), %rcx
    movq %rcx, 112(%rsp)
    movq %rax, 120(%rsp)
    leaq .Lref.r(%rip), %rax
    movq %rax, 40(%rsp)
    movq $3, 48(%rsp)
    leaq .Lref.s(%rip), %rax
    movq %rax, 56(%rsp)
    movq $2, 64(%rsp)
    leaq 96(%rsp), %rax
    movq %rax, 72(%rsp)
    movq $2, 80(%rsp)
    leaq .Lref.u(%rip), %rdx
    leaq 40(%rsp), %rcx
    callq _ZN3std9panicking15begin_panic_fmt17h7442690d48df7895E
    ud2
    .seh_handlerdata
    .text
    .Lcfi18:
    .seh_endproc

    .def main;
    .scl 2;
    .type 32;
    .endef
    .globl main
    .p2align 4, 0x90
    main:
    pushq %rbp
    pushq %rsi
    pushq %rdi
    pushq %rbx
    subq $40, %rsp
    leaq 32(%rsp), %rbp
    movq %rdx, %rsi
    movslq %ecx, %rdi
    leaq _ZN4test4main17h0eddcc39ebf1578fE(%rip), %rbx
    callq __main
    movq %rbx, %rcx
    movq %rdi, %rdx
    movq %rsi, %r8
    addq $40, %rsp
    popq %rbx
    popq %rdi
    popq %rsi
    popq %rbp
    jmp _ZN3std2rt10lang_start17ha873597f13d2ba38E

    .section .rdata,"dr"
    .p2align 4
    str.g:
    .ascii "C:\\projects\\rust\\src\\liballoc\\vec.rs"

    .p2align 3
    .Lpanic_bounds_check_loc.h:
    .quad str.g
    .quad 36
    .long 1549
    .long 10

    str.j:
    .ascii "test.rs"

    .p2align 3
    .Lref.n:
    .quad 9275262564250418

    .p2align 4
    str.o:
    .ascii "assertion failed: `(left == right)`\n left: `"

    str.p:
    .ascii "`,\n right: `"

    str.q:
    .byte 96

    .p2align 3
    .Lref.r:
    .quad str.o
    .quad 45
    .quad str.p
    .quad 12
    .quad str.q
    .quad 1

    .p2align 3
    .Lref.s:
    .quad 1
    .quad 0
    .quad 3
    .zero 8
    .quad 3
    .zero 8
    .long 32
    .long 0
    .byte 3
    .zero 7
    .quad 1
    .quad 1
    .quad 3
    .zero 8
    .quad 3
    .zero 8
    .long 32
    .long 0
    .byte 3
    .zero 7

    .p2align 3
    .Lref.u:
    .quad str.j
    .quad 7
    .long 185
    .long 4