0001
0002
0003
0004
0005
0006
0007
0008 #ifndef __ASSEMBLY__
0009 #error "Only include this from assembly code"
0010 #endif
0011
0012 #ifndef __ASM_ASSEMBLER_H
0013 #define __ASM_ASSEMBLER_H
0014
0015 #include <asm-generic/export.h>
0016
0017 #include <asm/alternative.h>
0018 #include <asm/asm-bug.h>
0019 #include <asm/asm-extable.h>
0020 #include <asm/asm-offsets.h>
0021 #include <asm/cpufeature.h>
0022 #include <asm/cputype.h>
0023 #include <asm/debug-monitors.h>
0024 #include <asm/page.h>
0025 #include <asm/pgtable-hwdef.h>
0026 #include <asm/ptrace.h>
0027 #include <asm/thread_info.h>
0028
0029
0030
0031
0032
0033 .irp n,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30
0034 wx\n .req w\n
0035 .endr
0036
0037 .macro save_and_disable_daif, flags
0038 mrs \flags, daif
0039 msr daifset, #0xf
0040 .endm
0041
0042 .macro disable_daif
0043 msr daifset, #0xf
0044 .endm
0045
0046 .macro enable_daif
0047 msr daifclr, #0xf
0048 .endm
0049
0050 .macro restore_daif, flags:req
0051 msr daif, \flags
0052 .endm
0053
0054
0055 .macro enable_da
0056 msr daifclr, #(8 | 4)
0057 .endm
0058
0059
0060
0061
0062 .macro save_and_disable_irq, flags
0063 mrs \flags, daif
0064 msr daifset, #3
0065 .endm
0066
0067 .macro restore_irq, flags
0068 msr daif, \flags
0069 .endm
0070
0071 .macro enable_dbg
0072 msr daifclr, #8
0073 .endm
0074
0075 .macro disable_step_tsk, flgs, tmp
0076 tbz \flgs, #TIF_SINGLESTEP, 9990f
0077 mrs \tmp, mdscr_el1
0078 bic \tmp, \tmp, #DBG_MDSCR_SS
0079 msr mdscr_el1, \tmp
0080 isb
0081 9990:
0082 .endm
0083
0084
0085 .macro enable_step_tsk, flgs, tmp
0086 tbz \flgs, #TIF_SINGLESTEP, 9990f
0087 mrs \tmp, mdscr_el1
0088 orr \tmp, \tmp, #DBG_MDSCR_SS
0089 msr mdscr_el1, \tmp
0090 9990:
0091 .endm
0092
0093
0094
0095
0096 .macro esb
0097 #ifdef CONFIG_ARM64_RAS_EXTN
0098 hint #16
0099 #else
0100 nop
0101 #endif
0102 .endm
0103
0104
0105
0106
0107 .macro csdb
0108 hint #20
0109 .endm
0110
0111
0112
0113
0114 .macro clearbhb
0115 hint #22
0116 .endm
0117
0118
0119
0120
0121 .macro sb
0122 alternative_if_not ARM64_HAS_SB
0123 dsb nsh
0124 isb
0125 alternative_else
0126 SB_BARRIER_INSN
0127 nop
0128 alternative_endif
0129 .endm
0130
0131
0132
0133
0134 .macro nops, num
0135 .rept \num
0136 nop
0137 .endr
0138 .endm
0139
0140
0141
0142
0143 lr .req x30
0144
0145
0146
0147
0148 .macro ventry label
0149 .align 7
0150 b \label
0151 .endm
0152
0153
0154
0155
0156 #ifdef CONFIG_CPU_BIG_ENDIAN
0157 #define CPU_BE(code...) code
0158 #else
0159 #define CPU_BE(code...)
0160 #endif
0161
0162
0163
0164
0165 #ifdef CONFIG_CPU_BIG_ENDIAN
0166 #define CPU_LE(code...)
0167 #else
0168 #define CPU_LE(code...) code
0169 #endif
0170
0171
0172
0173
0174
0175
0176 #ifndef CONFIG_CPU_BIG_ENDIAN
0177 .macro regs_to_64, rd, lbits, hbits
0178 #else
0179 .macro regs_to_64, rd, hbits, lbits
0180 #endif
0181 orr \rd, \lbits, \hbits, lsl #32
0182 .endm
0183
0184
0185
0186
0187
0188
0189
0190
0191
0192 .macro adr_l, dst, sym
0193 adrp \dst, \sym
0194 add \dst, \dst, :lo12:\sym
0195 .endm
0196
0197
0198
0199
0200
0201
0202
0203
0204 .macro ldr_l, dst, sym, tmp=
0205 .ifb \tmp
0206 adrp \dst, \sym
0207 ldr \dst, [\dst, :lo12:\sym]
0208 .else
0209 adrp \tmp, \sym
0210 ldr \dst, [\tmp, :lo12:\sym]
0211 .endif
0212 .endm
0213
0214
0215
0216
0217
0218
0219
0220 .macro str_l, src, sym, tmp
0221 adrp \tmp, \sym
0222 str \src, [\tmp, :lo12:\sym]
0223 .endm
0224
0225
0226
0227
0228 #if defined(__KVM_NVHE_HYPERVISOR__) || defined(__KVM_VHE_HYPERVISOR__)
0229 .macro get_this_cpu_offset, dst
0230 mrs \dst, tpidr_el2
0231 .endm
0232 #else
0233 .macro get_this_cpu_offset, dst
0234 alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
0235 mrs \dst, tpidr_el1
0236 alternative_else
0237 mrs \dst, tpidr_el2
0238 alternative_endif
0239 .endm
0240
0241 .macro set_this_cpu_offset, src
0242 alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
0243 msr tpidr_el1, \src
0244 alternative_else
0245 msr tpidr_el2, \src
0246 alternative_endif
0247 .endm
0248 #endif
0249
0250
0251
0252
0253
0254
0255 .macro adr_this_cpu, dst, sym, tmp
0256 adrp \tmp, \sym
0257 add \dst, \tmp, #:lo12:\sym
0258 get_this_cpu_offset \tmp
0259 add \dst, \dst, \tmp
0260 .endm
0261
0262
0263
0264
0265
0266
0267 .macro ldr_this_cpu dst, sym, tmp
0268 adr_l \dst, \sym
0269 get_this_cpu_offset \tmp
0270 ldr \dst, [\dst, \tmp]
0271 .endm
0272
0273
0274
0275
0276 .macro vma_vm_mm, rd, rn
0277 ldr \rd, [\rn, #VMA_VM_MM]
0278 .endm
0279
0280
0281
0282
0283
0284 .macro read_ctr, reg
0285 #ifndef __KVM_NVHE_HYPERVISOR__
0286 alternative_if_not ARM64_MISMATCHED_CACHE_TYPE
0287 mrs \reg, ctr_el0
0288 nop
0289 alternative_else
0290 ldr_l \reg, arm64_ftr_reg_ctrel0 + ARM64_FTR_SYSVAL
0291 alternative_endif
0292 #else
0293 alternative_if_not ARM64_KVM_PROTECTED_MODE
0294 ASM_BUG()
0295 alternative_else_nop_endif
0296 alternative_cb kvm_compute_final_ctr_el0
0297 movz \reg, #0
0298 movk \reg, #0, lsl #16
0299 movk \reg, #0, lsl #32
0300 movk \reg, #0, lsl #48
0301 alternative_cb_end
0302 #endif
0303 .endm
0304
0305
0306
0307
0308
0309
0310 .macro raw_dcache_line_size, reg, tmp
0311 mrs \tmp, ctr_el0
0312 ubfm \tmp, \tmp, #16, #19
0313 mov \reg, #4
0314 lsl \reg, \reg, \tmp
0315 .endm
0316
0317
0318
0319
0320 .macro dcache_line_size, reg, tmp
0321 read_ctr \tmp
0322 ubfm \tmp, \tmp, #16, #19
0323 mov \reg, #4
0324 lsl \reg, \reg, \tmp
0325 .endm
0326
0327
0328
0329
0330
0331 .macro raw_icache_line_size, reg, tmp
0332 mrs \tmp, ctr_el0
0333 and \tmp, \tmp, #0xf
0334 mov \reg, #4
0335 lsl \reg, \reg, \tmp
0336 .endm
0337
0338
0339
0340
0341 .macro icache_line_size, reg, tmp
0342 read_ctr \tmp
0343 and \tmp, \tmp, #0xf
0344 mov \reg, #4
0345 lsl \reg, \reg, \tmp
0346 .endm
0347
0348
0349
0350
0351 .macro tcr_set_t0sz, valreg, t0sz
0352 bfi \valreg, \t0sz, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH
0353 .endm
0354
0355
0356
0357
0358 .macro tcr_set_t1sz, valreg, t1sz
0359 bfi \valreg, \t1sz, #TCR_T1SZ_OFFSET, #TCR_TxSZ_WIDTH
0360 .endm
0361
0362
0363
0364
0365
0366
0367
0368
0369
0370 .macro idmap_get_t0sz, reg
0371 adrp \reg, _end
0372 orr \reg, \reg, #(1 << VA_BITS_MIN) - 1
0373 clz \reg, \reg
0374 .endm
0375
0376
0377
0378
0379
0380
0381
0382
0383
0384 .macro tcr_compute_pa_size, tcr, pos, tmp0, tmp1
0385 mrs \tmp0, ID_AA64MMFR0_EL1
0386
0387 ubfx \tmp0, \tmp0, #ID_AA64MMFR0_PARANGE_SHIFT, #3
0388 mov \tmp1, #ID_AA64MMFR0_PARANGE_MAX
0389 cmp \tmp0, \tmp1
0390 csel \tmp0, \tmp1, \tmp0, hi
0391 bfi \tcr, \tmp0, \pos, #3
0392 .endm
0393
0394 .macro __dcache_op_workaround_clean_cache, op, addr
0395 alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE
0396 dc \op, \addr
0397 alternative_else
0398 dc civac, \addr
0399 alternative_endif
0400 .endm
0401
0402
0403
0404
0405
0406
0407
0408
0409
0410
0411
0412
0413
0414 .macro dcache_by_myline_op op, domain, start, end, linesz, tmp, fixup
0415 sub \tmp, \linesz, #1
0416 bic \start, \start, \tmp
0417 .Ldcache_op\@:
0418 .ifc \op, cvau
0419 __dcache_op_workaround_clean_cache \op, \start
0420 .else
0421 .ifc \op, cvac
0422 __dcache_op_workaround_clean_cache \op, \start
0423 .else
0424 .ifc \op, cvap
0425 sys 3, c7, c12, 1, \start
0426 .else
0427 .ifc \op, cvadp
0428 sys 3, c7, c13, 1, \start
0429 .else
0430 dc \op, \start
0431 .endif
0432 .endif
0433 .endif
0434 .endif
0435 add \start, \start, \linesz
0436 cmp \start, \end
0437 b.lo .Ldcache_op\@
0438 dsb \domain
0439
0440 _cond_uaccess_extable .Ldcache_op\@, \fixup
0441 .endm
0442
0443
0444
0445
0446
0447
0448
0449
0450
0451
0452
0453
0454 .macro dcache_by_line_op op, domain, start, end, tmp1, tmp2, fixup
0455 dcache_line_size \tmp1, \tmp2
0456 dcache_by_myline_op \op, \domain, \start, \end, \tmp1, \tmp2, \fixup
0457 .endm
0458
0459
0460
0461
0462
0463
0464
0465
0466
0467 .macro invalidate_icache_by_line start, end, tmp1, tmp2, fixup
0468 icache_line_size \tmp1, \tmp2
0469 sub \tmp2, \tmp1, #1
0470 bic \tmp2, \start, \tmp2
0471 .Licache_op\@:
0472 ic ivau, \tmp2
0473 add \tmp2, \tmp2, \tmp1
0474 cmp \tmp2, \end
0475 b.lo .Licache_op\@
0476 dsb ish
0477 isb
0478
0479 _cond_uaccess_extable .Licache_op\@, \fixup
0480 .endm
0481
0482
0483
0484
0485
0486
0487 .macro load_ttbr1, pgtbl, tmp1, tmp2
0488 phys_to_ttbr \tmp1, \pgtbl
0489 offset_ttbr1 \tmp1, \tmp2
0490 msr ttbr1_el1, \tmp1
0491 isb
0492 .endm
0493
0494
0495
0496
0497
0498
0499
0500
0501 .macro break_before_make_ttbr_switch zero_page, page_table, tmp, tmp2
0502 phys_to_ttbr \tmp, \zero_page
0503 msr ttbr1_el1, \tmp
0504 isb
0505 tlbi vmalle1
0506 dsb nsh
0507 load_ttbr1 \page_table, \tmp, \tmp2
0508 .endm
0509
0510
0511
0512
0513 .macro reset_pmuserenr_el0, tmpreg
0514 mrs \tmpreg, id_aa64dfr0_el1
0515 sbfx \tmpreg, \tmpreg, #ID_AA64DFR0_PMUVER_SHIFT, #4
0516 cmp \tmpreg, #1
0517 b.lt 9000f
0518 msr pmuserenr_el0, xzr
0519 9000:
0520 .endm
0521
0522
0523
0524
0525 .macro reset_amuserenr_el0, tmpreg
0526 mrs \tmpreg, id_aa64pfr0_el1
0527 ubfx \tmpreg, \tmpreg, #ID_AA64PFR0_AMU_SHIFT, #4
0528 cbz \tmpreg, .Lskip_\@
0529 msr_s SYS_AMUSERENR_EL0, xzr
0530 .Lskip_\@:
0531 .endm
0532
0533
0534
0535 .macro copy_page dest:req src:req t1:req t2:req t3:req t4:req t5:req t6:req t7:req t8:req
0536 9998: ldp \t1, \t2, [\src]
0537 ldp \t3, \t4, [\src, #16]
0538 ldp \t5, \t6, [\src, #32]
0539 ldp \t7, \t8, [\src, #48]
0540 add \src, \src, #64
0541 stnp \t1, \t2, [\dest]
0542 stnp \t3, \t4, [\dest, #16]
0543 stnp \t5, \t6, [\dest, #32]
0544 stnp \t7, \t8, [\dest, #48]
0545 add \dest, \dest, #64
0546 tst \src, #(PAGE_SIZE - 1)
0547 b.ne 9998b
0548 .endm
0549
0550
0551
0552
0553 #ifdef CONFIG_KPROBES
0554 #define NOKPROBE(x) \
0555 .pushsection "_kprobe_blacklist", "aw"; \
0556 .quad x; \
0557 .popsection;
0558 #else
0559 #define NOKPROBE(x)
0560 #endif
0561
0562 #if defined(CONFIG_KASAN_GENERIC) || defined(CONFIG_KASAN_SW_TAGS)
0563 #define EXPORT_SYMBOL_NOKASAN(name)
0564 #else
0565 #define EXPORT_SYMBOL_NOKASAN(name) EXPORT_SYMBOL(name)
0566 #endif
0567
0568
0569
0570
0571
0572
0573
0574 .macro le64sym, sym
0575 .long \sym\()_lo32
0576 .long \sym\()_hi32
0577 .endm
0578
0579
0580
0581
0582
0583
0584 .macro mov_q, reg, val
0585 .if (((\val) >> 31) == 0 || ((\val) >> 31) == 0x1ffffffff)
0586 movz \reg, :abs_g1_s:\val
0587 .else
0588 .if (((\val) >> 47) == 0 || ((\val) >> 47) == 0x1ffff)
0589 movz \reg, :abs_g2_s:\val
0590 .else
0591 movz \reg, :abs_g3:\val
0592 movk \reg, :abs_g2_nc:\val
0593 .endif
0594 movk \reg, :abs_g1_nc:\val
0595 .endif
0596 movk \reg, :abs_g0_nc:\val
0597 .endm
0598
0599
0600
0601
0602 .macro get_current_task, rd
0603 mrs \rd, sp_el0
0604 .endm
0605
0606
0607
0608
0609
0610
0611
0612 .macro offset_ttbr1, ttbr, tmp
0613 #ifdef CONFIG_ARM64_VA_BITS_52
0614 mrs_s \tmp, SYS_ID_AA64MMFR2_EL1
0615 and \tmp, \tmp, #(0xf << ID_AA64MMFR2_LVA_SHIFT)
0616 cbnz \tmp, .Lskipoffs_\@
0617 orr \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
0618 .Lskipoffs_\@ :
0619 #endif
0620 .endm
0621
0622
0623
0624
0625
0626
0627 .macro restore_ttbr1, ttbr
0628 #ifdef CONFIG_ARM64_VA_BITS_52
0629 bic \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
0630 #endif
0631 .endm
0632
0633
0634
0635
0636
0637
0638
0639
0640 .macro phys_to_ttbr, ttbr, phys
0641 #ifdef CONFIG_ARM64_PA_BITS_52
0642 orr \ttbr, \phys, \phys, lsr #46
0643 and \ttbr, \ttbr, #TTBR_BADDR_MASK_52
0644 #else
0645 mov \ttbr, \phys
0646 #endif
0647 .endm
0648
0649 .macro phys_to_pte, pte, phys
0650 #ifdef CONFIG_ARM64_PA_BITS_52
0651
0652
0653
0654
0655 orr \pte, \phys, \phys, lsr #36
0656 and \pte, \pte, #PTE_ADDR_MASK
0657 #else
0658 mov \pte, \phys
0659 #endif
0660 .endm
0661
0662 .macro pte_to_phys, phys, pte
0663 #ifdef CONFIG_ARM64_PA_BITS_52
0664 ubfiz \phys, \pte, #(48 - 16 - 12), #16
0665 bfxil \phys, \pte, #16, #32
0666 lsl \phys, \phys, #16
0667 #else
0668 and \phys, \pte, #PTE_ADDR_MASK
0669 #endif
0670 .endm
0671
0672
0673
0674
0675 .macro tcr_clear_errata_bits, tcr, tmp1, tmp2
0676 #ifdef CONFIG_FUJITSU_ERRATUM_010001
0677 mrs \tmp1, midr_el1
0678
0679 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001_MASK
0680 and \tmp1, \tmp1, \tmp2
0681 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001
0682 cmp \tmp1, \tmp2
0683 b.ne 10f
0684
0685 mov_q \tmp2, TCR_CLEAR_FUJITSU_ERRATUM_010001
0686 bic \tcr, \tcr, \tmp2
0687 10:
0688 #endif
0689 .endm
0690
0691
0692
0693
0694
0695 .macro pre_disable_mmu_workaround
0696 #ifdef CONFIG_QCOM_FALKOR_ERRATUM_E1041
0697 isb
0698 #endif
0699 .endm
0700
0701
0702
0703
0704
0705
0706
0707 .macro frame_push, regcount:req, extra
0708 __frame st, \regcount, \extra
0709 .endm
0710
0711
0712
0713
0714
0715
0716
0717 .macro frame_pop
0718 __frame ld
0719 .endm
0720
0721 .macro __frame_regs, reg1, reg2, op, num
0722 .if .Lframe_regcount == \num
0723 \op\()r \reg1, [sp, #(\num + 1) * 8]
0724 .elseif .Lframe_regcount > \num
0725 \op\()p \reg1, \reg2, [sp, #(\num + 1) * 8]
0726 .endif
0727 .endm
0728
0729 .macro __frame, op, regcount, extra=0
0730 .ifc \op, st
0731 .if (\regcount) < 0 || (\regcount) > 10
0732 .error "regcount should be in the range [0 ... 10]"
0733 .endif
0734 .if ((\extra) % 16) != 0
0735 .error "extra should be a multiple of 16 bytes"
0736 .endif
0737 .ifdef .Lframe_regcount
0738 .if .Lframe_regcount != -1
0739 .error "frame_push/frame_pop may not be nested"
0740 .endif
0741 .endif
0742 .set .Lframe_regcount, \regcount
0743 .set .Lframe_extra, \extra
0744 .set .Lframe_local_offset, ((\regcount + 3) / 2) * 16
0745 stp x29, x30, [sp, #-.Lframe_local_offset - .Lframe_extra]!
0746 mov x29, sp
0747 .endif
0748
0749 __frame_regs x19, x20, \op, 1
0750 __frame_regs x21, x22, \op, 3
0751 __frame_regs x23, x24, \op, 5
0752 __frame_regs x25, x26, \op, 7
0753 __frame_regs x27, x28, \op, 9
0754
0755 .ifc \op, ld
0756 .if .Lframe_regcount == -1
0757 .error "frame_push/frame_pop may not be nested"
0758 .endif
0759 ldp x29, x30, [sp], #.Lframe_local_offset + .Lframe_extra
0760 .set .Lframe_regcount, -1
0761 .endif
0762 .endm
0763
0764
0765
0766
0767
0768 .macro set_sctlr, sreg, reg
0769 msr \sreg, \reg
0770 isb
0771
0772
0773
0774
0775
0776 ic iallu
0777 dsb nsh
0778 isb
0779 .endm
0780
0781 .macro set_sctlr_el1, reg
0782 set_sctlr sctlr_el1, \reg
0783 .endm
0784
0785 .macro set_sctlr_el2, reg
0786 set_sctlr sctlr_el2, \reg
0787 .endm
0788
0789
0790
0791
0792
0793
0794
0795
0796
0797
0798 .macro cond_yield, lbl:req, tmp:req, tmp2:req
0799 get_current_task \tmp
0800 ldr \tmp, [\tmp, #TSK_TI_PREEMPT]
0801
0802
0803
0804
0805
0806 tbnz \tmp, #SOFTIRQ_SHIFT, .Lnoyield_\@
0807 #ifdef CONFIG_PREEMPTION
0808 sub \tmp, \tmp, #PREEMPT_DISABLE_OFFSET
0809 cbz \tmp, \lbl
0810 #endif
0811 adr_l \tmp, irq_stat + IRQ_CPUSTAT_SOFTIRQ_PENDING
0812 get_this_cpu_offset \tmp2
0813 ldr w\tmp, [\tmp, \tmp2]
0814 cbnz w\tmp, \lbl
0815 .Lnoyield_\@:
0816 .endm
0817
0818
0819
0820
0821 .macro bti, targets
0822 .equ .L__bti_targets_c, 34
0823 .equ .L__bti_targets_j, 36
0824 .equ .L__bti_targets_jc,38
0825 hint #.L__bti_targets_\targets
0826 .endm
0827
0828
0829
0830
0831
0832
0833
0834 #define NT_GNU_PROPERTY_TYPE_0 5
0835 #define GNU_PROPERTY_AARCH64_FEATURE_1_AND 0xc0000000
0836
0837 #define GNU_PROPERTY_AARCH64_FEATURE_1_BTI (1U << 0)
0838 #define GNU_PROPERTY_AARCH64_FEATURE_1_PAC (1U << 1)
0839
0840 #ifdef CONFIG_ARM64_BTI_KERNEL
0841 #define GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT \
0842 ((GNU_PROPERTY_AARCH64_FEATURE_1_BTI | \
0843 GNU_PROPERTY_AARCH64_FEATURE_1_PAC))
0844 #endif
0845
0846 #ifdef GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT
0847 .macro emit_aarch64_feature_1_and, feat=GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT
0848 .pushsection .note.gnu.property, "a"
0849 .align 3
0850 .long 2f - 1f
0851 .long 6f - 3f
0852 .long NT_GNU_PROPERTY_TYPE_0
0853 1: .string "GNU"
0854 2:
0855 .align 3
0856 3: .long GNU_PROPERTY_AARCH64_FEATURE_1_AND
0857 .long 5f - 4f
0858 4:
0859
0860
0861
0862
0863
0864
0865 .long \feat
0866 5:
0867 .align 3
0868 6:
0869 .popsection
0870 .endm
0871
0872 #else
0873 .macro emit_aarch64_feature_1_and, feat=0
0874 .endm
0875
0876 #endif
0877
0878 .macro __mitigate_spectre_bhb_loop tmp
0879 #ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
0880 alternative_cb spectre_bhb_patch_loop_iter
0881 mov \tmp, #32
0882 alternative_cb_end
0883 .Lspectre_bhb_loop\@:
0884 b . + 4
0885 subs \tmp, \tmp, #1
0886 b.ne .Lspectre_bhb_loop\@
0887 sb
0888 #endif
0889 .endm
0890
0891 .macro mitigate_spectre_bhb_loop tmp
0892 #ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
0893 alternative_cb spectre_bhb_patch_loop_mitigation_enable
0894 b .L_spectre_bhb_loop_done\@
0895 alternative_cb_end
0896 __mitigate_spectre_bhb_loop \tmp
0897 .L_spectre_bhb_loop_done\@:
0898 #endif
0899 .endm
0900
0901
0902 .macro __mitigate_spectre_bhb_fw
0903 #ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
0904 stp x0, x1, [sp, #-16]!
0905 stp x2, x3, [sp, #-16]!
0906 mov w0, #ARM_SMCCC_ARCH_WORKAROUND_3
0907 alternative_cb smccc_patch_fw_mitigation_conduit
0908 nop
0909 alternative_cb_end
0910 ldp x2, x3, [sp], #16
0911 ldp x0, x1, [sp], #16
0912 #endif
0913 .endm
0914
0915 .macro mitigate_spectre_bhb_clear_insn
0916 #ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
0917 alternative_cb spectre_bhb_patch_clearbhb
0918
0919 clearbhb
0920 isb
0921 alternative_cb_end
0922 #endif
0923 .endm
0924 #endif