| /* SPDX-License-Identifier: (GPL-2.0+ OR MIT) */ |
| /* |
| * Copyright (c) 2019 Amlogic, Inc. All rights reserved. |
| */ |
| |
| #ifndef __ASM_IRQFLAGS_DEBUG_ARM64_H |
| #define __ASM_IRQFLAGS_DEBUG_ARM64_H |
| |
| #ifdef __KERNEL__ |
| |
| /* |
| * CPU interrupt mask handling. |
| */ |
| #include <linux/amlogic/debug_lockup.h> |
| |
| static inline unsigned long arch_local_irq_save(void) |
| { |
| unsigned long flags; |
| |
| asm volatile("mrs %0, daif // arch_local_irq_save\n" |
| "msr daifset, #2" |
| : "=r" (flags) |
| : |
| : "memory"); |
| irq_trace_start(flags); |
| return flags; |
| } |
| |
| static inline void arch_local_irq_enable(void) |
| { |
| irq_trace_stop(0); |
| |
| asm volatile("msr daifclr, #2 // arch_local_irq_enable" |
| : |
| : |
| : "memory"); |
| } |
| |
| static inline void arch_local_irq_disable(void) |
| { |
| arch_local_irq_save(); |
| } |
| |
| /* |
| * Save the current interrupt enable state. |
| */ |
| static inline unsigned long arch_local_save_flags(void) |
| { |
| unsigned long flags; |
| |
| asm volatile("mrs %0, daif // arch_local_save_flags" |
| : "=r" (flags) |
| : |
| : "memory"); |
| return flags; |
| } |
| |
| /* |
| * restore saved IRQ state |
| */ |
| static inline void arch_local_irq_restore(unsigned long flags) |
| { |
| irq_trace_stop(flags); |
| asm volatile("msr daif, %0 // arch_local_irq_restore" |
| : |
| : "r" (flags) |
| : "memory"); |
| } |
| |
| static inline int arch_irqs_disabled_flags(unsigned long flags) |
| { |
| return flags & PSR_I_BIT; |
| } |
| |
| #endif |
| #endif |