anolis: preempt/dynamic: Delete code for dynamic preempt full
ANBZ: #7862 We introduced preempt dynamic to switch dynamic mode among: none, voluntary and full. However, considering our own demands, we disable the dynamic switch from none/voluntary to full. As a result, there're large amount of codes which are introduced for dynamic full mode that won't take effect. So, we can delete these codes. After testing, deleting these useless codes can improve basic performance such as Unixbench. In following commits, we will also revert related codes in arm64. Fixes from 1eca80ad8bb1(static_call/x86: Add __static_call_return0()) to 4533f50c8c99(anolis: preempt/dynamic : Disable preempt dynamically change to full mode) Signed-off-by: zhouzhixin.zzx <zhixin.zhou@linux.alibaba.com> Reviewed-by: Cruz Zhao <CruzZhao@linux.alibaba.com> Link: https://gitee.com/anolis/cloud-kernel/pulls/3349
This commit is contained in:
parent
3aad5a2c60
commit
11d06fb443
|
@ -117,34 +117,12 @@ extern asmlinkage void preempt_schedule_notrace_thunk(void);
|
|||
#define preempt_schedule_notrace_dynamic_enabled preempt_schedule_notrace_thunk
|
||||
#define preempt_schedule_notrace_dynamic_disabled NULL
|
||||
|
||||
#ifdef CONFIG_PREEMPT_DYNAMIC
|
||||
|
||||
DECLARE_STATIC_CALL(preempt_schedule, preempt_schedule_dynamic_enabled);
|
||||
|
||||
#define __preempt_schedule() \
|
||||
do { \
|
||||
__STATIC_CALL_MOD_ADDRESSABLE(preempt_schedule); \
|
||||
asm volatile ("call " STATIC_CALL_TRAMP_STR(preempt_schedule) : ASM_CALL_CONSTRAINT); \
|
||||
} while (0)
|
||||
|
||||
DECLARE_STATIC_CALL(preempt_schedule_notrace, preempt_schedule_notrace_dynamic_enabled);
|
||||
|
||||
#define __preempt_schedule_notrace() \
|
||||
do { \
|
||||
__STATIC_CALL_MOD_ADDRESSABLE(preempt_schedule_notrace); \
|
||||
asm volatile ("call " STATIC_CALL_TRAMP_STR(preempt_schedule_notrace) : ASM_CALL_CONSTRAINT); \
|
||||
} while (0)
|
||||
|
||||
#else /* PREEMPT_DYNAMIC */
|
||||
|
||||
#define __preempt_schedule() \
|
||||
asm volatile ("call preempt_schedule_thunk" : ASM_CALL_CONSTRAINT);
|
||||
|
||||
#define __preempt_schedule_notrace() \
|
||||
asm volatile ("call preempt_schedule_notrace_thunk" : ASM_CALL_CONSTRAINT);
|
||||
|
||||
#endif /* PREEMPT_DYNAMIC */
|
||||
|
||||
#endif /* PREEMPTION */
|
||||
|
||||
#endif /* __ASM_PREEMPT_H */
|
||||
|
|
|
@ -405,20 +405,7 @@ irqentry_state_t noinstr irqentry_enter(struct pt_regs *regs);
|
|||
* Conditional reschedule with additional sanity checks.
|
||||
*/
|
||||
void raw_irqentry_exit_cond_resched(void);
|
||||
#ifdef CONFIG_PREEMPT_DYNAMIC
|
||||
#if defined(CONFIG_HAVE_PREEMPT_DYNAMIC_CALL)
|
||||
#define irqentry_exit_cond_resched_dynamic_enabled raw_irqentry_exit_cond_resched
|
||||
#define irqentry_exit_cond_resched_dynamic_disabled NULL
|
||||
DECLARE_STATIC_CALL(irqentry_exit_cond_resched, raw_irqentry_exit_cond_resched);
|
||||
#define irqentry_exit_cond_resched() static_call(irqentry_exit_cond_resched)()
|
||||
#elif defined(CONFIG_HAVE_PREEMPT_DYNAMIC_KEY)
|
||||
DECLARE_STATIC_KEY_TRUE(sk_dynamic_irqentry_exit_cond_resched);
|
||||
void dynamic_irqentry_exit_cond_resched(void);
|
||||
#define irqentry_exit_cond_resched() dynamic_irqentry_exit_cond_resched()
|
||||
#endif
|
||||
#else /* CONFIG_PREEMPT_DYNAMIC */
|
||||
#define irqentry_exit_cond_resched() raw_irqentry_exit_cond_resched()
|
||||
#endif /* CONFIG_PREEMPT_DYNAMIC */
|
||||
|
||||
/**
|
||||
* irqentry_exit - Handle return from exception that used irqentry_enter()
|
||||
|
|
|
@ -97,7 +97,6 @@ config PREEMPT_DYNAMIC
|
|||
bool "Preemption behaviour defined on boot"
|
||||
depends on HAVE_PREEMPT_DYNAMIC && !PREEMPT_RT
|
||||
select JUMP_LABEL if HAVE_PREEMPT_DYNAMIC_KEY
|
||||
select PREEMPT_BUILD
|
||||
default y if HAVE_PREEMPT_DYNAMIC_CALL
|
||||
help
|
||||
This option allows to define the preemption model on the kernel
|
||||
|
|
|
@ -366,19 +366,6 @@ void raw_irqentry_exit_cond_resched(void)
|
|||
preempt_schedule_irq();
|
||||
}
|
||||
}
|
||||
#ifdef CONFIG_PREEMPT_DYNAMIC
|
||||
#if defined(CONFIG_HAVE_PREEMPT_DYNAMIC_CALL)
|
||||
DEFINE_STATIC_CALL(irqentry_exit_cond_resched, raw_irqentry_exit_cond_resched);
|
||||
#elif defined(CONFIG_HAVE_PREEMPT_DYNAMIC_KEY)
|
||||
DEFINE_STATIC_KEY_TRUE(sk_dynamic_irqentry_exit_cond_resched);
|
||||
void dynamic_irqentry_exit_cond_resched(void)
|
||||
{
|
||||
if (!static_key_unlikely(&sk_dynamic_irqentry_exit_cond_resched))
|
||||
return;
|
||||
raw_irqentry_exit_cond_resched();
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
noinstr void irqentry_exit(struct pt_regs *regs, irqentry_state_t state)
|
||||
{
|
||||
|
|
|
@ -5890,27 +5890,6 @@ asmlinkage __visible void __sched notrace preempt_schedule(void)
|
|||
NOKPROBE_SYMBOL(preempt_schedule);
|
||||
EXPORT_SYMBOL(preempt_schedule);
|
||||
|
||||
#ifdef CONFIG_PREEMPT_DYNAMIC
|
||||
#if defined(CONFIG_HAVE_PREEMPT_DYNAMIC_CALL)
|
||||
#ifndef preempt_schedule_dynamic_enabled
|
||||
#define preempt_schedule_dynamic_enabled preempt_schedule
|
||||
#define preempt_schedule_dynamic_disabled NULL
|
||||
#endif
|
||||
DEFINE_STATIC_CALL(preempt_schedule, preempt_schedule_dynamic_enabled);
|
||||
EXPORT_STATIC_CALL_TRAMP(preempt_schedule);
|
||||
#elif defined(CONFIG_HAVE_PREEMPT_DYNAMIC_KEY)
|
||||
static DEFINE_STATIC_KEY_TRUE(sk_dynamic_preempt_schedule);
|
||||
void __sched notrace dynamic_preempt_schedule(void)
|
||||
{
|
||||
if (!static_branch_unlikely(&sk_dynamic_preempt_schedule))
|
||||
return;
|
||||
preempt_schedule();
|
||||
}
|
||||
NOKPROBE_SYMBOL(dynamic_preempt_schedule);
|
||||
EXPORT_SYMBOL(dynamic_preempt_schedule);
|
||||
#endif
|
||||
#endif
|
||||
|
||||
/**
|
||||
* preempt_schedule_notrace - preempt_schedule called by tracing
|
||||
*
|
||||
|
@ -5963,27 +5942,6 @@ asmlinkage __visible void __sched notrace preempt_schedule_notrace(void)
|
|||
}
|
||||
EXPORT_SYMBOL_GPL(preempt_schedule_notrace);
|
||||
|
||||
#ifdef CONFIG_PREEMPT_DYNAMIC
|
||||
#if defined(CONFIG_HAVE_PREEMPT_DYNAMIC_CALL)
|
||||
#ifndef preempt_schedule_notrace_dynamic_enabled
|
||||
#define preempt_schedule_notrace_dynamic_enabled preempt_schedule_notrace
|
||||
#define preempt_schedule_notrace_dynamic_disabled NULL
|
||||
#endif
|
||||
DEFINE_STATIC_CALL(preempt_schedule_notrace, preempt_schedule_notrace_dynamic_enabled);
|
||||
EXPORT_STATIC_CALL_TRAMP(preempt_schedule_notrace);
|
||||
#elif defined(CONFIG_HAVE_PREEMPT_DYNAMIC_KEY)
|
||||
static DEFINE_STATIC_KEY_TRUE(sk_dynamic_preempt_schedule_notrace);
|
||||
void __sched notrace dynamic_preempt_schedule_notrace(void)
|
||||
{
|
||||
if (!static_branch_unlikely(&sk_dynamic_preempt_schedule_notrace))
|
||||
return;
|
||||
preempt_schedule_notrace();
|
||||
}
|
||||
NOKPROBE_SYMBOL(dynamic_preempt_schedule_notrace);
|
||||
EXPORT_SYMBOL(dynamic_preempt_schedule_notrace);
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#endif /* CONFIG_PREEMPTION */
|
||||
|
||||
/*
|
||||
|
@ -7484,26 +7442,17 @@ void sched_dynamic_update(int mode)
|
|||
|
||||
preempt_dynamic_enable(cond_resched);
|
||||
preempt_dynamic_enable(might_resched);
|
||||
preempt_dynamic_enable(preempt_schedule);
|
||||
preempt_dynamic_enable(preempt_schedule_notrace);
|
||||
preempt_dynamic_enable(irqentry_exit_cond_resched);
|
||||
|
||||
switch (mode) {
|
||||
case preempt_dynamic_none:
|
||||
preempt_dynamic_enable(cond_resched);
|
||||
preempt_dynamic_disable(might_resched);
|
||||
preempt_dynamic_disable(preempt_schedule);
|
||||
preempt_dynamic_disable(preempt_schedule_notrace);
|
||||
preempt_dynamic_disable(irqentry_exit_cond_resched);
|
||||
pr_info("Dynamic Preempt: none\n");
|
||||
break;
|
||||
|
||||
case preempt_dynamic_voluntary:
|
||||
preempt_dynamic_enable(cond_resched);
|
||||
preempt_dynamic_enable(might_resched);
|
||||
preempt_dynamic_disable(preempt_schedule);
|
||||
preempt_dynamic_disable(preempt_schedule_notrace);
|
||||
preempt_dynamic_disable(irqentry_exit_cond_resched);
|
||||
pr_info("Dynamic Preempt: voluntary\n");
|
||||
break;
|
||||
|
||||
|
|
Loading…
Reference in New Issue