kprobes: kretprobes simplifications
- consolidate duplicate code in all arch_prepare_kretprobe instances into common code - replace various odd helpers that use hlist_for_each_entry to get the first elemenet of a list with either a hlist_for_each_entry_save or an opencoded access to the first element in the caller - inline add_rp_inst into it's only remaining caller - use kretprobe_inst_table_head instead of opencoding it Signed-off-by: Christoph Hellwig <hch@lst.de> Cc: Prasanna S Panchamukhi <prasanna@in.ibm.com> Acked-by: Ananth N Mavinakayanahalli <ananth@in.ibm.com> Cc: Anil S Keshavamurthy <anil.s.keshavamurthy@intel.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
This commit is contained in:
Родитель
6f716acd5f
Коммит
4c4308cb93
|
@ -226,24 +226,15 @@ static void __kprobes prepare_singlestep(struct kprobe *p, struct pt_regs *regs)
|
|||
}
|
||||
|
||||
/* Called with kretprobe_lock held */
|
||||
void __kprobes arch_prepare_kretprobe(struct kretprobe *rp,
|
||||
void __kprobes arch_prepare_kretprobe(struct kretprobe_instance *ri,
|
||||
struct pt_regs *regs)
|
||||
{
|
||||
unsigned long *sara = (unsigned long *)®s->esp;
|
||||
|
||||
struct kretprobe_instance *ri;
|
||||
ri->ret_addr = (kprobe_opcode_t *) *sara;
|
||||
|
||||
if ((ri = get_free_rp_inst(rp)) != NULL) {
|
||||
ri->rp = rp;
|
||||
ri->task = current;
|
||||
ri->ret_addr = (kprobe_opcode_t *) *sara;
|
||||
|
||||
/* Replace the return addr with trampoline addr */
|
||||
*sara = (unsigned long) &kretprobe_trampoline;
|
||||
add_rp_inst(ri);
|
||||
} else {
|
||||
rp->nmissed++;
|
||||
}
|
||||
/* Replace the return addr with trampoline addr */
|
||||
*sara = (unsigned long) &kretprobe_trampoline;
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -465,23 +465,13 @@ int __kprobes trampoline_probe_handler(struct kprobe *p, struct pt_regs *regs)
|
|||
}
|
||||
|
||||
/* Called with kretprobe_lock held */
|
||||
void __kprobes arch_prepare_kretprobe(struct kretprobe *rp,
|
||||
void __kprobes arch_prepare_kretprobe(struct kretprobe_instance *ri,
|
||||
struct pt_regs *regs)
|
||||
{
|
||||
struct kretprobe_instance *ri;
|
||||
ri->ret_addr = (kprobe_opcode_t *)regs->b0;
|
||||
|
||||
if ((ri = get_free_rp_inst(rp)) != NULL) {
|
||||
ri->rp = rp;
|
||||
ri->task = current;
|
||||
ri->ret_addr = (kprobe_opcode_t *)regs->b0;
|
||||
|
||||
/* Replace the return addr with trampoline addr */
|
||||
regs->b0 = ((struct fnptr *)kretprobe_trampoline)->ip;
|
||||
|
||||
add_rp_inst(ri);
|
||||
} else {
|
||||
rp->nmissed++;
|
||||
}
|
||||
/* Replace the return addr with trampoline addr */
|
||||
regs->b0 = ((struct fnptr *)kretprobe_trampoline)->ip;
|
||||
}
|
||||
|
||||
int __kprobes arch_prepare_kprobe(struct kprobe *p)
|
||||
|
|
|
@ -126,22 +126,13 @@ static void __kprobes set_current_kprobe(struct kprobe *p, struct pt_regs *regs,
|
|||
}
|
||||
|
||||
/* Called with kretprobe_lock held */
|
||||
void __kprobes arch_prepare_kretprobe(struct kretprobe *rp,
|
||||
void __kprobes arch_prepare_kretprobe(struct kretprobe_instance *ri,
|
||||
struct pt_regs *regs)
|
||||
{
|
||||
struct kretprobe_instance *ri;
|
||||
ri->ret_addr = (kprobe_opcode_t *)regs->link;
|
||||
|
||||
if ((ri = get_free_rp_inst(rp)) != NULL) {
|
||||
ri->rp = rp;
|
||||
ri->task = current;
|
||||
ri->ret_addr = (kprobe_opcode_t *)regs->link;
|
||||
|
||||
/* Replace the return addr with trampoline addr */
|
||||
regs->link = (unsigned long)kretprobe_trampoline;
|
||||
add_rp_inst(ri);
|
||||
} else {
|
||||
rp->nmissed++;
|
||||
}
|
||||
/* Replace the return addr with trampoline addr */
|
||||
regs->link = (unsigned long)kretprobe_trampoline;
|
||||
}
|
||||
|
||||
static int __kprobes kprobe_handler(struct pt_regs *regs)
|
||||
|
|
|
@ -271,23 +271,13 @@ static void __kprobes set_current_kprobe(struct kprobe *p, struct pt_regs *regs,
|
|||
}
|
||||
|
||||
/* Called with kretprobe_lock held */
|
||||
void __kprobes arch_prepare_kretprobe(struct kretprobe *rp,
|
||||
void __kprobes arch_prepare_kretprobe(struct kretprobe_instance *ri,
|
||||
struct pt_regs *regs)
|
||||
{
|
||||
struct kretprobe_instance *ri;
|
||||
ri->ret_addr = (kprobe_opcode_t *) regs->gprs[14];
|
||||
|
||||
if ((ri = get_free_rp_inst(rp)) != NULL) {
|
||||
ri->rp = rp;
|
||||
ri->task = current;
|
||||
ri->ret_addr = (kprobe_opcode_t *) regs->gprs[14];
|
||||
|
||||
/* Replace the return addr with trampoline addr */
|
||||
regs->gprs[14] = (unsigned long)&kretprobe_trampoline;
|
||||
|
||||
add_rp_inst(ri);
|
||||
} else {
|
||||
rp->nmissed++;
|
||||
}
|
||||
/* Replace the return addr with trampoline addr */
|
||||
regs->gprs[14] = (unsigned long)&kretprobe_trampoline;
|
||||
}
|
||||
|
||||
static int __kprobes kprobe_handler(struct pt_regs *regs)
|
||||
|
|
|
@ -266,23 +266,14 @@ static void __kprobes prepare_singlestep(struct kprobe *p, struct pt_regs *regs)
|
|||
}
|
||||
|
||||
/* Called with kretprobe_lock held */
|
||||
void __kprobes arch_prepare_kretprobe(struct kretprobe *rp,
|
||||
void __kprobes arch_prepare_kretprobe(struct kretprobe_instance *ri,
|
||||
struct pt_regs *regs)
|
||||
{
|
||||
unsigned long *sara = (unsigned long *)regs->rsp;
|
||||
struct kretprobe_instance *ri;
|
||||
|
||||
if ((ri = get_free_rp_inst(rp)) != NULL) {
|
||||
ri->rp = rp;
|
||||
ri->task = current;
|
||||
ri->ret_addr = (kprobe_opcode_t *) *sara;
|
||||
|
||||
/* Replace the return addr with trampoline addr */
|
||||
*sara = (unsigned long) &kretprobe_trampoline;
|
||||
add_rp_inst(ri);
|
||||
} else {
|
||||
rp->nmissed++;
|
||||
}
|
||||
ri->ret_addr = (kprobe_opcode_t *) *sara;
|
||||
/* Replace the return addr with trampoline addr */
|
||||
*sara = (unsigned long) &kretprobe_trampoline;
|
||||
}
|
||||
|
||||
int __kprobes kprobe_handler(struct pt_regs *regs)
|
||||
|
|
|
@ -123,7 +123,8 @@ DECLARE_PER_CPU(struct kprobe *, current_kprobe);
|
|||
DECLARE_PER_CPU(struct kprobe_ctlblk, kprobe_ctlblk);
|
||||
|
||||
#ifdef ARCH_SUPPORTS_KRETPROBES
|
||||
extern void arch_prepare_kretprobe(struct kretprobe *rp, struct pt_regs *regs);
|
||||
extern void arch_prepare_kretprobe(struct kretprobe_instance *ri,
|
||||
struct pt_regs *regs);
|
||||
#else /* ARCH_SUPPORTS_KRETPROBES */
|
||||
static inline void arch_prepare_kretprobe(struct kretprobe *rp,
|
||||
struct pt_regs *regs)
|
||||
|
@ -209,8 +210,6 @@ void jprobe_return(void);
|
|||
int register_kretprobe(struct kretprobe *rp);
|
||||
void unregister_kretprobe(struct kretprobe *rp);
|
||||
|
||||
struct kretprobe_instance *get_free_rp_inst(struct kretprobe *rp);
|
||||
void add_rp_inst(struct kretprobe_instance *ri);
|
||||
void kprobe_flush_task(struct task_struct *tk);
|
||||
void recycle_rp_inst(struct kretprobe_instance *ri, struct hlist_head *head);
|
||||
#else /* CONFIG_KPROBES */
|
||||
|
|
|
@ -357,46 +357,6 @@ void __kprobes kprobes_inc_nmissed_count(struct kprobe *p)
|
|||
return;
|
||||
}
|
||||
|
||||
/* Called with kretprobe_lock held */
|
||||
struct kretprobe_instance __kprobes *get_free_rp_inst(struct kretprobe *rp)
|
||||
{
|
||||
struct hlist_node *node;
|
||||
struct kretprobe_instance *ri;
|
||||
hlist_for_each_entry(ri, node, &rp->free_instances, uflist)
|
||||
return ri;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* Called with kretprobe_lock held */
|
||||
static struct kretprobe_instance __kprobes *get_used_rp_inst(struct kretprobe
|
||||
*rp)
|
||||
{
|
||||
struct hlist_node *node;
|
||||
struct kretprobe_instance *ri;
|
||||
hlist_for_each_entry(ri, node, &rp->used_instances, uflist)
|
||||
return ri;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* Called with kretprobe_lock held */
|
||||
void __kprobes add_rp_inst(struct kretprobe_instance *ri)
|
||||
{
|
||||
/*
|
||||
* Remove rp inst off the free list -
|
||||
* Add it back when probed function returns
|
||||
*/
|
||||
hlist_del(&ri->uflist);
|
||||
|
||||
/* Add rp inst onto table */
|
||||
INIT_HLIST_NODE(&ri->hlist);
|
||||
hlist_add_head(&ri->hlist,
|
||||
&kretprobe_inst_table[hash_ptr(ri->task, KPROBE_HASH_BITS)]);
|
||||
|
||||
/* Also add this rp inst to the used list. */
|
||||
INIT_HLIST_NODE(&ri->uflist);
|
||||
hlist_add_head(&ri->uflist, &ri->rp->used_instances);
|
||||
}
|
||||
|
||||
/* Called with kretprobe_lock held */
|
||||
void __kprobes recycle_rp_inst(struct kretprobe_instance *ri,
|
||||
struct hlist_head *head)
|
||||
|
@ -450,7 +410,9 @@ void __kprobes kprobe_flush_task(struct task_struct *tk)
|
|||
static inline void free_rp_inst(struct kretprobe *rp)
|
||||
{
|
||||
struct kretprobe_instance *ri;
|
||||
while ((ri = get_free_rp_inst(rp)) != NULL) {
|
||||
struct hlist_node *pos, *next;
|
||||
|
||||
hlist_for_each_entry_safe(ri, pos, next, &rp->free_instances, uflist) {
|
||||
hlist_del(&ri->uflist);
|
||||
kfree(ri);
|
||||
}
|
||||
|
@ -732,7 +694,21 @@ static int __kprobes pre_handler_kretprobe(struct kprobe *p,
|
|||
|
||||
/*TODO: consider to only swap the RA after the last pre_handler fired */
|
||||
spin_lock_irqsave(&kretprobe_lock, flags);
|
||||
arch_prepare_kretprobe(rp, regs);
|
||||
if (!hlist_empty(&rp->free_instances)) {
|
||||
struct kretprobe_instance *ri;
|
||||
|
||||
ri = hlist_entry(rp->free_instances.first,
|
||||
struct kretprobe_instance, uflist);
|
||||
ri->rp = rp;
|
||||
ri->task = current;
|
||||
arch_prepare_kretprobe(ri, regs);
|
||||
|
||||
/* XXX(hch): why is there no hlist_move_head? */
|
||||
hlist_del(&ri->uflist);
|
||||
hlist_add_head(&ri->uflist, &ri->rp->used_instances);
|
||||
hlist_add_head(&ri->hlist, kretprobe_inst_table_head(ri->task));
|
||||
} else
|
||||
rp->nmissed++;
|
||||
spin_unlock_irqrestore(&kretprobe_lock, flags);
|
||||
return 0;
|
||||
}
|
||||
|
@ -795,11 +771,13 @@ void __kprobes unregister_kretprobe(struct kretprobe *rp)
|
|||
{
|
||||
unsigned long flags;
|
||||
struct kretprobe_instance *ri;
|
||||
struct hlist_node *pos, *next;
|
||||
|
||||
unregister_kprobe(&rp->kp);
|
||||
|
||||
/* No race here */
|
||||
spin_lock_irqsave(&kretprobe_lock, flags);
|
||||
while ((ri = get_used_rp_inst(rp)) != NULL) {
|
||||
hlist_for_each_entry_safe(ri, pos, next, &rp->used_instances, uflist) {
|
||||
ri->rp = NULL;
|
||||
hlist_del(&ri->uflist);
|
||||
}
|
||||
|
|
Загрузка…
Ссылка в новой задаче