docs: kernel-locking: Convert semaphore references
[linux-2.6/verdex.git] / include / linux / proportions.h
blob2c3b3cad92bec050b18605cffe39784f774f09d0
1 /*
2 * FLoating proportions
4 * Copyright (C) 2007 Red Hat, Inc., Peter Zijlstra <pzijlstr@redhat.com>
6 * This file contains the public data structure and API definitions.
7 */
9 #ifndef _LINUX_PROPORTIONS_H
10 #define _LINUX_PROPORTIONS_H
12 #include <linux/percpu_counter.h>
13 #include <linux/spinlock.h>
14 #include <linux/mutex.h>
16 struct prop_global {
18 * The period over which we differentiate
20 * period = 2^shift
22 int shift;
24 * The total event counter aka 'time'.
26 * Treated as an unsigned long; the lower 'shift - 1' bits are the
27 * counter bits, the remaining upper bits the period counter.
29 struct percpu_counter events;
33 * global proportion descriptor
35 * this is needed to consitently flip prop_global structures.
37 struct prop_descriptor {
38 int index;
39 struct prop_global pg[2];
40 struct mutex mutex; /* serialize the prop_global switch */
43 int prop_descriptor_init(struct prop_descriptor *pd, int shift);
44 void prop_change_shift(struct prop_descriptor *pd, int new_shift);
47 * ----- PERCPU ------
50 struct prop_local_percpu {
52 * the local events counter
54 struct percpu_counter events;
57 * snapshot of the last seen global state
59 int shift;
60 unsigned long period;
61 spinlock_t lock; /* protect the snapshot state */
64 int prop_local_init_percpu(struct prop_local_percpu *pl);
65 void prop_local_destroy_percpu(struct prop_local_percpu *pl);
66 void __prop_inc_percpu(struct prop_descriptor *pd, struct prop_local_percpu *pl);
67 void prop_fraction_percpu(struct prop_descriptor *pd, struct prop_local_percpu *pl,
68 long *numerator, long *denominator);
70 static inline
71 void prop_inc_percpu(struct prop_descriptor *pd, struct prop_local_percpu *pl)
73 unsigned long flags;
75 local_irq_save(flags);
76 __prop_inc_percpu(pd, pl);
77 local_irq_restore(flags);
81 * ----- SINGLE ------
84 struct prop_local_single {
86 * the local events counter
88 unsigned long events;
91 * snapshot of the last seen global state
92 * and a lock protecting this state
94 int shift;
95 unsigned long period;
96 spinlock_t lock; /* protect the snapshot state */
99 #define INIT_PROP_LOCAL_SINGLE(name) \
100 { .lock = __SPIN_LOCK_UNLOCKED(name.lock), \
103 int prop_local_init_single(struct prop_local_single *pl);
104 void prop_local_destroy_single(struct prop_local_single *pl);
105 void __prop_inc_single(struct prop_descriptor *pd, struct prop_local_single *pl);
106 void prop_fraction_single(struct prop_descriptor *pd, struct prop_local_single *pl,
107 long *numerator, long *denominator);
109 static inline
110 void prop_inc_single(struct prop_descriptor *pd, struct prop_local_single *pl)
112 unsigned long flags;
114 local_irq_save(flags);
115 __prop_inc_single(pd, pl);
116 local_irq_restore(flags);
119 #endif /* _LINUX_PROPORTIONS_H */