summaryrefslogtreecommitdiff
path: root/drivers/char/random.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/char/random.c')
-rw-r--r--drivers/char/random.c34
1 files changed, 20 insertions, 14 deletions
diff --git a/drivers/char/random.c b/drivers/char/random.c
index 4cbc73173701..c033135f2a67 100644
--- a/drivers/char/random.c
+++ b/drivers/char/random.c
@@ -263,6 +263,7 @@
#include <linux/syscalls.h>
#include <linux/completion.h>
#include <linux/uuid.h>
+#include <linux/locallock.h>
#include <crypto/chacha20.h>
#include <asm/processor.h>
@@ -1087,8 +1088,6 @@ static void add_timer_randomness(struct timer_rand_state *state, unsigned num)
} sample;
long delta, delta2, delta3;
- preempt_disable();
-
sample.jiffies = jiffies;
sample.cycles = random_get_entropy();
sample.num = num;
@@ -1129,7 +1128,6 @@ static void add_timer_randomness(struct timer_rand_state *state, unsigned num)
*/
credit_entropy_bits(r, min_t(int, fls(delta>>1), 11));
}
- preempt_enable();
}
void add_input_randomness(unsigned int type, unsigned int code,
@@ -1186,28 +1184,27 @@ static __u32 get_reg(struct fast_pool *f, struct pt_regs *regs)
return *ptr;
}
-void add_interrupt_randomness(int irq, int irq_flags)
+void add_interrupt_randomness(int irq, int irq_flags, __u64 ip)
{
struct entropy_store *r;
struct fast_pool *fast_pool = this_cpu_ptr(&irq_randomness);
- struct pt_regs *regs = get_irq_regs();
unsigned long now = jiffies;
cycles_t cycles = random_get_entropy();
__u32 c_high, j_high;
- __u64 ip;
unsigned long seed;
int credit = 0;
if (cycles == 0)
- cycles = get_reg(fast_pool, regs);
+ cycles = get_reg(fast_pool, NULL);
c_high = (sizeof(cycles) > 4) ? cycles >> 32 : 0;
j_high = (sizeof(now) > 4) ? now >> 32 : 0;
fast_pool->pool[0] ^= cycles ^ j_high ^ irq;
fast_pool->pool[1] ^= now ^ c_high;
- ip = regs ? instruction_pointer(regs) : _RET_IP_;
+ if (!ip)
+ ip = _RET_IP_;
fast_pool->pool[2] ^= ip;
fast_pool->pool[3] ^= (sizeof(ip) > 4) ? ip >> 32 :
- get_reg(fast_pool, regs);
+ get_reg(fast_pool, NULL);
fast_mix(fast_pool);
add_interrupt_bench(cycles);
@@ -1668,7 +1665,7 @@ void get_random_bytes_arch(void *buf, int nbytes)
if (!arch_get_random_long(&v))
break;
-
+
memcpy(p, &v, chunk);
p += chunk;
nbytes -= chunk;
@@ -2122,18 +2119,22 @@ struct batched_entropy {
* the goal of being quite fast and not depleting entropy.
*/
static DEFINE_PER_CPU(struct batched_entropy, batched_entropy_long);
+static DEFINE_LOCAL_IRQ_LOCK(batched_entropy_long_lock);
unsigned long get_random_long(void)
{
unsigned long ret;
struct batched_entropy *batch;
- batch = &get_cpu_var(batched_entropy_long);
+ if (arch_get_random_long(&ret))
+ return ret;
+
+ batch = &get_locked_var(batched_entropy_long_lock, batched_entropy_long);
if (batch->position % ARRAY_SIZE(batch->entropy_long) == 0) {
extract_crng((u8 *)batch->entropy_long);
batch->position = 0;
}
ret = batch->entropy_long[batch->position++];
- put_cpu_var(batched_entropy_long);
+ put_locked_var(batched_entropy_long_lock, batched_entropy_long);
return ret;
}
EXPORT_SYMBOL(get_random_long);
@@ -2145,18 +2146,23 @@ unsigned int get_random_int(void)
}
#else
static DEFINE_PER_CPU(struct batched_entropy, batched_entropy_int);
+static DEFINE_LOCAL_IRQ_LOCK(batched_entropy_int_lock);
+
unsigned int get_random_int(void)
{
unsigned int ret;
struct batched_entropy *batch;
- batch = &get_cpu_var(batched_entropy_int);
+ if (arch_get_random_int(&ret))
+ return ret;
+
+ batch = &get_locked_var(batched_entropy_int_lock, batched_entropy_int);
if (batch->position % ARRAY_SIZE(batch->entropy_int) == 0) {
extract_crng((u8 *)batch->entropy_int);
batch->position = 0;
}
ret = batch->entropy_int[batch->position++];
- put_cpu_var(batched_entropy_int);
+ put_locked_var(batched_entropy_int_lock, batched_entropy_int);
return ret;
}
#endif