div_u64 65 include/linux/math64.h #ifndef div_u64 div_u64 1405 kernel/sched_fair.c rem_load = div_u64(rem_load, busiest_h_load + 1); div_u64 1415 kernel/sched_fair.c moved_load = div_u64(moved_load, busiest_weight + 1); div_u64 270 kernel/sched_rt.c diff = div_u64((u64)diff, weight); div_u64 596 kernel/time.c return div_u64((u64)x * TICK_NSEC, NSEC_PER_SEC / USER_HZ); div_u64 613 kernel/time.c return div_u64((u64)x * HZ, USER_HZ); div_u64 622 kernel/time.c x = div_u64(x * USER_HZ, HZ); div_u64 624 kernel/time.c x = div_u64(x, HZ / USER_HZ); div_u64 634 kernel/time.c x = div_u64(x * TICK_NSEC, (NSEC_PER_SEC / USER_HZ)); div_u64 643 kernel/time.c return div_u64(x, NSEC_PER_SEC / USER_HZ); div_u64 645 kernel/time.c return div_u64(x * USER_HZ / 512, NSEC_PER_SEC / 512); div_u64 652 kernel/time.c return div_u64(x * 9, (9ull * NSEC_PER_SEC + (USER_HZ / 2)) / USER_HZ); div_u64 61 kernel/time/ntp.c tick_nsec = div_u64(second_length, HZ) >> NTP_SCALE_SHIFT; div_u64 62 kernel/time/ntp.c tick_length_base = div_u64(tick_length_base, NTP_INTERVAL_FREQ); div_u64 95 lib/div64.c return div_u64(dividend, d); div_u64 3609 mm/slub.c (long)div_u64(l->sum_time, l->count),