hlock_class       234 kernel/lockdep.c 	stats = get_lock_stats(hlock_class(hlock));
hlock_class       530 kernel/lockdep.c 	print_lock_name(hlock_class(hlock));
hlock_class       960 kernel/lockdep.c 	this.class = hlock_class(check_source);
hlock_class      1069 kernel/lockdep.c 		if (entry->class == hlock_class(check_target))
hlock_class      1206 kernel/lockdep.c 	print_lock_name(hlock_class(prev));
hlock_class      1208 kernel/lockdep.c 	print_lock_name(hlock_class(next));
hlock_class      1249 kernel/lockdep.c 	ret = find_usage_backwards(hlock_class(prev), 0);
hlock_class      1254 kernel/lockdep.c 	ret = find_usage_forwards(hlock_class(next), 0);
hlock_class      1384 kernel/lockdep.c 		if (hlock_class(prev) != hlock_class(next))
hlock_class      1446 kernel/lockdep.c 	if (!(check_noncircular(hlock_class(next), 0)))
hlock_class      1470 kernel/lockdep.c 	list_for_each_entry(entry, &hlock_class(prev)->locks_after, entry) {
hlock_class      1471 kernel/lockdep.c 		if (entry->class == hlock_class(next)) {
hlock_class      1482 kernel/lockdep.c 	ret = add_lock_to_list(hlock_class(prev), hlock_class(next),
hlock_class      1483 kernel/lockdep.c 			       &hlock_class(prev)->locks_after,
hlock_class      1489 kernel/lockdep.c 	ret = add_lock_to_list(hlock_class(next), hlock_class(prev),
hlock_class      1490 kernel/lockdep.c 			       &hlock_class(next)->locks_before,
hlock_class      1498 kernel/lockdep.c 	if (verbose(hlock_class(prev)) || verbose(hlock_class(next))) {
hlock_class      1501 kernel/lockdep.c 		print_lock_name(hlock_class(prev));
hlock_class      1503 kernel/lockdep.c 		print_lock_name(hlock_class(next));
hlock_class      1600 kernel/lockdep.c 	struct lock_class *class = hlock_class(hlock);
hlock_class      1812 kernel/lockdep.c 	print_stack_trace(hlock_class(this)->usage_traces + prev_bit, 1);
hlock_class      1831 kernel/lockdep.c 	if (unlikely(hlock_class(this)->usage_mask & (1 << bad_bit)))
hlock_class      1870 kernel/lockdep.c 	print_lock_dependencies(hlock_class(this), 0);
hlock_class      1893 kernel/lockdep.c 	ret = find_usage_forwards(hlock_class(this), 0);
hlock_class      1912 kernel/lockdep.c 	ret = find_usage_backwards(hlock_class(this), 0);
hlock_class      1978 kernel/lockdep.c 		if (hardirq_verbose(hlock_class(this)))
hlock_class      2003 kernel/lockdep.c 		if (softirq_verbose(hlock_class(this)))
hlock_class      2016 kernel/lockdep.c 		if (hardirq_verbose(hlock_class(this)))
hlock_class      2029 kernel/lockdep.c 		if (softirq_verbose(hlock_class(this)))
hlock_class      2055 kernel/lockdep.c 		if (hardirq_verbose(hlock_class(this)))
hlock_class      2081 kernel/lockdep.c 		if (softirq_verbose(hlock_class(this)))
hlock_class      2096 kernel/lockdep.c 		if (hardirq_verbose(hlock_class(this)))
hlock_class      2111 kernel/lockdep.c 		if (softirq_verbose(hlock_class(this)))
hlock_class      2427 kernel/lockdep.c 	if (likely(hlock_class(this)->usage_mask & new_mask))
hlock_class      2435 kernel/lockdep.c 	if (unlikely(hlock_class(this)->usage_mask & new_mask)) {
hlock_class      2440 kernel/lockdep.c 	hlock_class(this)->usage_mask |= new_mask;
hlock_class      2442 kernel/lockdep.c 	if (!save_trace(hlock_class(this)->usage_traces + new_bit))
hlock_class      2731 kernel/lockdep.c 			hlock_class(hlock)->subclass, hlock->trylock,
hlock_class      2792 kernel/lockdep.c 			hlock_class(hlock)->subclass, hlock->trylock,
hlock_class      3028 kernel/lockdep.c 	point = lock_contention_point(hlock_class(hlock), ip);
hlock_class      3030 kernel/lockdep.c 	stats = get_lock_stats(hlock_class(hlock));
hlock_class      3076 kernel/lockdep.c 	stats = get_lock_stats(hlock_class(hlock));