debug_atomic_inc(&nr_cyclic_check_recursions);
if (depth > max_recursion_depth)
max_recursion_depth = depth;
debug_atomic_inc(&nr_cyclic_check_recursions);
if (depth > max_recursion_depth)
max_recursion_depth = depth;
/*
* Register a lock's class in the hash-table, if the class is not present
* yet. Otherwise we look it up. We cache the result in the lock object
/*
* Register a lock's class in the hash-table, if the class is not present
* yet. Otherwise we look it up. We cache the result in the lock object
* (or spin_lock_init()) call - which acts as the key. For static
* locks we use the lock object itself as the key.
*/
* (or spin_lock_init()) call - which acts as the key. For static
* locks we use the lock object itself as the key.
*/
* itself, so actual lookup of the hash should be once per lock object.
*/
static inline struct lock_class *
* itself, so actual lookup of the hash should be once per lock object.
*/
static inline struct lock_class *
* Initialize a lock instance's lock-class mapping info:
*/
void lockdep_init_map(struct lockdep_map *lock, const char *name,
* Initialize a lock instance's lock-class mapping info:
*/
void lockdep_init_map(struct lockdep_map *lock, const char *name,