Home
last modified time | relevance | path

Searched refs:lck_mtx_t (Results 1 – 25 of 150) sorted by relevance

123456

/xnu-12377.81.4/osfmk/kern/
H A Dlock_mtx.h74 } lck_mtx_t; typedef
86 #define decl_lck_mtx_data(class, name) class lck_mtx_t name
88 extern lck_mtx_t *lck_mtx_alloc_init(
93 lck_mtx_t *lck,
97 lck_mtx_t *lck);
100 lck_mtx_t *lck);
103 lck_mtx_t *lck,
107 lck_mtx_t *lck,
111 lck_mtx_t *lck,
117 lck_mtx_t *lck,
[all …]
H A Dlock_mtx.c106 #define LCK_EVENT_TO_MUTEX(e) __container_of((uint32_t *)(e), lck_mtx_t, lck_mtx.data)
119 KALLOC_TYPE_DEFINE(KT_LCK_MTX, lck_mtx_t, KT_PRIV_ACCT);
155 #pragma mark lck_mtx_t: validation
159 __lck_mtx_invalid_panic(lck_mtx_t *lck) in __lck_mtx_invalid_panic()
170 __lck_mtx_not_owned_panic(lck_mtx_t *lock, thread_t thread) in __lck_mtx_not_owned_panic()
178 __lck_mtx_not_locked_spin(lck_mtx_t *lock, thread_t thread) in __lck_mtx_not_locked_spin()
187 __lck_mtx_owned_panic(lck_mtx_t *lock, thread_t thread) in __lck_mtx_owned_panic()
194 __lck_mtx_lock_is_sleepable_panic(lck_mtx_t *lck) in __lck_mtx_lock_is_sleepable_panic()
205 __lck_mtx_preemption_disabled_panic(lck_mtx_t *lck, int expected) in __lck_mtx_preemption_disabled_panic()
213 __lck_mtx_at_irq_panic(lck_mtx_t *lck) in __lck_mtx_at_irq_panic()
[all …]
H A Dlocks.h305 lck_mtx_t *lock,
946 extern void lck_mtx_gate_init(lck_mtx_t *lock, gate_t *gate);
958 extern void lck_mtx_gate_destroy(lck_mtx_t *lock, gate_t *gate);
971 extern gate_t* lck_mtx_gate_alloc_init(lck_mtx_t *lock);
985 extern void lck_mtx_gate_free(lck_mtx_t *lock, gate_t *gate);
1021 extern kern_return_t lck_mtx_gate_try_close(lck_mtx_t *lock, gate_t *gate);
1039 extern void lck_mtx_gate_close(lck_mtx_t *lock, gate_t *gate);
1054 extern void lck_mtx_gate_open(lck_mtx_t *lock, gate_t *gate);
1081 extern kern_return_t lck_mtx_gate_handoff(lck_mtx_t *lock, gate_t *gate, gate_handoff_flags_t flags…
1102 extern void lck_mtx_gate_steal(lck_mtx_t *lock, gate_t *gate);
[all …]
H A Dlock_stat.h221 lck_mtx_t *mtx, in lck_mtx_prof_probe()
256 lck_mtx_t *mtx,
268 LCK_MTX_PROF_MISS(lck_mtx_t *mtx, uint32_t grp_attr_id, int *first_miss) in LCK_MTX_PROF_MISS()
284 lck_mtx_t *mtx, in LCK_MTX_PROF_WAIT()
/xnu-12377.81.4/osfmk/i386/
H A Dlocks.h65 } lck_mtx_t; typedef
87 } lck_mtx_t; typedef
96 typedef struct __lck_mtx_t__ lck_mtx_t; typedef
129 extern lck_mtx_spinwait_ret_type_t lck_mtx_lock_spinwait_x86(lck_mtx_t *mutex);
131 extern void lck_mtx_lock_wait_x86(lck_mtx_t *mutex, struct turnstile **ts);
132 extern void lck_mtx_lock_acquire_x86(lck_mtx_t *mutex);
134 extern void lck_mtx_lock_slow(lck_mtx_t *lock);
135 extern boolean_t lck_mtx_try_lock_slow(lck_mtx_t *lock);
136 extern void lck_mtx_unlock_slow(lck_mtx_t *lock);
137 extern void lck_mtx_lock_spin_slow(lck_mtx_t *lock);
[all …]
H A Dlocks_i386_inlines.h51 void lck_mtx_owner_check_panic(lck_mtx_t *mutex) __abortlike;
57 lck_mtx_t *mutex, in lck_mtx_ilk_unlock_inline()
69 lck_mtx_t *mutex, in lck_mtx_lock_finish_inline()
84 lck_mtx_t *mutex, in lck_mtx_lock_finish_inline_with_cleanup()
101 lck_mtx_t *mutex, in lck_mtx_try_lock_finish_inline()
113 lck_mtx_t *mutex, in lck_mtx_convert_spin_finish_inline()
127 lck_mtx_t *mutex, in lck_mtx_unlock_finish_inline()
H A Dlocks_i386.c147 KALLOC_TYPE_DEFINE(KT_LCK_MTX, lck_mtx_t, KT_PRIV_ACCT);
226 static void lck_mtx_unlock_wakeup_tail(lck_mtx_t *mutex, uint32_t state);
227 static void lck_mtx_interlock_lock(lck_mtx_t *mutex, uint32_t *new_state);
228 static void lck_mtx_interlock_lock_clear_flags(lck_mtx_t *mutex, uint32_t and_flags, uint32_t *new_…
229 static int lck_mtx_interlock_try_lock_set_flags(lck_mtx_t *mutex, uint32_t or_flags, uint32_t *new_…
230 static boolean_t lck_mtx_lock_wait_interlock_to_clear(lck_mtx_t *lock, uint32_t *new_state);
231 static boolean_t lck_mtx_try_lock_wait_interlock_to_clear(lck_mtx_t *lock, uint32_t *new_state);
1017 lck_mtx_t *
1022 lck_mtx_t *lck; in lck_mtx_alloc_init()
1034 lck_mtx_t *lck, in lck_mtx_free()
[all …]
H A Dlocks_i386_opt.c123 lck_mtx_t *lock) in lck_mtx_lock()
173 lck_mtx_t *lock) in lck_mtx_try_lock()
229 lck_mtx_t *lock) in lck_mtx_lock_spin_always()
288 lck_mtx_t *lock) in lck_mtx_lock_spin()
312 lck_mtx_t *lock) in lck_mtx_try_lock_spin_always()
368 lck_mtx_t *lock) in lck_mtx_try_lock_spin()
387 lck_mtx_t *lock) in lck_mtx_unlock()
/xnu-12377.81.4/bsd/sys/
H A Dfasttrap_impl.h83 lck_mtx_t ftpc_mtx; /* lock on all but acount */
94 lck_mtx_t ftp_mtx; /* provider lock */
95 lck_mtx_t ftp_cmtx; /* lock on creating probes */
151 lck_mtx_t ftb_mtx; /* bucket lock */
154 uint8_t ftb_pad[64 - sizeof (lck_mtx_t) - sizeof (void *)];
H A Dkernel.h86 extern lck_mtx_t hostname_lock;
88 extern lck_mtx_t domainname_lock;
H A Ddtrace_glue.h98 extern lck_mtx_t cpu_lock;
99 extern lck_mtx_t cyc_lock;
100 extern lck_mtx_t mod_lock;
136 lck_mtx_t cpuc_pid_lock; /* DTrace pid provider lock */
139 …uint8_t cpuc_pad[CPU_CACHE_COHERENCE_SIZE - sizeof(uint64_t) - sizeof(lck_mtx_t) - sizeof(…
H A Ddomain.h119 lck_mtx_t *dom_mtx; /* domain global mutex */
137 lck_mtx_t *dom_mtx; /* domain global mutex */
H A Dfiledesc.h122 lck_mtx_t fd_lock; /* (L) lock to protect fdesc */
153 lck_mtx_t fd_kqhashlock; /* (Q) lock for dynamic kqueue hash */
157 lck_mtx_t fd_knhashlock; /* (N) lock for hash table for attached knotes */
H A Dproc_internal.h140 lck_mtx_t s_mlock; /* session lock */
195 lck_mtx_t pg_mlock; /* process group lock (PGL) */
289 lck_mtx_t p_mlock; /* mutex lock for proc */
308 lck_mtx_t p_ucred_mlock; /* mutex lock to protect p_ucred */
310 lck_mtx_t p_audit_mlock; /* mutex lock to protect audit sessions */
369 lck_mtx_t p_dtrace_sprlock; /* sun proc lock emulation */
758 extern lck_mtx_t proc_list_mlock;
999 extern lck_mtx_t * pthread_list_mlock;
H A Dmount_internal.h111 lck_mtx_t mnt_mlock; /* mutex that protects mount point */
145lck_mtx_t mnt_renamelock; /* mutex that serializes renames that change s…
201 lck_mtx_t mnt_iter_lock; /* mutex that protects iteration of vnodes */
H A Dubc_internal.h85 lck_mtx_t cl_lockr;
92 lck_mtx_t cl_lockw;
/xnu-12377.81.4/bsd/kern/
H A Dkern_synch.c162 lck_mtx_t *mtx) in _sleep()
331 return _sleep((caddr_t)chan, pri, (char *)NULL, 0, (int (*)(int))0, (lck_mtx_t *)0); in sleep()
337 lck_mtx_t *mtx, in msleep0()
355 lck_mtx_t *mtx, in msleep()
373 lck_mtx_t *mtx, in msleep1()
393 return _sleep((caddr_t)chan, pri, wmsg, abstime, (int (*)(int))0, (lck_mtx_t *)0); in tsleep()
409 return _sleep((caddr_t)chan, pri, wmsg, abstime, continuation, (lck_mtx_t *)0); in tsleep0()
420 return _sleep((caddr_t)chan, pri, wmsg, abstime, continuation, (lck_mtx_t *)0); in tsleep1()
/xnu-12377.81.4/bsd/security/audit/
H A Daudit_bsd.h169 lck_mtx_t *mtx_lock;
189 lck_mtx_t *sl_mtx;
201 lck_mtx_t *rl_mtx;
216 void _audit_cv_wait(struct cv *cvp, lck_mtx_t *mp, const char *desc);
217 int _audit_cv_wait_sig(struct cv *cvp, lck_mtx_t *mp, const char *desc);
218 int _audit_cv_wait_continuation(struct cv *cvp, lck_mtx_t *mp,
/xnu-12377.81.4/bsd/miscfs/nullfs/
H A Dnullfs.h97 lck_mtx_t nullm_lock; /* lock to protect rootvp and secondvp above */
140 void nullfs_init_lck(lck_mtx_t * lck);
141 void nullfs_destroy_lck(lck_mtx_t * lck);
/xnu-12377.81.4/bsd/net/
H A Draw_usrreq.c172 lck_mtx_t * mutex_held; in raw_uabort()
225 lck_mtx_t * mutex_held; in raw_udetach()
284 lck_mtx_t * mutex_held; in raw_usend()
343 lck_mtx_t * mutex_held; in raw_ushutdown()
/xnu-12377.81.4/osfmk/arm/
H A Dlocks.h72 } lck_mtx_t; typedef
81 typedef struct __lck_mtx_t__ lck_mtx_t; typedef
/xnu-12377.81.4/iokit/Kernel/
H A DIOLocks.cpp77 lck_mtx_t *
80 return (lck_mtx_t *)lock; in IOLockGetMachLock()
162 lck_mtx_t mutex;
206 lck_mtx_t *
/xnu-12377.81.4/iokit/IOKit/
H A DIOLocks.h67 typedef lck_mtx_t IOLock;
92 lck_mtx_t * IOLockGetMachLock( IOLock * lock);
235 lck_mtx_t * IORecursiveLockGetMachLock( IORecursiveLock * lock);
/xnu-12377.81.4/bsd/miscfs/devfs/
H A Ddevfsdefs.h178 extern lck_mtx_t devfs_mutex;
179 extern lck_mtx_t devfs_attr_mutex;
/xnu-12377.81.4/bsd/miscfs/mockfs/
H A Dmockfs.h58 lck_mtx_t mockfs_mnt_mtx; /* Mount-wide (and tree-wide) mutex */

123456