Home
last modified time | relevance | path

Searched refs:SCHED (Results 1 – 18 of 18) sorted by relevance

/xnu-12377.81.4/tests/sched/sched_test_harness/
H A Dsched_clutch_harness_impl.c150 SCHED(init)(); in clutch_impl_init_topology()
151 SCHED(pset_init)(sched_boot_pset); in clutch_impl_init_topology()
152 SCHED(rt_init_pset)(sched_boot_pset); in clutch_impl_init_topology()
153 SCHED(processor_init)(master_processor); in clutch_impl_init_topology()
178 if (SCHED(cpu_init_completed) != NULL) { in clutch_impl_init_topology()
179 SCHED(cpu_init_completed)(); in clutch_impl_init_topology()
181 SCHED(rt_init_completed)(); in clutch_impl_init_topology()
437 return SCHED(ipi_policy)(dst, thread, (dst->active_thread == dst->idle_thread), event); in sched_ipi_action()
H A Dsched_edge_harness.c250 SCHED(update_pset_load_average)(cpus[cpu_id]->processor_set, 0); in impl_cpu_enqueue_thread()
/xnu-12377.81.4/osfmk/kern/
H A Dsched_prim.c447 kprintf("Scheduler: Default of %s\n", SCHED(sched_name)); in sched_init()
464 strlcpy(sched_string, SCHED(sched_name), sizeof(sched_string)); in sched_init()
470 SCHED(init)(); in sched_init()
472 SCHED(pset_init)(sched_boot_pset); in sched_init()
473 SCHED(rt_init_pset)(sched_boot_pset); in sched_init()
474 SCHED(processor_init)(master_processor); in sched_init()
515 SCHED(timebase_init)(); in sched_timebase_init()
550 const uint32_t quantum_size = SCHED(initial_quantum_size)(THREAD_NULL); in sched_set_max_unsafe_rt_quanta()
572 const uint32_t quantum_size = SCHED(initial_quantum_size)(THREAD_NULL); in sched_set_max_unsafe_fixed_quanta()
593 uint32_t quantum = SCHED(initial_quantum_size)(THREAD_NULL); in sched_get_quantum_us()
[all …]
H A Dsyscall_subr.c112 result = SCHED(thread_should_yield)(myprocessor, current_thread()); in swtch_continue()
129 if (!SCHED(thread_should_yield)(myprocessor, current_thread())) { in swtch()
149 result = SCHED(thread_should_yield)(myprocessor, current_thread()); in swtch_pri_continue()
166 if (!SCHED(thread_should_yield)(myprocessor, current_thread())) { in swtch_pri()
312 bool should_yield = SCHED(thread_should_yield)(current_processor(), current_thread()); in thread_switch()
617 if (!SCHED(thread_should_yield)(myprocessor, self)) { in thread_yield_internal()
H A Dpriority.c141SCHED(update_pset_avg_execution_time)(processor->processor_set, thread->quantum_remaining, ctime, … in thread_quantum_expire()
200 if (SCHED(can_update_priority)(thread)) { in thread_quantum_expire()
201 SCHED(update_priority)(thread); in thread_quantum_expire()
203 SCHED(lightweight_update_priority)(thread); in thread_quantum_expire()
207 SCHED(quantum_expire)(thread); in thread_quantum_expire()
326 SCHED(update_thread_bucket)(thread); in sched_set_thread_base_priority()
391 priority = (int16_t)SCHED(compute_timeshare_priority)(thread); in thread_recompute_sched_pri()
966 SCHED(update_thread_bucket)(thread); in sched_set_thread_mode()
H A Dsched_prim.h850 #define SCHED(f) (sched_edge_dispatch.f) macro
853 #define SCHED(f) (sched_amp_dispatch.f) macro
860 #define SCHED(f) (sched_clutch_dispatch.f) macro
863 #define SCHED(f) (sched_dualq_dispatch.f) macro
H A Dprocessor.c353 SCHED(processor_init)(processor); in processor_init()
579 SCHED(update_pset_load_average)(processor->processor_set, 0); in processor_state_update_idle()
609 SCHED(update_pset_load_average)(processor->processor_set, 0); in processor_state_update_from_thread()
650 if (SCHED(multiple_psets_enabled) == FALSE) { in pset_create()
745 SCHED(pset_init)(pset); in pset_init()
746 SCHED(rt_init_pset)(pset); in pset_init()
915 SCHED(update_pset_load_average)(pset, 0); in pset_update_processor_state()
H A Dsched_rt.c332 lowest_count = SCHED(processor_runq_count)(processor); in sched_rtlocal_choose_processor_smt()
606 if (SCHED(rt_steal_thread) != NULL) { in sched_rt_choose_thread()
609 thread_t new_thread = SCHED(rt_steal_thread)(pset); in sched_rt_choose_thread()
660 SCHED(update_pset_load_average)(pset, 0); in sched_rt_queue_shutdown()
H A Dhost.c263 quantum_time = SCHED(initial_quantum_size)(THREAD_NULL); in host_info()
1076 out->ps_runq_count_sum = SCHED(processor_runq_stats_count_sum)(processor); in get_sched_statistics()
1091 out->ps_runq_count_sum = SCHED(rt_runq_count_sum)(); in get_sched_statistics()
H A Dthread.c1504 new_thread->sched_mode = SCHED(initial_thread_sched_mode)(parent_task); in thread_create_internal()
2006 if (SCHED(can_update_priority)(thread)) { in retrieve_thread_basic_info()
2007 SCHED(update_priority)(thread); in retrieve_thread_basic_info()
2187 quantum_time = SCHED(initial_quantum_size)(THREAD_NULL); in thread_info_internal()
3748 return (uint64_t) (SCHED(initial_quantum_size)(thread) / 2); in thread_workq_quantum_size()
H A Dstartup.c1047 SCHED(run_count_incr)(thread); in load_context()
H A Dsched_amp.c512 SCHED(update_pset_load_average)(pset, 0); in sched_amp_steal_thread()
H A Dsched_clutch.c1263 if (SCHED(priority_is_urgent)(thread->sched_pri)) { in sched_clutch_root_urgency_inc()
1283 if (SCHED(priority_is_urgent)(thread->sched_pri)) { in sched_clutch_root_urgency_dec()
4758 SCHED(update_pset_load_average)(target_pset, ctime); in sched_edge_foreign_runnable_thread_remove()
4853 SCHED(update_pset_load_average)(steal_from_pset, current_timestamp); in sched_edge_steal_thread()
H A Dthread_group.c1502 SCHED(thread_group_recommendation_change)(tg, new_recommendation); in sched_perfcontrol_thread_group_recommend()
H A Dthread_policy.c1267 sched_mode_t newmode = SCHED(initial_thread_sched_mode)(get_threadtask(thread)); in thread_policy_reset()
H A Dtask.c5778 quantum_time = SCHED(initial_quantum_size)(THREAD_NULL); in task_info()
/xnu-12377.81.4/tests/sched/sched_test_harness/shadow_headers/
H A Dsched_prim.c141 if (SCHED(priority_is_urgent)(rq->highq)) { in run_queue_dequeue()
184 if (SCHED(priority_is_urgent)(thread->sched_pri)) { in run_queue_enqueue()
210 if (SCHED(priority_is_urgent)(thread->sched_pri)) { in run_queue_remove()
/xnu-12377.81.4/doc/scheduler/
H A Dsched_clutch_edge.md248SCHED(steal_thread)` scheduler callout is invoked when the processor does not find any thread for …
264 …turn THREAD_NULL for the steal callout and perform rebalancing as part of SCHED(processor_balance)…
271 If `SCHED(steal_thread)` did not return a thread for the processor, it indicates that the processor…