This source file includes following definitions.
- _compute_minimum_sleep
- sim_idle_ms_sleep
- sim_os_set_thread_priority
- sim_os_set_thread_priority
- sim_os_msec
- sim_os_sleep
- sim_timer_exit
- sim_os_ms_sleep_init
- sim_os_ms_sleep
- sim_os_msec
- sim_os_sleep
- sim_os_ms_sleep_init
- sim_os_ms_sleep
- sim_timespec_diff
- sim_rtcn_init_all
- sim_rtcn_init
- sim_rtcn_init_unit
- sim_rtcn_calb
- sim_timer_init
- sim_show_timers
- sim_show_clock_queues
- sim_timer_clr_catchup
- sim_timer_set_catchup
- sim_timer_show_catchup
- sim_timer_tick_svc
- win32_usleep
- sim_usleep
- _timespec_to_double
- _double_to_timespec
- sim_timer_clock_tick_svc
- _rtcn_configure_calibrated_clock
- sim_timer_clock_reset
- sim_start_timer_services
- sim_stop_timer_services
- sim_timer_inst_per_sec
- sim_timer_activate
- sim_timer_activate_after
- sim_register_clock_unit_tmr
- _sim_coschedule_cancel
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53 #include "sim_defs.h"
54 #include <ctype.h>
55 #include <math.h>
56
57 #define SIM_INTERNAL_CLK (SIM_NTIMERS+(1<<30))
58 #define SIM_INTERNAL_UNIT sim_internal_timer_unit
59 #ifndef MIN
60 # define MIN(a,b) (((a) < (b)) ? (a) : (b))
61 #endif
62 #ifndef MAX
63 # define MAX(a,b) (((a) > (b)) ? (a) : (b))
64 #endif
65
66 uint32 sim_idle_ms_sleep (unsigned int msec);
67
68 static int32 sim_calb_tmr = -1;
69 static int32 sim_calb_tmr_last = -1;
70 static double sim_inst_per_sec_last = 0;
71
72 static uint32 sim_idle_rate_ms = 0;
73 static uint32 sim_os_sleep_min_ms = 0;
74 static uint32 sim_os_sleep_inc_ms = 0;
75 static uint32 sim_os_clock_resoluton_ms = 0;
76 static uint32 sim_os_tick_hz = 0;
77 static uint32 sim_idle_calib_pct = 0;
78 static UNIT *sim_clock_unit[SIM_NTIMERS+1] = {NULL};
79 UNIT * volatile sim_clock_cosched_queue[SIM_NTIMERS+1] = {NULL};
80 static int32 sim_cosched_interval[SIM_NTIMERS+1];
81 static t_bool sim_catchup_ticks = FALSE;
82
83 #define sleep1Samples 10
84
85 static uint32 _compute_minimum_sleep (void)
86 {
87 uint32 i, tot, tim;
88
89 sim_os_set_thread_priority (PRIORITY_ABOVE_NORMAL);
90 sim_idle_ms_sleep (1);
91 for (i = 0, tot = 0; i < sleep1Samples; i++)
92 tot += sim_idle_ms_sleep (1);
93 tim = tot / sleep1Samples;
94 sim_os_sleep_min_ms = tim;
95 sim_idle_ms_sleep (1);
96 for (i = 0, tot = 0; i < sleep1Samples; i++)
97 tot += sim_idle_ms_sleep (sim_os_sleep_min_ms + 1);
98 tim = tot / sleep1Samples;
99 sim_os_sleep_inc_ms = tim - sim_os_sleep_min_ms;
100 sim_os_set_thread_priority (PRIORITY_NORMAL);
101 return sim_os_sleep_min_ms;
102 }
103
104 uint32 sim_idle_ms_sleep (unsigned int msec)
105 {
106 return sim_os_ms_sleep (msec);
107 }
108
109 #if defined(_WIN32)
110
111
112
113
114 t_stat sim_os_set_thread_priority (int below_normal_above)
115 {
116 const static int val[3] = {THREAD_PRIORITY_BELOW_NORMAL, THREAD_PRIORITY_NORMAL, THREAD_PRIORITY_ABOVE_NORMAL};
117
118 if ((below_normal_above < -1) || (below_normal_above > 1))
119 return SCPE_ARG;
120 SetThreadPriority (GetCurrentThread(), val[1 + below_normal_above]);
121 return SCPE_OK;
122 }
123 #else
124
125 t_stat sim_os_set_thread_priority (int below_normal_above)
126 {
127 int sched_policy, min_prio, max_prio;
128 struct sched_param sched_priority;
129
130 # ifndef __gnu_hurd__
131 if ((below_normal_above < -1) || (below_normal_above > 1))
132 return SCPE_ARG;
133
134 pthread_getschedparam (pthread_self(), &sched_policy, &sched_priority);
135 min_prio = sched_get_priority_min(sched_policy);
136 max_prio = sched_get_priority_max(sched_policy);
137 switch (below_normal_above) {
138 case PRIORITY_BELOW_NORMAL:
139 sched_priority.sched_priority = min_prio;
140 break;
141 case PRIORITY_NORMAL:
142 sched_priority.sched_priority = (max_prio + min_prio) / 2;
143 break;
144 case PRIORITY_ABOVE_NORMAL:
145 sched_priority.sched_priority = max_prio;
146 break;
147 }
148 pthread_setschedparam (pthread_self(), sched_policy, &sched_priority);
149 # endif
150 return SCPE_OK;
151 }
152 #endif
153
154
155
156 #if defined (_WIN32)
157
158
159
160 const t_bool rtc_avail = TRUE;
161
162 uint32 sim_os_msec (void)
163 {
164 return timeGetTime ();
165 }
166
167 void sim_os_sleep (unsigned int sec)
168 {
169 Sleep (sec * 1000);
170 return;
171 }
172
173 void sim_timer_exit (void)
174 {
175 timeEndPeriod (sim_idle_rate_ms);
176 return;
177 }
178
179 uint32 sim_os_ms_sleep_init (void)
180 {
181 TIMECAPS timers;
182
183 if (timeGetDevCaps (&timers, sizeof (timers)) != TIMERR_NOERROR)
184 return 0;
185 if (timers.wPeriodMin == 0)
186 return 0;
187 if (timeBeginPeriod (timers.wPeriodMin) != TIMERR_NOERROR)
188 return 0;
189 atexit (sim_timer_exit);
190
191 return _compute_minimum_sleep ();
192 }
193
194 uint32 sim_os_ms_sleep (unsigned int msec)
195 {
196 uint32 stime = sim_os_msec();
197
198 Sleep (msec);
199 return sim_os_msec () - stime;
200 }
201
202 #else
203
204
205
206 # include <time.h>
207 # include <sys/time.h>
208 # include <signal.h>
209 # include <unistd.h>
210 # define NANOS_PER_MILLI 1000000
211 # define MILLIS_PER_SEC 1000
212
213 const t_bool rtc_avail = TRUE;
214
215 uint32 sim_os_msec (void)
216 {
217 struct timeval cur;
218 struct timezone foo;
219 int st1ret;
220 uint32 msec;
221
222 st1ret = gettimeofday (&cur, &foo);
223 if (st1ret != 0)
224 {
225 fprintf (stderr, "\rFATAL: gettimeofday failure! Aborting at %s[%s:%d]\r\n",
226 __func__, __FILE__, __LINE__);
227 # if defined(USE_BACKTRACE)
228 # ifdef SIGUSR2
229 (void)raise(SIGUSR2);
230
231 # endif
232 # endif
233 abort();
234 }
235 msec = (((uint32) cur.tv_sec) * 1000UL) + (((uint32) cur.tv_usec) / 1000UL);
236 return msec;
237 }
238
239 void sim_os_sleep (unsigned int sec)
240 {
241 sleep (sec);
242 return;
243 }
244
245 uint32 sim_os_ms_sleep_init (void)
246 {
247 return _compute_minimum_sleep ();
248 }
249
250 uint32 sim_os_ms_sleep (unsigned int milliseconds)
251 {
252 uint32 stime = sim_os_msec ();
253 struct timespec treq;
254
255 treq.tv_sec = milliseconds / MILLIS_PER_SEC;
256 treq.tv_nsec = (milliseconds % MILLIS_PER_SEC) * NANOS_PER_MILLI;
257 (void) nanosleep (&treq, NULL);
258 return sim_os_msec () - stime;
259 }
260
261 #endif
262
263
264 void
265 sim_timespec_diff (struct timespec *diff, const struct timespec *min, struct timespec *sub)
266 {
267
268 *diff = *min;
269
270 while (sub->tv_nsec > diff->tv_nsec) {
271 --diff->tv_sec;
272 diff->tv_nsec += 1000000000;
273 }
274 diff->tv_nsec -= sub->tv_nsec;
275 diff->tv_sec -= sub->tv_sec;
276
277 while (diff->tv_nsec > 1000000000) {
278 ++diff->tv_sec;
279 diff->tv_nsec -= 1000000000;
280 }
281 }
282
283
284
285 static double _timespec_to_double (struct timespec *time);
286 static void _double_to_timespec (struct timespec *time, double dtime);
287 static void _rtcn_configure_calibrated_clock (int32 newtmr);
288 static void _sim_coschedule_cancel(UNIT *uptr);
289
290
291
292 static int32 rtc_ticks[SIM_NTIMERS+1] = { 0 };
293 static uint32 rtc_hz[SIM_NTIMERS+1] = { 0 };
294 static uint32 rtc_rtime[SIM_NTIMERS+1] = { 0 };
295 static uint32 rtc_vtime[SIM_NTIMERS+1] = { 0 };
296 static double rtc_gtime[SIM_NTIMERS+1] = { 0 };
297 static uint32 rtc_nxintv[SIM_NTIMERS+1] = { 0 };
298 static int32 rtc_based[SIM_NTIMERS+1] = { 0 };
299 static int32 rtc_currd[SIM_NTIMERS+1] = { 0 };
300 static int32 rtc_initd[SIM_NTIMERS+1] = { 0 };
301 static uint32 rtc_elapsed[SIM_NTIMERS+1] = { 0 };
302 static uint32 rtc_calibrations[SIM_NTIMERS+1] = { 0 };
303 static double rtc_clock_skew_max[SIM_NTIMERS+1] = { 0 };
304 static double rtc_clock_start_gtime[SIM_NTIMERS+1] = { 0 };
305 static double rtc_clock_tick_size[SIM_NTIMERS+1] = { 0 };
306 static uint32 rtc_calib_initializations[SIM_NTIMERS+1] = { 0 };
307 static double rtc_calib_tick_time[SIM_NTIMERS+1] = { 0 };
308 static double rtc_calib_tick_time_tot[SIM_NTIMERS+1] = { 0 };
309 static uint32 rtc_calib_ticks_acked[SIM_NTIMERS+1] = { 0 };
310 static uint32 rtc_calib_ticks_acked_tot[SIM_NTIMERS+1] = { 0 };
311 static uint32 rtc_clock_ticks[SIM_NTIMERS+1] = { 0 };
312 static uint32 rtc_clock_ticks_tot[SIM_NTIMERS+1] = { 0 };
313 static double rtc_clock_catchup_base_time[SIM_NTIMERS+1] = { 0 };
314 static uint32 rtc_clock_catchup_ticks[SIM_NTIMERS+1] = { 0 };
315 static uint32 rtc_clock_catchup_ticks_tot[SIM_NTIMERS+1] = { 0 };
316 static t_bool rtc_clock_catchup_pending[SIM_NTIMERS+1] = { 0 };
317 static t_bool rtc_clock_catchup_eligible[SIM_NTIMERS+1] = { 0 };
318 static uint32 rtc_clock_time_idled[SIM_NTIMERS+1] = { 0 };
319 static uint32 rtc_clock_calib_skip_idle[SIM_NTIMERS+1] = { 0 };
320 static uint32 rtc_clock_calib_gap2big[SIM_NTIMERS+1] = { 0 };
321 static uint32 rtc_clock_calib_backwards[SIM_NTIMERS+1] = { 0 };
322
323 UNIT sim_timer_units[SIM_NTIMERS+1];
324
325 UNIT sim_internal_timer_unit;
326 UNIT sim_throttle_unit;
327
328 t_stat sim_throt_svc (UNIT *uptr);
329 t_stat sim_timer_tick_svc (UNIT *uptr);
330
331 #define DBG_TRC 0x008
332 #define DBG_CAL 0x010
333 #define DBG_TIM 0x020
334 #define DBG_ACK 0x080
335 DEBTAB sim_timer_debug[] = {
336 {"TRACE", DBG_TRC, "Trace routine calls"},
337 {"IACK", DBG_ACK, "interrupt acknowledgement activities"},
338 {"CALIB", DBG_CAL, "Calibration activities"},
339 {"TIME", DBG_TIM, "Activation and scheduling activities"},
340 {0}
341 };
342
343
344 extern DEVICE sim_timer_dev;
345 extern DEVICE sim_throttle_dev;
346
347 void sim_rtcn_init_all (void)
348 {
349 int32 tmr;
350
351 for (tmr = 0; tmr <= SIM_NTIMERS; tmr++)
352 if (rtc_initd[tmr] != 0)
353 sim_rtcn_init (rtc_initd[tmr], tmr);
354 return;
355 }
356
357 int32 sim_rtcn_init (int32 time, int32 tmr)
358 {
359 return sim_rtcn_init_unit (NULL, time, tmr);
360 }
361
362 int32 sim_rtcn_init_unit (UNIT *uptr, int32 time, int32 tmr)
363 {
364 if (time == 0)
365 time = 1;
366 if (tmr == SIM_INTERNAL_CLK)
367 tmr = SIM_NTIMERS;
368 else {
369 if ((tmr < 0) || (tmr >= SIM_NTIMERS))
370 return time;
371 }
372
373
374
375
376
377 if (rtc_currd[tmr])
378 time = rtc_currd[tmr];
379 if (!uptr)
380 uptr = sim_clock_unit[tmr];
381 sim_debug (DBG_CAL, &sim_timer_dev, "_sim_rtcn_init_unit(unit=%s, time=%d, tmr=%d)\n", sim_uname(uptr), time, tmr);
382 if (uptr) {
383 if (!sim_clock_unit[tmr])
384 sim_register_clock_unit_tmr (uptr, tmr);
385 }
386 rtc_clock_start_gtime[tmr] = sim_gtime();
387 rtc_rtime[tmr] = sim_os_msec ();
388 rtc_vtime[tmr] = rtc_rtime[tmr];
389 rtc_nxintv[tmr] = 1000;
390 rtc_ticks[tmr] = 0;
391 rtc_hz[tmr] = 0;
392 rtc_based[tmr] = time;
393 rtc_currd[tmr] = time;
394 rtc_initd[tmr] = time;
395 rtc_elapsed[tmr] = 0;
396 rtc_calibrations[tmr] = 0;
397 rtc_clock_ticks_tot[tmr] += rtc_clock_ticks[tmr];
398 rtc_clock_ticks[tmr] = 0;
399 rtc_calib_tick_time_tot[tmr] += rtc_calib_tick_time[tmr];
400 rtc_calib_tick_time[tmr] = 0;
401 rtc_clock_catchup_pending[tmr] = FALSE;
402 rtc_clock_catchup_eligible[tmr] = FALSE;
403 rtc_clock_catchup_ticks_tot[tmr] += rtc_clock_catchup_ticks[tmr];
404 rtc_clock_catchup_ticks[tmr] = 0;
405 rtc_calib_ticks_acked_tot[tmr] += rtc_calib_ticks_acked[tmr];
406 rtc_calib_ticks_acked[tmr] = 0;
407 ++rtc_calib_initializations[tmr];
408 _rtcn_configure_calibrated_clock (tmr);
409 return time;
410 }
411
412 int32 sim_rtcn_calb (uint32 ticksper, int32 tmr)
413 {
414
415 if (tmr == SIM_INTERNAL_CLK)
416 tmr = SIM_NTIMERS;
417 else {
418 if ((tmr < 0) || (tmr >= SIM_NTIMERS))
419 return 10000;
420 }
421 if (rtc_hz[tmr] != ticksper) {
422 rtc_hz[tmr] = ticksper;
423 rtc_clock_tick_size[tmr] = 1.0/ticksper;
424 _rtcn_configure_calibrated_clock (tmr);
425 rtc_currd[tmr] = (int32)(sim_timer_inst_per_sec()/ticksper);
426 }
427 if (sim_clock_unit[tmr] == NULL) {
428 rtc_clock_ticks[tmr] += 1;
429 rtc_calib_tick_time[tmr] += rtc_clock_tick_size[tmr];
430 }
431 if (rtc_clock_catchup_pending[tmr]) {
432 ++rtc_clock_catchup_ticks[tmr];
433 rtc_clock_catchup_pending[tmr] = FALSE;
434 }
435 return rtc_currd[tmr];
436 }
437
438
439
440 t_bool sim_timer_init (void)
441 {
442 int tmr;
443 uint32 clock_start, clock_last, clock_now;
444
445 sim_debug (DBG_TRC, &sim_timer_dev, "sim_timer_init()\n");
446 for (tmr=0; tmr<=SIM_NTIMERS; tmr++) {
447 sim_timer_units[tmr].action = &sim_timer_tick_svc;
448 sim_timer_units[tmr].flags = UNIT_DIS | UNIT_IDLE;
449 }
450 SIM_INTERNAL_UNIT.flags = UNIT_DIS | UNIT_IDLE;
451 sim_register_internal_device (&sim_timer_dev);
452 sim_register_clock_unit_tmr (&SIM_INTERNAL_UNIT, SIM_INTERNAL_CLK);
453 sim_idle_rate_ms = sim_os_ms_sleep_init ();
454
455 clock_last = clock_start = sim_os_msec ();
456 sim_os_clock_resoluton_ms = 1000;
457 do {
458 uint32 clock_diff;
459
460 clock_now = sim_os_msec ();
461 clock_diff = clock_now - clock_last;
462 if ((clock_diff > 0) && (clock_diff < sim_os_clock_resoluton_ms))
463 sim_os_clock_resoluton_ms = clock_diff;
464 clock_last = clock_now;
465 } while (clock_now < clock_start + 100);
466 sim_os_tick_hz = 1000/(sim_os_clock_resoluton_ms * (sim_idle_rate_ms/sim_os_clock_resoluton_ms));
467 return (sim_idle_rate_ms != 0);
468 }
469
470
471 t_stat sim_show_timers (FILE* st, DEVICE *dptr, UNIT* uptr, int32 val, CONST char* desc)
472 {
473 int tmr, clocks;
474 struct timespec now;
475 time_t time_t_now;
476 int32 calb_tmr = (sim_calb_tmr == -1) ? sim_calb_tmr_last : sim_calb_tmr;
477
478 for (tmr=clocks=0; tmr<=SIM_NTIMERS; ++tmr) {
479 if (0 == rtc_initd[tmr])
480 continue;
481
482 if (sim_clock_unit[tmr]) {
483 ++clocks;
484 fprintf (st, "%s clock device is %s%s%s\n", sim_name,
485 (tmr == SIM_NTIMERS) ? "Internal Calibrated Timer(" : "",
486 sim_uname(sim_clock_unit[tmr]),
487 (tmr == SIM_NTIMERS) ? ")" : "");
488 }
489
490 fprintf (st, "%s%sTimer %d:\n", "", rtc_hz[tmr] ? "Calibrated " : "Uncalibrated ", tmr);
491 if (rtc_hz[tmr]) {
492 fprintf (st, " Running at: %lu Hz\n", (unsigned long)rtc_hz[tmr]);
493 fprintf (st, " Tick Size: %s\n", sim_fmt_secs (rtc_clock_tick_size[tmr]));
494 fprintf (st, " Ticks in current second: %lu\n", (unsigned long)rtc_ticks[tmr]);
495 }
496 fprintf (st, " Seconds Running: %lu (%s)\n", (unsigned long)rtc_elapsed[tmr], sim_fmt_secs ((double)rtc_elapsed[tmr]));
497 if (tmr == calb_tmr) {
498 fprintf (st, " Calibration Opportunities: %lu\n", (unsigned long)rtc_calibrations[tmr]);
499 if (sim_idle_calib_pct)
500 fprintf (st, " Calib Skip Idle Thresh %%: %lu\n", (unsigned long)sim_idle_calib_pct);
501 if (rtc_clock_calib_skip_idle[tmr])
502 fprintf (st, " Calibs Skip While Idle: %lu\n", (unsigned long)rtc_clock_calib_skip_idle[tmr]);
503 if (rtc_clock_calib_backwards[tmr])
504 fprintf (st, " Calibs Skip Backwards: %lu\n", (unsigned long)rtc_clock_calib_backwards[tmr]);
505 if (rtc_clock_calib_gap2big[tmr])
506 fprintf (st, " Calibs Skip Gap Too Big: %lu\n", (unsigned long)rtc_clock_calib_gap2big[tmr]);
507 }
508 if (rtc_gtime[tmr])
509 fprintf (st, " Instruction Time: %.0f\n", rtc_gtime[tmr]);
510 fprintf (st, " Current Insts Per Tick: %lu\n", (unsigned long)rtc_currd[tmr]);
511 fprintf (st, " Initializations: %lu\n", (unsigned long)rtc_calib_initializations[tmr]);
512 fprintf (st, " Total Ticks: %lu\n", (unsigned long)rtc_clock_ticks_tot[tmr]+(unsigned long)rtc_clock_ticks[tmr]);
513 if (rtc_clock_skew_max[tmr] != 0.0)
514 fprintf (st, " Peak Clock Skew: %s%s\n", sim_fmt_secs (fabs(rtc_clock_skew_max[tmr])), (rtc_clock_skew_max[tmr] < 0) ? " fast" : " slow");
515 if (rtc_calib_ticks_acked[tmr])
516 fprintf (st, " Ticks Acked: %lu\n", (unsigned long)rtc_calib_ticks_acked[tmr]);
517 if (rtc_calib_ticks_acked_tot[tmr]+rtc_calib_ticks_acked[tmr] != rtc_calib_ticks_acked[tmr])
518 fprintf (st, " Total Ticks Acked: %lu\n", (unsigned long)rtc_calib_ticks_acked_tot[tmr]+(unsigned long)rtc_calib_ticks_acked[tmr]);
519 if (rtc_calib_tick_time[tmr])
520 fprintf (st, " Tick Time: %s\n", sim_fmt_secs (rtc_calib_tick_time[tmr]));
521 if (rtc_calib_tick_time_tot[tmr]+rtc_calib_tick_time[tmr] != rtc_calib_tick_time[tmr])
522 fprintf (st, " Total Tick Time: %s\n", sim_fmt_secs (rtc_calib_tick_time_tot[tmr]+rtc_calib_tick_time[tmr]));
523 if (rtc_clock_catchup_ticks[tmr])
524 fprintf (st, " Catchup Ticks Sched: %lu\n", (unsigned long)rtc_clock_catchup_ticks[tmr]);
525 if (rtc_clock_catchup_ticks_tot[tmr]+rtc_clock_catchup_ticks[tmr] != rtc_clock_catchup_ticks[tmr])
526 fprintf (st, " Total Catchup Ticks Sched: %lu\n", (unsigned long)rtc_clock_catchup_ticks_tot[tmr]+(unsigned long)rtc_clock_catchup_ticks[tmr]);
527 clock_gettime (CLOCK_REALTIME, &now);
528 time_t_now = (time_t)now.tv_sec;
529 fprintf (st, " Wall Clock Time Now: %8.8s.%03d\n", 11+ctime(&time_t_now), (int)(now.tv_nsec/1000000));
530 if (rtc_clock_catchup_eligible[tmr]) {
531 _double_to_timespec (&now, rtc_clock_catchup_base_time[tmr]+rtc_calib_tick_time[tmr]);
532 time_t_now = (time_t)now.tv_sec;
533 fprintf (st, " Catchup Tick Time: %8.8s.%03d\n", 11+ctime(&time_t_now), (int)(now.tv_nsec/1000000));
534 _double_to_timespec (&now, rtc_clock_catchup_base_time[tmr]);
535 time_t_now = (time_t)now.tv_sec;
536 fprintf (st, " Catchup Base Time: %8.8s.%03d\n", 11+ctime(&time_t_now), (int)(now.tv_nsec/1000000));
537 }
538 if (rtc_clock_time_idled[tmr])
539 fprintf (st, " Total Time Idled: %s\n", sim_fmt_secs (rtc_clock_time_idled[tmr]/1000.0));
540 }
541 if (clocks == 0)
542 fprintf (st, "%s clock device is not specified, co-scheduling is unavailable\n", sim_name);
543 return SCPE_OK;
544 }
545
546 t_stat sim_show_clock_queues (FILE *st, DEVICE *dptr, UNIT *uptr, int32 flag, CONST char *cptr)
547 {
548 int tmr;
549
550 for (tmr=0; tmr<=SIM_NTIMERS; ++tmr) {
551 if (sim_clock_unit[tmr] == NULL)
552 continue;
553 if (sim_clock_cosched_queue[tmr] != QUEUE_LIST_END) {
554 int32 accum;
555
556 fprintf (st, "%s clock (%s) co-schedule event queue status\n",
557 sim_name, sim_uname(sim_clock_unit[tmr]));
558 accum = 0;
559 for (uptr = sim_clock_cosched_queue[tmr]; uptr != QUEUE_LIST_END; uptr = uptr->next) {
560 if ((dptr = find_dev_from_unit (uptr)) != NULL) {
561 fprintf (st, " %s", sim_dname (dptr));
562 if (dptr->numunits > 1)
563 fprintf (st, " unit %d", (int32) (uptr - dptr->units));
564 }
565 else
566 fprintf (st, " Unknown");
567 if (accum > 0)
568 fprintf (st, " after %d ticks", accum);
569 fprintf (st, "\n");
570 accum = accum + uptr->time;
571 }
572 }
573 }
574 return SCPE_OK;
575 }
576
577 REG sim_timer_reg[] = {
578 { NULL }
579 };
580
581
582
583 t_stat sim_timer_clr_catchup (UNIT *uptr, int32 val, CONST char *cptr, void *desc)
584 {
585 if (sim_catchup_ticks)
586 sim_catchup_ticks = FALSE;
587 return SCPE_OK;
588 }
589
590 t_stat sim_timer_set_catchup (UNIT *uptr, int32 val, CONST char *cptr, void *desc)
591 {
592 if (!sim_catchup_ticks)
593 sim_catchup_ticks = TRUE;
594 return SCPE_OK;
595 }
596
597 t_stat sim_timer_show_catchup (FILE *st, UNIT *uptr, int32 val, CONST void *desc)
598 {
599 fprintf (st, "Calibrated Ticks%s", sim_catchup_ticks ? " with Catchup Ticks" : "");
600 return SCPE_OK;
601 }
602
603 MTAB sim_timer_mod[] = {
604 { MTAB_VDV, MTAB_VDV, "CATCHUP", "CATCHUP", &sim_timer_set_catchup, &sim_timer_show_catchup, NULL, "Enables/Displays Clock Tick catchup mode" },
605 { MTAB_VDV, 0, NULL, "NOCATCHUP", &sim_timer_clr_catchup, NULL, NULL, "Disables Clock Tick catchup mode" },
606 { 0 },
607 };
608
609 static t_stat sim_timer_clock_reset (DEVICE *dptr);
610
611 DEVICE sim_timer_dev = {
612 "TIMER", sim_timer_units, sim_timer_reg, sim_timer_mod,
613 SIM_NTIMERS+1, 0, 0, 0, 0, 0,
614 NULL, NULL, &sim_timer_clock_reset, NULL, NULL, NULL,
615 NULL, DEV_DEBUG | DEV_NOSAVE, 0, sim_timer_debug};
616
617
618 t_stat sim_timer_tick_svc (UNIT *uptr)
619 {
620 int tmr = (int)(uptr-sim_timer_units);
621 t_stat stat;
622
623 rtc_clock_ticks[tmr] += 1;
624 rtc_calib_tick_time[tmr] += rtc_clock_tick_size[tmr];
625
626
627
628
629
630
631
632
633 if (sim_clock_unit[tmr]->action == NULL)
634 return SCPE_IERR;
635 stat = sim_clock_unit[tmr]->action (sim_clock_unit[tmr]);
636 --sim_cosched_interval[tmr];
637 if (stat == SCPE_OK) {
638 if (rtc_clock_catchup_eligible[tmr]) {
639 struct timespec now;
640 double skew;
641
642 clock_gettime(CLOCK_REALTIME, &now);
643 skew = (_timespec_to_double(&now) - (rtc_calib_tick_time[tmr]+rtc_clock_catchup_base_time[tmr]));
644
645 if (fabs(skew) > fabs(rtc_clock_skew_max[tmr]))
646 rtc_clock_skew_max[tmr] = skew;
647 }
648 while ((sim_clock_cosched_queue[tmr] != QUEUE_LIST_END) &&
649 (sim_cosched_interval[tmr] < sim_clock_cosched_queue[tmr]->time)) {
650 UNIT *cptr = sim_clock_cosched_queue[tmr];
651 sim_clock_cosched_queue[tmr] = cptr->next;
652 cptr->next = NULL;
653 cptr->cancel = NULL;
654 _sim_activate (cptr, 0);
655 }
656 if (sim_clock_cosched_queue[tmr] != QUEUE_LIST_END)
657 sim_cosched_interval[tmr] = sim_clock_cosched_queue[tmr]->time;
658 else
659 sim_cosched_interval[tmr] = 0;
660 }
661 sim_timer_activate_after (uptr, 1000000/rtc_hz[tmr]);
662 return stat;
663 }
664
665 #if !defined(__CYGWIN__) && \
666 ( defined(_WIN32) || defined(__MINGW32__) || defined(__MINGW64__) || \
667 defined(CROSS_MINGW32) || defined(CROSS_MINGW64) )
668 void win32_usleep(__int64 usec)
669 {
670 HANDLE timer;
671 LARGE_INTEGER ft;
672
673 ft.QuadPart = -(10*usec);
674
675 timer = CreateWaitableTimer(NULL, TRUE, NULL);
676 SetWaitableTimer(timer, &ft, 0, NULL, NULL, 0);
677 WaitForSingleObject(timer, INFINITE);
678 CloseHandle(timer);
679 }
680 #endif
681
682
683
684 int
685 sim_usleep(useconds_t tusleep)
686 {
687 #if ( !defined(__APPLE__) && !defined(__OpenBSD__) )
688 # if !defined(__CYGWIN__) && \
689 ( defined(_WIN32) || defined(__MINGW32__) || defined(__MINGW64__) || \
690 defined(CROSS_MINGW32) || defined(CROSS_MINGW64) )
691 win32_usleep(tusleep);
692
693 return 0;
694 # else
695 struct timespec rqt;
696 rqt.tv_sec = tusleep / 1000000;
697 rqt.tv_nsec = (tusleep % 1000000) * 1000;
698
699 return clock_nanosleep(CLOCK_MONOTONIC, 0, &rqt, NULL);
700 # endif
701
702
703 #else
704 # ifdef __APPLE__
705 struct timespec rqt;
706 rqt.tv_sec = tusleep / 1000000;
707 rqt.tv_nsec = (tusleep % 1000000) * 1000;
708 return nanosleep(&rqt, NULL);
709 # else
710 return usleep(tusleep);
711 # endif
712 #endif
713 }
714
715 static double _timespec_to_double (struct timespec *time)
716 {
717 return ((double)time->tv_sec)+(double)(time->tv_nsec)/1000000000.0;
718 }
719
720 static void _double_to_timespec (struct timespec *time, double dtime)
721 {
722 time->tv_sec = (time_t)floor(dtime);
723 time->tv_nsec = (long)((dtime-floor(dtime))*1000000000.0);
724 }
725
726 #define CLK_TPS 10
727 #define CLK_INIT (SIM_INITIAL_IPS/CLK_TPS)
728 static int32 sim_int_clk_tps;
729
730 static t_stat sim_timer_clock_tick_svc (UNIT *uptr)
731 {
732 sim_rtcn_calb (sim_int_clk_tps, SIM_INTERNAL_CLK);
733 sim_activate_after (uptr, 1000000/sim_int_clk_tps);
734 return SCPE_OK;
735 }
736
737 static void _rtcn_configure_calibrated_clock (int32 newtmr)
738 {
739 int32 tmr;
740
741
742 sim_int_clk_tps = MIN(CLK_TPS, sim_os_tick_hz);
743 for (tmr=0; tmr<SIM_NTIMERS; tmr++) {
744 if ((rtc_hz[tmr]) &&
745 (rtc_hz[tmr] <= (uint32)sim_os_tick_hz))
746 break;
747 }
748 if (tmr == SIM_NTIMERS) {
749 if ((tmr != newtmr) && (!sim_is_active (&SIM_INTERNAL_UNIT))) {
750
751 sim_calb_tmr = SIM_NTIMERS;
752 sim_debug (DBG_CAL, &sim_timer_dev, "_rtcn_configure_calibrated_clock() - Starting Internal Calibrated Timer at %dHz\n", sim_int_clk_tps);
753 SIM_INTERNAL_UNIT.action = &sim_timer_clock_tick_svc;
754 SIM_INTERNAL_UNIT.flags = UNIT_DIS | UNIT_IDLE;
755 sim_activate_abs (&SIM_INTERNAL_UNIT, 0);
756 sim_rtcn_init_unit (&SIM_INTERNAL_UNIT, (CLK_INIT*CLK_TPS)/sim_int_clk_tps, SIM_INTERNAL_CLK);
757 }
758 return;
759 }
760 if ((tmr == newtmr) &&
761 (sim_calb_tmr == newtmr))
762 return;
763 if (sim_calb_tmr == SIM_NTIMERS) {
764 sim_debug (DBG_CAL, &sim_timer_dev, "_rtcn_configure_calibrated_clock() - Stopping Internal Calibrated Timer, New Timer = %d (%dHz)\n", tmr, rtc_hz[tmr]);
765 rtc_initd[SIM_NTIMERS] = 0;
766 rtc_hz[SIM_NTIMERS] = 0;
767 sim_cancel (&SIM_INTERNAL_UNIT);
768
769 while (sim_clock_cosched_queue[SIM_NTIMERS] != QUEUE_LIST_END) {
770 UNIT *uptr = sim_clock_cosched_queue[SIM_NTIMERS];
771 _sim_coschedule_cancel (uptr);
772 _sim_activate (uptr, 1);
773 }
774 }
775 else {
776 sim_debug (DBG_CAL, &sim_timer_dev, "_rtcn_configure_calibrated_clock() - Changing Calibrated Timer from %d (%dHz) to %d (%dHz)\n", sim_calb_tmr, rtc_hz[sim_calb_tmr], tmr, rtc_hz[tmr]);
777 sim_calb_tmr = tmr;
778 }
779 sim_calb_tmr = tmr;
780 }
781
782 static t_stat sim_timer_clock_reset (DEVICE *dptr)
783 {
784 sim_debug (DBG_TRC, &sim_timer_dev, "sim_timer_clock_reset()\n");
785 _rtcn_configure_calibrated_clock (sim_calb_tmr);
786 if (sim_switches & SWMASK ('P')) {
787 sim_cancel (&SIM_INTERNAL_UNIT);
788 sim_calb_tmr = -1;
789 }
790 return SCPE_OK;
791 }
792
793 void sim_start_timer_services (void)
794 {
795 sim_debug (DBG_TRC, &sim_timer_dev, "sim_start_timer_services()\n");
796 _rtcn_configure_calibrated_clock (sim_calb_tmr);
797 }
798
799 void sim_stop_timer_services (void)
800 {
801 int tmr;
802
803 sim_debug (DBG_TRC, &sim_timer_dev, "sim_stop_timer_services()\n");
804
805 for (tmr=0; tmr<=SIM_NTIMERS; tmr++) {
806 int32 accum;
807
808 if (sim_clock_unit[tmr]) {
809
810 sim_cancel (&sim_timer_units[tmr]);
811 if (rtc_hz[tmr])
812 sim_activate (sim_clock_unit[tmr], rtc_currd[tmr]);
813
814 accum = 1;
815 while (sim_clock_cosched_queue[tmr] != QUEUE_LIST_END) {
816 UNIT *cptr = sim_clock_cosched_queue[tmr];
817
818 sim_clock_cosched_queue[tmr] = cptr->next;
819 cptr->next = NULL;
820 cptr->cancel = NULL;
821
822 accum += cptr->time;
823 _sim_activate (cptr, accum*rtc_currd[tmr]);
824 }
825 }
826 }
827 sim_cancel (&SIM_INTERNAL_UNIT);
828 sim_calb_tmr_last = sim_calb_tmr;
829 sim_inst_per_sec_last = sim_timer_inst_per_sec ();
830 sim_calb_tmr = -1;
831 }
832
833
834
835 double sim_timer_inst_per_sec (void)
836 {
837 double inst_per_sec = SIM_INITIAL_IPS;
838
839 if (sim_calb_tmr == -1)
840 return inst_per_sec;
841 inst_per_sec = ((double)rtc_currd[sim_calb_tmr])*rtc_hz[sim_calb_tmr];
842 if (0 == inst_per_sec)
843 inst_per_sec = ((double)rtc_currd[sim_calb_tmr])*sim_int_clk_tps;
844 return inst_per_sec;
845 }
846
847 t_stat sim_timer_activate (UNIT *uptr, int32 interval)
848 {
849 return sim_timer_activate_after (uptr, (uint32)((interval * 1000000.0) / sim_timer_inst_per_sec ()));
850 }
851
852 t_stat sim_timer_activate_after (UNIT *uptr, uint32 usec_delay)
853 {
854 int inst_delay, tmr;
855 double inst_delay_d, inst_per_sec;
856
857
858 for (tmr=0; tmr<=SIM_NTIMERS; tmr++)
859 if (sim_clock_unit[tmr] == uptr) {
860 uptr = &sim_timer_units[tmr];
861 break;
862 }
863 if (sim_is_active (uptr))
864 return SCPE_OK;
865 inst_per_sec = sim_timer_inst_per_sec ();
866 inst_delay_d = ((inst_per_sec*usec_delay)/1000000.0);
867
868
869 if (inst_delay_d > (double)0x7fffffff)
870 inst_delay_d = (double)0x7fffffff;
871 inst_delay = (int32)inst_delay_d;
872 if ((inst_delay == 0) && (usec_delay != 0))
873 inst_delay = 1;
874 sim_debug (DBG_TIM, &sim_timer_dev, "sim_timer_activate_after() - queue addition %s at %d (%d usecs)\n",
875 sim_uname(uptr), inst_delay, usec_delay);
876 return _sim_activate (uptr, inst_delay);
877 }
878
879 t_stat sim_register_clock_unit_tmr (UNIT *uptr, int32 tmr)
880 {
881 if (tmr == SIM_INTERNAL_CLK)
882 tmr = SIM_NTIMERS;
883 else {
884 if ((tmr < 0) || (tmr >= SIM_NTIMERS))
885 return SCPE_IERR;
886 }
887 if (NULL == uptr) {
888 while (sim_clock_cosched_queue[tmr] != QUEUE_LIST_END) {
889 UNIT *uptr = sim_clock_cosched_queue[tmr];
890
891 _sim_coschedule_cancel (uptr);
892 _sim_activate (uptr, 1);
893 }
894 sim_clock_unit[tmr] = NULL;
895 return SCPE_OK;
896 }
897 if (NULL == sim_clock_unit[tmr])
898 sim_clock_cosched_queue[tmr] = QUEUE_LIST_END;
899 sim_clock_unit[tmr] = uptr;
900 uptr->dynflags |= UNIT_TMR_UNIT;
901 sim_timer_units[tmr].flags = UNIT_DIS | (sim_clock_unit[tmr] ? UNIT_IDLE : 0);
902 return SCPE_OK;
903 }
904
905
906 static void _sim_coschedule_cancel (UNIT *uptr)
907 {
908 if (uptr->next) {
909 int tmr;
910
911 for (tmr=0; tmr<SIM_NTIMERS; tmr++) {
912 if (uptr == sim_clock_cosched_queue[tmr]) {
913 sim_clock_cosched_queue[tmr] = uptr->next;
914 uptr->next = NULL;
915 }
916 else {
917 UNIT *cptr;
918 for (cptr = sim_clock_cosched_queue[tmr];
919 (cptr != QUEUE_LIST_END);
920 cptr = cptr->next)
921 if (cptr->next == (uptr)) {
922 cptr->next = (uptr)->next;
923 uptr->next = NULL;
924 break;
925 }
926 }
927 if (uptr->next == NULL) {
928 uptr->cancel = NULL;
929 sim_debug (SIM_DBG_EVENT, &sim_timer_dev, "Canceled Clock Coscheduled Event for %s\n", sim_uname(uptr));
930 return;
931 }
932 }
933 }
934 }
935