This source file includes following definitions.
- _compute_minimum_sleep
- sim_idle_ms_sleep
- sim_os_set_thread_priority
- sim_os_set_thread_priority
- sim_os_msec
- sim_os_sleep
- sim_timer_exit
- sim_os_ms_sleep_init
- sim_os_ms_sleep
- sim_os_msec
- sim_os_sleep
- sim_os_ms_sleep_init
- sim_os_ms_sleep
- sim_timespec_diff
- sim_rtcn_init_all
- sim_rtcn_init
- sim_rtcn_init_unit
- sim_rtcn_calb
- sim_timer_init
- sim_show_timers
- sim_show_clock_queues
- sim_timer_clr_catchup
- sim_timer_set_catchup
- sim_timer_show_catchup
- sim_timer_tick_svc
- win32_usleep
- sim_usleep
- _timespec_to_double
- _double_to_timespec
- sim_timer_clock_tick_svc
- _rtcn_configure_calibrated_clock
- sim_timer_clock_reset
- sim_start_timer_services
- sim_stop_timer_services
- sim_timer_inst_per_sec
- sim_timer_activate
- sim_timer_activate_after
- sim_register_clock_unit_tmr
- _sim_coschedule_cancel
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53 #include "sim_defs.h"
54 #include <ctype.h>
55 #include <math.h>
56 #include "../dps8/dps8_sir.h"
57
58 #if defined(__QNX__)
59 # include <qh/time.h>
60 #endif
61
62 #define SIM_INTERNAL_CLK (SIM_NTIMERS+(1<<30))
63 #define SIM_INTERNAL_UNIT sim_internal_timer_unit
64
65 #if defined(MIN)
66 # undef MIN
67 #endif
68 #define MIN(a,b) (((a) < (b)) ? (a) : (b))
69
70 #if defined(MAX)
71 # undef MAX
72 #endif
73 #define MAX(a,b) (((a) > (b)) ? (a) : (b))
74
75 uint32 sim_idle_ms_sleep (unsigned int msec);
76
77 static int32 sim_calb_tmr = -1;
78 static int32 sim_calb_tmr_last = -1;
79 static double sim_inst_per_sec_last = 0;
80
81 static uint32 sim_idle_rate_ms = 0;
82 static uint32 sim_os_sleep_min_ms = 0;
83 static uint32 sim_os_sleep_inc_ms = 0;
84 static uint32 sim_os_clock_resoluton_ms = 0;
85 static uint32 sim_os_tick_hz = 0;
86 static uint32 sim_idle_calib_pct = 0;
87 static UNIT *sim_clock_unit[SIM_NTIMERS+1] = {NULL};
88 UNIT * volatile sim_clock_cosched_queue[SIM_NTIMERS+1] = {NULL};
89 static int32 sim_cosched_interval[SIM_NTIMERS+1];
90 static t_bool sim_catchup_ticks = FALSE;
91
92 #define sleep1Samples 10
93
94 static uint32 _compute_minimum_sleep (void)
95 {
96 uint32 i, tot, tim;
97
98 sim_idle_ms_sleep (1);
99 for (i = 0, tot = 0; i < sleep1Samples; i++)
100 tot += sim_idle_ms_sleep (1);
101 tim = tot / sleep1Samples;
102 sim_os_sleep_min_ms = tim;
103 sim_idle_ms_sleep (1);
104 for (i = 0, tot = 0; i < sleep1Samples; i++)
105 tot += sim_idle_ms_sleep (sim_os_sleep_min_ms + 1);
106 tim = tot / sleep1Samples;
107 sim_os_sleep_inc_ms = tim - sim_os_sleep_min_ms;
108 return sim_os_sleep_min_ms;
109 }
110
111 uint32 sim_idle_ms_sleep (unsigned int msec)
112 {
113 return sim_os_ms_sleep (msec);
114 }
115
116 #if defined(_WIN32)
117
118
119
120
121 t_stat sim_os_set_thread_priority (int below_normal_above)
122 {
123 const static int val[3] = {THREAD_PRIORITY_BELOW_NORMAL, THREAD_PRIORITY_NORMAL, THREAD_PRIORITY_ABOVE_NORMAL};
124
125 if ((below_normal_above < -1) || (below_normal_above > 1))
126 return SCPE_ARG;
127 SetThreadPriority (GetCurrentThread(), val[1 + below_normal_above]);
128 return SCPE_OK;
129 }
130 #else
131
132 t_stat sim_os_set_thread_priority (int below_normal_above)
133 {
134 int sched_policy = 0, min_prio = 0, max_prio = 0;
135 struct sched_param sched_param;
136
137 if ((below_normal_above < -1) || (below_normal_above > 1))
138 return SCPE_ARG;
139 if (pthread_getschedparam (pthread_self(), &sched_policy, &sched_param) != 0) {
140 # if defined(TESTING)
141 (void)sir_debug("pthread_getschedparam failed, not changing priority");
142 # endif
143 return SCPE_OK;
144 }
145 # if !defined(__PASE__)
146 min_prio = sched_get_priority_min(sched_policy);
147 max_prio = sched_get_priority_max(sched_policy);
148 # else
149 min_prio = 1;
150 max_prio = 127;
151 # endif
152 switch (below_normal_above) {
153 case PRIORITY_BELOW_NORMAL:
154 sched_param.sched_priority = min_prio;
155 break;
156 case PRIORITY_NORMAL:
157 sched_param.sched_priority = (max_prio + min_prio) / 2;
158 break;
159 case PRIORITY_ABOVE_NORMAL:
160 sched_param.sched_priority = max_prio;
161 break;
162 }
163 if (pthread_setschedparam (pthread_self(), sched_policy, &sched_param) != 0) {
164 # if defined(TESTING)
165 (void)sir_debug("pthread_setschedparam failed changing priority to %d", sched_param.sched_priority);
166 # endif
167 }
168 return SCPE_OK;
169 }
170 #endif
171
172
173
174 #if defined (_WIN32)
175
176
177
178 const t_bool rtc_avail = TRUE;
179
180 uint32 sim_os_msec (void)
181 {
182 return timeGetTime ();
183 }
184
185 void sim_os_sleep (unsigned int sec)
186 {
187 Sleep (sec * 1000);
188 return;
189 }
190
191 void sim_timer_exit (void)
192 {
193 timeEndPeriod (sim_idle_rate_ms);
194 return;
195 }
196
197 uint32 sim_os_ms_sleep_init (void)
198 {
199 TIMECAPS timers;
200
201 if (timeGetDevCaps (&timers, sizeof (timers)) != TIMERR_NOERROR)
202 return 0;
203 if (timers.wPeriodMin == 0)
204 return 0;
205 if (timeBeginPeriod (timers.wPeriodMin) != TIMERR_NOERROR)
206 return 0;
207 atexit (sim_timer_exit);
208
209 return _compute_minimum_sleep ();
210 }
211
212 uint32 sim_os_ms_sleep (unsigned int msec)
213 {
214 uint32 stime = sim_os_msec();
215
216 Sleep (msec);
217 return sim_os_msec () - stime;
218 }
219
220 #else
221
222
223
224 # include <time.h>
225 # include <sys/time.h>
226 # include <signal.h>
227 # include <unistd.h>
228 # define NANOS_PER_MILLI 1000000
229 # define MILLIS_PER_SEC 1000
230
231 const t_bool rtc_avail = TRUE;
232
233 uint32 sim_os_msec (void)
234 {
235 struct timeval cur;
236 struct timezone foo;
237 int st1ret;
238 uint32 msec;
239
240 st1ret = gettimeofday (&cur, &foo);
241 if (st1ret != 0)
242 {
243 fprintf (stderr, "\rFATAL: gettimeofday failure! Aborting at %s[%s:%d]\r\n",
244 __func__, __FILE__, __LINE__);
245 # if defined(USE_BACKTRACE)
246 # if defined(SIGUSR2)
247 (void)raise(SIGUSR2);
248
249 # endif
250 # endif
251 abort();
252 }
253 msec = (((uint32) cur.tv_sec) * 1000UL) + (((uint32) cur.tv_usec) / 1000UL);
254 return msec;
255 }
256
257 void sim_os_sleep (unsigned int sec)
258 {
259 sleep (sec);
260 return;
261 }
262
263 uint32 sim_os_ms_sleep_init (void)
264 {
265 return _compute_minimum_sleep ();
266 }
267
268 uint32 sim_os_ms_sleep (unsigned int milliseconds)
269 {
270 uint32 stime = sim_os_msec ();
271 struct timespec treq;
272
273 treq.tv_sec = milliseconds / MILLIS_PER_SEC;
274 treq.tv_nsec = (milliseconds % MILLIS_PER_SEC) * NANOS_PER_MILLI;
275 (void) nanosleep (&treq, NULL);
276 return sim_os_msec () - stime;
277 }
278
279 #endif
280
281
282 void
283 sim_timespec_diff (struct timespec *diff, const struct timespec *min, struct timespec *sub)
284 {
285
286 *diff = *min;
287
288 while (sub->tv_nsec > diff->tv_nsec) {
289 --diff->tv_sec;
290 diff->tv_nsec += 1000000000L;
291 }
292 diff->tv_nsec -= sub->tv_nsec;
293 diff->tv_sec -= sub->tv_sec;
294
295 while (diff->tv_nsec > 1000000000L) {
296 ++diff->tv_sec;
297 diff->tv_nsec -= 1000000000L;
298 }
299 }
300
301
302
303 static double _timespec_to_double (struct timespec *time);
304 static void _double_to_timespec (struct timespec *time, double dtime);
305 static void _rtcn_configure_calibrated_clock (int32 newtmr);
306 static void _sim_coschedule_cancel(UNIT *uptr);
307
308
309
310 static int32 rtc_ticks[SIM_NTIMERS+1] = { 0 };
311 static uint32 rtc_hz[SIM_NTIMERS+1] = { 0 };
312 static uint32 rtc_rtime[SIM_NTIMERS+1] = { 0 };
313 static uint32 rtc_vtime[SIM_NTIMERS+1] = { 0 };
314 static double rtc_gtime[SIM_NTIMERS+1] = { 0 };
315 static uint32 rtc_nxintv[SIM_NTIMERS+1] = { 0 };
316 static int32 rtc_based[SIM_NTIMERS+1] = { 0 };
317 static int32 rtc_currd[SIM_NTIMERS+1] = { 0 };
318 static int32 rtc_initd[SIM_NTIMERS+1] = { 0 };
319 static uint32 rtc_elapsed[SIM_NTIMERS+1] = { 0 };
320 static uint32 rtc_calibrations[SIM_NTIMERS+1] = { 0 };
321 static double rtc_clock_skew_max[SIM_NTIMERS+1] = { 0 };
322 static double rtc_clock_start_gtime[SIM_NTIMERS+1] = { 0 };
323 static double rtc_clock_tick_size[SIM_NTIMERS+1] = { 0 };
324 static uint32 rtc_calib_initializations[SIM_NTIMERS+1] = { 0 };
325 static double rtc_calib_tick_time[SIM_NTIMERS+1] = { 0 };
326 static double rtc_calib_tick_time_tot[SIM_NTIMERS+1] = { 0 };
327 static uint32 rtc_calib_ticks_acked[SIM_NTIMERS+1] = { 0 };
328 static uint32 rtc_calib_ticks_acked_tot[SIM_NTIMERS+1] = { 0 };
329 static uint32 rtc_clock_ticks[SIM_NTIMERS+1] = { 0 };
330 static uint32 rtc_clock_ticks_tot[SIM_NTIMERS+1] = { 0 };
331 static double rtc_clock_catchup_base_time[SIM_NTIMERS+1] = { 0 };
332 static uint32 rtc_clock_catchup_ticks[SIM_NTIMERS+1] = { 0 };
333 static uint32 rtc_clock_catchup_ticks_tot[SIM_NTIMERS+1] = { 0 };
334 static t_bool rtc_clock_catchup_pending[SIM_NTIMERS+1] = { 0 };
335 static t_bool rtc_clock_catchup_eligible[SIM_NTIMERS+1] = { 0 };
336 static uint32 rtc_clock_time_idled[SIM_NTIMERS+1] = { 0 };
337 static uint32 rtc_clock_calib_skip_idle[SIM_NTIMERS+1] = { 0 };
338 static uint32 rtc_clock_calib_gap2big[SIM_NTIMERS+1] = { 0 };
339 static uint32 rtc_clock_calib_backwards[SIM_NTIMERS+1] = { 0 };
340
341 UNIT sim_timer_units[SIM_NTIMERS+1];
342
343 UNIT sim_internal_timer_unit;
344 UNIT sim_throttle_unit;
345
346 t_stat sim_timer_tick_svc (UNIT *uptr);
347
348 #define DBG_TRC 0x008
349 #define DBG_CAL 0x010
350 #define DBG_TIM 0x020
351 #define DBG_ACK 0x080
352 DEBTAB sim_timer_debug[] = {
353 {"TRACE", DBG_TRC, "Trace routine calls"},
354 {"IACK", DBG_ACK, "interrupt acknowledgement activities"},
355 {"CALIB", DBG_CAL, "Calibration activities"},
356 {"TIME", DBG_TIM, "Activation and scheduling activities"},
357 {0}
358 };
359
360
361 extern DEVICE sim_timer_dev;
362 extern DEVICE sim_throttle_dev;
363
364 void sim_rtcn_init_all (void)
365 {
366 int32 tmr;
367
368 for (tmr = 0; tmr <= SIM_NTIMERS; tmr++)
369 if (rtc_initd[tmr] != 0)
370 sim_rtcn_init (rtc_initd[tmr], tmr);
371 return;
372 }
373
374 int32 sim_rtcn_init (int32 time, int32 tmr)
375 {
376 return sim_rtcn_init_unit (NULL, time, tmr);
377 }
378
379 int32 sim_rtcn_init_unit (UNIT *uptr, int32 time, int32 tmr)
380 {
381 if (time == 0)
382 time = 1;
383 if (tmr == SIM_INTERNAL_CLK)
384 tmr = SIM_NTIMERS;
385 else {
386 if ((tmr < 0) || (tmr >= SIM_NTIMERS))
387 return time;
388 }
389
390
391
392
393
394 if (rtc_currd[tmr])
395 time = rtc_currd[tmr];
396 if (!uptr)
397 uptr = sim_clock_unit[tmr];
398 sim_debug (DBG_CAL, &sim_timer_dev, "_sim_rtcn_init_unit(unit=%s, time=%d, tmr=%d)\n", sim_uname(uptr), time, tmr);
399 if (uptr) {
400 if (!sim_clock_unit[tmr])
401 sim_register_clock_unit_tmr (uptr, tmr);
402 }
403 rtc_clock_start_gtime[tmr] = sim_gtime();
404 rtc_rtime[tmr] = sim_os_msec ();
405 rtc_vtime[tmr] = rtc_rtime[tmr];
406 rtc_nxintv[tmr] = 1000;
407 rtc_ticks[tmr] = 0;
408 rtc_hz[tmr] = 0;
409 rtc_based[tmr] = time;
410 rtc_currd[tmr] = time;
411 rtc_initd[tmr] = time;
412 rtc_elapsed[tmr] = 0;
413 rtc_calibrations[tmr] = 0;
414 rtc_clock_ticks_tot[tmr] += rtc_clock_ticks[tmr];
415 rtc_clock_ticks[tmr] = 0;
416 rtc_calib_tick_time_tot[tmr] += rtc_calib_tick_time[tmr];
417 rtc_calib_tick_time[tmr] = 0;
418 rtc_clock_catchup_pending[tmr] = FALSE;
419 rtc_clock_catchup_eligible[tmr] = FALSE;
420 rtc_clock_catchup_ticks_tot[tmr] += rtc_clock_catchup_ticks[tmr];
421 rtc_clock_catchup_ticks[tmr] = 0;
422 rtc_calib_ticks_acked_tot[tmr] += rtc_calib_ticks_acked[tmr];
423 rtc_calib_ticks_acked[tmr] = 0;
424 ++rtc_calib_initializations[tmr];
425 _rtcn_configure_calibrated_clock (tmr);
426 return time;
427 }
428
429 int32 sim_rtcn_calb (uint32 ticksper, int32 tmr)
430 {
431 if (tmr == SIM_INTERNAL_CLK)
432 tmr = SIM_NTIMERS;
433 else {
434 if ((tmr < 0) || (tmr >= SIM_NTIMERS))
435 return 10000;
436 }
437 if (rtc_hz[tmr] != ticksper) {
438 rtc_hz[tmr] = ticksper;
439 rtc_clock_tick_size[tmr] = 1.0/ticksper;
440 _rtcn_configure_calibrated_clock (tmr);
441 rtc_currd[tmr] = (int32)(sim_timer_inst_per_sec()/ticksper);
442 }
443 if (sim_clock_unit[tmr] == NULL) {
444 rtc_clock_ticks[tmr] += 1;
445 rtc_calib_tick_time[tmr] += rtc_clock_tick_size[tmr];
446 }
447 if (rtc_clock_catchup_pending[tmr]) {
448 ++rtc_clock_catchup_ticks[tmr];
449 rtc_clock_catchup_pending[tmr] = FALSE;
450 }
451 return rtc_currd[tmr];
452 }
453
454
455
456 t_bool sim_timer_init (void)
457 {
458 int tmr;
459 uint32 clock_start, clock_last, clock_now;
460
461 sim_debug (DBG_TRC, &sim_timer_dev, "sim_timer_init()\n");
462 for (tmr=0; tmr<=SIM_NTIMERS; tmr++) {
463 sim_timer_units[tmr].action = &sim_timer_tick_svc;
464 sim_timer_units[tmr].flags = UNIT_DIS | UNIT_IDLE;
465 }
466 SIM_INTERNAL_UNIT.flags = UNIT_DIS | UNIT_IDLE;
467 sim_register_internal_device (&sim_timer_dev);
468 sim_register_clock_unit_tmr (&SIM_INTERNAL_UNIT, SIM_INTERNAL_CLK);
469 sim_idle_rate_ms = sim_os_ms_sleep_init ();
470
471 clock_last = clock_start = sim_os_msec ();
472 sim_os_clock_resoluton_ms = 1000;
473 do {
474 uint32 clock_diff;
475
476 clock_now = sim_os_msec ();
477 clock_diff = clock_now - clock_last;
478 if ((clock_diff > 0) && (clock_diff < sim_os_clock_resoluton_ms))
479 sim_os_clock_resoluton_ms = clock_diff;
480 clock_last = clock_now;
481 } while (clock_now < clock_start + 100);
482 sim_os_tick_hz = 1000/(sim_os_clock_resoluton_ms * (sim_idle_rate_ms/sim_os_clock_resoluton_ms));
483 return (sim_idle_rate_ms != 0);
484 }
485
486
487 t_stat sim_show_timers (FILE* st, DEVICE *dptr, UNIT* uptr, int32 val, CONST char* desc)
488 {
489 int tmr, clocks;
490 struct timespec now;
491 time_t time_t_now;
492 int32 calb_tmr = (sim_calb_tmr == -1) ? sim_calb_tmr_last : sim_calb_tmr;
493
494 for (tmr=clocks=0; tmr<=SIM_NTIMERS; ++tmr) {
495 if (0 == rtc_initd[tmr])
496 continue;
497
498 if (sim_clock_unit[tmr]) {
499 ++clocks;
500 fprintf (st, "%s clock device is %s%s%s\n",
501 sim_name,
502 (tmr == SIM_NTIMERS) ? "Internal Calibrated Timer(" : "",
503 sim_uname(sim_clock_unit[tmr]),
504 (tmr == SIM_NTIMERS) ? ")" : "");
505 }
506
507 fprintf (st, "%s%sTimer %d:\n", "",
508 rtc_hz[tmr] ? "Calibrated " : "Uncalibrated ",
509 tmr);
510 if (rtc_hz[tmr]) {
511 fprintf (st, " Running at: %lu Hz\n",
512 (unsigned long)rtc_hz[tmr]);
513 fprintf (st, " Tick Size: %s\n",
514 sim_fmt_secs (rtc_clock_tick_size[tmr]));
515 fprintf (st, " Ticks in current second: %lu\n",
516 (unsigned long)rtc_ticks[tmr]);
517 }
518 fprintf (st, " Seconds Running: %lu (%s)\n",
519 (unsigned long)rtc_elapsed[tmr],
520 sim_fmt_secs ((double)rtc_elapsed[tmr]));
521 if (tmr == calb_tmr) {
522 fprintf (st, " Calibration Opportunities: %lu\n",
523 (unsigned long)rtc_calibrations[tmr]);
524 if (sim_idle_calib_pct)
525 fprintf (st, " Calib Skip Idle Thresh %%: %lu\n",
526 (unsigned long)sim_idle_calib_pct);
527 if (rtc_clock_calib_skip_idle[tmr])
528 fprintf (st, " Calibs Skip While Idle: %lu\n",
529 (unsigned long)rtc_clock_calib_skip_idle[tmr]);
530 if (rtc_clock_calib_backwards[tmr])
531 fprintf (st, " Calibs Skip Backwards: %lu\n",
532 (unsigned long)rtc_clock_calib_backwards[tmr]);
533 if (rtc_clock_calib_gap2big[tmr])
534 fprintf (st, " Calibs Skip Gap Too Big: %lu\n",
535 (unsigned long)rtc_clock_calib_gap2big[tmr]);
536 }
537 if (rtc_gtime[tmr])
538 fprintf (st, " Instruction Time: %.0f\n",
539 rtc_gtime[tmr]);
540 fprintf (st, " Current Insts Per Tick: %lu\n",
541 (unsigned long)rtc_currd[tmr]);
542 fprintf (st, " Initializations: %lu\n",
543 (unsigned long)rtc_calib_initializations[tmr]);
544 fprintf (st, " Total Ticks: %lu\n",
545 (unsigned long)rtc_clock_ticks_tot[tmr]+(unsigned long)rtc_clock_ticks[tmr]);
546 if (rtc_clock_skew_max[tmr] != 0.0)
547 fprintf (st, " Peak Clock Skew: %s%s\n",
548 sim_fmt_secs (fabs(rtc_clock_skew_max[tmr])),
549 (rtc_clock_skew_max[tmr] < 0) ? " fast" : " slow");
550 if (rtc_calib_ticks_acked[tmr])
551 fprintf (st, " Ticks Acked: %lu\n",
552 (unsigned long)rtc_calib_ticks_acked[tmr]);
553 if (rtc_calib_ticks_acked_tot[tmr]+rtc_calib_ticks_acked[tmr] != rtc_calib_ticks_acked[tmr])
554 fprintf (st, " Total Ticks Acked: %lu\n",
555 (unsigned long)rtc_calib_ticks_acked_tot[tmr]+(unsigned long)rtc_calib_ticks_acked[tmr]);
556 if (rtc_calib_tick_time[tmr])
557 fprintf (st, " Tick Time: %s\n",
558 sim_fmt_secs (rtc_calib_tick_time[tmr]));
559 if (rtc_calib_tick_time_tot[tmr]+rtc_calib_tick_time[tmr] != rtc_calib_tick_time[tmr])
560 fprintf (st, " Total Tick Time: %s\n",
561 sim_fmt_secs (rtc_calib_tick_time_tot[tmr]+rtc_calib_tick_time[tmr]));
562 if (rtc_clock_catchup_ticks[tmr])
563 fprintf (st, " Catchup Ticks Sched: %lu\n",
564 (unsigned long)rtc_clock_catchup_ticks[tmr]);
565 if (rtc_clock_catchup_ticks_tot[tmr]+rtc_clock_catchup_ticks[tmr] != rtc_clock_catchup_ticks[tmr])
566 fprintf (st, " Total Catchup Ticks Sched: %lu\n",
567 (unsigned long)rtc_clock_catchup_ticks_tot[tmr]+(unsigned long)rtc_clock_catchup_ticks[tmr]);
568 clock_gettime (CLOCK_REALTIME, &now);
569 time_t_now = (time_t)now.tv_sec;
570 fprintf (st, " Wall Clock Time Now: %8.8s.%03d\n", 11+ctime(&time_t_now), (int)(now.tv_nsec/1000000));
571 if (rtc_clock_catchup_eligible[tmr]) {
572 _double_to_timespec (&now, rtc_clock_catchup_base_time[tmr]+rtc_calib_tick_time[tmr]);
573 time_t_now = (time_t)now.tv_sec;
574 fprintf (st, " Catchup Tick Time: %8.8s.%03d\n", 11+ctime(&time_t_now), (int)(now.tv_nsec/1000000));
575 _double_to_timespec (&now, rtc_clock_catchup_base_time[tmr]);
576 time_t_now = (time_t)now.tv_sec;
577 fprintf (st, " Catchup Base Time: %8.8s.%03d\n", 11+ctime(&time_t_now), (int)(now.tv_nsec/1000000));
578 }
579 if (rtc_clock_time_idled[tmr])
580 fprintf (st, " Total Time Idled: %s\n", sim_fmt_secs (rtc_clock_time_idled[tmr]/1000.0));
581 }
582 if (clocks == 0)
583 fprintf (st, "%s clock device is not specified, co-scheduling is unavailable\n", sim_name);
584 return SCPE_OK;
585 }
586
587 t_stat sim_show_clock_queues (FILE *st, DEVICE *dptr, UNIT *uptr, int32 flag, CONST char *cptr)
588 {
589 int tmr;
590
591 for (tmr=0; tmr<=SIM_NTIMERS; ++tmr) {
592 if (sim_clock_unit[tmr] == NULL)
593 continue;
594 if (sim_clock_cosched_queue[tmr] != QUEUE_LIST_END) {
595 int32 accum;
596
597 fprintf (st, "%s clock (%s) co-schedule event queue status\n",
598 sim_name, sim_uname(sim_clock_unit[tmr]));
599 accum = 0;
600 for (uptr = sim_clock_cosched_queue[tmr]; uptr != QUEUE_LIST_END; uptr = uptr->next) {
601 if ((dptr = find_dev_from_unit (uptr)) != NULL) {
602 fprintf (st, " %s", sim_dname (dptr));
603 if (dptr->numunits > 1)
604 fprintf (st, " unit %d", (int32) (uptr - dptr->units));
605 }
606 else
607 fprintf (st, " Unknown");
608 if (accum > 0)
609 fprintf (st, " after %d ticks", accum);
610 fprintf (st, "\n");
611 accum = accum + uptr->time;
612 }
613 }
614 }
615 return SCPE_OK;
616 }
617
618 REG sim_timer_reg[] = {
619 { NULL }
620 };
621
622
623
624 t_stat sim_timer_clr_catchup (UNIT *uptr, int32 val, CONST char *cptr, void *desc)
625 {
626 if (sim_catchup_ticks)
627 sim_catchup_ticks = FALSE;
628 return SCPE_OK;
629 }
630
631 t_stat sim_timer_set_catchup (UNIT *uptr, int32 val, CONST char *cptr, void *desc)
632 {
633 if (!sim_catchup_ticks)
634 sim_catchup_ticks = TRUE;
635 return SCPE_OK;
636 }
637
638 t_stat sim_timer_show_catchup (FILE *st, UNIT *uptr, int32 val, CONST void *desc)
639 {
640 fprintf (st, "Calibrated Ticks%s", sim_catchup_ticks ? " with Catchup Ticks" : "");
641 return SCPE_OK;
642 }
643
644 MTAB sim_timer_mod[] = {
645 { MTAB_VDV, MTAB_VDV, "CATCHUP", "CATCHUP", \
646 &sim_timer_set_catchup, &sim_timer_show_catchup, NULL, "Enables/Displays Clock Tick catchup mode" },
647 { MTAB_VDV, 0, NULL, "NOCATCHUP", \
648 &sim_timer_clr_catchup, NULL, NULL, "Disables Clock Tick catchup mode" },
649 { 0 },
650 };
651
652 static t_stat sim_timer_clock_reset (DEVICE *dptr);
653
654 DEVICE sim_timer_dev = {
655 "TIMER", sim_timer_units, sim_timer_reg, sim_timer_mod,
656 SIM_NTIMERS+1, 0, 0, 0, 0, 0,
657 NULL, NULL, &sim_timer_clock_reset, NULL, NULL, NULL,
658 NULL, DEV_DEBUG | DEV_NOSAVE, 0, sim_timer_debug};
659
660
661 t_stat sim_timer_tick_svc (UNIT *uptr)
662 {
663 int tmr = (int)(uptr-sim_timer_units);
664 t_stat stat;
665
666 rtc_clock_ticks[tmr] += 1;
667 rtc_calib_tick_time[tmr] += rtc_clock_tick_size[tmr];
668
669
670
671
672
673
674
675
676 if (sim_clock_unit[tmr]->action == NULL)
677 return SCPE_IERR;
678 stat = sim_clock_unit[tmr]->action (sim_clock_unit[tmr]);
679 --sim_cosched_interval[tmr];
680 if (stat == SCPE_OK) {
681 if (rtc_clock_catchup_eligible[tmr]) {
682 struct timespec now;
683 double skew;
684
685 clock_gettime(CLOCK_REALTIME, &now);
686 skew = (_timespec_to_double(&now) - (rtc_calib_tick_time[tmr]+rtc_clock_catchup_base_time[tmr]));
687
688 if (fabs(skew) > fabs(rtc_clock_skew_max[tmr]))
689 rtc_clock_skew_max[tmr] = skew;
690 }
691 while ((sim_clock_cosched_queue[tmr] != QUEUE_LIST_END) &&
692 (sim_cosched_interval[tmr] < sim_clock_cosched_queue[tmr]->time)) {
693 UNIT *cptr = sim_clock_cosched_queue[tmr];
694 sim_clock_cosched_queue[tmr] = cptr->next;
695 cptr->next = NULL;
696 cptr->cancel = NULL;
697 _sim_activate (cptr, 0);
698 }
699 if (sim_clock_cosched_queue[tmr] != QUEUE_LIST_END)
700 sim_cosched_interval[tmr] = sim_clock_cosched_queue[tmr]->time;
701 else
702 sim_cosched_interval[tmr] = 0;
703 }
704 sim_timer_activate_after (uptr, 1000000/rtc_hz[tmr]);
705 return stat;
706 }
707
708 #if !defined(__CYGWIN__) && \
709 ( defined(_WIN32) || defined(__MINGW32__) || defined(__MINGW64__) || \
710 defined(CROSS_MINGW32) || defined(CROSS_MINGW64) )
711 void win32_usleep(__int64 usec)
712 {
713 HANDLE timer;
714 LARGE_INTEGER ft;
715
716 ft.QuadPart = -(10*usec);
717
718 timer = CreateWaitableTimer(NULL, TRUE, NULL);
719 SetWaitableTimer(timer, &ft, 0, NULL, NULL, 0);
720 WaitForSingleObject(timer, INFINITE);
721 CloseHandle(timer);
722 }
723 #endif
724
725
726
727 int
728 sim_usleep(useconds_t tusleep)
729 {
730 #if ( !defined(__APPLE__) && !defined(__OpenBSD__) )
731 # if !defined(__CYGWIN__) && \
732 ( defined(_WIN32) || defined(__MINGW32__) || defined(__MINGW64__) || \
733 defined(CROSS_MINGW32) || defined(CROSS_MINGW64) )
734 win32_usleep(tusleep);
735
736 return 0;
737 # else
738 # if !defined(__PASE__)
739 # if defined(__QNX__) && defined(QNX_NSSLEEP)
740 return qh_nssleep((uint64_t)tusleep * 1000UL, CLOCK_MONOTONIC, (uint64_t)tusleep * 1000UL);
741 # else
742 struct timespec rqt;
743 rqt.tv_sec = tusleep / 1000000L;
744 rqt.tv_nsec = (tusleep % 1000000L) * 1000L;
745
746 return clock_nanosleep(CLOCK_MONOTONIC, 0, &rqt, NULL);
747 # endif
748 # else
749 return usleep(tusleep);
750 # endif
751 # endif
752
753
754 #else
755 # if defined(__APPLE__)
756 struct timespec rqt;
757 rqt.tv_sec = tusleep / 1000000L;
758 rqt.tv_nsec = (tusleep % 1000000L) * 1000L;
759 return nanosleep(&rqt, NULL);
760 # else
761 return usleep(tusleep);
762 # endif
763 #endif
764 }
765
766 static double _timespec_to_double (struct timespec *time)
767 {
768 return ((double)time->tv_sec)+(double)(time->tv_nsec)/1000000000.0;
769 }
770
771 static void _double_to_timespec (struct timespec *time, double dtime)
772 {
773 time->tv_sec = (time_t)floor(dtime);
774 time->tv_nsec = (long)((dtime-floor(dtime))*1000000000.0);
775 }
776
777 #define CLK_TPS 10
778 #define CLK_INIT (SIM_INITIAL_IPS/CLK_TPS)
779 static int32 sim_int_clk_tps;
780
781 static t_stat sim_timer_clock_tick_svc (UNIT *uptr)
782 {
783 sim_rtcn_calb (sim_int_clk_tps, SIM_INTERNAL_CLK);
784 sim_activate_after (uptr, 1000000/sim_int_clk_tps);
785 return SCPE_OK;
786 }
787
788 static void _rtcn_configure_calibrated_clock (int32 newtmr)
789 {
790 int32 tmr;
791
792
793 sim_int_clk_tps = MIN(CLK_TPS, sim_os_tick_hz);
794 for (tmr=0; tmr<SIM_NTIMERS; tmr++) {
795 if ((rtc_hz[tmr]) &&
796 (rtc_hz[tmr] <= (uint32)sim_os_tick_hz))
797 break;
798 }
799 if (tmr == SIM_NTIMERS) {
800 if ((tmr != newtmr) && (!sim_is_active (&SIM_INTERNAL_UNIT))) {
801
802 sim_calb_tmr = SIM_NTIMERS;
803 sim_debug (DBG_CAL, &sim_timer_dev,
804 "_rtcn_configure_calibrated_clock() - Starting Internal Calibrated Timer at %dHz\n",
805 sim_int_clk_tps);
806 SIM_INTERNAL_UNIT.action = &sim_timer_clock_tick_svc;
807 SIM_INTERNAL_UNIT.flags = UNIT_DIS | UNIT_IDLE;
808 sim_activate_abs (&SIM_INTERNAL_UNIT, 0);
809 sim_rtcn_init_unit (&SIM_INTERNAL_UNIT, (CLK_INIT*CLK_TPS)/sim_int_clk_tps, SIM_INTERNAL_CLK);
810 }
811 return;
812 }
813 if ((tmr == newtmr) &&
814 (sim_calb_tmr == newtmr))
815 return;
816 if (sim_calb_tmr == SIM_NTIMERS) {
817 sim_debug (DBG_CAL, &sim_timer_dev,
818 "_rtcn_configure_calibrated_clock() - Stopping Internal Calibrated Timer, New Timer = %d (%dHz)\n",
819 tmr, rtc_hz[tmr]);
820 rtc_initd[SIM_NTIMERS] = 0;
821 rtc_hz[SIM_NTIMERS] = 0;
822 sim_cancel (&SIM_INTERNAL_UNIT);
823
824 while (sim_clock_cosched_queue[SIM_NTIMERS] != QUEUE_LIST_END) {
825 UNIT *uptr = sim_clock_cosched_queue[SIM_NTIMERS];
826 _sim_coschedule_cancel (uptr);
827 _sim_activate (uptr, 1);
828 }
829 }
830 else {
831 sim_debug (DBG_CAL, &sim_timer_dev,
832 "_rtcn_configure_calibrated_clock() - Changing Calibrated Timer from %d (%dHz) to %d (%dHz)\n",
833 sim_calb_tmr, rtc_hz[sim_calb_tmr], tmr, rtc_hz[tmr]);
834 sim_calb_tmr = tmr;
835 }
836 sim_calb_tmr = tmr;
837 }
838
839 static t_stat sim_timer_clock_reset (DEVICE *dptr)
840 {
841 sim_debug (DBG_TRC, &sim_timer_dev, "sim_timer_clock_reset()\n");
842 _rtcn_configure_calibrated_clock (sim_calb_tmr);
843 if (sim_switches & SWMASK ('P')) {
844 sim_cancel (&SIM_INTERNAL_UNIT);
845 sim_calb_tmr = -1;
846 }
847 return SCPE_OK;
848 }
849
850 void sim_start_timer_services (void)
851 {
852 sim_debug (DBG_TRC, &sim_timer_dev, "sim_start_timer_services()\n");
853 _rtcn_configure_calibrated_clock (sim_calb_tmr);
854 }
855
856 void sim_stop_timer_services (void)
857 {
858 int tmr;
859
860 sim_debug (DBG_TRC, &sim_timer_dev, "sim_stop_timer_services()\n");
861
862 for (tmr=0; tmr<=SIM_NTIMERS; tmr++) {
863 int32 accum;
864
865 if (sim_clock_unit[tmr]) {
866
867 sim_cancel (&sim_timer_units[tmr]);
868 if (rtc_hz[tmr])
869 sim_activate (sim_clock_unit[tmr], rtc_currd[tmr]);
870
871 accum = 1;
872 while (sim_clock_cosched_queue[tmr] != QUEUE_LIST_END) {
873 UNIT *cptr = sim_clock_cosched_queue[tmr];
874
875 sim_clock_cosched_queue[tmr] = cptr->next;
876 cptr->next = NULL;
877 cptr->cancel = NULL;
878
879 accum += cptr->time;
880 _sim_activate (cptr, accum*rtc_currd[tmr]);
881 }
882 }
883 }
884 sim_cancel (&SIM_INTERNAL_UNIT);
885 sim_calb_tmr_last = sim_calb_tmr;
886 sim_inst_per_sec_last = sim_timer_inst_per_sec ();
887 sim_calb_tmr = -1;
888 }
889
890
891
892 double sim_timer_inst_per_sec (void)
893 {
894 double inst_per_sec = SIM_INITIAL_IPS;
895
896 if (sim_calb_tmr == -1)
897 return inst_per_sec;
898 inst_per_sec = ((double)rtc_currd[sim_calb_tmr])*rtc_hz[sim_calb_tmr];
899 if (0 == inst_per_sec)
900 inst_per_sec = ((double)rtc_currd[sim_calb_tmr])*sim_int_clk_tps;
901 return inst_per_sec;
902 }
903
904 t_stat sim_timer_activate (UNIT *uptr, int32 interval)
905 {
906 return sim_timer_activate_after (uptr, (uint32)((interval * 1000000.0) / sim_timer_inst_per_sec ()));
907 }
908
909 t_stat sim_timer_activate_after (UNIT *uptr, uint32 usec_delay)
910 {
911 int inst_delay, tmr;
912 double inst_delay_d, inst_per_sec;
913
914
915 for (tmr=0; tmr<=SIM_NTIMERS; tmr++)
916 if (sim_clock_unit[tmr] == uptr) {
917 uptr = &sim_timer_units[tmr];
918 break;
919 }
920 if (sim_is_active (uptr))
921 return SCPE_OK;
922 inst_per_sec = sim_timer_inst_per_sec ();
923 inst_delay_d = ((inst_per_sec*usec_delay)/1000000.0);
924
925
926 if (inst_delay_d > (double)0x7fffffff)
927 inst_delay_d = (double)0x7fffffff;
928 inst_delay = (int32)inst_delay_d;
929 if ((inst_delay == 0) && (usec_delay != 0))
930 inst_delay = 1;
931 sim_debug (DBG_TIM, &sim_timer_dev, "sim_timer_activate_after() - queue addition %s at %d (%d usecs)\n",
932 sim_uname(uptr), inst_delay, usec_delay);
933 return _sim_activate (uptr, inst_delay);
934 }
935
936 t_stat sim_register_clock_unit_tmr (UNIT *uptr, int32 tmr)
937 {
938 if (tmr == SIM_INTERNAL_CLK)
939 tmr = SIM_NTIMERS;
940 else {
941 if ((tmr < 0) || (tmr >= SIM_NTIMERS))
942 return SCPE_IERR;
943 }
944 if (NULL == uptr) {
945 while (sim_clock_cosched_queue[tmr] != QUEUE_LIST_END) {
946 UNIT *uptr = sim_clock_cosched_queue[tmr];
947
948 _sim_coschedule_cancel (uptr);
949 _sim_activate (uptr, 1);
950 }
951 sim_clock_unit[tmr] = NULL;
952 return SCPE_OK;
953 }
954 if (NULL == sim_clock_unit[tmr])
955 sim_clock_cosched_queue[tmr] = QUEUE_LIST_END;
956 sim_clock_unit[tmr] = uptr;
957 uptr->dynflags |= UNIT_TMR_UNIT;
958 sim_timer_units[tmr].flags = UNIT_DIS | (sim_clock_unit[tmr] ? UNIT_IDLE : 0);
959 return SCPE_OK;
960 }
961
962
963 static void _sim_coschedule_cancel (UNIT *uptr)
964 {
965 if (uptr->next) {
966 int tmr;
967
968 for (tmr=0; tmr<SIM_NTIMERS; tmr++) {
969 if (uptr == sim_clock_cosched_queue[tmr]) {
970 sim_clock_cosched_queue[tmr] = uptr->next;
971 uptr->next = NULL;
972 }
973 else {
974 UNIT *cptr;
975 for (cptr = sim_clock_cosched_queue[tmr];
976 (cptr != QUEUE_LIST_END);
977 cptr = cptr->next)
978 if (cptr->next == (uptr)) {
979 cptr->next = (uptr)->next;
980 uptr->next = NULL;
981 break;
982 }
983 }
984 if (uptr->next == NULL) {
985 uptr->cancel = NULL;
986 sim_debug (SIM_DBG_EVENT, &sim_timer_dev, "Canceled Clock Coscheduled Event for %s\n", sim_uname(uptr));
987 return;
988 }
989 }
990 }
991 }
992