This source file includes following definitions.
- _compute_minimum_sleep
- sim_idle_ms_sleep
- sim_os_set_thread_priority
- sim_os_set_thread_priority
- sim_os_msec
- sim_os_sleep
- sim_timer_exit
- sim_os_ms_sleep_init
- sim_os_ms_sleep
- sim_os_msec
- sim_os_sleep
- sim_os_ms_sleep_init
- sim_os_ms_sleep
- sim_timespec_diff
- sim_rtcn_init_all
- sim_rtcn_init
- sim_rtcn_init_unit
- sim_rtcn_calb
- sim_timer_init
- sim_show_timers
- sim_show_clock_queues
- sim_timer_clr_catchup
- sim_timer_set_catchup
- sim_timer_show_catchup
- sim_timer_tick_svc
- win32_usleep
- sim_usleep
- _timespec_to_double
- _double_to_timespec
- sim_timer_clock_tick_svc
- _rtcn_configure_calibrated_clock
- sim_timer_clock_reset
- sim_start_timer_services
- sim_stop_timer_services
- sim_timer_inst_per_sec
- sim_timer_activate
- sim_timer_activate_after
- sim_register_clock_unit_tmr
- _sim_coschedule_cancel
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53 #include "sim_defs.h"
54 #include <ctype.h>
55 #include <math.h>
56
57 #define SIM_INTERNAL_CLK (SIM_NTIMERS+(1<<30))
58 #define SIM_INTERNAL_UNIT sim_internal_timer_unit
59
60 #if defined(MIN)
61 # undef MIN
62 #endif
63 #define MIN(a,b) (((a) < (b)) ? (a) : (b))
64
65 #if defined(MAX)
66 # undef MAX
67 #endif
68 #define MAX(a,b) (((a) > (b)) ? (a) : (b))
69
70 uint32 sim_idle_ms_sleep (unsigned int msec);
71
72 static int32 sim_calb_tmr = -1;
73 static int32 sim_calb_tmr_last = -1;
74 static double sim_inst_per_sec_last = 0;
75
76 static uint32 sim_idle_rate_ms = 0;
77 static uint32 sim_os_sleep_min_ms = 0;
78 static uint32 sim_os_sleep_inc_ms = 0;
79 static uint32 sim_os_clock_resoluton_ms = 0;
80 static uint32 sim_os_tick_hz = 0;
81 static uint32 sim_idle_calib_pct = 0;
82 static UNIT *sim_clock_unit[SIM_NTIMERS+1] = {NULL};
83 UNIT * volatile sim_clock_cosched_queue[SIM_NTIMERS+1] = {NULL};
84 static int32 sim_cosched_interval[SIM_NTIMERS+1];
85 static t_bool sim_catchup_ticks = FALSE;
86
87 #define sleep1Samples 10
88
89 static uint32 _compute_minimum_sleep (void)
90 {
91 uint32 i, tot, tim;
92
93 sim_os_set_thread_priority (PRIORITY_ABOVE_NORMAL);
94 sim_idle_ms_sleep (1);
95 for (i = 0, tot = 0; i < sleep1Samples; i++)
96 tot += sim_idle_ms_sleep (1);
97 tim = tot / sleep1Samples;
98 sim_os_sleep_min_ms = tim;
99 sim_idle_ms_sleep (1);
100 for (i = 0, tot = 0; i < sleep1Samples; i++)
101 tot += sim_idle_ms_sleep (sim_os_sleep_min_ms + 1);
102 tim = tot / sleep1Samples;
103 sim_os_sleep_inc_ms = tim - sim_os_sleep_min_ms;
104 sim_os_set_thread_priority (PRIORITY_NORMAL);
105 return sim_os_sleep_min_ms;
106 }
107
108 uint32 sim_idle_ms_sleep (unsigned int msec)
109 {
110 return sim_os_ms_sleep (msec);
111 }
112
113 #if defined(_WIN32)
114
115
116
117
118 t_stat sim_os_set_thread_priority (int below_normal_above)
119 {
120 const static int val[3] = {THREAD_PRIORITY_BELOW_NORMAL, THREAD_PRIORITY_NORMAL, THREAD_PRIORITY_ABOVE_NORMAL};
121
122 if ((below_normal_above < -1) || (below_normal_above > 1))
123 return SCPE_ARG;
124 SetThreadPriority (GetCurrentThread(), val[1 + below_normal_above]);
125 return SCPE_OK;
126 }
127 #else
128
129 t_stat sim_os_set_thread_priority (int below_normal_above)
130 {
131 int sched_policy, min_prio, max_prio;
132 struct sched_param sched_priority;
133
134 # if !defined(__gnu_hurd__)
135 if ((below_normal_above < -1) || (below_normal_above > 1))
136 return SCPE_ARG;
137
138 pthread_getschedparam (pthread_self(), &sched_policy, &sched_priority);
139 # if !defined(__PASE__)
140 min_prio = sched_get_priority_min(sched_policy);
141 max_prio = sched_get_priority_max(sched_policy);
142 # else
143 min_prio = 1;
144 max_prio = 127;
145 # endif
146 switch (below_normal_above) {
147 case PRIORITY_BELOW_NORMAL:
148 sched_priority.sched_priority = min_prio;
149 break;
150 case PRIORITY_NORMAL:
151 sched_priority.sched_priority = (max_prio + min_prio) / 2;
152 break;
153 case PRIORITY_ABOVE_NORMAL:
154 sched_priority.sched_priority = max_prio;
155 break;
156 }
157 pthread_setschedparam (pthread_self(), sched_policy, &sched_priority);
158 # endif
159 return SCPE_OK;
160 }
161 #endif
162
163
164
165 #if defined (_WIN32)
166
167
168
169 const t_bool rtc_avail = TRUE;
170
171 uint32 sim_os_msec (void)
172 {
173 return timeGetTime ();
174 }
175
176 void sim_os_sleep (unsigned int sec)
177 {
178 Sleep (sec * 1000);
179 return;
180 }
181
182 void sim_timer_exit (void)
183 {
184 timeEndPeriod (sim_idle_rate_ms);
185 return;
186 }
187
188 uint32 sim_os_ms_sleep_init (void)
189 {
190 TIMECAPS timers;
191
192 if (timeGetDevCaps (&timers, sizeof (timers)) != TIMERR_NOERROR)
193 return 0;
194 if (timers.wPeriodMin == 0)
195 return 0;
196 if (timeBeginPeriod (timers.wPeriodMin) != TIMERR_NOERROR)
197 return 0;
198 atexit (sim_timer_exit);
199
200 return _compute_minimum_sleep ();
201 }
202
203 uint32 sim_os_ms_sleep (unsigned int msec)
204 {
205 uint32 stime = sim_os_msec();
206
207 Sleep (msec);
208 return sim_os_msec () - stime;
209 }
210
211 #else
212
213
214
215 # include <time.h>
216 # include <sys/time.h>
217 # include <signal.h>
218 # include <unistd.h>
219 # define NANOS_PER_MILLI 1000000
220 # define MILLIS_PER_SEC 1000
221
222 const t_bool rtc_avail = TRUE;
223
224 uint32 sim_os_msec (void)
225 {
226 struct timeval cur;
227 struct timezone foo;
228 int st1ret;
229 uint32 msec;
230
231 st1ret = gettimeofday (&cur, &foo);
232 if (st1ret != 0)
233 {
234 fprintf (stderr, "\rFATAL: gettimeofday failure! Aborting at %s[%s:%d]\r\n",
235 __func__, __FILE__, __LINE__);
236 # if defined(USE_BACKTRACE)
237 # if defined(SIGUSR2)
238 (void)raise(SIGUSR2);
239
240 # endif
241 # endif
242 abort();
243 }
244 msec = (((uint32) cur.tv_sec) * 1000UL) + (((uint32) cur.tv_usec) / 1000UL);
245 return msec;
246 }
247
248 void sim_os_sleep (unsigned int sec)
249 {
250 sleep (sec);
251 return;
252 }
253
254 uint32 sim_os_ms_sleep_init (void)
255 {
256 return _compute_minimum_sleep ();
257 }
258
259 uint32 sim_os_ms_sleep (unsigned int milliseconds)
260 {
261 uint32 stime = sim_os_msec ();
262 struct timespec treq;
263
264 treq.tv_sec = milliseconds / MILLIS_PER_SEC;
265 treq.tv_nsec = (milliseconds % MILLIS_PER_SEC) * NANOS_PER_MILLI;
266 (void) nanosleep (&treq, NULL);
267 return sim_os_msec () - stime;
268 }
269
270 #endif
271
272
273 void
274 sim_timespec_diff (struct timespec *diff, const struct timespec *min, struct timespec *sub)
275 {
276
277 *diff = *min;
278
279 while (sub->tv_nsec > diff->tv_nsec) {
280 --diff->tv_sec;
281 diff->tv_nsec += 1000000000L;
282 }
283 diff->tv_nsec -= sub->tv_nsec;
284 diff->tv_sec -= sub->tv_sec;
285
286 while (diff->tv_nsec > 1000000000L) {
287 ++diff->tv_sec;
288 diff->tv_nsec -= 1000000000L;
289 }
290 }
291
292
293
294 static double _timespec_to_double (struct timespec *time);
295 static void _double_to_timespec (struct timespec *time, double dtime);
296 static void _rtcn_configure_calibrated_clock (int32 newtmr);
297 static void _sim_coschedule_cancel(UNIT *uptr);
298
299
300
301 static int32 rtc_ticks[SIM_NTIMERS+1] = { 0 };
302 static uint32 rtc_hz[SIM_NTIMERS+1] = { 0 };
303 static uint32 rtc_rtime[SIM_NTIMERS+1] = { 0 };
304 static uint32 rtc_vtime[SIM_NTIMERS+1] = { 0 };
305 static double rtc_gtime[SIM_NTIMERS+1] = { 0 };
306 static uint32 rtc_nxintv[SIM_NTIMERS+1] = { 0 };
307 static int32 rtc_based[SIM_NTIMERS+1] = { 0 };
308 static int32 rtc_currd[SIM_NTIMERS+1] = { 0 };
309 static int32 rtc_initd[SIM_NTIMERS+1] = { 0 };
310 static uint32 rtc_elapsed[SIM_NTIMERS+1] = { 0 };
311 static uint32 rtc_calibrations[SIM_NTIMERS+1] = { 0 };
312 static double rtc_clock_skew_max[SIM_NTIMERS+1] = { 0 };
313 static double rtc_clock_start_gtime[SIM_NTIMERS+1] = { 0 };
314 static double rtc_clock_tick_size[SIM_NTIMERS+1] = { 0 };
315 static uint32 rtc_calib_initializations[SIM_NTIMERS+1] = { 0 };
316 static double rtc_calib_tick_time[SIM_NTIMERS+1] = { 0 };
317 static double rtc_calib_tick_time_tot[SIM_NTIMERS+1] = { 0 };
318 static uint32 rtc_calib_ticks_acked[SIM_NTIMERS+1] = { 0 };
319 static uint32 rtc_calib_ticks_acked_tot[SIM_NTIMERS+1] = { 0 };
320 static uint32 rtc_clock_ticks[SIM_NTIMERS+1] = { 0 };
321 static uint32 rtc_clock_ticks_tot[SIM_NTIMERS+1] = { 0 };
322 static double rtc_clock_catchup_base_time[SIM_NTIMERS+1] = { 0 };
323 static uint32 rtc_clock_catchup_ticks[SIM_NTIMERS+1] = { 0 };
324 static uint32 rtc_clock_catchup_ticks_tot[SIM_NTIMERS+1] = { 0 };
325 static t_bool rtc_clock_catchup_pending[SIM_NTIMERS+1] = { 0 };
326 static t_bool rtc_clock_catchup_eligible[SIM_NTIMERS+1] = { 0 };
327 static uint32 rtc_clock_time_idled[SIM_NTIMERS+1] = { 0 };
328 static uint32 rtc_clock_calib_skip_idle[SIM_NTIMERS+1] = { 0 };
329 static uint32 rtc_clock_calib_gap2big[SIM_NTIMERS+1] = { 0 };
330 static uint32 rtc_clock_calib_backwards[SIM_NTIMERS+1] = { 0 };
331
332 UNIT sim_timer_units[SIM_NTIMERS+1];
333
334 UNIT sim_internal_timer_unit;
335 UNIT sim_throttle_unit;
336
337 t_stat sim_throt_svc (UNIT *uptr);
338 t_stat sim_timer_tick_svc (UNIT *uptr);
339
340 #define DBG_TRC 0x008
341 #define DBG_CAL 0x010
342 #define DBG_TIM 0x020
343 #define DBG_ACK 0x080
344 DEBTAB sim_timer_debug[] = {
345 {"TRACE", DBG_TRC, "Trace routine calls"},
346 {"IACK", DBG_ACK, "interrupt acknowledgement activities"},
347 {"CALIB", DBG_CAL, "Calibration activities"},
348 {"TIME", DBG_TIM, "Activation and scheduling activities"},
349 {0}
350 };
351
352
353 extern DEVICE sim_timer_dev;
354 extern DEVICE sim_throttle_dev;
355
356 void sim_rtcn_init_all (void)
357 {
358 int32 tmr;
359
360 for (tmr = 0; tmr <= SIM_NTIMERS; tmr++)
361 if (rtc_initd[tmr] != 0)
362 sim_rtcn_init (rtc_initd[tmr], tmr);
363 return;
364 }
365
366 int32 sim_rtcn_init (int32 time, int32 tmr)
367 {
368 return sim_rtcn_init_unit (NULL, time, tmr);
369 }
370
371 int32 sim_rtcn_init_unit (UNIT *uptr, int32 time, int32 tmr)
372 {
373 if (time == 0)
374 time = 1;
375 if (tmr == SIM_INTERNAL_CLK)
376 tmr = SIM_NTIMERS;
377 else {
378 if ((tmr < 0) || (tmr >= SIM_NTIMERS))
379 return time;
380 }
381
382
383
384
385
386 if (rtc_currd[tmr])
387 time = rtc_currd[tmr];
388 if (!uptr)
389 uptr = sim_clock_unit[tmr];
390 sim_debug (DBG_CAL, &sim_timer_dev, "_sim_rtcn_init_unit(unit=%s, time=%d, tmr=%d)\n", sim_uname(uptr), time, tmr);
391 if (uptr) {
392 if (!sim_clock_unit[tmr])
393 sim_register_clock_unit_tmr (uptr, tmr);
394 }
395 rtc_clock_start_gtime[tmr] = sim_gtime();
396 rtc_rtime[tmr] = sim_os_msec ();
397 rtc_vtime[tmr] = rtc_rtime[tmr];
398 rtc_nxintv[tmr] = 1000;
399 rtc_ticks[tmr] = 0;
400 rtc_hz[tmr] = 0;
401 rtc_based[tmr] = time;
402 rtc_currd[tmr] = time;
403 rtc_initd[tmr] = time;
404 rtc_elapsed[tmr] = 0;
405 rtc_calibrations[tmr] = 0;
406 rtc_clock_ticks_tot[tmr] += rtc_clock_ticks[tmr];
407 rtc_clock_ticks[tmr] = 0;
408 rtc_calib_tick_time_tot[tmr] += rtc_calib_tick_time[tmr];
409 rtc_calib_tick_time[tmr] = 0;
410 rtc_clock_catchup_pending[tmr] = FALSE;
411 rtc_clock_catchup_eligible[tmr] = FALSE;
412 rtc_clock_catchup_ticks_tot[tmr] += rtc_clock_catchup_ticks[tmr];
413 rtc_clock_catchup_ticks[tmr] = 0;
414 rtc_calib_ticks_acked_tot[tmr] += rtc_calib_ticks_acked[tmr];
415 rtc_calib_ticks_acked[tmr] = 0;
416 ++rtc_calib_initializations[tmr];
417 _rtcn_configure_calibrated_clock (tmr);
418 return time;
419 }
420
421 int32 sim_rtcn_calb (uint32 ticksper, int32 tmr)
422 {
423 if (tmr == SIM_INTERNAL_CLK)
424 tmr = SIM_NTIMERS;
425 else {
426 if ((tmr < 0) || (tmr >= SIM_NTIMERS))
427 return 10000;
428 }
429 if (rtc_hz[tmr] != ticksper) {
430 rtc_hz[tmr] = ticksper;
431 rtc_clock_tick_size[tmr] = 1.0/ticksper;
432 _rtcn_configure_calibrated_clock (tmr);
433 rtc_currd[tmr] = (int32)(sim_timer_inst_per_sec()/ticksper);
434 }
435 if (sim_clock_unit[tmr] == NULL) {
436 rtc_clock_ticks[tmr] += 1;
437 rtc_calib_tick_time[tmr] += rtc_clock_tick_size[tmr];
438 }
439 if (rtc_clock_catchup_pending[tmr]) {
440 ++rtc_clock_catchup_ticks[tmr];
441 rtc_clock_catchup_pending[tmr] = FALSE;
442 }
443 return rtc_currd[tmr];
444 }
445
446
447
448 t_bool sim_timer_init (void)
449 {
450 int tmr;
451 uint32 clock_start, clock_last, clock_now;
452
453 sim_debug (DBG_TRC, &sim_timer_dev, "sim_timer_init()\n");
454 for (tmr=0; tmr<=SIM_NTIMERS; tmr++) {
455 sim_timer_units[tmr].action = &sim_timer_tick_svc;
456 sim_timer_units[tmr].flags = UNIT_DIS | UNIT_IDLE;
457 }
458 SIM_INTERNAL_UNIT.flags = UNIT_DIS | UNIT_IDLE;
459 sim_register_internal_device (&sim_timer_dev);
460 sim_register_clock_unit_tmr (&SIM_INTERNAL_UNIT, SIM_INTERNAL_CLK);
461 sim_idle_rate_ms = sim_os_ms_sleep_init ();
462
463 clock_last = clock_start = sim_os_msec ();
464 sim_os_clock_resoluton_ms = 1000;
465 do {
466 uint32 clock_diff;
467
468 clock_now = sim_os_msec ();
469 clock_diff = clock_now - clock_last;
470 if ((clock_diff > 0) && (clock_diff < sim_os_clock_resoluton_ms))
471 sim_os_clock_resoluton_ms = clock_diff;
472 clock_last = clock_now;
473 } while (clock_now < clock_start + 100);
474 sim_os_tick_hz = 1000/(sim_os_clock_resoluton_ms * (sim_idle_rate_ms/sim_os_clock_resoluton_ms));
475 return (sim_idle_rate_ms != 0);
476 }
477
478
479 t_stat sim_show_timers (FILE* st, DEVICE *dptr, UNIT* uptr, int32 val, CONST char* desc)
480 {
481 int tmr, clocks;
482 struct timespec now;
483 time_t time_t_now;
484 int32 calb_tmr = (sim_calb_tmr == -1) ? sim_calb_tmr_last : sim_calb_tmr;
485
486 for (tmr=clocks=0; tmr<=SIM_NTIMERS; ++tmr) {
487 if (0 == rtc_initd[tmr])
488 continue;
489
490 if (sim_clock_unit[tmr]) {
491 ++clocks;
492 fprintf (st, "%s clock device is %s%s%s\n",
493 sim_name,
494 (tmr == SIM_NTIMERS) ? "Internal Calibrated Timer(" : "",
495 sim_uname(sim_clock_unit[tmr]),
496 (tmr == SIM_NTIMERS) ? ")" : "");
497 }
498
499 fprintf (st, "%s%sTimer %d:\n", "",
500 rtc_hz[tmr] ? "Calibrated " : "Uncalibrated ",
501 tmr);
502 if (rtc_hz[tmr]) {
503 fprintf (st, " Running at: %lu Hz\n",
504 (unsigned long)rtc_hz[tmr]);
505 fprintf (st, " Tick Size: %s\n",
506 sim_fmt_secs (rtc_clock_tick_size[tmr]));
507 fprintf (st, " Ticks in current second: %lu\n",
508 (unsigned long)rtc_ticks[tmr]);
509 }
510 fprintf (st, " Seconds Running: %lu (%s)\n",
511 (unsigned long)rtc_elapsed[tmr],
512 sim_fmt_secs ((double)rtc_elapsed[tmr]));
513 if (tmr == calb_tmr) {
514 fprintf (st, " Calibration Opportunities: %lu\n",
515 (unsigned long)rtc_calibrations[tmr]);
516 if (sim_idle_calib_pct)
517 fprintf (st, " Calib Skip Idle Thresh %%: %lu\n",
518 (unsigned long)sim_idle_calib_pct);
519 if (rtc_clock_calib_skip_idle[tmr])
520 fprintf (st, " Calibs Skip While Idle: %lu\n",
521 (unsigned long)rtc_clock_calib_skip_idle[tmr]);
522 if (rtc_clock_calib_backwards[tmr])
523 fprintf (st, " Calibs Skip Backwards: %lu\n",
524 (unsigned long)rtc_clock_calib_backwards[tmr]);
525 if (rtc_clock_calib_gap2big[tmr])
526 fprintf (st, " Calibs Skip Gap Too Big: %lu\n",
527 (unsigned long)rtc_clock_calib_gap2big[tmr]);
528 }
529 if (rtc_gtime[tmr])
530 fprintf (st, " Instruction Time: %.0f\n",
531 rtc_gtime[tmr]);
532 fprintf (st, " Current Insts Per Tick: %lu\n",
533 (unsigned long)rtc_currd[tmr]);
534 fprintf (st, " Initializations: %lu\n",
535 (unsigned long)rtc_calib_initializations[tmr]);
536 fprintf (st, " Total Ticks: %lu\n",
537 (unsigned long)rtc_clock_ticks_tot[tmr]+(unsigned long)rtc_clock_ticks[tmr]);
538 if (rtc_clock_skew_max[tmr] != 0.0)
539 fprintf (st, " Peak Clock Skew: %s%s\n",
540 sim_fmt_secs (fabs(rtc_clock_skew_max[tmr])),
541 (rtc_clock_skew_max[tmr] < 0) ? " fast" : " slow");
542 if (rtc_calib_ticks_acked[tmr])
543 fprintf (st, " Ticks Acked: %lu\n",
544 (unsigned long)rtc_calib_ticks_acked[tmr]);
545 if (rtc_calib_ticks_acked_tot[tmr]+rtc_calib_ticks_acked[tmr] != rtc_calib_ticks_acked[tmr])
546 fprintf (st, " Total Ticks Acked: %lu\n",
547 (unsigned long)rtc_calib_ticks_acked_tot[tmr]+(unsigned long)rtc_calib_ticks_acked[tmr]);
548 if (rtc_calib_tick_time[tmr])
549 fprintf (st, " Tick Time: %s\n",
550 sim_fmt_secs (rtc_calib_tick_time[tmr]));
551 if (rtc_calib_tick_time_tot[tmr]+rtc_calib_tick_time[tmr] != rtc_calib_tick_time[tmr])
552 fprintf (st, " Total Tick Time: %s\n",
553 sim_fmt_secs (rtc_calib_tick_time_tot[tmr]+rtc_calib_tick_time[tmr]));
554 if (rtc_clock_catchup_ticks[tmr])
555 fprintf (st, " Catchup Ticks Sched: %lu\n",
556 (unsigned long)rtc_clock_catchup_ticks[tmr]);
557 if (rtc_clock_catchup_ticks_tot[tmr]+rtc_clock_catchup_ticks[tmr] != rtc_clock_catchup_ticks[tmr])
558 fprintf (st, " Total Catchup Ticks Sched: %lu\n",
559 (unsigned long)rtc_clock_catchup_ticks_tot[tmr]+(unsigned long)rtc_clock_catchup_ticks[tmr]);
560 clock_gettime (CLOCK_REALTIME, &now);
561 time_t_now = (time_t)now.tv_sec;
562 fprintf (st, " Wall Clock Time Now: %8.8s.%03d\n", 11+ctime(&time_t_now), (int)(now.tv_nsec/1000000));
563 if (rtc_clock_catchup_eligible[tmr]) {
564 _double_to_timespec (&now, rtc_clock_catchup_base_time[tmr]+rtc_calib_tick_time[tmr]);
565 time_t_now = (time_t)now.tv_sec;
566 fprintf (st, " Catchup Tick Time: %8.8s.%03d\n", 11+ctime(&time_t_now), (int)(now.tv_nsec/1000000));
567 _double_to_timespec (&now, rtc_clock_catchup_base_time[tmr]);
568 time_t_now = (time_t)now.tv_sec;
569 fprintf (st, " Catchup Base Time: %8.8s.%03d\n", 11+ctime(&time_t_now), (int)(now.tv_nsec/1000000));
570 }
571 if (rtc_clock_time_idled[tmr])
572 fprintf (st, " Total Time Idled: %s\n", sim_fmt_secs (rtc_clock_time_idled[tmr]/1000.0));
573 }
574 if (clocks == 0)
575 fprintf (st, "%s clock device is not specified, co-scheduling is unavailable\n", sim_name);
576 return SCPE_OK;
577 }
578
579 t_stat sim_show_clock_queues (FILE *st, DEVICE *dptr, UNIT *uptr, int32 flag, CONST char *cptr)
580 {
581 int tmr;
582
583 for (tmr=0; tmr<=SIM_NTIMERS; ++tmr) {
584 if (sim_clock_unit[tmr] == NULL)
585 continue;
586 if (sim_clock_cosched_queue[tmr] != QUEUE_LIST_END) {
587 int32 accum;
588
589 fprintf (st, "%s clock (%s) co-schedule event queue status\n",
590 sim_name, sim_uname(sim_clock_unit[tmr]));
591 accum = 0;
592 for (uptr = sim_clock_cosched_queue[tmr]; uptr != QUEUE_LIST_END; uptr = uptr->next) {
593 if ((dptr = find_dev_from_unit (uptr)) != NULL) {
594 fprintf (st, " %s", sim_dname (dptr));
595 if (dptr->numunits > 1)
596 fprintf (st, " unit %d", (int32) (uptr - dptr->units));
597 }
598 else
599 fprintf (st, " Unknown");
600 if (accum > 0)
601 fprintf (st, " after %d ticks", accum);
602 fprintf (st, "\n");
603 accum = accum + uptr->time;
604 }
605 }
606 }
607 return SCPE_OK;
608 }
609
610 REG sim_timer_reg[] = {
611 { NULL }
612 };
613
614
615
616 t_stat sim_timer_clr_catchup (UNIT *uptr, int32 val, CONST char *cptr, void *desc)
617 {
618 if (sim_catchup_ticks)
619 sim_catchup_ticks = FALSE;
620 return SCPE_OK;
621 }
622
623 t_stat sim_timer_set_catchup (UNIT *uptr, int32 val, CONST char *cptr, void *desc)
624 {
625 if (!sim_catchup_ticks)
626 sim_catchup_ticks = TRUE;
627 return SCPE_OK;
628 }
629
630 t_stat sim_timer_show_catchup (FILE *st, UNIT *uptr, int32 val, CONST void *desc)
631 {
632 fprintf (st, "Calibrated Ticks%s", sim_catchup_ticks ? " with Catchup Ticks" : "");
633 return SCPE_OK;
634 }
635
636 MTAB sim_timer_mod[] = {
637 { MTAB_VDV, MTAB_VDV, "CATCHUP", "CATCHUP", \
638 &sim_timer_set_catchup, &sim_timer_show_catchup, NULL, "Enables/Displays Clock Tick catchup mode" },
639 { MTAB_VDV, 0, NULL, "NOCATCHUP", \
640 &sim_timer_clr_catchup, NULL, NULL, "Disables Clock Tick catchup mode" },
641 { 0 },
642 };
643
644 static t_stat sim_timer_clock_reset (DEVICE *dptr);
645
646 DEVICE sim_timer_dev = {
647 "TIMER", sim_timer_units, sim_timer_reg, sim_timer_mod,
648 SIM_NTIMERS+1, 0, 0, 0, 0, 0,
649 NULL, NULL, &sim_timer_clock_reset, NULL, NULL, NULL,
650 NULL, DEV_DEBUG | DEV_NOSAVE, 0, sim_timer_debug};
651
652
653 t_stat sim_timer_tick_svc (UNIT *uptr)
654 {
655 int tmr = (int)(uptr-sim_timer_units);
656 t_stat stat;
657
658 rtc_clock_ticks[tmr] += 1;
659 rtc_calib_tick_time[tmr] += rtc_clock_tick_size[tmr];
660
661
662
663
664
665
666
667
668 if (sim_clock_unit[tmr]->action == NULL)
669 return SCPE_IERR;
670 stat = sim_clock_unit[tmr]->action (sim_clock_unit[tmr]);
671 --sim_cosched_interval[tmr];
672 if (stat == SCPE_OK) {
673 if (rtc_clock_catchup_eligible[tmr]) {
674 struct timespec now;
675 double skew;
676
677 clock_gettime(CLOCK_REALTIME, &now);
678 skew = (_timespec_to_double(&now) - (rtc_calib_tick_time[tmr]+rtc_clock_catchup_base_time[tmr]));
679
680 if (fabs(skew) > fabs(rtc_clock_skew_max[tmr]))
681 rtc_clock_skew_max[tmr] = skew;
682 }
683 while ((sim_clock_cosched_queue[tmr] != QUEUE_LIST_END) &&
684 (sim_cosched_interval[tmr] < sim_clock_cosched_queue[tmr]->time)) {
685 UNIT *cptr = sim_clock_cosched_queue[tmr];
686 sim_clock_cosched_queue[tmr] = cptr->next;
687 cptr->next = NULL;
688 cptr->cancel = NULL;
689 _sim_activate (cptr, 0);
690 }
691 if (sim_clock_cosched_queue[tmr] != QUEUE_LIST_END)
692 sim_cosched_interval[tmr] = sim_clock_cosched_queue[tmr]->time;
693 else
694 sim_cosched_interval[tmr] = 0;
695 }
696 sim_timer_activate_after (uptr, 1000000/rtc_hz[tmr]);
697 return stat;
698 }
699
700 #if !defined(__CYGWIN__) && \
701 ( defined(_WIN32) || defined(__MINGW32__) || defined(__MINGW64__) || \
702 defined(CROSS_MINGW32) || defined(CROSS_MINGW64) )
703 void win32_usleep(__int64 usec)
704 {
705 HANDLE timer;
706 LARGE_INTEGER ft;
707
708 ft.QuadPart = -(10*usec);
709
710 timer = CreateWaitableTimer(NULL, TRUE, NULL);
711 SetWaitableTimer(timer, &ft, 0, NULL, NULL, 0);
712 WaitForSingleObject(timer, INFINITE);
713 CloseHandle(timer);
714 }
715 #endif
716
717
718
719 int
720 sim_usleep(useconds_t tusleep)
721 {
722 #if ( !defined(__APPLE__) && !defined(__OpenBSD__) )
723 # if !defined(__CYGWIN__) && \
724 ( defined(_WIN32) || defined(__MINGW32__) || defined(__MINGW64__) || \
725 defined(CROSS_MINGW32) || defined(CROSS_MINGW64) )
726 win32_usleep(tusleep);
727
728 return 0;
729 # else
730 # if !defined(__PASE__)
731 struct timespec rqt;
732 rqt.tv_sec = tusleep / 1000000L;
733 rqt.tv_nsec = (tusleep % 1000000L) * 1000L;
734
735 return clock_nanosleep(CLOCK_MONOTONIC, 0, &rqt, NULL);
736 # else
737 return usleep(tusleep);
738 # endif
739 # endif
740
741
742 #else
743 # if defined(__APPLE__)
744 struct timespec rqt;
745 rqt.tv_sec = tusleep / 1000000L;
746 rqt.tv_nsec = (tusleep % 1000000L) * 1000L;
747 return nanosleep(&rqt, NULL);
748 # else
749 return usleep(tusleep);
750 # endif
751 #endif
752 }
753
754 static double _timespec_to_double (struct timespec *time)
755 {
756 return ((double)time->tv_sec)+(double)(time->tv_nsec)/1000000000.0;
757 }
758
759 static void _double_to_timespec (struct timespec *time, double dtime)
760 {
761 time->tv_sec = (time_t)floor(dtime);
762 time->tv_nsec = (long)((dtime-floor(dtime))*1000000000.0);
763 }
764
765 #define CLK_TPS 10
766 #define CLK_INIT (SIM_INITIAL_IPS/CLK_TPS)
767 static int32 sim_int_clk_tps;
768
769 static t_stat sim_timer_clock_tick_svc (UNIT *uptr)
770 {
771 sim_rtcn_calb (sim_int_clk_tps, SIM_INTERNAL_CLK);
772 sim_activate_after (uptr, 1000000/sim_int_clk_tps);
773 return SCPE_OK;
774 }
775
776 static void _rtcn_configure_calibrated_clock (int32 newtmr)
777 {
778 int32 tmr;
779
780
781 sim_int_clk_tps = MIN(CLK_TPS, sim_os_tick_hz);
782 for (tmr=0; tmr<SIM_NTIMERS; tmr++) {
783 if ((rtc_hz[tmr]) &&
784 (rtc_hz[tmr] <= (uint32)sim_os_tick_hz))
785 break;
786 }
787 if (tmr == SIM_NTIMERS) {
788 if ((tmr != newtmr) && (!sim_is_active (&SIM_INTERNAL_UNIT))) {
789
790 sim_calb_tmr = SIM_NTIMERS;
791 sim_debug (DBG_CAL, &sim_timer_dev,
792 "_rtcn_configure_calibrated_clock() - Starting Internal Calibrated Timer at %dHz\n",
793 sim_int_clk_tps);
794 SIM_INTERNAL_UNIT.action = &sim_timer_clock_tick_svc;
795 SIM_INTERNAL_UNIT.flags = UNIT_DIS | UNIT_IDLE;
796 sim_activate_abs (&SIM_INTERNAL_UNIT, 0);
797 sim_rtcn_init_unit (&SIM_INTERNAL_UNIT, (CLK_INIT*CLK_TPS)/sim_int_clk_tps, SIM_INTERNAL_CLK);
798 }
799 return;
800 }
801 if ((tmr == newtmr) &&
802 (sim_calb_tmr == newtmr))
803 return;
804 if (sim_calb_tmr == SIM_NTIMERS) {
805 sim_debug (DBG_CAL, &sim_timer_dev,
806 "_rtcn_configure_calibrated_clock() - Stopping Internal Calibrated Timer, New Timer = %d (%dHz)\n",
807 tmr, rtc_hz[tmr]);
808 rtc_initd[SIM_NTIMERS] = 0;
809 rtc_hz[SIM_NTIMERS] = 0;
810 sim_cancel (&SIM_INTERNAL_UNIT);
811
812 while (sim_clock_cosched_queue[SIM_NTIMERS] != QUEUE_LIST_END) {
813 UNIT *uptr = sim_clock_cosched_queue[SIM_NTIMERS];
814 _sim_coschedule_cancel (uptr);
815 _sim_activate (uptr, 1);
816 }
817 }
818 else {
819 sim_debug (DBG_CAL, &sim_timer_dev,
820 "_rtcn_configure_calibrated_clock() - Changing Calibrated Timer from %d (%dHz) to %d (%dHz)\n",
821 sim_calb_tmr, rtc_hz[sim_calb_tmr], tmr, rtc_hz[tmr]);
822 sim_calb_tmr = tmr;
823 }
824 sim_calb_tmr = tmr;
825 }
826
827 static t_stat sim_timer_clock_reset (DEVICE *dptr)
828 {
829 sim_debug (DBG_TRC, &sim_timer_dev, "sim_timer_clock_reset()\n");
830 _rtcn_configure_calibrated_clock (sim_calb_tmr);
831 if (sim_switches & SWMASK ('P')) {
832 sim_cancel (&SIM_INTERNAL_UNIT);
833 sim_calb_tmr = -1;
834 }
835 return SCPE_OK;
836 }
837
838 void sim_start_timer_services (void)
839 {
840 sim_debug (DBG_TRC, &sim_timer_dev, "sim_start_timer_services()\n");
841 _rtcn_configure_calibrated_clock (sim_calb_tmr);
842 }
843
844 void sim_stop_timer_services (void)
845 {
846 int tmr;
847
848 sim_debug (DBG_TRC, &sim_timer_dev, "sim_stop_timer_services()\n");
849
850 for (tmr=0; tmr<=SIM_NTIMERS; tmr++) {
851 int32 accum;
852
853 if (sim_clock_unit[tmr]) {
854
855 sim_cancel (&sim_timer_units[tmr]);
856 if (rtc_hz[tmr])
857 sim_activate (sim_clock_unit[tmr], rtc_currd[tmr]);
858
859 accum = 1;
860 while (sim_clock_cosched_queue[tmr] != QUEUE_LIST_END) {
861 UNIT *cptr = sim_clock_cosched_queue[tmr];
862
863 sim_clock_cosched_queue[tmr] = cptr->next;
864 cptr->next = NULL;
865 cptr->cancel = NULL;
866
867 accum += cptr->time;
868 _sim_activate (cptr, accum*rtc_currd[tmr]);
869 }
870 }
871 }
872 sim_cancel (&SIM_INTERNAL_UNIT);
873 sim_calb_tmr_last = sim_calb_tmr;
874 sim_inst_per_sec_last = sim_timer_inst_per_sec ();
875 sim_calb_tmr = -1;
876 }
877
878
879
880 double sim_timer_inst_per_sec (void)
881 {
882 double inst_per_sec = SIM_INITIAL_IPS;
883
884 if (sim_calb_tmr == -1)
885 return inst_per_sec;
886 inst_per_sec = ((double)rtc_currd[sim_calb_tmr])*rtc_hz[sim_calb_tmr];
887 if (0 == inst_per_sec)
888 inst_per_sec = ((double)rtc_currd[sim_calb_tmr])*sim_int_clk_tps;
889 return inst_per_sec;
890 }
891
892 t_stat sim_timer_activate (UNIT *uptr, int32 interval)
893 {
894 return sim_timer_activate_after (uptr, (uint32)((interval * 1000000.0) / sim_timer_inst_per_sec ()));
895 }
896
897 t_stat sim_timer_activate_after (UNIT *uptr, uint32 usec_delay)
898 {
899 int inst_delay, tmr;
900 double inst_delay_d, inst_per_sec;
901
902
903 for (tmr=0; tmr<=SIM_NTIMERS; tmr++)
904 if (sim_clock_unit[tmr] == uptr) {
905 uptr = &sim_timer_units[tmr];
906 break;
907 }
908 if (sim_is_active (uptr))
909 return SCPE_OK;
910 inst_per_sec = sim_timer_inst_per_sec ();
911 inst_delay_d = ((inst_per_sec*usec_delay)/1000000.0);
912
913
914 if (inst_delay_d > (double)0x7fffffff)
915 inst_delay_d = (double)0x7fffffff;
916 inst_delay = (int32)inst_delay_d;
917 if ((inst_delay == 0) && (usec_delay != 0))
918 inst_delay = 1;
919 sim_debug (DBG_TIM, &sim_timer_dev, "sim_timer_activate_after() - queue addition %s at %d (%d usecs)\n",
920 sim_uname(uptr), inst_delay, usec_delay);
921 return _sim_activate (uptr, inst_delay);
922 }
923
924 t_stat sim_register_clock_unit_tmr (UNIT *uptr, int32 tmr)
925 {
926 if (tmr == SIM_INTERNAL_CLK)
927 tmr = SIM_NTIMERS;
928 else {
929 if ((tmr < 0) || (tmr >= SIM_NTIMERS))
930 return SCPE_IERR;
931 }
932 if (NULL == uptr) {
933 while (sim_clock_cosched_queue[tmr] != QUEUE_LIST_END) {
934 UNIT *uptr = sim_clock_cosched_queue[tmr];
935
936 _sim_coschedule_cancel (uptr);
937 _sim_activate (uptr, 1);
938 }
939 sim_clock_unit[tmr] = NULL;
940 return SCPE_OK;
941 }
942 if (NULL == sim_clock_unit[tmr])
943 sim_clock_cosched_queue[tmr] = QUEUE_LIST_END;
944 sim_clock_unit[tmr] = uptr;
945 uptr->dynflags |= UNIT_TMR_UNIT;
946 sim_timer_units[tmr].flags = UNIT_DIS | (sim_clock_unit[tmr] ? UNIT_IDLE : 0);
947 return SCPE_OK;
948 }
949
950
951 static void _sim_coschedule_cancel (UNIT *uptr)
952 {
953 if (uptr->next) {
954 int tmr;
955
956 for (tmr=0; tmr<SIM_NTIMERS; tmr++) {
957 if (uptr == sim_clock_cosched_queue[tmr]) {
958 sim_clock_cosched_queue[tmr] = uptr->next;
959 uptr->next = NULL;
960 }
961 else {
962 UNIT *cptr;
963 for (cptr = sim_clock_cosched_queue[tmr];
964 (cptr != QUEUE_LIST_END);
965 cptr = cptr->next)
966 if (cptr->next == (uptr)) {
967 cptr->next = (uptr)->next;
968 uptr->next = NULL;
969 break;
970 }
971 }
972 if (uptr->next == NULL) {
973 uptr->cancel = NULL;
974 sim_debug (SIM_DBG_EVENT, &sim_timer_dev, "Canceled Clock Coscheduled Event for %s\n", sim_uname(uptr));
975 return;
976 }
977 }
978 }
979 }
980