This source file includes following definitions.
- doAppendCycleInstructionFetch
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62 word24 doAppendCycleInstructionFetch (cpu_state_t * cpup, word36 * data, uint nWords) {
63
64 static int evcnt = 0;
65 DCDstruct * i = & cpu.currentInstruction;
66 (void)evcnt;
67 DBGAPP ("doAppendCycleInstructionFetch(Entry) thisCycle=INSTRUCTION_FETCH\n");
68 DBGAPP ("doAppendCycleInstructionFetch(Entry) lastCycle=%s\n", str_pct (cpu.apu.lastCycle));
69 DBGAPP ("doAppendCycleInstructionFetch(Entry) CA %06o\n", cpu.TPR.CA);
70 DBGAPP ("doAppendCycleInstructionFetch(Entry) n=%2u\n", nWords);
71 DBGAPP ("doAppendCycleInstructionFetch(Entry) PPR.PRR=%o PPR.PSR=%05o\n", cpu.PPR.PRR, cpu.PPR.PSR);
72 DBGAPP ("doAppendCycleInstructionFetch(Entry) TPR.TRR=%o TPR.TSR=%05o\n", cpu.TPR.TRR, cpu.TPR.TSR);
73
74 if (i->b29) {
75 DBGAPP ("doAppendCycleInstructionFetch(Entry) isb29 PRNO %o\n", GET_PRN (IWB_IRODD));
76 }
77
78 uint this = UC_INSTRUCTION_FETCH;
79
80 word24 finalAddress = 0;
81 word24 pageAddress = 0;
82 word3 RSDWH_R1 = 0;
83 word14 bound = 0;
84 word1 p = 0;
85 bool paged = false;
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110 #if defined(TEST_UCACHE)
111 bool cacheHit;
112 cacheHit = false;
113 #endif
114
115
116 if (i->opcode == 0610 && ! i->opcodeX) {
117
118 goto skip_ucache;
119 }
120
121
122 if (cpu.rRALR) {
123
124 goto skip_ucache;
125 }
126
127
128
129
130
131
132 if (i->opcodeX && ((i->opcode & 0770)== 0200|| (i->opcode & 0770) == 0220
133 || (i->opcode & 0770)== 020|| (i->opcode & 0770) == 0300)) {
134
135 goto skip_ucache;
136 }
137
138
139
140
141 #if defined(TEST_UCACHE)
142 word24 cachedAddress;
143 word3 cachedR1;
144 word14 cachedBound;
145 word1 cachedP;
146 bool cachedPaged;
147 cacheHit =
148 ucCacheCheck (cpup, this, cpu.TPR.TSR, cpu.TPR.CA, & cachedBound, & cachedP, & cachedAddress, & cachedR1, & cachedPaged);
149 goto miss_ucache;
150 #else
151 if (! ucCacheCheck (cpup, this, cpu.TPR.TSR, cpu.TPR.CA, & bound, & p, & pageAddress, & RSDWH_R1, & paged))
152 goto miss_ucache;
153 #endif
154
155 if (paged) {
156 finalAddress = pageAddress + (cpu.TPR.CA & OS18MASK);
157 } else {
158 finalAddress = pageAddress + cpu.TPR.CA;
159 }
160 cpu.RSDWH_R1 = RSDWH_R1;
161
162
163
164
165 cpu.apu.lastCycle = INSTRUCTION_FETCH;
166 goto HI;
167
168 skip_ucache:;
169
170 #if defined(UCACHE_STATS)
171 cpu.uCache.skips[this] ++;
172 #endif
173
174 miss_ucache:;
175
176 bool nomatch = true;
177 if (cpu.tweaks.enable_wam) {
178
179
180
181
182
183 nomatch = ((i->opcode == 0232 || i->opcode == 0254 ||
184 i->opcode == 0154 || i->opcode == 0173) &&
185 i->opcodeX ) ||
186 ((i->opcode == 0557 || i->opcode == 0257) &&
187 ! i->opcodeX);
188 }
189
190 processor_cycle_type lastCycle = cpu.apu.lastCycle;
191 cpu.apu.lastCycle = INSTRUCTION_FETCH;
192
193 DBGAPP ("doAppendCycleInstructionFetch(Entry) XSF %o\n", cpu.cu.XSF);
194
195 PNL (L68_ (cpu.apu.state = 0;))
196
197 cpu.RSDWH_R1 = 0;
198
199 cpu.acvFaults = 0;
200
201
202 #define FMSG(x)
203 FMSG (char * acvFaultsMsg = "<unknown>";)
204
205
206
207
208
209
210
211
212
213
214
215
216 PNL (cpu.APUMemAddr = cpu.TPR.CA;)
217
218 DBGAPP ("doAppendCycleInstructionFetch(A)\n");
219
220
221 if (nomatch || ! fetch_sdw_from_sdwam (cpup, cpu.TPR.TSR)) {
222
223 DBGAPP ("doAppendCycleInstructionFetch(A):SDW for segment %05o not in SDWAM\n", cpu.TPR.TSR);
224
225 DBGAPP ("doAppendCycleInstructionFetch(A):DSBR.U=%o\n", cpu.DSBR.U);
226
227 if (cpu.DSBR.U == 0) {
228 fetch_dsptw (cpup, cpu.TPR.TSR);
229
230 if (! cpu.PTW0.DF)
231 doFault (FAULT_DF0 + cpu.PTW0.FC, fst_zero, "doAppendCycleInstructionFetch(A): PTW0.F == 0");
232
233 if (! cpu.PTW0.U)
234 modify_dsptw (cpup, cpu.TPR.TSR);
235
236 fetch_psdw (cpup, cpu.TPR.TSR);
237 } else
238 fetch_nsdw (cpup, cpu.TPR.TSR);
239
240 if (cpu.SDW0.DF == 0) {
241 DBGAPP ("doAppendCycleInstructionFetch(A): SDW0.F == 0! " "Initiating directed fault\n");
242
243 doFault (FAULT_DF0 + cpu.SDW0.FC, fst_zero, "SDW0.F == 0");
244 }
245
246 load_sdwam (cpup, cpu.TPR.TSR, nomatch);
247 }
248 DBGAPP ("doAppendCycleInstructionFetch(A) R1 %o R2 %o R3 %o E %o\n", cpu.SDW->R1, cpu.SDW->R2, cpu.SDW->R3, cpu.SDW->E);
249
250
251 RSDWH_R1 = cpu.RSDWH_R1 = cpu.SDW->R1;
252
253
254
255
256
257
258
259
260
261
262
263 DBGAPP ("doAppendCycleInstructionFetch(B)\n");
264
265
266
267
268 if (! (cpu.SDW->R1 <= cpu.SDW->R2 && cpu.SDW->R2 <= cpu.SDW->R3)) {
269
270 cpu.acvFaults |= ACV0;
271 PNL (L68_ (cpu.apu.state |= apu_FLT;))
272 FMSG (acvFaultsMsg = "acvFaults(B) C(SDW.R1) <= C(SDW.R2) <= " "C(SDW .R3)";)
273 }
274
275
276
277
278
279
280
281 if (i->opcode == 0610 && ! i->opcodeX)
282 goto C;
283
284 if (lastCycle == RTCD_OPERAND_FETCH)
285 sim_warn ("%s: lastCycle == RTCD_OPERAND_FETCH opcode %0#o\n", __func__, i->opcode);
286
287
288
289
290
291
292
293 goto F;
294
295
296
297
298
299
300
301 C:;
302 DBGAPP ("doAppendCycleInstructionFetch(C)\n");
303
304
305
306
307
308
309
310
311
312 if (cpu.TPR.TRR < cpu.SDW->R1 || cpu.TPR.TRR > cpu.SDW->R2) {
313 DBGAPP ("ACV1 c\n");
314 DBGAPP ("acvFaults(C) ACV1 ! ( C(SDW .R1) %o <= C(TPR.TRR) %o <= C(SDW .R2) %o )\n", cpu.SDW->R1, cpu.TPR.TRR, cpu.SDW->R2);
315
316 cpu.acvFaults |= ACV1;
317 PNL (L68_ (cpu.apu.state |= apu_FLT;))
318 FMSG (acvFaultsMsg = "acvFaults(C) C(SDW.R1 > C(TPR.TRR) > C(SDW.R2)";)
319 }
320
321 if (! cpu.SDW->E) {
322 DBGAPP ("ACV2 a\n");
323 DBGAPP ("doAppendCycleInstructionFetch(C) ACV2\n");
324
325 cpu.acvFaults |= ACV2;
326 PNL (L68_ (cpu.apu.state |= apu_FLT;))
327 FMSG (acvFaultsMsg = "acvFaults(C) SDW.E";)
328 }
329 if (cpu.TPR.TRR > cpu.PPR.PRR)
330 sim_warn ("rtcd: outbound call cpu.TPR.TRR %d cpu.PPR.PRR %d\n", cpu.TPR.TRR, cpu.PPR.PRR);
331
332 if (cpu.TPR.TRR < cpu.PPR.PRR) {
333 DBGAPP ("ACV11\n");
334 DBGAPP ("doAppendCycleInstructionFetch(C) ACV11\n");
335
336 cpu.acvFaults |= ACV11;
337 PNL (L68_ (cpu.apu.state |= apu_FLT;))
338 FMSG (acvFaultsMsg = "acvFaults(C) TRR>=PRR";)
339 }
340
341 D:;
342 DBGAPP ("doAppendCycleInstructionFetch(D)\n");
343
344
345
346
347
348 if (cpu.rRALR == 0)
349 goto G;
350
351
352 if (! (cpu.PPR.PRR < cpu.rRALR)) {
353 DBGAPP ("ACV13\n");
354 DBGAPP ("acvFaults(D) C(PPR.PRR) %o < RALR %o\n", cpu.PPR.PRR, cpu.rRALR);
355 cpu.acvFaults |= ACV13;
356 PNL (L68_ (cpu.apu.state |= apu_FLT;))
357 FMSG (acvFaultsMsg = "acvFaults(D) C(PPR.PRR) < RALR";)
358 }
359
360 goto G;
361
362
363
364
365
366
367
368 F:;
369 PNL (L68_ (cpu.apu.state |= apu_PIAU;))
370 DBGAPP ("doAppendCycleInstructionFetch(F): transfer or instruction fetch\n");
371
372
373
374
375
376
377 if (cpu.TPR.TRR < cpu.SDW->R1 || cpu.TPR.TRR > cpu.SDW->R2) {
378 DBGAPP ("ACV1 a/b\n");
379 DBGAPP ("acvFaults(F) ACV1 !( C(SDW .R1) %o <= C(TPR.TRR) %o <= C(SDW .R2) %o )\n", cpu.SDW->R1, cpu.TPR.TRR, cpu.SDW->R2);
380 cpu.acvFaults |= ACV1;
381 PNL (L68_ (cpu.apu.state |= apu_FLT;))
382 FMSG (acvFaultsMsg = "acvFaults(F) C(TPR.TRR) < C(SDW .R1)";)
383 }
384
385 if (! cpu.SDW->E) {
386 DBGAPP ("ACV2 c \n");
387 DBGAPP ("doAppendCycleInstructionFetch(F) ACV2\n");
388 cpu.acvFaults |= ACV2;
389 PNL (L68_ (cpu.apu.state |= apu_FLT;))
390 FMSG (acvFaultsMsg = "acvFaults(F) SDW .E set OFF";)
391 }
392
393
394 if (cpu.PPR.PRR != cpu.TPR.TRR) {
395 DBGAPP ("ACV12\n");
396 DBGAPP ("doAppendCycleInstructionFetch(F) ACV12\n");
397
398 cpu.acvFaults |= ACV12;
399 PNL (L68_ (cpu.apu.state |= apu_FLT;))
400 FMSG (acvFaultsMsg = "acvFaults(F) C(PPR.PRR) != C(TPR.TRR)";)
401 }
402
403 goto D;
404
405
406
407
408
409
410
411 G:;
412
413 DBGAPP ("doAppendCycleInstructionFetch(G)\n");
414
415
416 if (((cpu.TPR.CA >> 4) & 037777) > cpu.SDW->BOUND) {
417 DBGAPP ("ACV15\n");
418 DBGAPP ("doAppendCycleInstructionFetch(G) ACV15\n");
419 cpu.acvFaults |= ACV15;
420 PNL (L68_ (cpu.apu.state |= apu_FLT;))
421 FMSG (acvFaultsMsg = "acvFaults(G) C(TPR.CA)0,13 > SDW.BOUND";)
422 DBGAPP ("acvFaults(G) C(TPR.CA)0,13 > SDW.BOUND\n" " CA %06o CA>>4 & 037777 %06o SDW->BOUND %06o",
423 cpu.TPR.CA, ((cpu.TPR.CA >> 4) & 037777), cpu.SDW->BOUND);
424 }
425 bound = cpu.SDW->BOUND;
426 p = cpu.SDW->P;
427
428 if (cpu.acvFaults) {
429 DBGAPP ("doAppendCycleInstructionFetch(G) acvFaults\n");
430 PNL (L68_ (cpu.apu.state |= apu_FLT;))
431
432 doFault (FAULT_ACV, (_fault_subtype) {.fault_acv_subtype=cpu.acvFaults}, "ACV fault");
433 }
434
435
436 if (cpu.SDW->U)
437 goto H;
438
439
440
441
442 DBGAPP ("doAppendCycleInstructionFetch(G) CA %06o\n", cpu.TPR.CA);
443 if (nomatch ||
444 ! fetch_ptw_from_ptwam (cpup, cpu.SDW->POINTER, cpu.TPR.CA)) {
445 fetch_ptw (cpup, cpu.SDW, cpu.TPR.CA);
446 if (! cpu.PTW0.DF) {
447
448 doFault (FAULT_DF0 + cpu.PTW0.FC, (_fault_subtype) {.bits=0}, "PTW0.F == 0");
449 }
450 loadPTWAM (cpup, cpu.SDW->POINTER, cpu.TPR.CA, nomatch);
451 }
452
453
454
455
456
457
458
459
460 if (i->opcodeX && ((i->opcode & 0770)== 0200|| (i->opcode & 0770) == 0220
461 || (i->opcode & 0770)== 020|| (i->opcode & 0770) == 0300)) {
462 do_ptw2 (cpup, cpu.SDW, cpu.TPR.CA);
463 }
464 goto I;
465
466
467
468
469
470
471
472 H:;
473 DBGAPP ("doAppendCycleInstructionFetch(H): FANP\n");
474
475 paged = false;
476
477 PNL (L68_ (cpu.apu.state |= apu_FANP;))
478
479
480
481
482
483
484
485
486 set_apu_status (cpup, apuStatus_FANP);
487
488 DBGAPP ("doAppendCycleInstructionFetch(H): SDW->ADDR=%08o CA=%06o \n", cpu.SDW->ADDR, cpu.TPR.CA);
489
490 pageAddress = (cpu.SDW->ADDR & 077777760);
491 finalAddress = (cpu.SDW->ADDR & 077777760) + cpu.TPR.CA;
492 finalAddress &= 0xffffff;
493 PNL (cpu.APUMemAddr = finalAddress;)
494
495 DBGAPP ("doAppendCycleInstructionFetch(H:FANP): (%05o:%06o) finalAddress=%08o\n", cpu.TPR.TSR, cpu.TPR.CA, finalAddress);
496
497 goto HI;
498
499 I:;
500
501
502
503 DBGAPP ("doAppendCycleInstructionFetch(I): FAP\n");
504
505 paged = true;
506
507
508 set_apu_status (cpup, apuStatus_FAP);
509 PNL (L68_ (cpu.apu.state |= apu_FAP;))
510
511 word24 y2 = cpu.TPR.CA % 1024;
512
513 pageAddress = (((word24)cpu.PTW->ADDR & 0777760) << 6);
514
515
516 finalAddress = (((word24)cpu.PTW->ADDR & 0777760) << 6) + y2;
517 finalAddress &= 0xffffff;
518 PNL (cpu.APUMemAddr = finalAddress;)
519
520 #if defined(L68)
521 if (cpu.MR_cache.emr && cpu.MR_cache.ihr)
522 add_APU_history (APUH_FAP);
523 #endif
524 DBGAPP ("doAppendCycleInstructionFetch(H:FAP): (%05o:%06o) finalAddress=%08o\n", cpu.TPR.TSR, cpu.TPR.CA, finalAddress);
525
526 HI:
527 DBGAPP ("doAppendCycleInstructionFetch(HI)\n");
528
529 #if defined(TEST_UCACHE)
530 if (cacheHit) {
531 bool err = false;
532 if (cachedAddress != pageAddress) {
533 sim_printf ("cachedAddress %08o != pageAddress %08o\r\n", cachedAddress, pageAddress);
534 err = true;
535 }
536 if (cachedR1 != RSDWH_R1) {
537 sim_printf ("cachedR1 %01o != RSDWH_R1 %01o\r\n", cachedR1, RSDWH_R1);
538 err = true;
539 }
540 if (cachedBound != bound) {
541 sim_printf ("cachedBound %01o != bound %01o\r\n", cachedBound, bound);
542 err = true;
543 }
544 if (cachedPaged != paged) {
545 sim_printf ("cachedPaged %01o != paged %01o\r\n", cachedPaged, paged);
546 err = true;
547 }
548 if (err) {
549 # if defined(HDBG)
550 HDBGPrint ();
551 # endif
552 sim_printf ("ins fetch err %d %05o:%06o\r\n", evcnt, cpu.TPR.TSR, cpu.TPR.CA);
553 exit (1);
554 }
555
556 # if defined(HDBG)
557 hdbgNote ("doAppendCycleOperandRead.h", "test hit %d %05o:%06o\r\n", evcnt, cpu.TPR.TSR, cpu.TPR.CA);
558 # endif
559 } else {
560
561 # if defined(HDBG)
562 hdbgNote ("doAppendCycleOperandRead.h", "test miss %d %05o:%06o\r\n", evcnt, cpu.TPR.TSR, cpu.TPR.CA);
563 # endif
564 }
565 #endif
566 #if defined(TEST_UCACHE)
567 if (cacheHit) {
568 if (cachedPaged != paged) sim_printf ("cachedPaged %01o != paged %01o\r\n", cachedPaged, paged);
569
570 }
571 else
572 {
573
574 }
575 #endif
576
577 ucCacheSave (cpup, this, cpu.TPR.TSR, cpu.TPR.CA, bound, p, pageAddress, RSDWH_R1, paged);
578 evcnt ++;
579
580 cpu.cu.XSF = 1;
581 sim_debug (DBG_TRACEEXT, & cpu_dev, "loading of cpu.TPR.TSR sets XSF to 1\n");
582
583 core_readN (cpup, finalAddress, data, nWords, "INSTRUCTION_FETCH");
584
585
586
587
588
589
590
591
592
593 DBGAPP ("doAppendCycleInstructionFetch(L)\n");
594
595
596
597 if (i->opcode == 0610 && ! i->opcodeX) {
598
599
600 CPTUR (cptUsePRn + 0);
601 CPTUR (cptUsePRn + 1);
602 CPTUR (cptUsePRn + 2);
603 CPTUR (cptUsePRn + 3);
604 CPTUR (cptUsePRn + 4);
605 CPTUR (cptUsePRn + 5);
606 CPTUR (cptUsePRn + 6);
607 CPTUR (cptUsePRn + 7);
608 cpu.PR[0].RNR =
609 cpu.PR[1].RNR =
610 cpu.PR[2].RNR =
611 cpu.PR[3].RNR =
612 cpu.PR[4].RNR =
613 cpu.PR[5].RNR =
614 cpu.PR[6].RNR =
615 cpu.PR[7].RNR = cpu.TPR.TRR;
616 #if defined(TESTING)
617 HDBGRegPRW (0, "app rtcd");
618 HDBGRegPRW (1, "app rtcd");
619 HDBGRegPRW (2, "app rtcd");
620 HDBGRegPRW (3, "app rtcd");
621 HDBGRegPRW (4, "app rtcd");
622 HDBGRegPRW (5, "app rtcd");
623 HDBGRegPRW (6, "app rtcd");
624 HDBGRegPRW (7, "app rtcd");
625 #endif
626 }
627 goto KL;
628
629 KL:
630 DBGAPP ("doAppendCycleInstructionFetch(KL)\n");
631
632
633 cpu.PPR.PSR = cpu.TPR.TSR;
634
635 cpu.PPR.IC = cpu.TPR.CA;
636
637 goto M;
638
639 M:
640 DBGAPP ("doAppendCycleInstructionFetch(M)\n");
641
642
643 if (cpu.TPR.TRR == 0) {
644
645
646 cpu.PPR.P = p;
647 } else {
648
649 cpu.PPR.P = 0;
650 }
651
652 PNL (cpu.APUDataBusOffset = cpu.TPR.CA;)
653 PNL (cpu.APUDataBusAddr = finalAddress;)
654
655 PNL (L68_ (cpu.apu.state |= apu_FA;))
656
657 DBGAPP ("doAppendCycleInstructionFetch (Exit) PRR %o PSR %05o P %o IC %06o\n",
658 cpu.PPR.PRR, cpu.PPR.PSR, cpu.PPR.P, cpu.PPR.IC);
659 DBGAPP ("doAppendCycleInstructionFetch (Exit) TRR %o TSR %05o TBR %02o CA %06o\n",
660 cpu.TPR.TRR, cpu.TPR.TSR, cpu.TPR.TBR, cpu.TPR.CA);
661
662 return finalAddress;
663 }
664 #undef TEST_UCACHE