Plan 9 from Bell Labs’s /usr/web/sources/extra/9hist/mpc/l.s

Copyright © 2021 Plan 9 Foundation.
Distributed under the MIT License.
Download the Plan 9 distribution.


## diffname mpc/l.s 1999/0121
## diff -e /dev/null /n/emeliedump/1999/0121/sys/src/brazil/mpc/l.s
0a
#include	"mem.h"

/*
 * common ppc special purpose registers
 */
#define DSISR	18
#define DAR	19	/* Data Address Register */
#define DEC	22	/* Decrementer */
#define SRR0	26	/* Saved Registers (exception) */
#define SRR1	27
#define SPRG0	272	/* Supervisor Private Registers */
#define SPRG1	273
#define SPRG2	274
#define SPRG3	275
#define TBRU	269	/* Time base Upper/Lower (Reading) */
#define TBRL	268
#define TBWU	285	/* Time base Upper/Lower (Writing) */
#define TBWL	284
#define PVR	287	/* Processor Version */

/*
 * mpc8xx-specific special purpose registers of interest here
 */
#define EIE		80
#define EID		81
#define NRI		82
#define IMMR		638
#define IC_CST		560
#define IC_ADR		561
#define IC_DAT		562
#define DC_CST		568
#define DC_ADR		569
#define DC_DAT		570
#define MI_CTR		784
#define MI_AP		786
#define MI_EPN		787
#define MI_TWC		789
#define MI_RPN		790
#define MI_DBCAM	816
#define MI_DBRAM0	817
#define MI_DBRAM1	818
#define MD_CTR		792
#define M_CASID		793
#define MD_AP		794
#define MD_EPN		795
#define M_TWB		796
#define MD_TWC		797
#define MD_RPN		798
#define	M_TW		799
#define	MD_DBCAM	824
#define	MD_DBRAM0	825
#define	MD_DBRAM1	826

/* use of SPRG registers in save/restore */
#define	SAVER0	SPRG0
#define	SAVER1	SPRG1
#define	SAVELR	SPRG2
#define	SAVEXX	SPRG3

/* special instruction definitions */
#define	BDNZ	BC	16,0,
#define	BDNE	BC	0,2,
#define	TLBIA	WORD	$(31<<26)
#define	MFTB(tbr,d)	WORD	$((31<<26)|((d)<<21)|((tbr&0x1f)<<16)|(((tbr>>5)&0x1f)<<11)|(371<<1))

/* on some models mtmsr doesn't synchronise enough (eg, 603e) */
#define	MSRSYNC	SYNC; ISYNC

#define	UREGSPACE	(UREGSIZE+8)

	TEXT start(SB), $-4

	/* turn off interrupts but enable traps */
	MOVW	MSR, R3
	MOVW	$~(MSR_EE|MSR_FP), R4
	AND	R4, R3
	OR	$(MSR_IP|MSR_ME), R3
	ISYNC
	MOVW	R3, MSR
	MSRSYNC

	MOVW	$0, R0	/* except during trap handling, R0 is zero from now on */
	MOVW	$setSB(SB), R2

/*
 * reset the caches and disable them for now
 */
	MOVW	SPR(IC_CST), R4	/* read and clear */
	MOVW	$(5<<25), R4
	MOVW	R4, SPR(IC_CST)	/* unlock all */
	ISYNC
	MOVW	$(6<<25), R4
	MOVW	R4, SPR(IC_CST)	/* invalidate all */
	ISYNC
	MOVW	$(2<<25), R4
	MOVW	R4, SPR(IC_CST)	/* disable i-cache */
	ISYNC

	SYNC
	MOVW	SPR(DC_CST), R4	/* read and clear */
	MOVW	$(10<<24), R4
	SYNC
	MOVW	R4, SPR(DC_CST)	/* unlock all */
	ISYNC
	MOVW	$(12<<24), R4
	SYNC
	MOVW	R4, SPR(DC_CST)	/* invalidate all */
	ISYNC
	MOVW	$(4<<24), R4
	SYNC
	MOVW	R4, SPR(DC_CST)	/* disable d-cache */
	ISYNC

	MOVW	$7, R4
	MOVW	R4, SPR(158)		/* cancel `show cycle' for normal instruction execution */
	ISYNC

/*
 * set other system configuration values
 */
	MOVW	$INTMEM, R4
	MOVW	R4, SPR(IMMR)		/* set internal memory base */

	MOVW	$mach0(SB), R(MACH)
	ADD	$(MACHSIZE-8), R(MACH), R1
	SUB	$4, R(MACH), R3
	ADD	$4, R1, R4
clrmach:
	MOVWU	R0, 4(R3)
	CMP	R3, R4
	BNE	clrmach

	MOVW	R0, R(USER)
	MOVW	R0, 0(R(MACH))

	MOVW	$edata(SB), R3
	MOVW	$end(SB), R4
	ADD	$4, R4
	SUB	$4, R3
clrbss:
	MOVWU	R0, 4(R3)
	CMP	R3, R4
	BNE	clrbss

	BL	main(SB)
	BR	0(PC)

TEXT	kernelmmu(SB), $0
	TLBIA
	ISYNC

	MOVW	$0, R4
	MOVW	R4, SPR(M_CASID)	/* set supervisor space */
	MOVW	$(0<<29), R4		/* allow i-cache when IR=0 */
	MOVW	R4, SPR(MI_CTR)	/* i-mmu control */
	ISYNC
	MOVW	$((1<<29)|(1<<28)), R4	/* cache inhibit when DR=0, write-through */
	SYNC
	MOVW	R4, SPR(MD_CTR)	/* d-mmu control */
	ISYNC
	TLBIA

	/* map various things 1:1 */
	MOVW	$tlbtab-KZERO(SB), R4
	MOVW	$tlbtabe-KZERO(SB), R5
	SUB	R4, R5
	MOVW	$(3*4), R6
	DIVW	R6, R5
	SUB	$4, R4
	MOVW	R5, CTR
ltlb:
	MOVWU	4(R4), R5
	MOVW	R5, SPR(MD_EPN)
	MOVW	R5, SPR(MI_EPN)
	MOVWU	4(R4), R5
	MOVW	R5, SPR(MI_TWC)
	MOVW	R5, SPR(MD_TWC)
	MOVWU	4(R4), R5
	MOVW	R5, SPR(MD_RPN)
	MOVW	R5, SPR(MI_RPN)
	BDNZ	ltlb

	MOVW	$(1<<25), R4
	MOVW	R4, SPR(IC_CST)	/* enable i-cache */
	ISYNC

	MOVW	$(1<<24), R4
	SYNC
	MOVW	R4, SPR(DC_CST)	/* force write through mode */
	MOVW	$(1<<25), R4
	SYNC
	MOVW	R4, SPR(DC_CST)	/* enable d-cache */
	ISYNC

	/* enable MMU and set kernel PC to virtual space */
	MOVW	$((0<<29)|(1<<28)), R4	/* cache when DR=0, write-through */
	SYNC
	MOVW	R4, SPR(MD_CTR)	/* d-mmu control */
	MOVW	LR, R3
	OR	$KZERO, R3
	MOVW	R3, SPR(SRR0)
	MOVW	MSR, R4
	OR	$(MSR_ME|MSR_IR|MSR_DR), R4	/* had ME|FPE|FE0|FE1 */
	MOVW	R4, SPR(SRR1)
	RFI	/* resume in kernel mode in caller */

TEXT	splhi(SB), $0
	MOVW	MSR, R3
	RLWNM	$0, R3, $~MSR_EE, R4
	SYNC
	MOVW	R4, MSR
	MSRSYNC
	MOVW	LR, R31
	MOVW	R31, 4(R(MACH))	/* save PC in m->splpc */
	RETURN

TEXT	splx(SB), $0
	/* fall though */

TEXT	splxpc(SB), $0
	MOVW	MSR, R4
	RLWMI	$0, R3, $MSR_EE, R4
	RLWNMCC	$0, R3, $MSR_EE, R5
	BNE	splx0
	MOVW	LR, R31
	MOVW	R31, 4(R(MACH))	/* save PC in m->splpc */
splx0:
	SYNC
	MOVW	R4, MSR
	MSRSYNC
	RETURN

TEXT	spllo(SB), $0
	MFTB(TBRL, 3)
	MOVW	R3, spltbl(SB)
	MOVW	MSR, R3
	OR	$MSR_EE, R3, R4
	SYNC
	MOVW	R4, MSR
	MSRSYNC
	RETURN

TEXT	spldone(SB), $0
	RETURN

TEXT	islo(SB), $0
	MOVW	MSR, R3
	RLWNM	$0, R3, $MSR_EE, R3
	RETURN

TEXT	setlabel(SB), $-4
	MOVW	LR, R31
	MOVW	R1, 0(R3)
	MOVW	R31, 4(R3)
	MOVW	$0, R3
	RETURN

TEXT	gotolabel(SB), $-4
	MOVW	4(R3), R31
	MOVW	R31, LR
	MOVW	0(R3), R1
	MOVW	$1, R3
	RETURN

/*
 * enter with stack set and mapped.
 * on return, SB (R2) has been set, and R3 has the Ureg*,
 * the MMU has been re-enabled, kernel text and PC are in KSEG,
 * R(MACH) has been set, and R0 contains 0.
 *
 * this can be simplified in the Inferno regime
 */
TEXT	saveureg(SB), $-4
/*
 * save state
 */
	MOVMW	R2, 48(R1)	/* r2:r31 */
	MOVW	$setSB(SB), R2
	MOVW	SPR(SAVER1), R4
	MOVW	R4, 44(R1)
	MOVW	SPR(SAVER0), R5
	MOVW	R5, 40(R1)
	MOVW	CTR, R6
	MOVW	R6, 36(R1)
	MOVW	XER, R4
	MOVW	R4, 32(R1)
	MOVW	CR, R5
	MOVW	R5, 28(R1)
	MOVW	SPR(SAVELR), R6	/* LR */
	MOVW	R6, 24(R1)
	/* pad at 20(R1) */
	/* old PC(16) and status(12) saved earlier */
	MOVW	SPR(SAVEXX), R0
	MOVW	R0, 8(R1)	/* cause/vector */
	ADD	$8, R1, R3	/* Ureg* */
	STWCCC	R3, (R1)	/* break any pending reservations */
	MOVW	$0, R0	/* compiler/linker expect R0 to be zero */

	MOVW	MSR, R5
	OR	$(MSR_IR|MSR_DR), R5	/* enable MMU */
	MOVW	R5, SPR(SRR1)
	MOVW	LR, R31
	OR	$KZERO, R31	/* return PC in KSEG0 */
	MOVW	R31, SPR(SRR0)
	SYNC
	ISYNC
	RFI	/* returns to trap handler */

TEXT	icflush(SB), $-4	/* icflush(virtaddr, count) */
	MOVW	n+4(FP), R4
	RLWNM	$0, R3, $~(CACHELINESZ-1), R5
	SUB	R5, R3
	ADD	R3, R4
	ADD		$(CACHELINESZ-1), R4
	SRAW	$CACHELINELOG, R4
	MOVW	R4, CTR
icf0:	ICBI	(R5)
	ADD	$CACHELINESZ, R5
	BDNZ	icf0
	ISYNC
	RETURN

TEXT	dcflush(SB), $-4	/* dcflush(virtaddr, count) */
	SYNC
	MOVW	n+4(FP), R4
	RLWNM	$0, R3, $~(CACHELINESZ-1), R5
	CMP	R4, $0
	BLE	dcf1
	SUB	R5, R3
	ADD	R3, R4
	ADD		$(CACHELINESZ-1), R4
	SRAW	$CACHELINELOG, R4
	MOVW	R4, CTR
dcf0:	DCBF	(R5)
	ADD	$CACHELINESZ, R5
	BDNZ	dcf0
	SYNC
	ISYNC
dcf1:
	RETURN

TEXT	tas(SB), $0
	SYNC
	MOVW	R3, R4
	MOVW	$0xdeaddead,R5
tas1:
	DCBF	(R4)	/* fix for 603x bug */
	LWAR	(R4), R3
	CMP	R3, $0
	BNE	tas0
	STWCCC	R5, (R4)
	BNE	tas1
tas0:
	SYNC
	ISYNC
	RETURN

TEXT	gettbl(SB), $0
	MFTB(TBRL, 3)
	RETURN

TEXT	gettbu(SB), $0
	MOVW	SPR(TBRU), R3
	RETURN

TEXT	getpvr(SB), $0
	MOVW	SPR(PVR), R3
	RETURN

TEXT	getimmr(SB), $0
	MOVW	SPR(IMMR), R3
	RETURN

TEXT	getdec(SB), $0
	MOVW	SPR(DEC), R3
	RETURN

TEXT	putdec(SB), $0
	MOVW	R3, SPR(DEC)
	RETURN

TEXT	getcallerpc(SB), $-4
	MOVW	0(R1), R3
	RETURN

TEXT getdar(SB), $0
	MOVW	SPR(DAR), R3
	RETURN

TEXT getdsisr(SB), $0
	MOVW	SPR(DSISR), R3
	RETURN

TEXT	getdepn(SB), $0
	MOVW	SPR(MD_EPN), R3
	RETURN

TEXT	getmsr(SB), $0
	MOVW	MSR, R3
	RETURN

TEXT	putmsr(SB), $0
	SYNC
	MOVW	R3, MSR
	MSRSYNC
	RETURN

TEXT	eieio(SB), $0
	EIEIO
	RETURN

TEXT	gotopc(SB), $0
	MOVW	R3, CTR
	MOVW	LR, R31	/* for trace back */
	BR	(CTR)

TEXT	firmware(SB), $0
	MOVW	MSR, R3
	MOVW	$(MSR_EE|MSR_ME), R4
	ANDN	R4, R3
	OR	$(MSR_IP), R3
	ISYNC
	MOVW	R3, MSR	/* turn off interrupts and machine checks */
	MSRSYNC
	MOVW	$(MSR_RI|MSR_IR|MSR_DR|MSR_ME), R4
	ANDN	R4, R3
	MOVW	R3, SPR(SRR1)
	MOVW	$(0xFF00<<16), R4
	MOVW	R4, SPR(IMMR)
	MOVW	$(0x0800<<16), R4
	MOVW	R4, SPR(SRR0)	/* force bad address */
	MOVW	R0, SPR(149)	/* ensure checkstop on machine check */
	MOVW	R0, R1
	MOVW	R0, R2
	EIEIO
	ISYNC
	RFI

/*
 * byte swapping of arrays of long and short;
 * could possibly be avoided with more changes to drivers
 */
TEXT	swabl(SB), $0
	MOVW	v+4(FP), R4
	MOVW	n+8(FP), R5
	SRAW	$2, R5, R5
	MOVW	R5, CTR
	SUB	$4, R4
	SUB	$4, R3
swabl1:
	ADD	$4, R3
	MOVWU	4(R4), R7
	MOVWBR	R7, (R3)
	BDNZ	swabl1
	RETURN

TEXT	swabs(SB), $0
	MOVW	v+4(FP), R4
	MOVW	n+8(FP), R5
	SRAW	$1, R5, R5
	MOVW	R5, CTR
	SUB	$2, R4
	SUB	$2, R3
swabs1:
	ADD	$2, R3
	MOVHZU	2(R4), R7
	MOVHBR	R7, (R3)
	BDNZ	swabs1
	RETURN

TEXT	legetl(SB), $0
	MOVWBR	(R3), R3
	RETURN

TEXT	lesetl(SB), $0
	MOVW	v+4(FP), R4
	MOVWBR	R4, (R3)
	RETURN

TEXT	legets(SB), $0
	MOVHBR	(R3), R3
	RETURN

TEXT	lesets(SB), $0
	MOVW	v+4(FP), R4
	MOVHBR	R4, (R3)
	RETURN

/*
 * ITLB miss
 *	avoid references that might need the right SB value;
 *	IR and DR are off.
 */
TEXT	itlbmiss(SB), $-4
	MOVW	R1, SPR(M_TW)
	MOVW	SPR(SRR0), R1	/* instruction miss address */
	MOVW	R1, SPR(MD_EPN)
	MOVW	SPR(M_TWB), R1	/* level one pointer */
	MOVW	(R1), R1
	MOVW	R1, SPR(MI_TWC)	/* save level one attributes */
	MOVW	R1, SPR(MD_TWC)	/* save base and attributes */
	MOVW	SPR(MD_TWC), R1	/* level two pointer */
	MOVW	(R1), R1	/* level two entry */
	MOVW	R1, SPR(MI_RPN)	/* write TLB */
	MOVW	SPR(M_TW), R1
	RFI

/*
 * DTLB miss
 *	avoid references that might need the right SB value;
 *	IR and DR are off.
 */
TEXT	dtlbmiss(SB), $-4
	MOVW	R1, SPR(M_TW)
	MOVW	SPR(M_TWB), R1	/* level one pointer */
	MOVW	(R1), R1	/* level one entry */
	MOVW	R1, SPR(MD_TWC)	/* save base and attributes */
	MOVW	SPR(MD_TWC), R1	/* level two pointer */
	MOVW	(R1), R1	/* level two entry */
	MOVW	R1, SPR(MD_RPN)	/* write TLB */
	MOVW	SPR(M_TW), R1
	RFI

/*
 * traps force memory mapping off.
 * this code goes to too much effort (for the Inferno environment) to restore it.
 */
TEXT	trapvec(SB), $-4
traps:
	MOVW	LR, R0

pagefault:

/*
 * map data virtually and make space to save
 */
	MOVW	R0, SPR(SAVEXX)	/* vector */
	MOVW	R1, SPR(SAVER1)
	SYNC
	ISYNC
	MOVW	MSR, R0
	OR	$(MSR_DR|MSR_ME), R0		/* make data space usable */
	SYNC
	MOVW	R0, MSR
	MSRSYNC
	SUB	$UREGSPACE, R1

	MOVW	SPR(SRR0), R0	/* save SRR0/SRR1 now, since DLTB might be missing stack page */
	MOVW	R0, LR
	MOVW	SPR(SRR1), R0
	MOVW	R0, 12(R1)	/* save status: could take DLTB miss here */
	MOVW	LR, R0
	MOVW	R0, 16(R1)	/* old PC */
	BL	saveureg(SB)
	BL	trap(SB)
	BR	restoreureg

TEXT	intrvec(SB), $-4
	MOVW	LR, R0

/*
 * map data virtually and make space to save
 */
	MOVW	R0, SPR(SAVEXX)	/* vector */
	MOVW	R1, SPR(SAVER1)
	SYNC
	ISYNC
	MOVW	MSR, R0
	OR	$MSR_DR, R0		/* make data space usable */
	SYNC
	MOVW	R0, MSR
	MSRSYNC
	SUB	$UREGSPACE, R1

	MFTB(TBRL, 0)
	MOVW	R0, intrtbl(SB)

	MOVW	SPR(SRR0), R0
	MOVW	R0, LR
	MOVW	SPR(SRR1), R0
	MOVW	R0, 12(R1)
	MOVW	LR, R0
	MOVW	R0, 16(R1)
	BL	saveureg(SB)

	MFTB(TBRL, 5)
	MOVW	R5, isavetbl(SB)

	BL	intr(SB)

/*
 * restore state from Ureg and return from trap/interrupt
 */
restoreureg:
	MOVMW	48(R1), R2	/* r2:r31 */
	/* defer R1 */
	MOVW	40(R1), R0
	MOVW	R0, SPR(SAVER0)
	MOVW	36(R1), R0
	MOVW	R0, CTR
	MOVW	32(R1), R0
	MOVW	R0, XER
	MOVW	28(R1), R0
	MOVW	R0, CR	/* CR */
	MOVW	24(R1), R0
	MOVW	R0, SPR(SAVELR)	/* LR */
	/* pad, skip */
	MOVW	16(R1), R0
	MOVW	R0, SPR(SRR0)	/* old PC */
	MOVW	12(R1), R0
	MOVW	R0, SPR(SRR1)	/* old MSR */
	/* cause, skip */
	MOVW	44(R1), R1	/* old SP */
	MOVW	SPR(SAVELR), R0
	MOVW	R0, LR
	MOVW	SPR(SAVER0), R0
	RFI

TEXT	powerdownled(SB), $0

	MOVW	$0x10002200,R11
	MOVW	$0,R7
	MOVW	R7,0(R11)
	RETURN

TEXT	powerupled(SB), $0

	MOVW	$0x10002200,R11
	MOVW	$2,R7
	MOVW	R7,0(R11)
	RETURN

TEXT	reset(SB), $-4
	MOVW	$0,R4
	MOVW	0(R4), R5
loop:
	BR	loop


GLOBL	mach0+0(SB), $MACHSIZE
GLOBL	spltbl+0(SB), $4
GLOBL	intrtbl+0(SB), $4
GLOBL	isavetbl+0(SB), $4

/*
 * TLB prototype entries, loaded once-for-all at startup,
 * remaining unchanged thereafter.
 * Limit the table to at most 8 entries to ensure
 * it works on the 823 (other 8xx processors allow up to 32 TLB entries).
 */
#define	TLBE(epn,rpn,twc)	WORD	$(epn);  WORD	$(twc); WORD	$(rpn)

TEXT	tlbtab(SB), $-4

	/* epn, rpn, twc */
/*	TLBE(KZERO|DRAMMEM|TLBVALID, DRAMMEM|PTEWRITE|PTELPS|PTESH|PTEVALID, PTE8MB|PTEWT|PTEVALID)	/* DRAM, 8M */
/*	TLBE(KZERO|BCSRMEM|TLBVALID, BCSRMEM|PTEWRITE|PTESH|PTECI|PTEVALID, PTE4K|PTEWT|PTEVALID)	/* Board CSR, 4K */
/*	TLBE(KZERO|INTMEM|TLBVALID, INTMEM|PTEWRITE|PTELPS|PTESH|PTECI|PTEVALID, PTE4K|PTEWT|PTEVALID)	/* IMMR, 16K */
/*	TLBE(KZERO|FLASHMEM|TLBVALID, FLASHMEM|PTEWRITE|PTELPS|PTESH|PTECI|PTEVALID, PTE8MB|PTEWT|PTEVALID)	/* Flash, 8M */
/*	TLBE(KZERO|SDRAMMEM|TLBVALID, SDRAMMEM|PTEWRITE|PTELPS|PTESH|PTEVALID, PTE8MB|PTEWT|PTEVALID)	/* SDRAM, 8M */
/*	TLBE(KZERO|PCMCIAMEM|TLBVALID, PCMCIAMEM|PTEWRITE|PTELPS|PTESH|PTECI|PTEVALID, PTE8MB|PTEWT|PTEVALID)	/* PCMCIA, 8M */
TEXT	tlbtabe(SB), $-4
	RETURN
.
## diffname mpc/l.s 1999/0122
## diff -e /n/emeliedump/1999/0121/sys/src/brazil/mpc/l.s /n/emeliedump/1999/0122/sys/src/brazil/mpc/l.s
632,638d
133,135d
127a

.
125c
	ADD	$(MACHSIZE-8), R(MACH), R1	/* set stack */
.
## diffname mpc/l.s 1999/0123
## diff -e /n/emeliedump/1999/0122/sys/src/brazil/mpc/l.s /n/emeliedump/1999/0123/sys/src/brazil/mpc/l.s
616,629d
545d
537,543d
530,534d
301c
/*	OR	$KZERO, R31	/* return PC in KSEG0 */
.
298c
/*	OR	$(MSR_IR|MSR_DR), R5	/* enable MMU */
.
133a
	MOVW	R0, R(USER)
	MOVW	R0, 0(R(MACH))

.
## diffname mpc/l.s 1999/0126
## diff -e /n/emeliedump/1999/0123/sys/src/brazil/mpc/l.s /n/emeliedump/1999/0126/sys/src/brazil/mpc/l.s
623,628c
	TLBE(DRAMMEM|MMUEV, MMUPS8M|MMUWT|MMUV, DRAMMEM|MMUPP|MMUSPS|MMUSH|MMUCI|MMUV)	/* DRAM, 8M */
	TLBE((DRAMMEM+8*(1<<20))|MMUEV, MMUPS8M|MMUWT|MMUV, (DRAMMEM+8*(1<<20))|MMUPP|MMUSPS|MMUSH|MMUCI|MMUV)	/* DRAM, second 8M */
	TLBE(INTMEM|MMUEV, MMUPS8M|MMUWT|MMUV, INTMEM|MMUPP|MMUSPS|MMUSH|MMUCI|MMUV)	/* IO space 8M */
.
621d
618c
#define	TLBE(epn,twc,rpn)	WORD	$(epn);	WORD	$(twc);	WORD	$(rpn)
.
615,616c
 * Limit the table to at most 4 entries
.
611a
	RETURN

.
412a
TEXT	flushmmu(SB), $0
	TLBIA
	RETURN

TEXT	putmmu(SB), $0
	RETURN

.
391c
TEXT	getdsisr(SB), $0
.
387c
TEXT	getdar(SB), $0
.
196,207d
188,194c
	RETURN
.
184,185c
	/* enable MMU */
	MOVW	MSR, R4
	OR	$(MSR_IR|MSR_DR), R4
	MOVW	R4, MSR
.
165,166c
	MOVW	$tlbtab(SB), R4
	MOVW	$tlbtabe(SB), R5
.
162d
158,159c
	MOVW	$(MMUCIDEF|MMUWTDEF|(31<<8)), R4
.
154,155c
	MOVW	R4, SPR(M_CASID)
	
	/* set Ks = 0 Kp = 1 for all acess groups */
	MOVW	$0x55555555, R4
	MOVW	R4, SPR(MI_AP)
	MOVW	R4, SPR(MD_AP)

	/*
	 * set:
	 *  PowerPC mode
	 *  Page protection mode - no 1K pages
	 *  CI when MMU is disbaled - this will change
	 *  WT when DMMU is disbaled - this will change
	 *  disable protected TLB for the momment
	 *  ignore user/supervisor state when looking for TLB
	 *  set first tlb entry to 28 - first lockable entry
	 */
	MOVW	$(MMUCIDEF|(31<<8)), R4
.
152a
	/* dont use CASID yet - set to zero for now */
.
150,151d
148a

.
145a
	/* off to main */
.
## diffname mpc/l.s 1999/0127
## diff -e /n/emeliedump/1999/0126/sys/src/brazil/mpc/l.s /n/emeliedump/1999/0127/sys/src/brazil/mpc/l.s
543a
	MOVW	SPR(SRR1), R0
	ANDCC	$MSR_PR, R0
	BEQ	ktrap
	
	/* switch to kernel stack */
	MOVW	R1, CR
	MOVW	R2, R0
	MOVW	$setSB(SB), R2
	MOVW	$mach0(SB), R1	/* m-> */
	MOVW	R0, R2
	MOVW	12(R1), R1	/* m->proc */
	MOVW	8(R1), R1	/* m->proc->kstack */
	ADD	$(KSTACK-UREGSIZE), R1
	BR	trap1
ktrap:
	MOVW	R1, CR
	MOVW	SPR(SAVER1), R1
	SUB	$UREGSPACE, R1
trap1:
.
541c
	MOVW	CR, R1
	MOVW	MSR, R0
	OR	$(MSR_DR|MSR_IR), R0		/* make data space usable */
	MOVW	R0, MSR
.
533d
416c
TEXT	_putmmu(SB), $0
	MOVW	MSR, R7
	MOVW	$~(MSR_DR|MSR_IR), R8
	AND	R7, R8
	MOVW	R8, MSR
	OR	$MMUEV, R3
	MOVW	R3, SPR(MD_EPN)
	MOVW	R3, SPR(MI_EPN)
	MOVW	$(MMUWT|MMUV), R5
	MOVW	R5, SPR(MI_TWC)
	MOVW	R5, SPR(MD_TWC)
	MOVW	4(FP), R6
	MOVW	R6, SPR(MD_RPN)
	MOVW	R6, SPR(MI_RPN)
	MOVW	SPR(MD_CTR), R3
	MOVW	R7, MSR
.
278a
	MOVW	$mach0(SB), R(MACH)
	MOVW	12(R(MACH)), R(USER)
.
264a
TEXT	touser(SB), $-4
	MOVW	$(UTZERO+32), R5	/* header appears in text */
	MOVW	$(MSR_EE|MSR_PR|MSR_ME|MSR_IP|MSR_IR|MSR_DR|MSR_RI), R4
	MOVW	R4, SPR(SRR1)
	MOVW	R3, R1
	MOVW	R5, SPR(SRR0)
	RFI

.
198a
	/* lock kernel entries in tlb - also reset tlb index*/
	MOVW	$(MMUCIDEF|MMURSV4), R4
	MOVW	R4, SPR(MI_CTR)	/* i-mmu control */
	ISYNC
	MOVW	$(MMUCIDEF|MMUWTDEF|MMURSV4), R4
	MOVW	R4, SPR(MD_CTR)	/* d-mmu control */
	ISYNC

.
## diffname mpc/l.s 1999/0128
## diff -e /n/emeliedump/1999/0127/sys/src/brazil/mpc/l.s /n/emeliedump/1999/0128/sys/src/brazil/mpc/l.s
666a
	
.
641a
TEXT	forkret(SB), $0
	BR	restoreureg

.
430c
TEXT	_flushmmu(SB), $0
.
63c
#define	TLBIA	WORD	$((31<<26)|(370<<1))
.
## diffname mpc/l.s 1999/0608
## diff -e /n/emeliedump/1999/0128/sys/src/brazil/mpc/l.s /n/emeliedump/1999/0608/sys/src/brazil/mpc/l.s
687,688c
	TLBE(DRAMMEM|MMUEV, MMUPS8M|MMUWT|MMUV, DRAMMEM|MMUPP|MMUSPS|MMUSH|MMUV)	/* DRAM, 8M */
	TLBE((DRAMMEM+8*(1<<20))|MMUEV, MMUPS8M|MMUWT|MMUV, (DRAMMEM+8*(1<<20))|MMUPP|MMUSPS|MMUSH|MMUV)	/* DRAM, second 8M */
.
207c
	/* enable i-cache */
	MOVW	$(1<<25), R4
	MOVW	R4, SPR(IC_CST)
	ISYNC

 	/* enable d-cache 	*/
	MOVW	$(1<<25), R4
	MOVW	R4, SPR(DC_CST)
	ISYNC

 	/* enable MMU */
.
203c
	MOVW	$(MMURSV4), R4
.
200c
	MOVW	$(MMURSV4), R4
.
175c
	MOVW	$((31<<8)), R4
.
172c
	MOVW	$((31<<8)), R4
.
166c
	 *  ~CI when MMU is disbaled
.
## diffname mpc/l.s 1999/0609
## diff -e /n/emeliedump/1999/0608/sys/src/brazil/mpc/l.s /n/emeliedump/1999/0609/sys/src/brazil/mpc/l.s
697,698c
	TLBE(FLASHMEM|MMUEV, MMUPS8M|MMUWT|MMUV, FLASHMEM|MMUPP|MMUSPS|MMUSH|MMUCI|MMUV)	/* FLASH, 8M */
	TLBE(DRAMMEM|MMUEV, MMUPS8M|MMUWT|MMUV, DRAMMEM|MMUPP|MMUSPS|MMUSH|MMUV)	/* DRAM, second 8M */
.
## diffname mpc/l.s 1999/0714
## diff -e /n/emeliedump/1999/0609/sys/src/brazil/mpc/l.s /n/emeliedump/1999/0714/sys/src/brazil/mpc/l.s
410,413d
## diffname mpc/l.s 2000/0204
## diff -e /n/emeliedump/1999/0714/sys/src/brazil/mpc/l.s /n/emeliedump/2000/0204/sys/src/9/mpc/l.s
115c
	MOVW	R4, SPR(158)	/* cancel `show cycle' for normal instruction execution */
.
## diffname mpc/l.s 2000/0516
## diff -e /n/emeliedump/2000/0204/sys/src/9/mpc/l.s /n/emeliedump/2000/0516/sys/src/9/mpc/l.s
677d
675a
TEXT	flash(SB), $-4
flash0:
	BL	powerupled(SB);
	MOVW	$0x100000, R5
	MOVW	R5, CTR
delay0:
	BDNZ	delay0
	BL	powerdownled(SB);
	MOVW	$0x100000, R5
	MOVW	R5, CTR
delay1:
	BDNZ	delay1
	BR		flash0

TEXT	powerupled(SB), $0

	MOVW	$INTMEM,R11
	MOVH	0x970(R11), R7
	MOVW	$0x100,R8
	OR	R8,R7
	MOVH	R7,0x970(R11)
	MOVH	0x976(R11), R7
	ANDN	R8,R7
	MOVH	R7,0x976(R11)
	RETURN

TEXT	powerdownled(SB), $0

	MOVW	$INTMEM,R11
	MOVH	0x970(R11), R7
	MOVW	$0x100,R8
	OR	R8,R7
	MOVH	R7,0x970(R11)
	MOVH	0x976(R11), R7
	OR	R8,R7
	MOVH	R7,0x976(R11)
	RETURN
.
671,672d
663c
	MOVW	R0, LR
.
644a
	MOVW	MSR, R5
	OR	$(MSR_IR|MSR_DR|MSR_RI), R5	/* enable MMU */
	MOVW	R5, SPR(SRR1)
	MOVW	LR, R31
	OR	$KZERO, R31	/* return PC in KSEG0 */
	MOVW	R31, SPR(SRR0)
	OR	$KZERO, R1	/* fix stack pointer */
	RFI	/* returns to trap handler */

.
643a
/*
 * enter with stack set and mapped.
 * on return, SB (R2) has been set, and R3 has the Ureg*,
 * the MMU has been re-enabled, kernel text and PC are in KSEG,
 * R(MACH) has been set, and R0 contains 0.
 *
 */
TEXT	saveureg(SB), $-4
/*
 * save state
 */
	MOVMW	R2, 48(R1)	/* r2:r31 */
	MOVW	$setSB(SB), R2
	MOVW	$(MACHADDR&~KZERO), R(MACH)
	MOVW	12(R(MACH)), R(USER)
	MOVW	$MACHADDR, R(MACH)
	MOVW	SPR(SAVER1), R4
	MOVW	R4, 44(R1)
	MOVW	SPR(SAVER0), R5
	MOVW	R5, 40(R1)
	MOVW	CTR, R6
	MOVW	R6, 36(R1)
	MOVW	XER, R4
	MOVW	R4, 32(R1)
	MOVW	CR, R5
	MOVW	R5, 28(R1)
	MOVW	SPR(SAVELR), R6	/* LR */
	MOVW	R6, 24(R1)
	/* pad at 20(R1) */
	MOVW	SPR(SRR0), R0
	MOVW	R0, 16(R1)				/* old PC */
	MOVW	SPR(SRR1), R0
	MOVW	R0, 12(R1)				/* old status */
	MOVW	SPR(SAVEXX), R0
	MOVW	R0, 8(R1)	/* cause/vector */
	ADD	$8, R1, R3	/* Ureg* */
	OR	$KZERO, R3	/* fix ureg */
	STWCCC	R3, (R1)	/* break any pending reservations */
	MOVW	$0, R0	/* compiler/linker expect R0 to be zero */
.
614a
	BR	0(PC)

.
603,607d
601a
	RLWNM	$0, R1, $~KZERO, R1		/* PADDR(m->proc->kstack) */
.
598c
	BL	saveureg(SB)
	BL	trap(SB)
	BR	restoreureg
.
591,596c
	MOVW	$(MACHADDR&~KZERO), R1	/* PADDR(m->) */
	MOVW	12(R1), R1				/* m->proc  */
	RLWNM	$0, R1, $~KZERO, R1		/* PADDR(m->proc) */
	MOVW	8(R1), R1				/* m->proc->kstack */
	RLWNM	$0, R1, $~KZERO, R1		/* PADDR(m->proc->kstack) */
.
586,587c
	MOVW	CR, R1
	MOVW	R0, CR
	BC	4,17,ktrap
.
579,584c
traps:
	MOVW	R0, SPR(SAVEXX)	/* vector */

/*
	to enable hardware break points
	MOVW	MSR, R1
	OR		$(MSR_RI), R1
	MOVW	R1, MSR
	ISYNC
*/	

	/* did we come from user space */
.
576,577d
574d
571a
 * the following code has been executed at the exception
 * vector location
 *	MOVW R0, SPR(SAVER0)
 *	MOVW LR, R0
 *	MOVW R0, SPR(SAVELR) 
 *	bl	trapvec(SB)
.
569a
	/*
	 * R1 = 20 bits of addr | 8 bits of junk | 4 bits of asid
	 * calulate ((x>>9)^(x>>21)^(x<<11)) & (0xfff<<3)
	 * note (x>>21)^(x<<11) = rotate left 11
	 */
	RLWNM	$(32-9), R1, $(0xfff<<3), R3
	RLWNM	$11, R1, $(0xfff<<3), R1
	XOR		R1, R3, R1
	MOVW	$(MACHADDR&~KZERO), R3	/* m-> */
	MOVW	16(R3), R3				/* m->stb */
	RLWNM	$0, R3, $~KZERO, R3		/* PADDR(m->stb) */
	ADD	R1,R3,R3
	MOVW	4(R3), R0
	MOVW	0(R3), R3
	MOVW	SPR(MD_EPN), R1
	RLWNM	$20, R1, $0xffffff, R1
	RLWNM	$12, R1, $~0, R1
	CMP		R3, R1
	BNE		dtlbtrap
	MOVW	$(MMUV), R1
	MOVW	R1, SPR(MD_TWC)
	MOVW	R0, SPR(MD_RPN)
	MOVW	R2, CR
	MOVW	SPR(SAVEXX), R3
	MOVW	SPR(M_TW), R2
	MOVW	SPR(SAVER1), R1
	MOVW	SPR(SAVER0), R0
	RFI
dtlbtrap:
	MOVW	R2, CR

	MOVW	$(MACHADDR&~KZERO), R2	/* m-> */
	MOVW	R1, 32(R2)
	MOVW	R3, 36(R2)

	MOVW	SPR(MD_EPN), R1
	MOVW	$(MACHADDR&~KZERO), R3	/* m-> */
	MOVW	R1, 24(R3)				/* save dar */
	MOVW	SPR(SAVEXX), R3
	MOVW	SPR(M_TW), R2
	MOVW	LR, R0
	MOVW	R0, SPR(SAVELR)
	MOVW	$0x1200, R0
	BR	traps

TEXT	dtlberror(SB), $-4
	MOVW	R0, SPR(SAVER0)
	MOVW	R1, SPR(SAVER1)
	MOVW	LR, R0
	MOVW	R0, SPR(SAVELR)
	MOVW	$(MACHADDR&~KZERO), R1		/* m-> */
	MOVW	SPR(DAR), R0
	MOVW	R0, 24(R1)				/* save dar */
	MOVW	SPR(DSISR), R0
	MOVW	R0, 28(R1)				/* save dsisr */
	MOVW	$0x1400, R0
	BR	traps

.
568a
dtlbio:
	/*
	 * map io
	 */
	MOVW	R2, CR
	MOVW	$(MMUPS8M|MMUWT|MMUV), R2
	MOVW	R2, SPR(MD_TWC)
	RLWNM	$0, R1, $0xff800000, R1
	OR	$(MMUPP|MMUSPS|MMUSH|MMUCI|MMUV), R1
	MOVW	R1, SPR(MD_RPN)
	MOVW	SPR(M_TW), R2
	MOVW	SPR(SAVER1), R1
	RFI
dtlblookup:
	MOVW	R0, SPR(SAVER0)
	MOVW	R3, SPR(SAVEXX)
.
560,567c

	MOVW	R1, SPR(SAVER1)
	MOVW	R2, SPR(M_TW)
/*
	to enable hardware break points
	MOVW	MSR, R1
	OR		$(MSR_RI), R1
	MOVW	R1, MSR
	ISYNC
*/	

	/* m->tlbfault++ */
	MOVW	$(MACHADDR&~KZERO), R1		/* m-> */
	MOVW	20(R1), R2	
	ADD		$1, R2
	MOVW	R2, 20(R1)

	MOVW	CR, R2
	MOVW	SPR(MD_EPN), R1
	MOVW	R1, CR
	BC	4,0,dtlblookup
	BC	12,1,dtlbio
	/*
	 * map ram
	 */
	MOVW	R2, CR
	MOVW	$(MMUPS8M|MMUV), R2
	MOVW	R2, SPR(MD_TWC)
	RLWNM	$0, R1, $0x7f800000, R1
	OR	$(MMUPP|MMUSPS|MMUSH|MMUV), R1
	MOVW	R1, SPR(MD_RPN)
	MOVW	SPR(M_TW), R2
	MOVW	SPR(SAVER1), R1
.
555,558c
	to enable hardware break points
	MOVW	MSR, R1
	OR		$(MSR_RI), R1
	MOVW	R1, MSR
	ISYNC	
*/

	/* m->tlbfault++ */
	MOVW	$(MACHADDR&~KZERO), R1		/* m-> */
	MOVW	20(R1), R2	
	ADD		$1, R2
	MOVW	R2, 20(R1)

	MOVW	CR, R2
	MOVW	SPR(MI_EPN), R1
	MOVW	R1, CR
	BC	4,0,itlblookup
	/*
	 * map ram
	 */
	MOVW	R2, CR
	MOVW	$(MMUPS8M|MMUV), R2
	MOVW	R2, SPR(MI_TWC)
	RLWNM	$0, R1, $0x7f800000, R1
	OR	$(MMUPP|MMUSPS|MMUSH|MMUV), R1
	MOVW	R1, SPR(MI_RPN)
	MOVW	SPR(M_TW), R2
	MOVW	SPR(SAVER1), R1
	RFI

itlblookup:
	MOVW	R0, SPR(SAVER0)
	MOVW	R3, SPR(SAVEXX)

	/*
	 * R1 = 20 bits of addr | 8 bits of junk | 4 bits of asid
	 * calulate ((x>>9)^(x>>21)^(x<<11)) & (0xfff<<3)
	 * note (x>>21)^(x<<11) = rotate left 11
	 */
	RLWNM	$(32-9), R1, $(0xfff<<3), R3
	RLWNM	$11, R1, $(0xfff<<3), R1
	XOR		R1, R3, R1
	MOVW	$(MACHADDR&~KZERO), R3	/* m-> */
	MOVW	16(R3), R3				/* m->stb */
	RLWNM	$0, R3, $~KZERO, R3		/* PADDR(m->stb) */
	ADD		R1, R3
	MOVW	4(R3), R0
	MOVW	0(R3), R3
	MOVW	SPR(MI_EPN), R1
	RLWNM	$20, R1, $0xffffff, R1
	RLWNM	$12, R1, $~0, R1
	CMP		R3, R1
	BNE		itlbtrap
	MOVW	R2, CR
	MOVW	$(MMUV), R1
	MOVW	R1, SPR(MI_TWC)
	MOVW	R0, SPR(MI_RPN)
	MOVW	SPR(SAVEXX), R3
	MOVW	SPR(M_TW), R2
	MOVW	SPR(SAVER1), R1
	MOVW	SPR(SAVER0), R0
	RFI
itlbtrap:
	MOVW	R2, CR

	MOVW	$(MACHADDR&~KZERO), R2	/* m-> */
	MOVW	R1, 32(R2)
	MOVW	R3, 36(R2)

	MOVW	SPR(SAVEXX), R3
	MOVW	SPR(M_TW), R2
	MOVW	LR, R0
	MOVW	R0, SPR(SAVELR)
	MOVW	$0x1100, R0
	BR	traps


.
541,552c
	MOVW	R1, SPR(SAVER1)
	MOVW	R2, SPR(M_TW)
.
535,539c

.
463,484d
457a
TEXT	putcasid(SB), $-4
	MOVW	LR, R4
	MOVW	MSR, R5
	BL	nommu(SB)
	MOVW	R3,	SPR(M_CASID)
	MOVW	R4, SPR(SRR0)
	MOVW	R5, SPR(SRR1)
	RFI

TEXT	nommu(SB), $-4
	MOVW	LR, R6
	RLWNM	$0, R6, $~KZERO, R6
	MOVW	$(MSR_DR|MSR_IR), R8
	MOVW	R5, R7
	ANDN	R8, R7
	MOVW	R6, SPR(SRR0)
	MOVW	R7, SPR(SRR1)
	RFI

.
440,455c
TEXT	tlbflush(SB), $0
	TLBIE	R3
.
436c
TEXT	tlbflushall(SB), $0
.
425a
TEXT	putder(SB), $0
	MOVW	R3, SPR(DER)
	RETURN

TEXT	getder(SB), $0
	MOVW	SPR(DER), R3
	RETURN

.
373c
	MOVW	$0xdead,R5
.
365,366d
352d
327,336d
291,325d
285c
	MOVW	$(MSR_EE|MSR_PR|MSR_ME|MSR_IR|MSR_DR|MSR_RI), R4
.
220,221c
	MOVW	R4, SPR(SRR1)
	RFI	/* resume in kernel mode in caller */
.
207,217c
	/* enable MMU */
	MOVW	LR, R3
	OR	$KZERO, R3
	MOVW	R3, SPR(SRR0)
.
199,205c
	/*
	 * map 8mb IO at INTMEM->INTMEM, cache inhibit, shared
	 */
	MOVW	$(INTMEM|MMUEV), R4
	MOVW	R4, SPR(MD_EPN)
	MOVW	$(MMUPS8M|MMUWT|MMUV), R4
	MOVW	R4, SPR(MD_TWC)
	MOVW	$(INTMEM|MMUPP|MMUSPS|MMUSH|MMUCI|MMUV), R4
	MOVW	R4, SPR(MD_RPN)
.
179,197c
	/*
	 * map 8Mb of ram at 0 -> KZERO, cached, writeback, shared
	 */
	MOVW	$(KZERO|MMUEV), R4
	MOVW	R4, SPR(MD_EPN)
	MOVW	R4, SPR(MI_EPN)
	MOVW	$(MMUPS8M|MMUV), R4		/* |MMUWT */
	MOVW	R4, SPR(MD_TWC)
	MOVW	R4, SPR(MI_TWC)
	MOVW	$(0|MMUPP|MMUSPS|MMUSH|MMUV), R4
	MOVW	R4, SPR(MD_RPN)
	MOVW	R4, SPR(MI_RPN)
.
177d
174,175c
	MOVW	$0, R4
.
172c
	MOVW	$0, R4
.
164,170c
	 *  GPM = 0: PowerPC mode
	 *  PPM = 0: Page protection mode - no 1K pages
	 *  CIDEF = 0: cache enable when MMU disabled
	 *  WTDEF = 0: write back when MMU is disbaled
	 *  RSV2 = 0: no reserved entries
	 *	TWAM = 0: don't use table walk assit
	 *  PPCS = 0: not used in PowerPC mode
	 *	INDX = 0: start at first entry
.
157c
	/* set Ks = 0 Kp = 1 for all access groups */
.
151,152d
149a
/*
 * on return from this function we will be running in virtual mode.
 * We setup two TLB entries:
 * 1) map the first 8Mb of RAM to KZERO
 * 2) map the region that contains the IMMR
 */
TEXT	mmuinit0(SB), $0
	/* reset all the tlbs */
	TLBIA
.
143,144c
	BDNZ	zero
skipz:
.
141c
zero:
.
139c
	SUBCC	R3, R4
	BLE	skipz
	SRAW	$2, R4
	MOVW	R4, CTR
.
136a
	/* zero bss */
.
132d
128d
124c

/*
 * setup mach
 */
	MOVW	$MACHADDR, R(MACH)
.
121,122d
117a
	/* running with MMU on!! */

	/* set R2 to it correct value */
	MOVW	$setSB(SB), R2

	/* enable i-cache */
	MOVW	$(1<<25), R4
	MOVW	R4, SPR(IC_CST)
	ISYNC

 	/* enable d-cache 	*/
	MOVW	$(2<<24), R4
	MOVW	R4, SPR(DC_CST)
	ISYNC


.
116a
	MOVW	R4, SPR(DER)
	ISYNC
	BL	mmuinit0(SB)
.
115c
	MOVW	R4, SPR(ICTRL)		/* cancel `show cycle' for normal instruction execution */
.
83a
	MOVW	$KZERO, R3
	ANDN	R3, R2
.
82c
	/* set internal memory base */
	MOVW	$INTMEM, R4
	MOVW	R4, SPR(IMMR)

	/* except during trap handling, R0 is zero from now on */
	MOVW	$0, R0

	/* setup SB for pre mmu */
.
75,77c
	MOVW	$(MSR_EE|MSR_IP), R4
	ANDN	R4, R3
	OR		$(MSR_ME), R3
.
73c
	/*
	 * setup MSR
	 * turn off interrupts
	 * use 0x000 as exception prefix
	 * enable machine check
	 */
.
68a
/*
 * The following code assume that the MPC8xx processor has been
 * configured to a certain extent.  In particular, we assume the following
 * following registers have been set up.
 *
 * SIU
 *		SIUMCR
 *		SYPCR
 *
 */

.
53a
/*
 * mpc8xx specific debug-level SPRs
 */ 
#define CMPA		144
#define CMPB		145
#define CMPC		146
#define CMPD		147
#define ICR			148
#define DER			149
#define COUNTA		150
#define COUNTB		151
#define CMPE		152
#define CMPF		153
#define CMPG		154
#define CMPH		155
#define LCTRL1		156
#define LCTRL2		157
#define ICTRL		158
#define BAR			159
#define DPDR		630

.
7,8c
#define DAR		19	/* Data Address Register */
#define DEC		22	/* Decrementer */
.
## diffname mpc/l.s 2001/0527 # deleted
## diff -e /n/emeliedump/2000/0516/sys/src/9/mpc/l.s /n/emeliedump/2001/0527/sys/src/9/mpc/l.s
1,952d

Bell Labs OSI certified Powered by Plan 9

(Return to Plan 9 Home Page)

Copyright © 2021 Plan 9 Foundation. All Rights Reserved.
Comments to [email protected].