[Vm-dev] VM Maker: VMMaker.oscog-eem.2124.mcz

commits at source.squeak.org commits at source.squeak.org
Thu Feb 9 01:37:35 UTC 2017


Eliot Miranda uploaded a new version of VMMaker to project VM Maker:
http://source.squeak.org/VMMaker/VMMaker.oscog-eem.2124.mcz

==================== Summary ====================

Name: VMMaker.oscog-eem.2124
Author: eem
Time: 8 February 2017, 5:36:03.25071 pm
UUID: 194b7409-a53c-4d94-9488-7c573d6d418e
Ancestors: VMMaker.oscog-cb.2117, VMMaker.oscogSPC-eem.2124

Merge with VMMaker.oscogSPC-eem.2124

=============== Diff against VMMaker.oscog-cb.2117 ===============

Item was changed:
  ----- Method: CCodeGenerator>>localizeGlobalVariables (in category 'utilities') -----
  localizeGlobalVariables
  	| candidates elected localized |
  
  	"find all globals used in only one method"
  	candidates := globalVariableUsage select: [:e | e size = 1].
+ 	"Don't localize globals; nor those that are only assigned to; they're for debugging..."
+ 	(candidates keys select: [:k| (vmClass mustBeGlobal: k)
+ 								or: [(self methodNamed: (globalVariableUsage at: k) anyOne)
+ 										ifNil: [false]
+ 										ifNotNil: [:m| (m readsVariable: k) not]]]) do:
- 	(candidates keys select: [:k| vmClass mustBeGlobal: k]) do:
  		[:k| candidates removeKey: k].
+ 	
  	elected := Set new.
  	localized := Dictionary new. "for an ordered report"
  	"move any suitable global to be local to the single method using it"
  	candidates keysAndValuesDo:
  		[:key :targets |
  		targets do:
  			[:name |
  			(methods at: name ifAbsent: []) ifNotNil:
  				[:procedure | | newDeclaration |
  				(procedure isRealMethod
  				 and: [self shouldGenerateMethod: procedure]) ifTrue:
  					[(localized at: name ifAbsentPut: [SortedCollection new]) add: key.
  					elected add: (procedure locals add: key).
  					newDeclaration := variableDeclarations at: key ifAbsent: ['sqInt ', key].
  					(self initializerForInstVar: key inStartClass: procedure definingClass) ifNotNil:
  						[:initializerNode|
  						newDeclaration := String streamContents:
  												[:s|
  												 s nextPutAll: newDeclaration; nextPutAll: ' = '.
  												 initializerNode emitCCodeOn: s level: 0 generator: self]].
  					procedure declarationAt: key put: newDeclaration.
  					variableDeclarations removeKey: key ifAbsent: []]]]].
  	logger ifNotNil:
  		[localized keys asSortedCollection do:
  			[:name|
  			(localized at: name) do:
  				[:var|
  				logger ensureCr; show: var, ' localised to ', name; cr]]].
  	elected do: [:ea| (variables includes: ea) ifTrue: [self checkDeleteVariable: ea]].
  	variables removeAllFoundIn: elected!

Item was changed:
  ----- Method: CogObjectRepresentationForSpur>>branchIf:isNotOop:target: (in category 'sista support') -----
+ branchIf: reg isNotOop:  oop target: targetFixUp
- branchIf: reg isNotOop:  oop target: targetFixup
  	<var: #targetFixUp type: #'AbstractInstruction *'>
  	<inline: true>
  	cogit CmpCq: oop R: reg.
+ 	cogit JumpNonZero: targetFixUp.
+ 	^0!
- 	cogit JumpNonZero: targetFixup.
- 	^ 0!

Item was changed:
  ----- Method: CogObjectRepresentationForSpur>>branchIf:isOop:target: (in category 'sista support') -----
+ branchIf: reg isOop:  oop target: targetFixUp
- branchIf: reg isOop:  oop target: targetFixup
  	<var: #targetFixUp type: #'AbstractInstruction *'>
  	<inline: true>
  	cogit CmpCq: oop R: reg.
+ 	cogit JumpZero: targetFixUp.
+ 	^0!
- 	cogit JumpZero: targetFixup.
- 	^ 0!

Item was changed:
  ----- Method: CogVMSimulator>>primitiveDirectoryEntry (in category 'file primitives') -----
  primitiveDirectoryEntry
  	| name pathName arrayNilOrSymbol result |
  	name := self stringOf: self stackTop.
  	pathName := self stringOf: (self stackValue: 1).
  	
  	"temporary work-around to make it work in Pharo..."
  	self cppIf: PharoVM ifTrue: [ pathName := Smalltalk imagePath ].
  	
  	self successful ifFalse:
  		[^self primitiveFail].
  
  	arrayNilOrSymbol := FileDirectory default primLookupEntryIn: pathName name: name.
  	arrayNilOrSymbol ifNil:
  		[self pop: 3 thenPush: objectMemory nilObject.
  		 ^self].
  	arrayNilOrSymbol isArray ifFalse:
  		["arrayNilOrSymbol ~~ #primFailed ifTrue:
  			[self halt]. "
+ 		self transcript show: name, ' NOT FOUND'.
- 		Transcript show: name, ' NOT FOUND'.
  		 ^self primitiveFail].
  
  	result := PharoVM 
  		ifTrue:
  			[self makeDirEntryName: (arrayNilOrSymbol at: 1) size: (arrayNilOrSymbol at: 1) size
  				createDate: (arrayNilOrSymbol at: 2) modDate: (arrayNilOrSymbol at: 3)
  				isDir: (arrayNilOrSymbol at: 4) fileSize: (arrayNilOrSymbol at: 5)
  				posixPermissions: (arrayNilOrSymbol at: 6) isSymlink: (arrayNilOrSymbol at: 7) ]
  		ifFalse:
  			[self makeDirEntryName: (arrayNilOrSymbol at: 1) size: (arrayNilOrSymbol at: 1) size
  				createDate: (arrayNilOrSymbol at: 2) modDate: (arrayNilOrSymbol at: 3)
  				isDir: (arrayNilOrSymbol at: 4) fileSize: (arrayNilOrSymbol at: 5) ].
  	self pop: 3 thenPush: result!

Item was changed:
  ----- Method: Cogit>>checkIfValidOopRefAndTarget:pc:cogMethod: (in category 'garbage collection') -----
  checkIfValidOopRefAndTarget: annotation pc: mcpc cogMethod: cogMethod
  	"Check for a valid object reference, if any, at a map entry.  Answer a code unique to each error for debugging."
  	<var: #mcpc type: #'char *'>
  	<var: #nsSendCache type: #'NSSendCache *'>
  	| literal entryPoint |
  	annotation = IsObjectReference ifTrue:
  		[literal := literalsManager fetchLiteralAtAnnotatedAddress: mcpc asUnsignedInteger using: backEnd.
  		 (self asserta: (objectRepresentation checkValidOopReference: literal)) ifFalse:
  			[^1].
  		((objectRepresentation couldBeObject: literal)
  		 and: [objectMemory isReallyYoungObject: literal]) ifTrue:
  			[(self asserta: (self cCoerceSimple: cogMethod to: #'CogMethod *') cmRefersToYoung) ifFalse:
  				[^2]]].
  
  	NewspeakVM ifTrue:
  		[annotation = IsNSSendCall ifTrue:
  			[| nsSendCache classTag enclosingObject nsTargetMethod |
  			nsSendCache := self nsSendCacheFromReturnAddress: mcpc asInteger.
  			(self asserta: (objectRepresentation checkValidOopReference: nsSendCache selector)) ifFalse:
  				[^9].
  			classTag := nsSendCache classTag.
  			(self asserta: (classTag = 0 or: [objectRepresentation validInlineCacheTag: classTag])) ifFalse:
  				[^10].
  			enclosingObject := nsSendCache enclosingObject.
  			(self asserta: (enclosingObject = 0 or: [objectRepresentation checkValidOopReference: enclosingObject])) ifFalse:
  				[^11].
  			entryPoint := nsSendCache target.
  			entryPoint ~= 0 ifTrue: [
  				nsTargetMethod := self cCoerceSimple: entryPoint - cmNoCheckEntryOffset to: #'CogMethod *'.
  				(self asserta: (nsTargetMethod cmType = CMMethod)) ifFalse:
  					[^12]]]].
  
  	(self isPureSendAnnotation: annotation) ifTrue:
  		[(self asserta: (self cCoerceSimple: cogMethod to: #'CogMethod *') cmType = CMMethod) ifFalse:
  			[^3].
+ 		 self entryCacheTagAndCouldBeObjectAt: mcpc annotation: annotation into:
+ 			[:entryPt :cacheTag :tagCouldBeObject|
+ 			entryPoint := entryPt.
- 		 self offsetCacheTagAndCouldBeObjectAt: mcpc annotation: annotation into:
- 			[:offset :cacheTag :tagCouldBeObject|
  			tagCouldBeObject
  				ifTrue:
  					[(objectRepresentation couldBeObject: cacheTag)
  						ifTrue:
  							[(self asserta: (objectRepresentation checkValidOopReference: cacheTag)) ifFalse:
  								[^4]]
  						ifFalse:
  							[(self asserta: (objectRepresentation validInlineCacheTag: cacheTag)) ifFalse:
  								[^5]].
  					((objectRepresentation couldBeObject: cacheTag)
  					 and: [objectMemory isReallyYoungObject: cacheTag]) ifTrue:
  						[(self asserta: (self cCoerceSimple: cogMethod to: #'CogMethod *') cmRefersToYoung) ifFalse:
  							[^6]]]
  				ifFalse:
+ 					[(self inlineCacheTagsAreIndexes
+ 					  and: [self self entryPointTagIsSelector: entryPoint])
+ 						ifTrue:
+ 							[cacheTag signedIntFromLong < 0
+ 								ifTrue:
+ 									[cacheTag signedIntFromLong negated > NumSpecialSelectors ifTrue:
+ 										[^7]]
+ 								ifFalse:
+ 									[cacheTag >= (objectMemory literalCountOf: enumeratingCogMethod methodObject) ifTrue:
+ 										[^8]]]
+ 						ifFalse:
+ 							[(self asserta: (objectRepresentation validInlineCacheTag: cacheTag)) ifFalse:
+ 								[^9]]]].
- 					[(self asserta: (objectRepresentation validInlineCacheTag: cacheTag)) ifFalse:
- 						[^7]]].
- 		entryPoint := backEnd callTargetFromReturnAddress: mcpc asInteger.
  		entryPoint > methodZoneBase ifTrue:
  			["It's a linked send; find which kind."
  			 self targetMethodAndSendTableFor: entryPoint annotation: annotation into:
  					[:targetMethod :sendTable|
  					 (self asserta: (targetMethod cmType = CMMethod
  								   or: [targetMethod cmType = CMClosedPIC
  								   or: [targetMethod cmType = CMOpenPIC]])) ifFalse:
+ 						[^10]]]].
- 						[^8]]]].
  	^0 "keep scanning"!

Item was changed:
  ----- Method: Cogit>>ensureFixupAt: (in category 'compile abstract instructions') -----
+ ensureFixupAt: targetPC
+ 	"Make sure there's a flagged fixup at the target pc in fixups.
- ensureFixupAt: targetIndex
- 	"Make sure there's a flagged fixup at the targetIndex (pc relative to first pc) in fixups.
  	 Initially a fixup's target is just a flag.  Later on it is replaced with a proper instruction."
  	<returnTypeC: #'BytecodeFixup *'>
  	| fixup |
  	<var: #fixup type: #'BytecodeFixup *'>
+ 	fixup := self fixupAt: targetPC - initialPC.
- 	fixup := self fixupAt: targetIndex.
  	fixup notAFixup ifTrue:
  		[fixup becomeFixup].
  	fixup recordBcpc: bytecodePC.
  	^fixup!

Item was added:
+ ----- Method: Cogit>>entryCacheTagAndCouldBeObjectAt:annotation:into: (in category 'in-line cacheing') -----
+ entryCacheTagAndCouldBeObjectAt: mcpc annotation: annotation into: trinaryBlock
+ 	"Evaluate trinaryBlock with the entryPoint, inline cache tag and whether the cache
+ 	 tag could be an object, for the send at mcpc with annotation annotation."
+ 	<inline: true>
+ 	| cacheTag entryPoint tagCouldBeObj |
+ 	cacheTag := backEnd inlineCacheTagAt: mcpc asInteger.
+ 	entryPoint := backEnd callTargetFromReturnAddress: mcpc asInteger.
+ 	"in-line cache tags are the selectors of sends if sends are unlinked,
+ 	 the selectors of super sends (entry offset = cmNoCheckEntryOffset),
+ 	 the selectors of open PIC sends (entry offset = cmEntryOffset, target is an Open PIC)
+ 	 or in-line cache tags (classes, class indices, immediate bit patterns, etc).
+ 	 Note that selectors can be immediate so there is no guarantee that they
+ 	 are markable/remappable objects."
+ 	tagCouldBeObj := self inlineCacheTagsAreIndexes not
+ 						and: [objectRepresentation inlineCacheTagsMayBeObjects
+ 							or: [self entryPointTagIsSelector: entryPoint]].
+ 	trinaryBlock
+ 		value: entryPoint
+ 		value: cacheTag
+ 		value: tagCouldBeObj!

Item was added:
+ ----- Method: Cogit>>entryPointTagIsSelector: (in category 'in-line cacheing') -----
+ entryPointTagIsSelector: entryPoint
+ 	"Answer if the entryPoint's tag is expected to be a selector reference, as opposed to a class tag."
+ 	^entryPoint < methodZoneBase
+ 	 or: [(entryPoint bitAnd: entryPointMask) = uncheckedEntryAlignment
+ 	 or: [(entryPoint bitAnd: entryPointMask) = checkedEntryAlignment
+ 		and: [(self cCoerceSimple: entryPoint - cmEntryOffset to: #'CogMethod *') cmType = CMOpenPIC]]]!

Item was changed:
  ----- Method: Cogit>>markLiterals:pc:method: (in category 'garbage collection') -----
  markLiterals: annotation pc: mcpc method: cogMethod
  	"Mark and trace literals.
  	 Additionally in Newspeak, void push implicits that have unmarked classes."
  	<var: #mcpc type: #'char *'>
  	<var: #nsSendCache type: #'NSSendCache *'>
  	| literal |
  	annotation = IsObjectReference ifTrue:
  		[literal := literalsManager fetchLiteralAtAnnotatedAddress: mcpc asUnsignedInteger using: backEnd.
  		 (objectRepresentation
  				markAndTraceLiteral: literal
  				in: (self cCoerceSimple: cogMethod to: #'CogMethod *')
  				atpc: mcpc asUnsignedInteger) ifTrue:
  			[codeModified := true]].
  
  	NewspeakVM ifTrue:
  		[annotation = IsNSSendCall ifTrue:
  			[| nsSendCache sel eo |
  			nsSendCache := self nsSendCacheFromReturnAddress: mcpc asInteger.
  			sel := nsSendCache selector.
  				(objectMemory isForwarded: sel)
  					ifFalse: [objectMemory markAndTrace: sel]
  					ifTrue: [sel := objectMemory followForwarded: literal.
  							nsSendCache selector: sel.
  							self markAndTraceUpdatedLiteral: sel in: (self cCoerceSimple: cogMethod to: #'CogMethod *')].
  			eo := nsSendCache enclosingObject.
  			eo ~= 0 ifTrue:
  				[(objectMemory isForwarded: eo)
  					ifFalse: [objectMemory markAndTrace: eo]
  					ifTrue: [eo := objectMemory followForwarded: literal.
  							nsSendCache enclosingObject: eo.
  							self markAndTraceUpdatedLiteral: eo in: (self cCoerceSimple: cogMethod to: #'CogMethod *')]]]].
  
  	(self isPureSendAnnotation: annotation) ifTrue:
+ 		[self entryCacheTagAndCouldBeObjectAt: mcpc annotation: annotation into:
- 		[self offsetCacheTagAndCouldBeObjectAt: mcpc annotation: annotation into:
  			[:entryPoint :cacheTag :tagCouldBeObj |
  			 tagCouldBeObj ifTrue:
  				[(objectRepresentation
  						markAndTraceCacheTagLiteral: cacheTag
  						in: (self cCoerceSimple: cogMethod to: #'CogMethod *')
  						atpc: mcpc asUnsignedInteger) ifTrue:
  					["cacheTag is selector" codeModified := true]]]].
  
  	^0 "keep scanning"!

Item was changed:
  ----- Method: Cogit>>markLiteralsAndUnlinkIfUnmarkedSend:pc:method: (in category 'garbage collection') -----
  markLiteralsAndUnlinkIfUnmarkedSend: annotation pc: mcpc method: cogMethod
  	"Mark and trace literals.  Unlink sends that have unmarked cache tags or targets."
  	<var: #mcpc type: #'char *'>
  	<var: #nsSendCache type: #'NSSendCache *'>
  	| literal |
  	annotation = IsObjectReference ifTrue:
  		[literal := literalsManager fetchLiteralAtAnnotatedAddress: mcpc asUnsignedInteger using: backEnd.
  		 (objectRepresentation
  				markAndTraceLiteral: literal
  				in: (self cCoerceSimple: cogMethod to: #'CogMethod *')
  				atpc: mcpc asUnsignedInteger) ifTrue:
  			[codeModified := true]].
  
  	NewspeakVM ifTrue:
  		[annotation = IsNSSendCall ifTrue:
  			[| nsSendCache entryPoint targetMethod sel eo |
  			nsSendCache := self nsSendCacheFromReturnAddress: mcpc asInteger.
  			entryPoint := nsSendCache target.
  			entryPoint ~= 0 ifTrue: "Send is linked"
  				[targetMethod := self cCoerceSimple: entryPoint - cmNoCheckEntryOffset to: #'CogMethod *'.
  				 (self markAndTraceOrFreeCogMethod: targetMethod
  					firstVisit: targetMethod asUnsignedInteger > mcpc asUnsignedInteger) ifTrue:	
  						[self voidNSSendCache: nsSendCache]].
  			sel := nsSendCache selector.
  			(objectMemory isForwarded: sel)
  				ifFalse: [objectMemory markAndTrace: sel]
  				ifTrue: [sel := objectMemory followForwarded: literal.
  						nsSendCache selector: sel.
  						self markAndTraceUpdatedLiteral: sel in: (self cCoerceSimple: cogMethod to: #'CogMethod *')].
  			eo := nsSendCache enclosingObject.
  			eo ~= 0 ifTrue:
  				[(objectMemory isForwarded: eo)
  					ifFalse: [objectMemory markAndTrace: eo]
  					ifTrue: [eo := objectMemory followForwarded: literal.
  							nsSendCache enclosingObject: eo.
  							self markAndTraceUpdatedLiteral: eo in: (self cCoerceSimple: cogMethod to: #'CogMethod *')]]]].
  
  	(self isPureSendAnnotation: annotation) ifTrue:
+ 		[self entryCacheTagAndCouldBeObjectAt: mcpc annotation: annotation into:
- 		[self offsetCacheTagAndCouldBeObjectAt: mcpc annotation: annotation into:
  			[:entryPoint :cacheTag :tagCouldBeObj | | cacheTagMarked |
  			 cacheTagMarked := tagCouldBeObj and: [objectRepresentation cacheTagIsMarked: cacheTag].
  			 entryPoint > methodZoneBase
  				ifTrue: "It's a linked send."
  					[self targetMethodAndSendTableFor: entryPoint annotation: annotation into:
  						[:targetMethod :sendTable| 
  						 (cacheTagMarked not
  						  or: [self markAndTraceOrFreeCogMethod: targetMethod
  								firstVisit: targetMethod asUnsignedInteger > mcpc asUnsignedInteger]) ifTrue:
  							["Either the cacheTag is unmarked (e.g. new class) or the target
  							  has been freed (because it is unmarked), so unlink the send."
  							 self unlinkSendAt: mcpc targetMethod: targetMethod sendTable: sendTable.
  							 objectRepresentation
  								markAndTraceLiteral: targetMethod selector
  								in: targetMethod
  								at: (self addressOf: targetMethod selector put: [:val| targetMethod selector: val])]]]
  				ifFalse:  "cacheTag is selector"
  					[(objectRepresentation
  							markAndTraceCacheTagLiteral: cacheTag
  							in: (self cCoerceSimple: cogMethod to: #'CogMethod *')
  							atpc: mcpc asUnsignedInteger) ifTrue:
  						[codeModified := true]]]].
  
  	^0 "keep scanning"!

Item was changed:
  ----- Method: Cogit>>markYoungObjects:pc:method: (in category 'garbage collection') -----
  markYoungObjects: annotation pc: mcpc method: cogMethod
  	"Mark and trace young literals."
  	<var: #mcpc type: #'char *'>
  	<var: #nsSendCache type: #'NSSendCache *'>
  	| literal |
  	annotation = IsObjectReference ifTrue:
  		[literal := literalsManager fetchLiteralAtAnnotatedAddress: mcpc asUnsignedInteger using: backEnd.
  		 objectRepresentation markAndTraceLiteralIfYoung: literal].
  
  	NewspeakVM ifTrue: [annotation = IsNSSendCall ifTrue:
  		[| nsSendCache |
  		 nsSendCache := self nsSendCacheFromReturnAddress: mcpc asInteger.
  		 objectRepresentation markAndTraceLiteralIfYoung: nsSendCache selector.
  		 nsSendCache enclosingObject ~= 0 ifTrue:
  			[objectRepresentation markAndTraceLiteralIfYoung: nsSendCache enclosingObject]]].
  
  	(self isPureSendAnnotation: annotation) ifTrue:
+ 		[self entryCacheTagAndCouldBeObjectAt: mcpc annotation: annotation into:
- 		[self offsetCacheTagAndCouldBeObjectAt: mcpc annotation: annotation into:
  			[:entryPoint :cacheTag :tagCouldBeObj |
  			 tagCouldBeObj ifTrue:
  				[objectRepresentation markAndTraceLiteralIfYoung: cacheTag]]].
  
  	^0 "keep scanning"!

Item was removed:
- ----- Method: Cogit>>offsetCacheTagAndCouldBeObjectAt:annotation:into: (in category 'in-line cacheing') -----
- offsetCacheTagAndCouldBeObjectAt: mcpc annotation: annotation into: trinaryBlock
- 	"Evaluate trinaryBlock with the entry, inline cache tag and whether the cache
- 	 tag could be an object, for the send at mcpc with annotation annotation."
- 	<inline: true>
- 	| cacheTag entryPoint tagCouldBeObj |
- 	cacheTag := backEnd inlineCacheTagAt: mcpc asInteger.
- 	entryPoint := backEnd callTargetFromReturnAddress: mcpc asInteger.
- 	"in-line cache tags are the selectors of sends if sends are unlinked,
- 	 the selectors of super sends (entry offset = cmNoCheckEntryOffset),
- 	 the selectors of open PIC sends (entry offset = cmEntryOffset, target is an Open PIC)
- 	 or in-line cache tags (classes, class indices, immediate bit patterns, etc).
- 	 Note that selectors can be immediate so there is no guarantee that they
- 	 are markable/remappable objects."
- 	tagCouldBeObj := self inlineCacheTagsAreIndexes not
- 						and: [objectRepresentation inlineCacheTagsMayBeObjects
- 							or: [entryPoint < methodZoneBase
- 							or: [(entryPoint bitAnd: entryPointMask) = uncheckedEntryAlignment
- 							or: [(entryPoint bitAnd: entryPointMask) = checkedEntryAlignment
- 								and: [(self cCoerceSimple: entryPoint - cmEntryOffset to: #'CogMethod *') cmType = CMOpenPIC]]]]].
- 	trinaryBlock
- 		value: entryPoint
- 		value: cacheTag
- 		value: tagCouldBeObj!

Item was changed:
  ----- Method: Cogit>>remapIfObjectRef:pc:hasYoung: (in category 'garbage collection') -----
  remapIfObjectRef: annotation pc: mcpc hasYoung: hasYoungPtr
  	<var: #mcpc type: #'char *'>
  	<var: #targetMethod type: #'CogMethod *'>
  	<var: #nsSendCache type: #'NSSendCache *'>
  	annotation = IsObjectReference ifTrue:
  		[| literal mappedLiteral |
  		 literal := literalsManager fetchLiteralAtAnnotatedAddress: mcpc asUnsignedInteger using: backEnd.
  		 (objectRepresentation couldBeObject: literal) ifTrue:
  			[mappedLiteral := objectRepresentation remapObject: literal.
  			 literal ~= mappedLiteral ifTrue:
  				[literalsManager storeLiteral: mappedLiteral atAnnotatedAddress: mcpc asUnsignedInteger using: backEnd.
  				 codeModified := true].
  			 (hasYoungPtr ~= 0
  			  and: [objectMemory isYoung: mappedLiteral]) ifTrue:
  				[(self cCoerceSimple: hasYoungPtr to: #'sqInt *') at: 0 put: true]]].
  
  	NewspeakVM ifTrue: [annotation = IsNSSendCall ifTrue:
  		[| nsSendCache oop mappedOop |
  		nsSendCache := self nsSendCacheFromReturnAddress: mcpc.
  		oop := nsSendCache selector.	
  		mappedOop := objectRepresentation remapObject: oop.
  		oop ~= mappedOop ifTrue:
  			[nsSendCache selector: mappedOop.
  			(hasYoungPtr ~= 0 and: [objectMemory isYoung: mappedOop]) ifTrue:
  				[(self cCoerceSimple: hasYoungPtr to: #'sqInt *') at: 0 put: true]].
  		oop := nsSendCache enclosingObject.	
  		oop ~= 0 ifTrue: [
  			mappedOop := objectRepresentation remapObject: oop.
  			oop ~= mappedOop ifTrue:
  				[nsSendCache enclosingObject: mappedOop.
  				(hasYoungPtr ~= 0 and: [objectMemory isYoung: mappedOop]) ifTrue:
  					[(self cCoerceSimple: hasYoungPtr to: #'sqInt *') at: 0 put: true]]].
  		^0 "keep scanning"]].
  
  	(self isPureSendAnnotation: annotation) ifTrue:
+ 		[self entryCacheTagAndCouldBeObjectAt: mcpc annotation: annotation into:
- 		[self offsetCacheTagAndCouldBeObjectAt: mcpc annotation: annotation into:
  			[:entryPoint :cacheTag :tagCouldBeObj | | mappedCacheTag |
  			 (tagCouldBeObj
  			  and: [objectRepresentation couldBeObject: cacheTag]) ifTrue:
  				[mappedCacheTag := objectRepresentation remapObject: cacheTag.
  				 cacheTag ~= mappedCacheTag ifTrue:
  					[backEnd rewriteInlineCacheTag: mappedCacheTag at: mcpc asUnsignedInteger.
  					 codeModified := true].
  				 (hasYoungPtr ~= 0
  				  and: [objectMemory isYoung: mappedCacheTag]) ifTrue:
  					[(self cCoerceSimple: hasYoungPtr to: #'sqInt *') at: 0 put: true]].
  			hasYoungPtr ~= 0 ifTrue:
  				["Since the unlinking routines may rewrite the cacheTag to the send's selector, and
  				  since they don't have the cogMethod to hand and can't add it to youngReferrers,
  				  the method must remain in youngReferrers if the targetMethod's selector is young."
  				 entryPoint > methodZoneBase ifTrue: "It's a linked send."
  					[self targetMethodAndSendTableFor: entryPoint annotation: annotation into:
  						[:targetMethod :ignored|
  						 (objectMemory isYoung: targetMethod selector) ifTrue:
  							[(self cCoerceSimple: hasYoungPtr to: #'sqInt *') at: 0 put: true]]]]]].
  	^0 "keep scanning"!

Item was changed:
  ----- Method: RegisterAllocatingCogit>>ensureFixupAt: (in category 'bytecode generator support') -----
+ ensureFixupAt: targetPC
+ 	"Make sure there's a flagged fixup at the target pc in fixups.
+ 	 Initially a fixup's target is just a flag.  Later on it is replaced with a proper instruction.
+ 	 Override to enerate stack merging code if required."
- ensureFixupAt: targetIndex
  	| fixup |	
  	<var: #fixup type: #'BytecodeFixup *'>
+ 	fixup := self fixupAt:  targetPC - initialPC.
- 	fixup := self fixupAt: targetIndex.
  	fixup needsFixup 
  		ifTrue:
  			[fixup mergeSimStack
  				ifNil: [self setMergeSimStackOf: fixup]
  				ifNotNil: [self mergeCurrentSimStackWith: fixup]]
  		ifFalse: 
  			[self assert: fixup mergeSimStack isNil.
+ 			 self moveVolatileSimStackEntriesToRegisters.
+ 			 self setMergeSimStackOf: fixup].
+ 	^super ensureFixupAt: targetPC!
- 			self moveVolatileSimStackEntriesToRegisters.
- 			self setMergeSimStackOf: fixup].
- 	^super ensureFixupAt: targetIndex!

Item was changed:
  ----- Method: RegisterAllocatingCogit>>ensureNonMergeFixupAt: (in category 'compile abstract instructions') -----
+ ensureNonMergeFixupAt: targetPC
+ 	"Make sure there's a flagged fixup at the target pc in fixups.
+ 	 Initially a fixup's target is just a flag.  Later on it is replaced with a proper instruction.
+ 	 Override to remember the simStack state at tyeh target, if required."
- ensureNonMergeFixupAt: targetIndex
- 	"Make sure there's a flagged fixup at the targetIndex (pc relative to first pc) in fixups.
- 	 Initially a fixup's target is just a flag.  Later on it is replaced with a proper instruction."
  	| fixup |
+ 	fixup := super ensureNonMergeFixupAt: targetPC.
- 	fixup := super ensureNonMergeFixupAt: targetIndex.
  	fixup mergeSimStack ifNil: [self setMergeSimStackOf: fixup].
  	^fixup!

Item was changed:
  ----- Method: RegisterAllocatingCogit>>genExtJumpIfNotInstanceOfBehaviorsOrPopBytecode (in category 'bytecode generators') -----
  genExtJumpIfNotInstanceOfBehaviorsOrPopBytecode
  	"SistaV1: *	254		11111110	kkkkkkkk	jjjjjjjj		branch If Not Instance Of Behavior/Array Of Behavior kkkkkkkk (+ Extend A * 256, where Extend A >= 0) distance jjjjjjjj (+ Extend B * 256, where Extend B >= 0)"
  								
  	| reg literal distance targetFixUp |
  	
  	reg := self allocateRegForStackEntryAt: 0.
  	self ssTop popToReg: reg.
  	
  	literal := self getLiteral: (extA * 256 + byte1).
  	extA := 0.
  	distance := extB * 256 + byte2.
  	extB := 0.
  	numExtB := 0.
  
  	"Because ensureFixupAt: will generate code to merge with the target simStack when required, it is
  	 necessary to tease apart the jump and the merge so that the merge code is only executed if the
  	 branch is taken.  i.e. if merge code is required we generate
  			jump not cond Lcontinue
  			... merge code ...
  			jump Ltarget
  		Lcontinue:
  	 instead of the incorrect
  			... merge code ...
  			jump cond Ltarget"
+ 	(self mergeRequiredForJumpTo: bytecodePC + 3 + distance) ifTrue:
- 	(self mergeRequiredForJumpTo: bytecodePC + 3 + distance - initialPC) ifTrue:
  		[self shouldBeImplemented].
  
+ 	targetFixUp := self cCoerceSimple: (self ensureFixupAt: bytecodePC + 3 + distance) to: #'AbstractInstruction *'.
- 	targetFixUp := self cCoerceSimple: (self ensureFixupAt: bytecodePC + 3 + distance - initialPC) to: #'AbstractInstruction *'.
  		
  	(objectMemory isArrayNonImm: literal)
  		ifTrue: [objectRepresentation branchIf: reg notInstanceOfBehaviors: literal target: targetFixUp]
  		ifFalse: [objectRepresentation branchIf: reg notInstanceOfBehavior: literal target: targetFixUp].
  						
  	self ssPop: 1.
  	
  	^0!

Item was changed:
  ----- Method: RegisterAllocatingCogit>>genForwardersInlinedIdenticalOrNotIf: (in category 'bytecode generators') -----
  genForwardersInlinedIdenticalOrNotIf: orNot
  	| nextPC branchDescriptor unforwardRcvr argReg targetBytecodePC
  	unforwardArg  rcvrReg postBranchPC label fixup |
  	<var: #branchDescriptor type: #'BytecodeDescriptor *'>
  	<var: #label type: #'AbstractInstruction *'>
  	
  	self extractMaybeBranchDescriptorInto: [ :descr :next :postBranch :target | 
  		branchDescriptor := descr. nextPC := next. postBranchPC := postBranch. targetBytecodePC := target ].
  
  	"If an operand is an annotable constant, it may be forwarded, so we need to store it into a 
  	register so the forwarder check can jump back to the comparison after unforwarding the constant.
  	However, if one of the operand is an unnanotable constant, does not allocate a register for it 
  	(machine code will use operations on constants) and does not generate forwarder checks."
  	unforwardRcvr := (objectRepresentation isUnannotatableConstant: (self ssValue: 1)) not.
  	unforwardArg := (objectRepresentation isUnannotatableConstant: self ssTop) not.
  
  	self 
  		allocateEqualsEqualsRegistersArgNeedsReg: unforwardArg 
  		rcvrNeedsReg: unforwardRcvr 
  		into: [ :rcvr :arg | rcvrReg:= rcvr. argReg := arg ].
  
  	"If not followed by a branch, resolve to true or false."
  	(branchDescriptor isBranchTrue or: [branchDescriptor isBranchFalse]) ifFalse:
  		[^ self 
  			genIdenticalNoBranchArgIsConstant: unforwardArg not
  			rcvrIsConstant: unforwardRcvr not
  			argReg: argReg 
  			rcvrReg: rcvrReg 
  			orNotIf: orNot].
  	
  	label := self Label.
  	self genCmpArgIsConstant: unforwardArg not rcvrIsConstant: unforwardRcvr not argReg: argReg rcvrReg: rcvrReg.
  	self ssPop: 2.
  
+ 	"For now just deny we're in the situation we have yet to implement ;-)
+ 	 self printSimStack; printSimStack: (self fixupAt: postBranchPC - initialPC) mergeSimStack"
- 	"For now just deny we're in the situation we have yet to implement ;-)"
  	self deny: (self mergeRequiredForJumpTo: targetBytecodePC).
  	self deny: (self mergeRequiredForJumpTo: postBranchPC).
  
  	"Further since there is a following conditional jump bytecode, define
  	 non-merge fixups and leave the cond bytecode to set the mergeness."
  	(self fixupAt: nextPC - initialPC) notAFixup
  		ifTrue: "The next instruction is dead.  we can skip it."
  			[deadCode := true.
+ 		 	 self ensureFixupAt: targetBytecodePC.
+ 			 self ensureFixupAt: postBranchPC]
- 		 	 self ensureFixupAt: targetBytecodePC - initialPC.
- 			 self ensureFixupAt: postBranchPC - initialPC]
  		ifFalse:
  			[self deny: deadCode]. "push dummy value below"
  
  	self assert: (unforwardArg or: [unforwardRcvr]).
  	orNot == branchDescriptor isBranchTrue "orNot is true for ~~"
  		ifFalse: "branchDescriptor is branchFalse"
+ 			[ fixup := (self ensureNonMergeFixupAt: postBranchPC) asUnsignedInteger.
+ 			self JumpZero:  (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger ]
- 			[ fixup := (self ensureNonMergeFixupAt: postBranchPC - initialPC) asUnsignedInteger.
- 			self JumpZero:  (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger ]
  		ifTrue:
+ 			[ fixup := (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger.
+ 			self JumpZero: (self ensureNonMergeFixupAt: postBranchPC) asUnsignedInteger ].
- 			[ fixup := (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger.
- 			self JumpZero: (self ensureNonMergeFixupAt: postBranchPC - initialPC) asUnsignedInteger ].
  
  	deadCode ifFalse:
  		[self ssPushConstant: objectMemory trueObject]. "dummy value"
  	"The forwarders checks need to jump back to the comparison (label) if a forwarder is found, else 
  	jump forward either to the next forwarder check or to the postBranch or branch target (fixup)."
  	unforwardArg ifTrue: 
  		[ unforwardRcvr
  			ifTrue: [ objectRepresentation genEnsureOopInRegNotForwarded: argReg scratchReg: TempReg jumpBackTo: label ]
  			ifFalse: [ objectRepresentation 
  				genEnsureOopInRegNotForwarded: argReg 
  				scratchReg: TempReg 
  				ifForwarder: label
  				ifNotForwarder: fixup ] ].
  	unforwardRcvr ifTrue: 
  		[ objectRepresentation 
  			genEnsureOopInRegNotForwarded: rcvrReg 
  			scratchReg: TempReg 
  			ifForwarder: label
  			ifNotForwarder: fixup ].
  		
  	"Not reached, execution flow have jumped to fixup"
  	
  	^0!

Item was changed:
  ----- Method: RegisterAllocatingCogit>>genJumpIf:to: (in category 'bytecode generator support') -----
  genJumpIf: boolean to: targetBytecodePC
  	<inline: false>
  	| eventualTarget desc reg fixup ok mbb noMustBeBoolean |
  	<var: #fixup type: #'BytecodeFixup *'>
  	<var: #ok type: #'AbstractInstruction *'>
  	<var: #desc type: #'CogSimStackEntry *'>
  	<var: #mbb type: #'AbstractInstruction *'>
  	eventualTarget := self eventualTargetOf: targetBytecodePC.
  	desc := self ssTop.
  	self ssPop: 1.
  
  	noMustBeBoolean := self extASpecifiesNoMustBeBoolean.
  	extA := 0.
  
  	(desc type == SSConstant
  	 and: [desc constant = objectMemory trueObject or: [desc constant = objectMemory falseObject]]) ifTrue:
  		["Must annotate the bytecode for correct pc mapping."
  		 desc constant = boolean
  			ifTrue:
  				[deadCode := true. "Can't fall through."
+ 				 fixup := self ensureFixupAt: eventualTarget.
- 				 fixup := self ensureFixupAt: eventualTarget - initialPC.
  				 self annotateBytecode: (self Jump: fixup)]
  		 	ifFalse:
  				[self annotateBytecode: (self prevInstIsPCAnnotated
  												ifTrue: [self Nop]
  												ifFalse: [self Label])].
  		 ^0].
  
  	"try and use the top entry's register if any, but only if it can be destroyed."
  	reg := (desc type ~= SSRegister
  			or: [(self anyReferencesToRegister: desc register inAllButTopNItems: 0)
  			or: [(desc register = ReceiverResultReg and: [optStatus isReceiverResultRegLive])]])
  				ifTrue: [TempReg]
  				ifFalse: [desc register].
  	desc popToReg: reg.
  	"Cunning trick by LPD.  If true and false are contiguous subtract the smaller.
  	 Correct result is either 0 or the distance between them.  If result is not 0 or
  	 their distance send mustBeBoolean."
  	self assert: (objectMemory objectAfter: objectMemory falseObject) = objectMemory trueObject.
  
  	"Merge required; must not generate merge code along untaken branch, so flip the order."
  	(self mergeRequiredForJumpTo: eventualTarget)
  		ifTrue:
  			[self genSubConstant: (boolean = objectMemory trueObject
  										ifTrue: [objectMemory falseObject]
  										ifFalse: [objectMemory trueObject])
  				R: reg.
  			 ok := self JumpZero: 0.
  			 self CmpCq: (boolean = objectMemory trueObject
  							ifTrue: [objectMemory trueObject - objectMemory falseObject]
  							ifFalse: [objectMemory falseObject - objectMemory trueObject])
  				R: reg.
  			 noMustBeBoolean ifTrue: 
+ 				[self JumpZero: (self ensureFixupAt: eventualTarget). "generates merge code"
- 				[self JumpZero: (self ensureFixupAt: eventualTarget - initialPC). "generates merge code"
  				 ok jmpTarget: (self annotateBytecode: self lastOpcode).
  				 ^0].
  			 mbb := self JumpNonZero: 0.
+ 			 self Jump: (self ensureFixupAt: eventualTarget). "generates merge code"
- 			 self Jump: (self ensureFixupAt: eventualTarget - initialPC). "generates merge code"
  			 mbb jmpTarget: self Label]
  		ifFalse:
  			[self genSubConstant: boolean R: reg.
+ 			 self JumpZero: (self ensureFixupAt: eventualTarget).
- 			 self JumpZero: (self ensureFixupAt: eventualTarget - initialPC).
  			 noMustBeBoolean ifTrue: 
  				[self annotateBytecode: self lastOpcode.
  				 ^0].
  			 self CmpCq: (boolean = objectMemory falseObject
  							ifTrue: [objectMemory trueObject - objectMemory falseObject]
  							ifFalse: [objectMemory falseObject - objectMemory trueObject])
  				R: reg.
  			 ok := self JumpZero: 0].
  
  	reg ~= TempReg ifTrue:
  		[self MoveR: reg R: TempReg].
  	self copySimStackToScratch: simSpillBase.
  	self ssFlushTo: simStackPtr.
  	self genCallMustBeBooleanFor: boolean.
  	"NOTREACHED"
  	ok jmpTarget: (self annotateBytecode: self Label).
  	self restoreSimStackFromScratch.
  	^0!

Item was changed:
  ----- Method: RegisterAllocatingCogit>>genJumpTo: (in category 'bytecode generator support') -----
  genJumpTo: targetBytecodePC
  	"Overriden to avoid the flush because in this cogit stack state is merged at merge point."
  	deadCode := true. "can't fall through"
+ 	self Jump: (self ensureFixupAt: (self eventualTargetOf: targetBytecodePC)).
- 	self Jump: (self ensureFixupAt: (self eventualTargetOf: targetBytecodePC) - initialPC).
  	^ 0!

Item was changed:
  ----- Method: RegisterAllocatingCogit>>genSpecialSelectorComparison (in category 'bytecode generators') -----
  genSpecialSelectorComparison
  	| nextPC postBranchPC targetBytecodePC primDescriptor branchDescriptor
  	  rcvrIsInt argIsInt argInt jumpNotSmallInts inlineCAB index rcvrReg argReg |
  	<var: #primDescriptor type: #'BytecodeDescriptor *'>
  	<var: #branchDescriptor type: #'BytecodeDescriptor *'>
  	<var: #jumpNotSmallInts type: #'AbstractInstruction *'>
  	primDescriptor := self generatorAt: byte0.
  	argIsInt := self ssTop type = SSConstant
  				 and: [objectMemory isIntegerObject: (argInt := self ssTop constant)].
  	rcvrIsInt := (self ssValue: 1) type = SSConstant
  				 and: [objectMemory isIntegerObject: (self ssValue: 1) constant].
  
  	(argIsInt and: [rcvrIsInt]) ifTrue:
  		[^self genStaticallyResolvedSpecialSelectorComparison].
  
  	self extractMaybeBranchDescriptorInto: [ :descr :next :postBranch :target | 
  		branchDescriptor := descr. nextPC := next. postBranchPC := postBranch. targetBytecodePC := target ].
  
  	"Only interested in inlining if followed by a conditional branch."
  	inlineCAB := branchDescriptor isBranchTrue or: [branchDescriptor isBranchFalse].
  	"Further, only interested in inlining = and ~= if there's a SmallInteger constant involved.
  	 The relational operators successfully statically predict SmallIntegers; the equality operators do not."
  	(inlineCAB and: [primDescriptor opcode = JumpZero or: [primDescriptor opcode = JumpNonZero]]) ifTrue:
  		[inlineCAB := argIsInt or: [rcvrIsInt]].
  	inlineCAB ifFalse:
  		[^self genSpecialSelectorSend].
  
  	"In-line the comparison and the jump, but if the types are not SmallInteger then we will need
  	 to do a send and fall through to the following conditional branch.  Since we're allocating values
  	 in registers we would like to keep those registers live on the inlined path and reload registers
  	 along the non-inlined send path.  The merge logic at the branch destinations handles this."
  	argIsInt
  		ifTrue:
  			[rcvrReg := self allocateRegForStackEntryAt: 1.
  			 (self ssValue: 1) popToReg: rcvrReg.
  			 self MoveR: rcvrReg R: TempReg]
  		ifFalse:
  			[self allocateRegForStackTopTwoEntriesInto: [:rTop :rNext| argReg := rTop. rcvrReg := rNext].
  			 rcvrReg = Arg0Reg ifTrue:
  				[rcvrReg := argReg. argReg := Arg0Reg].
  			 self ssTop popToReg: argReg.
  			 (self ssValue: 1) popToReg: rcvrReg.
  			 self MoveR: argReg R: TempReg].
  	self ssPop: 2.
  	jumpNotSmallInts := (argIsInt or: [rcvrIsInt])
  							ifTrue: [objectRepresentation genJumpNotSmallIntegerInScratchReg: TempReg]
  							ifFalse: [objectRepresentation genJumpNotSmallIntegersIn: rcvrReg andScratch: TempReg scratch: ClassReg].
  	argIsInt
  		ifTrue: [self CmpCq: argInt R: rcvrReg]
  		ifFalse: [self CmpR: argReg R: rcvrReg].
  
  	"For now just deny we're in the situation we have yet to implement ;-)"
  	self deny: (self mergeRequiredForJumpTo: targetBytecodePC).
  	self deny: (self mergeRequiredForJumpTo: postBranchPC).
  
  	"Cmp is weird/backwards so invert the comparison.  Further since there is a following conditional
  	 jump bytecode define non-merge fixups and leave the cond bytecode to set the mergeness."
  	self genConditionalBranch: (branchDescriptor isBranchTrue
  				ifTrue: [primDescriptor opcode]
  				ifFalse: [self inverseBranchFor: primDescriptor opcode])
+ 		operand: (self ensureFixupAt: targetBytecodePC) asUnsignedInteger.
+ 	self Jump: (self ensureFixupAt: postBranchPC).
- 		operand: (self ensureFixupAt: targetBytecodePC - initialPC) asUnsignedInteger.
- 	self Jump: (self ensureFixupAt: postBranchPC - initialPC).
  	jumpNotSmallInts jmpTarget: self Label.
  	self ssFlushTo: simStackPtr.
  	self deny: rcvrReg = Arg0Reg.
  	argIsInt
  		ifTrue: [self MoveCq: argInt R: Arg0Reg]
  		ifFalse: [argReg ~= Arg0Reg ifTrue: [self MoveR: argReg R: Arg0Reg]].
  	rcvrReg ~= ReceiverResultReg ifTrue: [self MoveR: rcvrReg R: ReceiverResultReg].
  	index := byte0 - self firstSpecialSelectorBytecodeOffset.
  	^self genMarshalledSend: index negated - 1 numArgs: 1 sendTable: ordinarySendTrampolines!

Item was changed:
  ----- Method: RegisterAllocatingCogit>>genVanillaInlinedIdenticalOrNotIf: (in category 'bytecode generators') -----
  genVanillaInlinedIdenticalOrNotIf: orNot
  	| nextPC postBranchPC targetBytecodePC branchDescriptor
  	  rcvrReg argReg argIsConstant rcvrIsConstant  |
  	<var: #branchDescriptor type: #'BytecodeDescriptor *'>
  	
  	self extractMaybeBranchDescriptorInto: [ :descr :next :postBranch :target | 
  		branchDescriptor := descr. nextPC := next. postBranchPC := postBranch. targetBytecodePC := target ].
  	
  	argIsConstant := self ssTop type = SSConstant.
  	"They can't be both constants to use correct machine opcodes.
  	 However annotable constants can't be resolved statically, hence we need to careful."
  	rcvrIsConstant := argIsConstant not and: [(self ssValue: 1) type = SSConstant].
  	
  	self 
  		allocateEqualsEqualsRegistersArgNeedsReg: argIsConstant not 
  		rcvrNeedsReg: rcvrIsConstant not 
  		into: [ :rcvr :arg | rcvrReg:= rcvr. argReg := arg ].
  	
  	"If not followed by a branch, resolve to true or false."
  	(branchDescriptor isBranchTrue or: [branchDescriptor isBranchFalse]) ifFalse:
  		[^ self 
  			genIdenticalNoBranchArgIsConstant: argIsConstant 
  			rcvrIsConstant: rcvrIsConstant 
  			argReg: argReg 
  			rcvrReg: rcvrReg 
  			orNotIf: orNot].
  	
  	self genCmpArgIsConstant: argIsConstant rcvrIsConstant: rcvrIsConstant argReg: argReg rcvrReg: rcvrReg.
  	self ssPop: 2.
  
  	"For now just deny we're in the situation we have yet to implement ;-)"
  	self deny: (self mergeRequiredForJumpTo: targetBytecodePC).
  	self deny: (self mergeRequiredForJumpTo: postBranchPC).
  
  	"Further since there is a following conditional jump bytecode, define
  	 non-merge fixups and leave the cond bytecode to set the mergeness."
  	(self fixupAt: nextPC - initialPC) notAFixup
  		ifTrue: "The next instruction is dead.  we can skip it."
  			[deadCode := true.
+ 		 	 self ensureFixupAt: targetBytecodePC.
+ 			 self ensureFixupAt: postBranchPC]
- 		 	 self ensureFixupAt: targetBytecodePC - initialPC.
- 			 self ensureFixupAt: postBranchPC - initialPC]
  		ifFalse:
  			[self deny: deadCode]. "push dummy value below"
  		
  	self genConditionalBranch: (orNot == branchDescriptor isBranchTrue ifTrue: [JumpNonZero] ifFalse: [JumpZero])
+ 		operand: (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger.
- 		operand: (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger.
  
  	"If the branch is dead, then we can just fall through postBranchPC (only a nop in-between), else 
  	we need to jump over the code of the branch"
  	deadCode ifFalse:
+ 		[self Jump: (self ensureNonMergeFixupAt: postBranchPC).
- 		[self Jump: (self ensureNonMergeFixupAt: postBranchPC - initialPC).
  		 self ssPushConstant: objectMemory trueObject]. "dummy value"
  	^0!

Item was changed:
  ----- Method: RegisterAllocatingCogit>>mergeRequiredForJumpTo: (in category 'bytecode generator support') -----
+ mergeRequiredForJumpTo: targetPC
- mergeRequiredForJumpTo: target
  	"While this is a multi-pass compiler, no intermediate control-flow graph is built from bytecode and
  	 there is a monotonically increasing one-to-one relationship between bytecode pcs and machine
  	 code pcs that map to one another.  Therefore, when jumping forward, any required code to merge
  	 the state of the current simStack with that at the target must be generated before the jump
  	 (because at the target the simStack state will be whatever falls through). If only one forward jump
  	 to the target exists then that jump can simply install its simStack as the required simStack at the
  	 target and the merge code wil be generated just before the target as control falls through.  But if
  	 there are two or more forward jumps to the target, a situation that occurs given that the
  	 StackToRegisterMappingCogit follows jump chains, then jumps other than the first must generate
  	 merge code before jumping.  This poses a problem for conditional branches.  The merge code must
  	 only be generated along the path that takes the jump  Therefore this must *not* be generated:
  
  			... merge code ...
  			jump cond Ltarget
  
  	 which incorrectly executes the merge code along both the taken and untaken paths.  Instead
  	 this must be generated so that the merge code is only executed if the branch is taken.
  
  			jump not cond Lcontinue
  			... merge code ...
  			jump Ltarget
  		Lcontinue:
  
  	 Note that no merge code is required for code such as self at: (expr ifTrue: [1] ifFalse: [2])
  		17 <70> self
  		18 <71> pushConstant: true
  		19 <99> jumpFalse: 22
  		20 <76> pushConstant: 1
  		21 <90> jumpTo: 23
  		22 <77> pushConstant: 2
  		23 <C0> send: at:
  	 provided that 1 and 2 are assigned to the same target register."
  
  	self flag: 'be lazy for now; this needs more work to ignore compatible sim stacks'.
+ 	^(self fixupAt: targetPC - initialPC) hasMergeSimStack!
- 	^(self fixupAt: target - initialPC) hasMergeSimStack!

Item was changed:
  ----- Method: SimpleStackBasedCogit>>genJumpIf:to: (in category 'bytecode generator support') -----
  genJumpIf: boolean to: targetBytecodePC
  	<inline: false>
  	"Cunning trick by LPD.  If true and false are contiguous subtract the smaller.
  	 Correct result is either 0 or the distance between them.  If result is not 0 or
  	 their distance send mustBeBoolean."
  	| ok |
  	<var: #ok type: #'AbstractInstruction *'>
  	extA := 0.
  	self assert: (objectMemory objectAfter: objectMemory falseObject) = objectMemory trueObject.
  	self PopR: TempReg.
  	self genSubConstant: boolean R: TempReg.
+ 	self JumpZero: (self ensureFixupAt: targetBytecodePC).
- 	self JumpZero: (self ensureFixupAt: targetBytecodePC - initialPC).
  	self CmpCq: (boolean == objectMemory falseObject
  					ifTrue: [objectMemory trueObject - objectMemory falseObject]
  					ifFalse: [objectMemory falseObject - objectMemory trueObject])
  		R: TempReg.
  	ok := self JumpZero: 0.
  	self genCallMustBeBooleanFor: boolean.
  	ok jmpTarget: (self annotateBytecode: self Label).
  	^0!

Item was changed:
  ----- Method: SimpleStackBasedCogit>>genJumpTo: (in category 'bytecode generator support') -----
  genJumpTo: targetBytecodePC
+ 	self Jump: (self ensureFixupAt: targetBytecodePC).
- 	self Jump: (self ensureFixupAt: targetBytecodePC - initialPC).
  	^0!

Item was changed:
  ----- Method: SistaCogit>>genBinaryInlineComparison:opFalse:destReg: (in category 'inline primitive generators') -----
  genBinaryInlineComparison: opTrue opFalse: opFalse destReg: destReg
  	"Inlined comparison. opTrue = jump for true and opFalse = jump for false"
  	<var: #branchDescriptor type: #'BytecodeDescriptor *'>
  	| nextPC branchDescriptor targetBytecodePC postBranchPC |	
  		
  	self extractMaybeBranchDescriptorInto: [ :descr :next :postBranch :target | 
  		branchDescriptor := descr. nextPC := next. postBranchPC := postBranch. targetBytecodePC := target ].
  
  	(branchDescriptor isBranchTrue or: [branchDescriptor isBranchFalse])
  		ifTrue: "This is the path where the inlined comparison is followed immediately by a branch"
  			[ (self fixupAt: nextPC - initialPC) notAFixup
  				ifTrue: "The next instruction is dead.  we can skip it."
  					[deadCode := true.
+ 				 	 self ensureFixupAt: targetBytecodePC.
+ 					 self ensureFixupAt: postBranchPC ]
- 				 	 self ensureFixupAt: targetBytecodePC - initialPC.
- 					 self ensureFixupAt: postBranchPC - initialPC ]
  				ifFalse:
  					[self ssPushConstant: objectMemory trueObject]. "dummy value"
  			self genConditionalBranch: (branchDescriptor isBranchTrue ifTrue: [opTrue] ifFalse: [opFalse])
+ 				operand: (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger.
- 				operand: (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger.
  			"We can only elide the jump if the pc after nextPC is the same as postBranchPC.
  			 Branch following means it may not be."
  			self nextDescriptorExtensionsAndNextPCInto:
  				[:iguana1 :iguana2 :iguana3 :followingPC| nextPC := followingPC].
  			(deadCode and: [nextPC = postBranchPC]) ifFalse:
+ 				[ self Jump: (self ensureNonMergeFixupAt: postBranchPC) ] ]
- 				[ self Jump: (self ensureNonMergeFixupAt: postBranchPC - initialPC) ] ]
  		ifFalse: "This is the path where the inlined comparison is *not* followed immediately by a branch"
  			[| condJump jump |
  			condJump := self genConditionalBranch: opTrue operand: 0.
  			self genMoveFalseR: destReg.
  	 		jump := self Jump: 0.
  			condJump jmpTarget: (self genMoveTrueR: destReg).
  			jump jmpTarget: self Label].
  	^ 0!

Item was changed:
  ----- Method: SistaCogit>>genCounterTripOnlyJumpIf:to: (in category 'bytecode generator support') -----
  genCounterTripOnlyJumpIf: boolean to: targetBytecodePC 
  	"Specific version if the branch is only reached while falling through if the counter trips after an inlined #== branch. We do not regenerate the counter logic in this case to avoid 24 bytes instructions."
  	
  	<var: #ok type: #'AbstractInstruction *'>
  	<var: #mustBeBooleanTrampoline type: #'AbstractInstruction *'>
  
  	| ok mustBeBooleanTrampoline |
  
  	extA := 0.
  
  	self ssFlushTo: simStackPtr - 1.
  	
  	self ssTop popToReg: TempReg.
  	
  	self ssPop: 1.
  
  	counterIndex := counterIndex + 1. "counters are increased / decreased in the inlined branch"
  
  	"We need SendNumArgsReg because of the mustBeBooleanTrampoline"
  	self ssAllocateRequiredReg: SendNumArgsReg.
  	self MoveCq: 1 R: SendNumArgsReg.
  	
  	"The first time this is reached, it calls necessarily the counter trip for the trampoline because SendNumArgsReg is non zero"
  	mustBeBooleanTrampoline := self genCallMustBeBooleanFor: boolean.
  	self annotateBytecode: self Label.
  
  	"Cunning trick by LPD.  If true and false are contiguous subtract the smaller.
  	 Correct result is either 0 or the distance between them.  If result is not 0 or
  	 their distance send mustBeBoolean."
  	self assert: (objectMemory objectAfter: objectMemory falseObject) = objectMemory trueObject.
  	self genSubConstant: boolean R: TempReg.
+ 	self JumpZero: (self ensureFixupAt: targetBytecodePC).
- 	self JumpZero: (self ensureFixupAt: targetBytecodePC - initialPC).
  
  	self CmpCq: (boolean = objectMemory falseObject
  					ifTrue: [objectMemory trueObject - objectMemory falseObject]
  					ifFalse: [objectMemory falseObject - objectMemory trueObject])
  		R: TempReg.
  		
  	ok := self JumpZero: 0.
  	self MoveCq: 0 R: SendNumArgsReg. "if counterReg is 0 this is a mustBeBoolean, not a counter trip."		
  
  	self Jump: mustBeBooleanTrampoline.
  	
  	ok jmpTarget: self Label.
  	^0!

Item was changed:
  ----- Method: SistaCogit>>genExtJumpIfNotInstanceOfBehaviorsBytecode (in category 'bytecode generators') -----
  genExtJumpIfNotInstanceOfBehaviorsBytecode
  	"SistaV1: *	254		11111110	kkkkkkkk	jjjjjjjj		branch If Not Instance Of Behavior/Array Of Behavior kkkkkkkk (+ Extend A * 256, where Extend A >= 0) distance jjjjjjjj (+ Extend B * 256, where Extend B >= 0)"
  	| reg literal distance targetFixUp inverse |
  
  	"We lose the information of in which register is stack top 
  	 when jitting the branch target so we need to flush everything. 
  	 We could use a fixed register here...."
  	reg := self allocateRegForStackEntryAt: 0.
  	self ssTop popToReg: reg.
  	self ssPop: 1.
  	self ssFlushTo: simStackPtr. "flushed but the value is still in reg"
  
  	literal := self getLiteral: (extA * 256 + byte1).
  	(inverse := extB < 0) ifTrue:
  		[extB := extB + 128].
  	distance := extB * 256 + byte2.
  	extA := extB := numExtB := 0.
  
+ 	targetFixUp := self cCoerceSimple: (self ensureFixupAt: bytecodePC + 3 + distance) to: #'AbstractInstruction *'.
- 	targetFixUp := self cCoerceSimple: (self ensureFixupAt: bytecodePC + 3 + distance - initialPC) to: #'AbstractInstruction *'.
  	inverse
  		ifFalse: 
  			[(objectMemory isArrayNonImm: literal)
  				ifTrue: [objectRepresentation branchIf: reg notInstanceOfBehaviors: literal target: targetFixUp]
  				ifFalse: [objectRepresentation branchIf: reg notInstanceOfBehavior: literal target: targetFixUp] ]
  		ifTrue:
  			[(objectMemory isArrayNonImm: literal)
  				ifTrue: [objectRepresentation branchIf: reg instanceOfBehaviors: literal target: targetFixUp]
  				ifFalse: [objectRepresentation branchIf: reg instanceOfBehavior: literal target: targetFixUp]].
  
  	^0!

Item was changed:
  ----- Method: SistaCogit>>genForwardersInlinedIdenticalOrNotIf: (in category 'bytecode generators') -----
  genForwardersInlinedIdenticalOrNotIf: orNot
  	"Override to count inlined branches if followed by a conditional branch.
  	 We borrow the following conditional branch's counter and when about to
  	 inline the comparison we decrement the counter (without writing it back)
  	 and if it trips simply abort the inlining, falling back to the normal send which
  	 will then continue to the conditional branch which will trip and enter the abort."
  	| nextPC postBranchPC targetBytecodePC branchDescriptor counterReg fixup jumpEqual jumpNotEqual
  	  counterAddress countTripped unforwardArg unforwardRcvr argReg rcvrReg regMask |
  	<var: #fixup type: #'BytecodeFixup *'>
  	<var: #countTripped type: #'AbstractInstruction *'>
  	<var: #label type: #'AbstractInstruction *'>
  	<var: #branchDescriptor type: #'BytecodeDescriptor *'>
  	<var: #jumpEqual type: #'AbstractInstruction *'>
  	<var: #jumpNotEqual type: #'AbstractInstruction *'>
  
  	((coInterpreter isOptimizedMethod: methodObj) or: [needsFrame not]) ifTrue:
  		[^super genForwardersInlinedIdenticalOrNotIf: orNot].
  
  	regMask := 0.
  	
  	self extractMaybeBranchDescriptorInto: [ :descr :next :postBranch :target | 
  		branchDescriptor := descr. nextPC := next. postBranchPC := postBranch. targetBytecodePC := target ].
  	
  	unforwardRcvr := (objectRepresentation isUnannotatableConstant: (self ssValue: 1)) not.
  	unforwardArg := (objectRepresentation isUnannotatableConstant: self ssTop) not.
  	
  	"If an operand is an annotable constant, it may be forwarded, so we need to store it into a 
  	register so the forwarder check can jump back to the comparison after unforwarding the constant.
  	However, if one of the operand is an unnanotable constant, does not allocate a register for it 
  	(machine code will use operations on constants)."
  	rcvrReg:= argReg := NoReg.
  	self 
  		allocateEqualsEqualsRegistersArgNeedsReg: unforwardArg 
  		rcvrNeedsReg: unforwardRcvr 
  		into: [ :rcvr :arg | rcvrReg:= rcvr. argReg := arg ].
  		
  	argReg ~= NoReg ifTrue: [ regMask := self registerMaskFor: argReg ].
  	rcvrReg ~= NoReg ifTrue: [ regMask := regMask bitOr: (self registerMaskFor: rcvrReg) ].
  	
  	"Only interested in inlining if followed by a conditional branch."
  	(branchDescriptor isBranchTrue or: [branchDescriptor isBranchFalse]) ifFalse:
  		[^ self 
  			genIdenticalNoBranchArgIsConstant: unforwardArg not
  			rcvrIsConstant: unforwardRcvr not
  			argReg: argReg 
  			rcvrReg: rcvrReg 
  			orNotIf: orNot].
  	
  	"If branching the stack must be flushed for the merge"
  	self ssFlushTo: simStackPtr - 2.
  	
  	unforwardArg ifTrue: [ objectRepresentation genEnsureOopInRegNotForwarded: argReg scratchReg: TempReg ].
  	unforwardRcvr ifTrue: [ objectRepresentation genEnsureOopInRegNotForwarded: rcvrReg scratchReg: TempReg ].
  	
  	counterReg := self allocateRegNotConflictingWith: regMask.
  	self 
  		genExecutionCountLogicInto: [ :cAddress :countTripBranch | 
  			counterAddress := cAddress. 
  			countTripped := countTripBranch ] 
  		counterReg: counterReg.
  	
  	self assert: (unforwardArg or: [ unforwardRcvr ]).
  	self genCmpArgIsConstant: unforwardArg not rcvrIsConstant: unforwardRcvr not argReg: argReg rcvrReg: rcvrReg.
  	self ssPop: 2.
  	
  	orNot == branchDescriptor isBranchTrue "orNot is true for ~~"
  		ifFalse:
+ 			[ fixup := (self ensureNonMergeFixupAt: postBranchPC) asUnsignedInteger.
+ 			self JumpZero:  (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger ]
- 			[ fixup := (self ensureNonMergeFixupAt: postBranchPC - initialPC) asUnsignedInteger.
- 			self JumpZero:  (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger ]
  		ifTrue:
+ 			[ fixup := (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger.
+ 			self JumpZero: (self ensureNonMergeFixupAt: postBranchPC) asUnsignedInteger ].
- 			[ fixup := (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger.
- 			self JumpZero: (self ensureNonMergeFixupAt: postBranchPC - initialPC) asUnsignedInteger ].
  	
  	self genFallsThroughCountLogicCounterReg: counterReg counterAddress: counterAddress.
  	self Jump: fixup.
  	
  	countTripped jmpTarget: self Label.
  	
  	"inlined version of #== ignoring the branchDescriptor if the counter trips to have normal state for the optimizer"
  	self ssPop: -2. 
  	self genCmpArgIsConstant: unforwardArg not rcvrIsConstant: unforwardRcvr not argReg: argReg rcvrReg: rcvrReg.
  	self ssPop: 2. 
  	
  	"This code necessarily directly falls through the jumpIf: code which pops the top of the stack into TempReg. 
  	We therefore directly assign the result to TempReg to save one move instruction"
  	jumpEqual := orNot ifFalse: [self JumpZero: 0] ifTrue: [self JumpNonZero: 0].
  	self genMoveFalseR: TempReg.
  	jumpNotEqual := self Jump: 0.
  	jumpEqual jmpTarget: (self genMoveTrueR: TempReg).
  	jumpNotEqual jmpTarget: self Label.
  	self ssPushRegister: TempReg.
  	
  	(self fixupAt: nextPC - initialPC) notAFixup ifTrue: [ branchReachedOnlyForCounterTrip := true ].
  	
  	^ 0!

Item was changed:
  ----- Method: SistaCogit>>genJumpIf:to: (in category 'bytecode generator support') -----
  genJumpIf: boolean to: targetBytecodePC
  	"The heart of performance counting in Sista.  Conditional branches are 6 times less
  	 frequent than sends and can provide basic block frequencies (send counters can't).
  	 Each conditional has a 32-bit counter split into an upper 16 bits counting executions
  	 and a lower half counting untaken executions of the branch.  Executing the branch
  	 decrements the upper half, tripping if the count goes negative.  Not taking the branch
  	 decrements the lower half.  N.B. We *do not* eliminate dead branches (true ifTrue:/true ifFalse:)
  	 so that scanning for send and branch data is simplified and that branch data is correct."
  	<inline: false>
  	| ok counterAddress countTripped retry nextPC nextDescriptor desc eventualTarget |
  	<var: #ok type: #'AbstractInstruction *'>
  	<var: #desc type: #'CogSimStackEntry *'>
  	<var: #retry type: #'AbstractInstruction *'>
  	<var: #countTripped type: #'AbstractInstruction *'>
  	<var: #nextDescriptor type: #'BytecodeDescriptor *'>
  
  	"In optimized code we don't generate counters to improve performance"
  	(coInterpreter isOptimizedMethod: methodObj) ifTrue: [ ^ super genJumpIf: boolean to: targetBytecodePC ].
  	
  	"If the branch is reached only for the counter trip trampoline 
  	(typically, var1 == var2 ifTrue: falls through to the branch only for the trampoline)
  	we generate a specific path to drastically reduce the number of machine instructions"
  	branchReachedOnlyForCounterTrip ifTrue: 
  		[ branchReachedOnlyForCounterTrip := false.
  		^ self genCounterTripOnlyJumpIf: boolean to: targetBytecodePC ].
  	
  	"We detect and: / or:, if found, we don't generate the counters to avoid pathological counter slow down"
  	boolean = objectMemory falseObject ifTrue:
  		[ nextPC := bytecodePC + (self generatorAt: byte0) numBytes.
  		  nextDescriptor := self generatorAt: (objectMemory fetchByte: nextPC ofObject: methodObj) + bytecodeSetOffset.
  		  nextDescriptor generator ==  #genPushConstantTrueBytecode ifTrue: [ ^ super genJumpIf: boolean to: targetBytecodePC ].
  		  nextDescriptor := self generatorAt: (objectMemory fetchByte: targetBytecodePC ofObject: methodObj) + bytecodeSetOffset.
  		  nextDescriptor generator == #genPushConstantFalseBytecode ifTrue: [ ^ super genJumpIf: boolean to: targetBytecodePC ].  ].
  
  	extA := 0. "We ignore the noMustBeBoolean flag. It should not be present in methods with counters, and if it is we don't care."
  
  	"We don't generate counters on branches on true/false, the basicblock usage can be inferred"
  	desc := self ssTop.
  	(desc type == SSConstant
  	 and: [desc constant = objectMemory trueObject or: [desc constant = objectMemory falseObject]]) ifTrue:
  		[ ^ super genJumpIf: boolean to: targetBytecodePC ].
  
  	eventualTarget := self eventualTargetOf: targetBytecodePC.
  
  	self ssFlushTo: simStackPtr - 1.
  	desc popToReg: TempReg.
  	self ssPop: 1.
  
  	"We need SendNumArgsReg because of the mustBeBooleanTrampoline"
  	self ssAllocateRequiredReg: SendNumArgsReg.
  
  	retry := self Label.
  	self 
  		genExecutionCountLogicInto: [ :cAddress :countTripBranch | 
  			counterAddress := cAddress. 
  			countTripped := countTripBranch ] 
  		counterReg: SendNumArgsReg.
  	counterIndex := counterIndex + 1.
  
  	"Cunning trick by LPD.  If true and false are contiguous subtract the smaller.
  	 Correct result is either 0 or the distance between them.  If result is not 0 or
  	 their distance send mustBeBoolean."
  	self assert: (objectMemory objectAfter: objectMemory falseObject) = objectMemory trueObject.
  	self genSubConstant: boolean R: TempReg.
+ 	self JumpZero: (self ensureFixupAt: eventualTarget).
- 	self JumpZero: (self ensureFixupAt: eventualTarget - initialPC).
  
  	self genFallsThroughCountLogicCounterReg: SendNumArgsReg counterAddress: counterAddress.
  
  	self CmpCq: (boolean = objectMemory falseObject
  					ifTrue: [objectMemory trueObject - objectMemory falseObject]
  					ifFalse: [objectMemory falseObject - objectMemory trueObject])
  		R: TempReg.
  	ok := self JumpZero: 0.
  	self MoveCq: 0 R: SendNumArgsReg. "if counterReg is 0 this is a mustBeBoolean, not a counter trip."
  	
  	countTripped jmpTarget: (self genCallMustBeBooleanFor: boolean).
  						
  	"If we're in an image which hasn't got the Sista code loaded then the ceCounterTripped:
  	 trampoline will return directly to machine code, returning the boolean.  So the code should
  	 jump back to the retry point. The trampoline makes sure that TempReg has been reloaded."
  	
  	"Clément: For some reason if I write self annotateBytecode: (self Jump: retry) the annotation is not at the correct place."
  	"Eliot: Annotations apply the the address following an instruction, and the annotation must be for the return address
  	 of the call (since this is the address the run-time sees), so it must be on a label before the jump, not after the jump."
  	self annotateBytecode: self Label.
  	self Jump: retry.
  	
  	ok jmpTarget: self Label.
  	^0!

Item was changed:
  ----- Method: SistaCogit>>genSpecialSelectorComparison (in category 'bytecode generators') -----
  genSpecialSelectorComparison
  	"Override to count inlined branches if followed by a conditional branch.
  	 We borrow the following conditional branch's counter and when about to
  	 inline the comparison we decrement the counter (without writing it back)
  	 and if it trips simply abort the inlining, falling back to the normal send which
  	 will then continue to the conditional branch which will trip and enter the abort."
  	| nextPC postBranchPC targetBytecodePC primDescriptor branchDescriptor
  	  rcvrIsInt argIsInt argInt jumpNotSmallInts inlineCAB
  	  counterAddress countTripped counterReg index |
  	<var: #countTripped type: #'AbstractInstruction *'>
  	<var: #primDescriptor type: #'BytecodeDescriptor *'>
  	<var: #jumpNotSmallInts type: #'AbstractInstruction *'>
  	<var: #branchDescriptor type: #'BytecodeDescriptor *'>
  
  	(coInterpreter isOptimizedMethod: methodObj) ifTrue: [ ^ self genSpecialSelectorComparisonWithoutCounters ].
  
  	self ssFlushTo: simStackPtr - 2.
  	primDescriptor := self generatorAt: byte0.
  	argIsInt := self ssTop type = SSConstant
  				 and: [objectMemory isIntegerObject: (argInt := self ssTop constant)].
  	rcvrIsInt := (self ssValue: 1) type = SSConstant
  				 and: [objectMemory isIntegerObject: (self ssValue: 1) constant].
  
  	"short-cut the jump if operands are SmallInteger constants."
  	(argIsInt and: [rcvrIsInt]) ifTrue:
  		[^ self genStaticallyResolvedSpecialSelectorComparison].
  
  	self extractMaybeBranchDescriptorInto: [ :descr :next :postBranch :target | 
  		branchDescriptor := descr. nextPC := next. postBranchPC := postBranch. targetBytecodePC := target ].
  	
  	"Only interested in inlining if followed by a conditional branch."
  	inlineCAB := branchDescriptor isBranchTrue or: [branchDescriptor isBranchFalse].
  	"Further, only interested in inlining = and ~= if there's a SmallInteger constant involved.
  	 The relational operators successfully statically predict SmallIntegers; the equality operators do not."
  	(inlineCAB and: [primDescriptor opcode = JumpZero or: [primDescriptor opcode = JumpNonZero]]) ifTrue:
  		[inlineCAB := argIsInt or: [rcvrIsInt]].
  	inlineCAB ifFalse:
  		[^self genSpecialSelectorSend].
  
  	argIsInt
  		ifTrue:
  			[(self ssValue: 1) popToReg: ReceiverResultReg.
  			 self ssPop: 2.
  			 self MoveR: ReceiverResultReg R: TempReg]
  		ifFalse:
  			[self marshallSendArguments: 1.
  			 self MoveR: Arg0Reg R: TempReg].
  	jumpNotSmallInts := (argIsInt or: [rcvrIsInt])
  							ifTrue: [objectRepresentation genJumpNotSmallIntegerInScratchReg: TempReg]
  							ifFalse: [objectRepresentation genJumpNotSmallIntegersIn: ReceiverResultReg andScratch: TempReg scratch: ClassReg].
  
  	counterReg := self allocateRegNotConflictingWith: (self registerMaskFor: ReceiverResultReg and: Arg0Reg).
  	self 
  		genExecutionCountLogicInto: [ :cAddress :countTripBranch | 
  			counterAddress := cAddress. 
  			countTripped := countTripBranch ] 
  		counterReg: counterReg.
  
  	argIsInt
  		ifTrue: [self CmpCq: argInt R: ReceiverResultReg]
  		ifFalse: [self CmpR: Arg0Reg R: ReceiverResultReg].
  	"Cmp is weird/backwards so invert the comparison.  Further since there is a following conditional
  	 jump bytecode define non-merge fixups and leave the cond bytecode to set the mergeness."
  	self genConditionalBranch: (branchDescriptor isBranchTrue
  				ifTrue: [primDescriptor opcode]
  				ifFalse: [self inverseBranchFor: primDescriptor opcode])
+ 		operand: (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger.
- 		operand: (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger.
  		
  	self genFallsThroughCountLogicCounterReg: counterReg counterAddress: counterAddress.
  	
+ 	self Jump: (self ensureNonMergeFixupAt: postBranchPC).
- 	self Jump: (self ensureNonMergeFixupAt: postBranchPC - initialPC).
  	countTripped jmpTarget: (jumpNotSmallInts jmpTarget: self Label).
  	
  	argIsInt ifTrue:
  		[self MoveCq: argInt R: Arg0Reg].
  	index := byte0 - self firstSpecialSelectorBytecodeOffset.
  	^self genMarshalledSend: index negated - 1 numArgs: 1 sendTable: ordinarySendTrampolines!

Item was changed:
  ----- Method: SistaCogitClone>>genBinaryInlineComparison:opFalse:destReg: (in category 'inline primitive generators') -----
  genBinaryInlineComparison: opTrue opFalse: opFalse destReg: destReg
  	"Inlined comparison. opTrue = jump for true and opFalse = jump for false"
  	<var: #branchDescriptor type: #'BytecodeDescriptor *'>
  	| nextPC branchDescriptor targetBytecodePC postBranchPC |	
  		
  	self extractMaybeBranchDescriptorInto: [ :descr :next :postBranch :target | 
  		branchDescriptor := descr. nextPC := next. postBranchPC := postBranch. targetBytecodePC := target ].
  	
  	(branchDescriptor isBranchTrue or: [branchDescriptor isBranchFalse])
  		ifTrue: "This is the path where the inlined comparison is followed immediately by a branch"
  			[ (self fixupAt: nextPC - initialPC) notAFixup
  				ifTrue: "The next instruction is dead.  we can skip it."
  					[deadCode := true.
+ 				 	 self ensureFixupAt: targetBytecodePC.
+ 					 self ensureFixupAt: postBranchPC ]
- 				 	 self ensureFixupAt: targetBytecodePC - initialPC.
- 					 self ensureFixupAt: postBranchPC - initialPC ]
  				ifFalse:
  					[self ssPushConstant: objectMemory trueObject]. "dummy value"
  			self genConditionalBranch: (branchDescriptor isBranchTrue ifTrue: [opTrue] ifFalse: [opFalse])
+ 				operand: (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger. 
+ 			deadCode ifFalse: [ self Jump: (self ensureNonMergeFixupAt: postBranchPC) ] ]
- 				operand: (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger. 
- 			deadCode ifFalse: [ self Jump: (self ensureNonMergeFixupAt: postBranchPC - initialPC) ] ]
  		ifFalse: "This is the path where the inlined comparison is *not* followed immediately by a branch"
  			[| condJump jump |
  			condJump := self genConditionalBranch: opTrue operand: 0.
  			self genMoveFalseR: destReg.
  	 		jump := self Jump: 0.
  			condJump jmpTarget: (self genMoveTrueR: destReg).
  			jump jmpTarget: self Label].
  	^ 0!

Item was changed:
  ----- Method: SistaCogitClone>>genExtJumpIfNotInstanceOfBehaviorsBytecode (in category 'bytecode generators') -----
  genExtJumpIfNotInstanceOfBehaviorsBytecode
  	"SistaV1: *	254		11111110	kkkkkkkk	jjjjjjjj		branch If Not Instance Of Behavior/Array Of Behavior kkkkkkkk (+ Extend A * 256, where Extend A >= 0) distance jjjjjjjj (+ Extend B * 256, where Extend B >= 0)"
  	| reg literal distance targetFixUp inverse |
  
  	"We lose the information of in which register is stack top 
  	 when jitting the branch target so we need to flush everything. 
  	 We could use a fixed register here...."
  	reg := self allocateRegForStackEntryAt: 0.
  	self ssTop popToReg: reg.
  	self ssFlushTo: simStackPtr. "flushed but the value is still in reg"
  	self ssPop: 1.
  
  	literal := self getLiteral: (extA * 256 + byte1).
  	(inverse := extB < 0) ifTrue:
  		[extB := extB + 128].
  	distance := extB * 256 + byte2.
  	extA := extB := numExtB := 0.
  
+ 	targetFixUp := self cCoerceSimple: (self ensureFixupAt: bytecodePC + 3 + distance) to: #'AbstractInstruction *'.
- 	targetFixUp := self cCoerceSimple: (self ensureFixupAt: bytecodePC + 3 + distance - initialPC) to: #'AbstractInstruction *'.
  	inverse
  		ifFalse: 
  			[(objectMemory isArrayNonImm: literal)
  				ifTrue: [objectRepresentation branchIf: reg notInstanceOfBehaviors: literal target: targetFixUp]
  				ifFalse: [objectRepresentation branchIf: reg notInstanceOfBehavior: literal target: targetFixUp] ]
  		ifTrue:
  			[(objectMemory isArrayNonImm: literal)
  				ifTrue: [objectRepresentation branchIf: reg instanceOfBehaviors: literal target: targetFixUp]
  				ifFalse: [objectRepresentation branchIf: reg instanceOfBehavior: literal target: targetFixUp]].
  
  	^0!

Item was changed:
  ----- Method: SistaRegisterAllocatingCogit>>genCounterTripOnlyJumpIf:to: (in category 'bytecode generator support') -----
  genCounterTripOnlyJumpIf: boolean to: targetBytecodePC 
  	"Specific version if the branch is only reached while falling through if the counter trips after an inlined #== branch. We do not regenerate the counter logic in this case to avoid 24 bytes instructions."
  	
  	<var: #ok type: #'AbstractInstruction *'>
  	<var: #mustBeBooleanTrampoline type: #'AbstractInstruction *'>
  
  	| ok mustBeBooleanTrampoline |
  
  	extA := 0.
  	
  	self ssTop popToReg: TempReg.
  	self ssPop: 1.
  
  	counterIndex := counterIndex + 1. "counters are increased / decreased in the inlined branch"
  
  	"We need SendNumArgsReg because of the mustBeBooleanTrampoline"
  	self ssAllocateRequiredReg: SendNumArgsReg.
  	self MoveCq: 1 R: SendNumArgsReg.
  	
  	"The first time this is reached, it calls necessarily the counter trip for the trampoline because SendNumArgsReg is non zero"
  	mustBeBooleanTrampoline := self genCallMustBeBooleanFor: boolean.
  
  	self annotateBytecode: self Label.
  
  	"Cunning trick by LPD.  If true and false are contiguous subtract the smaller.
  	 Correct result is either 0 or the distance between them.  If result is not 0 or
  	 their distance send mustBeBoolean."
  	self assert: (objectMemory objectAfter: objectMemory falseObject) = objectMemory trueObject.
  	self genSubConstant: boolean R: TempReg.
+ 	self JumpZero: (self ensureFixupAt: targetBytecodePC).
- 	self JumpZero: (self ensureFixupAt: targetBytecodePC - initialPC).
  
  	self CmpCq: (boolean = objectMemory falseObject
  					ifTrue: [objectMemory trueObject - objectMemory falseObject]
  					ifFalse: [objectMemory falseObject - objectMemory trueObject])
  		R: TempReg.
  		
  	ok := self JumpZero: 0.
  	self MoveCq: 0 R: SendNumArgsReg. "if counterReg is 0 this is a mustBeBoolean, not a counter trip."		
  
  	self Jump: mustBeBooleanTrampoline.
  	
  	ok jmpTarget: self Label.
  	^0!

Item was changed:
  ----- Method: SistaRegisterAllocatingCogit>>genExtJumpIfNotInstanceOfBehaviorsBytecode (in category 'bytecode generators') -----
  genExtJumpIfNotInstanceOfBehaviorsBytecode
  	"SistaV1: *	254		11111110	kkkkkkkk	jjjjjjjj		branch If Not Instance Of Behavior/Array Of Behavior kkkkkkkk (+ Extend A * 256, where Extend A >= 0) distance jjjjjjjj (+ Extend B * 256, where Extend B >= 0)"
  	| reg literal distance targetFixUp inverse |
  
  	"We lose the information of in which register is stack top 
  	 when jitting the branch target so we need to flush everything. 
  	 We could use a fixed register here...."
  	reg := self allocateRegForStackEntryAt: 0.
  	self ssTop popToReg: reg.
  	self ssPop: 1.
  
  	literal := self getLiteral: (extA * 256 + byte1).
  	(inverse := extB < 0) ifTrue:
  		[extB := extB + 128].
  	distance := extB * 256 + byte2.
  	extA := extB := numExtB := 0.
  
  	"For now just deny we're in the situation we have yet to implement ;-)"
  	self deny: (self mergeRequiredForJumpTo: bytecodePC + 3 + distance).
  
+ 	targetFixUp := self cCoerceSimple: (self ensureFixupAt: bytecodePC + 3 + distance) to: #'AbstractInstruction *'.
- 	targetFixUp := self cCoerceSimple: (self ensureFixupAt: bytecodePC + 3 + distance - initialPC) to: #'AbstractInstruction *'.
  	inverse
  		ifFalse: 
  			[(objectMemory isArrayNonImm: literal)
  				ifTrue: [objectRepresentation branchIf: reg notInstanceOfBehaviors: literal target: targetFixUp]
  				ifFalse: [objectRepresentation branchIf: reg notInstanceOfBehavior: literal target: targetFixUp] ]
  		ifTrue:
  			[(objectMemory isArrayNonImm: literal)
  				ifTrue: [objectRepresentation branchIf: reg instanceOfBehaviors: literal target: targetFixUp]
  				ifFalse: [objectRepresentation branchIf: reg instanceOfBehavior: literal target: targetFixUp]].
  
  	^0!

Item was changed:
  ----- Method: SistaRegisterAllocatingCogit>>genForwardersInlinedIdenticalOrNotIf: (in category 'bytecode generators') -----
  genForwardersInlinedIdenticalOrNotIf: orNot
  	"Override to count inlined branches if followed by a conditional branch.
  	 We borrow the following conditional branch's counter and when about to
  	 inline the comparison we decrement the counter (without writing it back)
  	 and if it trips simply abort the inlining, falling back to the normal send which
  	 will then continue to the conditional branch which will trip and enter the abort."
  	| nextPC postBranchPC targetBytecodePC branchDescriptor counterReg fixup jumpEqual jumpNotEqual
  	  counterAddress countTripped unforwardArg unforwardRcvr argReg rcvrReg regMask |
  	<var: #fixup type: #'BytecodeFixup *'>
  	<var: #countTripped type: #'AbstractInstruction *'>
  	<var: #label type: #'AbstractInstruction *'>
  	<var: #branchDescriptor type: #'BytecodeDescriptor *'>
  	<var: #jumpEqual type: #'AbstractInstruction *'>
  	<var: #jumpNotEqual type: #'AbstractInstruction *'>
  
  	((coInterpreter isOptimizedMethod: methodObj) or: [needsFrame not]) ifTrue:
  		[^super genForwardersInlinedIdenticalOrNotIf: orNot].
  
  	regMask := 0.
  	
  	self extractMaybeBranchDescriptorInto: [ :descr :next :postBranch :target | 
  		branchDescriptor := descr. nextPC := next. postBranchPC := postBranch. targetBytecodePC := target ].
  	
  	unforwardRcvr := (objectRepresentation isUnannotatableConstant: (self ssValue: 1)) not.
  	unforwardArg := (objectRepresentation isUnannotatableConstant: self ssTop) not.
  	
  	"If an operand is an annotable constant, it may be forwarded, so we need to store it into a 
  	register so the forwarder check can jump back to the comparison after unforwarding the constant.
  	However, if one of the operand is an unnanotable constant, does not allocate a register for it 
  	(machine code will use operations on constants)."
  	rcvrReg:= argReg := NoReg.
  	self 
  		allocateEqualsEqualsRegistersArgNeedsReg: unforwardArg 
  		rcvrNeedsReg: unforwardRcvr 
  		into: [ :rcvr :arg | rcvrReg:= rcvr. argReg := arg ].
  		
  	argReg ~= NoReg ifTrue: [ regMask := self registerMaskFor: argReg ].
  	rcvrReg ~= NoReg ifTrue: [ regMask := regMask bitOr: (self registerMaskFor: rcvrReg) ].
  	
  	"Only interested in inlining if followed by a conditional branch."
  	(branchDescriptor isBranchTrue or: [branchDescriptor isBranchFalse]) ifFalse:
  		[^ self 
  			genIdenticalNoBranchArgIsConstant: unforwardArg not
  			rcvrIsConstant: unforwardRcvr not
  			argReg: argReg 
  			rcvrReg: rcvrReg 
  			orNotIf: orNot].
  	
  	unforwardArg ifTrue: [ objectRepresentation genEnsureOopInRegNotForwarded: argReg scratchReg: TempReg ].
  	unforwardRcvr ifTrue: [ objectRepresentation genEnsureOopInRegNotForwarded: rcvrReg scratchReg: TempReg ].
  	
  	counterReg := self allocateRegNotConflictingWith: regMask.
  	self 
  		genExecutionCountLogicInto: [ :cAddress :countTripBranch | 
  			counterAddress := cAddress. 
  			countTripped := countTripBranch ] 
  		counterReg: counterReg.
  	
  	self assert: (unforwardArg or: [ unforwardRcvr ]).
  	self genCmpArgIsConstant: unforwardArg not rcvrIsConstant: unforwardRcvr not argReg: argReg rcvrReg: rcvrReg.
  	self ssPop: 2.
  	
  	orNot == branchDescriptor isBranchTrue "orNot is true for ~~"
  		ifFalse:
+ 			[ fixup := (self ensureNonMergeFixupAt: postBranchPC) asUnsignedInteger.
+ 			self JumpZero:  (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger ]
- 			[ fixup := (self ensureNonMergeFixupAt: postBranchPC - initialPC) asUnsignedInteger.
- 			self JumpZero:  (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger ]
  		ifTrue:
+ 			[ fixup := (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger.
+ 			self JumpZero: (self ensureNonMergeFixupAt: postBranchPC) asUnsignedInteger ].
- 			[ fixup := (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger.
- 			self JumpZero: (self ensureNonMergeFixupAt: postBranchPC - initialPC) asUnsignedInteger ].
  	
  	self genFallsThroughCountLogicCounterReg: counterReg counterAddress: counterAddress.
  	self Jump: fixup.
  	
  	countTripped jmpTarget: self Label.
  	
  	"inlined version of #== ignoring the branchDescriptor if the counter trips to have normal state for the optimizer"
  	self ssPop: -2. 
  	self genCmpArgIsConstant: unforwardArg not rcvrIsConstant: unforwardRcvr not argReg: argReg rcvrReg: rcvrReg.
  	self ssPop: 2. 
  	
  	"This code necessarily directly falls through the jumpIf: code which pops the top of the stack into TempReg. 
  	We therefore directly assign the result to TempReg to save one move instruction"
  	jumpEqual := orNot ifFalse: [self JumpZero: 0] ifTrue: [self JumpNonZero: 0].
  	self genMoveFalseR: TempReg.
  	jumpNotEqual := self Jump: 0.
  	jumpEqual jmpTarget: (self genMoveTrueR: TempReg).
  	jumpNotEqual jmpTarget: self Label.
  	self ssPushRegister: TempReg.
  	
  	(self fixupAt: nextPC - initialPC) notAFixup ifTrue: [ branchReachedOnlyForCounterTrip := true ].
  	
  	^ 0!

Item was changed:
  ----- Method: SistaRegisterAllocatingCogit>>genJumpIf:to: (in category 'bytecode generator support') -----
  genJumpIf: boolean to: targetBytecodePC
  	"The heart of performance counting in Sista.  Conditional branches are 6 times less
  	 frequent than sends and can provide basic block frequencies (send counters can't).
  	 Each conditional has a 32-bit counter split into an upper 16 bits counting executions
  	 and a lower half counting untaken executions of the branch.  Executing the branch
  	 decrements the upper half, tripping if the count goes negative.  Not taking the branch
  	 decrements the lower half.  N.B. We *do not* eliminate dead branches (true ifTrue:/true ifFalse:)
  	 so that scanning for send and branch data is simplified and that branch data is correct."
  	<inline: false>
  	| ok counterAddress countTripped retry nextPC nextDescriptor desc eventualTarget reg |
  	<var: #ok type: #'AbstractInstruction *'>
  	<var: #desc type: #'CogSimStackEntry *'>
  	<var: #retry type: #'AbstractInstruction *'>
  	<var: #countTripped type: #'AbstractInstruction *'>
  	<var: #nextDescriptor type: #'BytecodeDescriptor *'>
  
  	"In optimized code we don't generate counters to improve performance"
  	(coInterpreter isOptimizedMethod: methodObj) ifTrue:
  		[^super genJumpIf: boolean to: targetBytecodePC].
  	
  	"If the branch is reached only for the counter trip trampoline 
  	(typically, var1 == var2 ifTrue: falls through to the branch only for the trampoline)
  	we generate a specific path to drastically reduce the number of machine instructions"
  	branchReachedOnlyForCounterTrip ifTrue: 
  		[branchReachedOnlyForCounterTrip := false.
  		 ^self genCounterTripOnlyJumpIf: boolean to: targetBytecodePC].
  
  	"We detect and: / or:, if found, we don't generate the counters to avoid pathological counter slow down"
  	boolean = objectMemory falseObject ifTrue:
  		[ nextPC := bytecodePC + (self generatorAt: byte0) numBytes.
  		  nextDescriptor := self generatorAt: (objectMemory fetchByte: nextPC ofObject: methodObj) + bytecodeSetOffset.
  		  nextDescriptor generator ==  #genPushConstantTrueBytecode ifTrue: [ ^ super genJumpIf: boolean to: targetBytecodePC ].
  		  nextDescriptor := self generatorAt: (objectMemory fetchByte: targetBytecodePC ofObject: methodObj) + bytecodeSetOffset.
  		  nextDescriptor generator == #genPushConstantFalseBytecode ifTrue: [ ^ super genJumpIf: boolean to: targetBytecodePC ]. ].
  
  	extA := 0. "We ignore the noMustBeBoolean flag. It should not be present in methods with counters, and if it is we don't care."
  
  	"We don't generate counters on branches on true/false, the basicblock usage can be inferred"
  	desc := self ssTop.
  	(desc type == SSConstant
  	 and: [desc constant = objectMemory trueObject or: [desc constant = objectMemory falseObject]]) ifTrue:
  		[ ^ super genJumpIf: boolean to: targetBytecodePC ].
  
  	eventualTarget := self eventualTargetOf: targetBytecodePC.
  
  	self flag: 'Because of the restriction on x64 that absolute loads must target %rax, it would perhaps be a better choice to use TempReg (%rax) for the counter reg and SendNumArgsReg for the boolean.'.
  	"try and use the top entry's register if any, but only if it can be destroyed."
  	reg := (desc type ~= SSRegister
  			or: [(self anyReferencesToRegister: desc register inAllButTopNItems: 0)
  			or: [(desc register = ReceiverResultReg and: [optStatus isReceiverResultRegLive])]])
  				ifTrue: [TempReg]
  				ifFalse: [desc register].
  	desc popToReg: reg.
  	self ssPop: 1.
  
  	"We need SendNumArgsReg because of the mustBeBooleanTrampoline"
  	self ssAllocateRequiredReg: SendNumArgsReg.
  
  	retry := self Label.
  	self 
  		genExecutionCountLogicInto: [ :cAddress :countTripBranch | 
  			counterAddress := cAddress. 
  			countTripped := countTripBranch ] 
  		counterReg: SendNumArgsReg.
  	counterIndex := counterIndex + 1.
  
  	"Cunning trick by LPD.  If true and false are contiguous subtract the smaller.
  	 Correct result is either 0 or the distance between them.  If result is not 0 or
  	 their distance send mustBeBoolean."
  	self assert: (objectMemory objectAfter: objectMemory falseObject) = objectMemory trueObject.
  	self genSubConstant: boolean R: reg.
+ 	self JumpZero: (self ensureFixupAt: eventualTarget).
- 	self JumpZero: (self ensureFixupAt: eventualTarget - initialPC).
  
  	self genFallsThroughCountLogicCounterReg: SendNumArgsReg counterAddress: counterAddress.
  
  	self CmpCq: (boolean = objectMemory falseObject
  					ifTrue: [objectMemory trueObject - objectMemory falseObject]
  					ifFalse: [objectMemory falseObject - objectMemory trueObject])
  		R: reg.
  	ok := self JumpZero: 0.
  	self MoveCq: 0 R: SendNumArgsReg. "if counterReg is 0 this is a mustBeBoolean, not a counter trip."
  	reg ~= TempReg ifTrue:
  		[self MoveR: reg R: TempReg].
  	countTripped jmpTarget: self Label.
  	self copySimStackToScratch: simSpillBase.
  	self ssFlushTo: simStackPtr.
  	self genCallMustBeBooleanFor: boolean.
  						
  	"If we're in an image which hasn't got the Sista code loaded then the ceCounterTripped: trampoline
  	 will return directly to machine code, returning the boolean.  So the code should jump back to the
  	 retry point. The trampoline preserves register state when taking the ceCounterTripped: path."
  	"Clément: For some reason if I write self annotateBytecode: (self Jump: retry) the annotation is not at the correct place."
  	"Eliot: Annotations apply the the address following an instruction, and the annotation must be for the return address
  	 of the call (since this is the address the run-time sees), so it must be on a label before the jump, not after the jump."
  	self annotateBytecode: self Label.
  	simSpillBase ~= scratchSpillBase ifTrue:
  		[self assert: simSpillBase > scratchSpillBase.
  		 self AddCq: simSpillBase - scratchSpillBase * objectMemory wordSize R: SPReg].
  	self Jump: retry.
  
  	ok jmpTarget: self Label.
  	self restoreSimStackFromScratch.
  	^0!

Item was changed:
  ----- Method: SistaRegisterAllocatingCogit>>genSpecialSelectorComparison (in category 'bytecode generators') -----
  genSpecialSelectorComparison
  	"Override to count inlined branches if followed by a conditional branch.
  	 We borrow the following conditional branch's counter and when about to
  	 inline the comparison we decrement the counter (without writing it back)
  	 and if it trips simply abort the inlining, falling back to the normal send which
  	 will then continue to the conditional branch which will trip and enter the abort."
  	| nextPC postBranchPC targetBytecodePC primDescriptor branchDescriptor
  	  rcvrIsInt argIsInt argInt jumpNotSmallInts inlineCAB
  	  counterAddress countTripped counterReg index rcvrReg argReg |
  	<var: #countTripped type: #'AbstractInstruction *'>
  	<var: #primDescriptor type: #'BytecodeDescriptor *'>
  	<var: #jumpNotSmallInts type: #'AbstractInstruction *'>
  	<var: #branchDescriptor type: #'BytecodeDescriptor *'>
  
  	(coInterpreter isOptimizedMethod: methodObj) ifTrue:
  		[^self genSpecialSelectorComparisonWithoutCounters].
  
  	primDescriptor := self generatorAt: byte0.
  	argIsInt := self ssTop type = SSConstant
  				 and: [objectMemory isIntegerObject: (argInt := self ssTop constant)].
  	rcvrIsInt := (self ssValue: 1) type = SSConstant
  				 and: [objectMemory isIntegerObject: (self ssValue: 1) constant].
  
  	"short-cut the jump if operands are SmallInteger constants."
  	(argIsInt and: [rcvrIsInt]) ifTrue:
  		[^ self genStaticallyResolvedSpecialSelectorComparison].
  
  	self extractMaybeBranchDescriptorInto: [ :descr :next :postBranch :target | 
  		branchDescriptor := descr. nextPC := next. postBranchPC := postBranch. targetBytecodePC := target ].
  	
  	"Only interested in inlining if followed by a conditional branch."
  	inlineCAB := branchDescriptor isBranchTrue or: [branchDescriptor isBranchFalse].
  	"Further, only interested in inlining = and ~= if there's a SmallInteger constant involved.
  	 The relational operators successfully statically predict SmallIntegers; the equality operators do not."
  	(inlineCAB and: [primDescriptor opcode = JumpZero or: [primDescriptor opcode = JumpNonZero]]) ifTrue:
  		[inlineCAB := argIsInt or: [rcvrIsInt]].
  	inlineCAB ifFalse:
  		[^self genSpecialSelectorSend].
  
  	"In-line the comparison and the jump, but if the types are not SmallInteger then we will need
  	 to do a send and fall through to the following conditional branch.  Since we're allocating values
  	 in registers we would like to keep those registers live on the inlined path and reload registers
  	 along the non-inlined send path.  The merge logic at the branch destinations handles this."
  	argIsInt
  		ifTrue:
  			[rcvrReg := self allocateRegForStackEntryAt: 1.
  			 (self ssValue: 1) popToReg: rcvrReg.
  			 self MoveR: rcvrReg R: TempReg.
  			 counterReg := self allocateRegNotConflictingWith: (self registerMaskFor: rcvrReg)]
  		ifFalse:
  			[self allocateRegForStackTopTwoEntriesInto: [:rTop :rNext| argReg := rTop. rcvrReg := rNext].
  			 rcvrReg = Arg0Reg ifTrue:
  				[rcvrReg := argReg. argReg := Arg0Reg].
  			 self ssTop popToReg: argReg.
  			 (self ssValue: 1) popToReg: rcvrReg.
  			 self MoveR: argReg R: TempReg.
  			 counterReg := self allocateRegNotConflictingWith: (self registerMaskFor: rcvrReg and: argReg)].
  	jumpNotSmallInts := (argIsInt or: [rcvrIsInt])
  							ifTrue: [objectRepresentation genJumpNotSmallIntegerInScratchReg: TempReg]
  							ifFalse: [objectRepresentation genJumpNotSmallIntegersIn: rcvrReg andScratch: TempReg scratch: ClassReg].
  
  	self
  		genExecutionCountLogicInto: [ :cAddress :countTripBranch | 
  			counterAddress := cAddress. 
  			countTripped := countTripBranch ] 
  		counterReg: counterReg.
  
  	argIsInt
  		ifTrue: [self CmpCq: argInt R: rcvrReg]
  		ifFalse: [self CmpR: argReg R: rcvrReg].
  	"Cmp is weird/backwards so invert the comparison.  Further since there is a following conditional
  	 jump bytecode define non-merge fixups and leave the cond bytecode to set the mergeness."
  	self genConditionalBranch: (branchDescriptor isBranchTrue
  				ifTrue: [primDescriptor opcode]
  				ifFalse: [self inverseBranchFor: primDescriptor opcode])
+ 		operand: (self ensureFixupAt: targetBytecodePC) asUnsignedInteger.
- 		operand: (self ensureFixupAt: targetBytecodePC - initialPC) asUnsignedInteger.
  		
  	self genFallsThroughCountLogicCounterReg: counterReg counterAddress: counterAddress.
  	
+ 	self Jump: (self ensureFixupAt: postBranchPC).
- 	self Jump: (self ensureFixupAt: postBranchPC - initialPC).
  	countTripped jmpTarget: (jumpNotSmallInts jmpTarget: self Label).
  	
  	self ssFlushTo: simStackPtr.
  	self deny: rcvrReg = Arg0Reg.
  	argIsInt
  		ifTrue: [self MoveCq: argInt R: Arg0Reg]
  		ifFalse: [argReg ~= Arg0Reg ifTrue: [self MoveR: argReg R: Arg0Reg]].
  	rcvrReg ~= ReceiverResultReg ifTrue: [self MoveR: rcvrReg R: ReceiverResultReg].
  	index := byte0 - self firstSpecialSelectorBytecodeOffset.
  	^self genMarshalledSend: index negated - 1 numArgs: 1 sendTable: ordinarySendTrampolines!

Item was added:
+ ----- Method: SpurGenerationScavenger class>>simulatorClass (in category 'simulation') -----
+ simulatorClass
+ 	^SpurGenerationScavengerSimulator!

Item was changed:
  CogClass subclass: #SpurMemoryManager
(excessive size, no diff calculated)

Item was removed:
- ----- Method: SpurMemoryManager>>allOldSpaceEntitiesForCompactingFrom:do: (in category 'object enumeration') -----
- allOldSpaceEntitiesForCompactingFrom: initialObject do: aBlock
- 	<inline: true>
- 	| prevObj prevPrevObj objOop nextObj |
- 	self assert: (self isOldObject: initialObject).
- 	prevPrevObj := prevObj := nil.
- 	objOop := initialObject.
- 	[self assert: objOop \\ self allocationUnit = 0.
- 	 self oop: objOop isLessThan: endOfMemory] whileTrue:
- 		[self assert: (self long64At: objOop) ~= 0.
- 		 nextObj := self objectAfter: objOop limit: endOfMemory.
- 		 aBlock value: objOop value: nextObj.
- 		 prevPrevObj := prevObj.
- 		 prevObj := objOop.
- 		 objOop := nextObj].
- 	self touch: prevPrevObj.
- 	self touch: prevObj!

Item was added:
+ ----- Method: SpurMemoryManager>>allOldSpaceEntitiesForCompactingFrom:to:do: (in category 'object enumeration') -----
+ allOldSpaceEntitiesForCompactingFrom: initialObject to: finalObject do: aBlock
+ 	<inline: true>
+ 	| limit prevObj prevPrevObj objOop nextObj |
+ 	self assert: (self isOldObject: initialObject).
+ 	self assert: (self oop: finalObject isLessThanOrEqualTo: endOfMemory).
+ 	prevPrevObj := prevObj := nil.
+ 	objOop := initialObject.
+ 	limit := (self oop: finalObject isLessThan: endOfMemory) ifTrue: [self addressAfter: finalObject] ifFalse: [endOfMemory].
+ 	[self assert: objOop \\ self allocationUnit = 0.
+ 	 self oop: objOop isLessThan: limit] whileTrue:
+ 		[self assert: (self long64At: objOop) ~= 0.
+ 		 nextObj := self objectAfter: objOop limit: endOfMemory.
+ 		 aBlock value: objOop value: nextObj.
+ 		 prevPrevObj := prevObj.
+ 		 prevObj := objOop.
+ 		 objOop := nextObj].
+ 	self touch: prevPrevObj.
+ 	self touch: prevObj!

Item was changed:
  ----- Method: SpurMemoryManager>>allocateMemoryOfSize:newSpaceSize:stackSize:codeSize: (in category 'spur bootstrap') -----
  allocateMemoryOfSize: memoryBytes newSpaceSize: newSpaceBytes stackSize: stackBytes codeSize: codeBytes
  	"Intialize the receiver for bootsraping an image.
  	 Set up a large oldSpace and an empty newSpace and set-up freeStart and scavengeThreshold
  	 to allocate in oldSpace.  Later on (in initializePostBootstrap) freeStart and scavengeThreshold
  	 will be set to sane values."
  	<doNotGenerate>
  	self assert: (memoryBytes \\ self allocationUnit = 0
  				and: [newSpaceBytes \\ self allocationUnit = 0
  				and: [codeBytes \\ self allocationUnit = 0]]).
  	self allocateMemoryOfSize: memoryBytes + newSpaceBytes + codeBytes + stackBytes.
  	newSpaceStart := codeBytes + stackBytes.
  	endOfMemory := freeOldSpaceStart := memoryBytes + newSpaceBytes + codeBytes + stackBytes.
  	"leave newSpace empty for the bootstrap"
  	freeStart := newSpaceBytes + newSpaceStart.
  	oldSpaceStart := newSpaceLimit := newSpaceBytes + newSpaceStart.
  	scavengeThreshold := memory size * memory bytesPerElement. "i.e. /don't/ scavenge."
+ 	scavenger := SpurGenerationScavenger simulatorClass new.
- 	scavenger := SpurGenerationScavengerSimulator new.
  	scavenger manager: self.
  	scavenger newSpaceStart: newSpaceStart
  				newSpaceBytes: newSpaceBytes
  				survivorBytes: newSpaceBytes // self scavengerDenominator.
+ 	compactor := self class compactorClass simulatorClass new manager: self; yourself!
- 	compactor := self class compactorClass new manager: self; yourself.!

Item was changed:
  ----- Method: SpurMemoryManager>>classAtIndex: (in category 'class table') -----
  classAtIndex: classIndex
  	<api>
+ 	<inline: true>
  	| classTablePage |
+ 	self assert: (classIndex >= 0 and: [classIndex <= self tagMask or: [classIndex >= self arrayClassIndexPun and: [classIndex <= self classIndexMask]]]).
- 	self assert: (classIndex <= self tagMask or: [classIndex >= self arrayClassIndexPun]).
  	classTablePage := self fetchPointer: classIndex >> self classTableMajorIndexShift
  							ofObject: hiddenRootsObj.
  	classTablePage = nilObj ifTrue:
  		[^nil].
  	^self
  		fetchPointer: (classIndex bitAnd: self classTableMinorIndexMask)
  		ofObject: classTablePage!

Item was changed:
  ----- Method: SpurMemoryManager>>fullGC (in category 'gc - global') -----
  fullGC
  	"Perform a full lazy compacting GC.  Answer the size of the largest free chunk."
  	<returnTypeC: #usqLong>
  	<inline: #never> "for profiling"
  	needGCFlag := false.
+ 	gcStartUsecs := coInterpreter ioUTCMicrosecondsNow.
- 	gcStartUsecs := self ioUTCMicrosecondsNow.
  	statMarkCount := 0.
  	coInterpreter preGCAction: GCModeFull.
  	self globalGarbageCollect.
  	coInterpreter postGCAction: GCModeFull.
  	statFullGCs := statFullGCs + 1.
+ 	statGCEndUsecs := coInterpreter ioUTCMicrosecondsNow.
- 	statGCEndUsecs := self ioUTCMicrosecondsNow.
  	statFullGCUsecs := statFullGCUsecs + (statGCEndUsecs - gcStartUsecs).
+ 	statCompactionUsecs := statCompactionUsecs + (statGCEndUsecs - compactionStartUsecs).
  	^(freeLists at: 0) ~= 0
  		ifTrue: [self bytesInObject: self findLargestFreeChunk]
  		ifFalse: [0]!

Item was changed:
  ----- Method: SpurMemoryManager>>globalGarbageCollect (in category 'gc - global') -----
  globalGarbageCollect
  	<inline: true> "inline into fullGC"
  	self assert: self validObjStacks.
  	self assert: (self isEmptyObjStack: markStack).
  	self assert: (self isEmptyObjStack: weaklingStack).
  
  	"Mark objects /before/ scavenging, to empty the rememberedTable of unmarked roots."
  	self markObjects: true.
  
  	scavenger forgetUnmarkedRememberedObjects.
  	self doScavenge: MarkOnTenure.
  
  	"Mid-way the leak check must be more lenient.  Unmarked classes will have been
  	 expunged from the table, but unmarked instances will not yet have been reclaimed."
  	self runLeakCheckerFor: GCModeFull
  		excludeUnmarkedObjs: true
  		classIndicesShouldBeValid: true.
  
+ 	compactionStartUsecs := coInterpreter ioUTCMicrosecondsNow.
  	segmentManager prepareForGlobalSweep. "for notePinned:"
  	compactor compact.
  	self attemptToShrink.
  	self setHeapSizeAtPreviousGC.
  
  	self assert: self validObjStacks.
  	self assert: (self isEmptyObjStack: markStack).
  	self assert: (self isEmptyObjStack: weaklingStack).
  	self assert: self allObjectsUnmarked.
  	self runLeakCheckerFor: GCModeFull!

Item was changed:
  ----- Method: SpurMemoryManager>>initialize (in category 'initialization') -----
  initialize
  	"We can put all initializations that set something to 0 or to false here.
  	 In C all global variables are initialized to 0, and 0 is false."
  	remapBuffer := Array new: RemapBufferSize.
  	remapBufferCount := extraRootCount := 0. "see below"
  	freeListsMask := totalFreeOldSpace := lowSpaceThreshold := 0.
  	checkForLeaks := 0.
  	needGCFlag := signalLowSpace := marking := false.
  	becomeEffectsFlags := gcPhaseInProgress := 0.
  	statScavenges := statIncrGCs := statFullGCs := 0.
+ 	statScavengeGCUsecs := statIncrGCUsecs := statFullGCUsecs := statCompactionUsecs := statGCEndUsecs := 0.
- 	statScavengeGCUsecs := statIncrGCUsecs := statFullGCUsecs := statGCEndUsecs := 0.
  	statSGCDeltaUsecs := statIGCDeltaUsecs := statFGCDeltaUsecs := 0.
  	statGrowMemory := statShrinkMemory := statRootTableCount := 0.
  	statRootTableOverflows := statMarkCount := statCompactPassCount := statCoalesces := 0.
  
  	"We can initialize things that are allocated but are lazily initialized."
  	unscannedEphemerons := SpurContiguousObjStack new.
  
  	"we can initialize things that are virtual in C."
+ 	scavenger := SpurGenerationScavenger simulatorClass new manager: self; yourself.
+ 	segmentManager := SpurSegmentManager simulatorClass new manager: self; yourself.
+ 	compactor := self class compactorClass simulatorClass new manager: self; yourself.
- 	scavenger := SpurGenerationScavengerSimulator new manager: self; yourself.
- 	segmentManager := SpurSegmentManager new manager: self; yourself.
- 	compactor := self class compactorClass new manager: self; yourself.
  
  	"We can also initialize here anything that is only for simulation."
  	heapMap := CogCheck32BitHeapMap new.
  
  	"N.B. We *don't* initialize extraRoots because we don't simulate it."
  	maxOldSpaceSize := self class initializationOptions
  							ifNotNil: [:initOpts| initOpts at: #maxOldSpaceSize ifAbsent: [0]]
  							ifNil: [0]!

Item was changed:
  ----- Method: SpurMemoryManager>>printFreeChunk:printAsTreeNode: (in category 'debug printing') -----
  printFreeChunk: freeChunk printAsTreeNode: printAsTreeNode
  	| numBytes |
  	numBytes := self bytesInObject: freeChunk.
  	coInterpreter
+ 		print: 'freeChunk '; printHexPtrnp: freeChunk.
+ 	printAsTreeNode ifTrue:
+ 		[coInterpreter print: ' - '; printHexPtrnp:(self addressAfter: freeChunk)].
+ 	coInterpreter
- 		print: 'freeChunk '; printHexPtrnp: freeChunk;
  		print: ' bytes '; printNum: numBytes;
  		print: ' next '; printHexPtrnp: (self fetchPointer: self freeChunkNextIndex
  											ofFreeChunk: freeChunk).
  	(numBytes >= (self numFreeLists * self allocationUnit)
  	 and: [printAsTreeNode]) ifTrue:
  		[coInterpreter
  			print: ' ^ '; printHexPtrnp: (self fetchPointer: self freeChunkParentIndex
  											ofFreeChunk: freeChunk);
  			print: ' < '; printHexPtrnp: (self fetchPointer: self freeChunkSmallerIndex
  											ofFreeChunk: freeChunk);
  			print: ' > '; printHexPtrnp: (self fetchPointer: self freeChunkLargerIndex
  											ofFreeChunk: freeChunk)].
  	coInterpreter cr!

Item was changed:
  ----- Method: SpurMemoryManager>>printOopsFrom:to: (in category 'debug printing') -----
  printOopsFrom: startAddress to: endAddress
  	<api>
+ 	| oop limit firstNonEntity inEmptySpace lastNonEntity |
- 	| oop limit |
  	oop := self objectBefore: startAddress.
  	limit := endAddress asUnsignedIntegerPtr min: endOfMemory.
  	oop := oop
  				ifNil: [startAddress]
  				ifNotNil: [(self objectAfter: oop) = startAddress
  							ifTrue: [startAddress]
  							ifFalse: [oop]].
+ 	inEmptySpace := false.
  	[self oop: oop isLessThan: limit] whileTrue:
  		[self printEntity: oop.
+ 		 [oop := self objectAfter: oop.
+ 		  (self long64At: oop) = 0] whileTrue:
+ 			[inEmptySpace ifFalse:
+ 				[inEmptySpace := true.
+ 				 firstNonEntity := oop].
+ 			 lastNonEntity := oop].
+ 		 inEmptySpace ifTrue:
+ 			[inEmptySpace := false.
+ 			 coInterpreter
+ 				print: 'skipped empty space from '; printHexPtrnp: firstNonEntity;
+ 				print:' to '; printHexPtrnp: lastNonEntity; cr.
+ 			 oop := self objectStartingAt: oop]]!
- 		 oop := self objectAfter: oop]!

Item was changed:
  ----- Method: SpurMemoryManager>>resetFreeListHeads (in category 'free space') -----
  resetFreeListHeads
+ 	freeListsMask := 0.
  	0 to: self numFreeLists - 1 do:
  		[:i| freeLists at: i put: 0]!

Item was added:
+ ----- Method: SpurMemoryManager>>statCompactionUsecs (in category 'accessing') -----
+ statCompactionUsecs
+ 	^statCompactionUsecs!

Item was added:
+ ----- Method: SpurMemoryManager>>unlinkFreeChunk: (in category 'free space') -----
+ unlinkFreeChunk: freeChunk
+ 	"Unlink a free object from the free lists. Do not alter totalFreeOldSpace. Used for coalescing."
+ 	| chunkBytes index node next prev child childBytes |
+ 	index := (chunkBytes := self bytesInObject: freeChunk) / self allocationUnit.
+ 	(index < self numFreeLists and: [1 << index <= freeListsMask]) ifTrue:
+ 		[self assert: ((freeListsMask anyMask: 1 << index) and: [(freeLists at: index) ~= 0]).
+ 		 node := freeLists at: index.
+ 		 prev := 0.
+ 		 [node ~= 0] whileTrue:
+ 			[self assert: node = (self startOfObject: node).
+ 			 self assert: (self isValidFreeObject: node).
+ 			 next := self fetchPointer: self freeChunkNextIndex ofFreeChunk: node.
+ 			 node = freeChunk ifTrue:
+ 				[prev = 0
+ 					ifTrue: [freeLists at: index put: next]
+ 					ifFalse: [self storePointer: self freeChunkNextIndex ofFreeChunk: prev withValue: next].
+ 				 ^self].
+ 			 prev := node.
+ 			 node := next].
+ 		 self error: 'freeChunk not found in free lists'].
+ 
+ 	"Large chunk.  Search the large chunk tree."
+ 	child := freeLists at: 0.
+ 	node := 0.
+ 	[child ~= 0] whileTrue:
+ 		[self assert: (self isValidFreeObject: child).
+ 		 childBytes := self bytesInObject: child.
+ 		 childBytes = chunkBytes ifTrue: "size match; try to remove from list at node."
+ 			[node := child.
+ 			 [prev := node.
+ 			  node := self fetchPointer: self freeChunkNextIndex ofFreeChunk: node.
+ 			  node ~= 0] whileTrue:
+ 				[node = freeChunk ifTrue:
+ 					[self assert: (self isValidFreeObject: node).
+ 					 self storePointer: self freeChunkNextIndex
+ 						ofFreeChunk: prev
+ 						withValue: (self fetchPointer: self freeChunkNextIndex ofFreeChunk: node).
+ 					 ^self]].
+ 			 child = freeChunk ifTrue:
+ 				[next := self fetchPointer: self freeChunkNextIndex ofFreeChunk: child.
+ 				 next = 0
+ 					ifTrue: "no list; remove the interior node"
+ 						[self unlinkSolitaryFreeTreeNode: child]
+ 					ifFalse: "list; replace node with it"
+ 						[self inFreeTreeReplace: child with: next].
+ 				 ^self]].
+ 		 child ~= 0 ifTrue:
+ 			[childBytes < chunkBytes
+ 				ifTrue: "node too small; walk down the larger size of the tree"
+ 					[child := self fetchPointer: self freeChunkLargerIndex ofFreeChunk: child]
+ 				ifFalse:
+ 					[node := child.
+ 					 child := self fetchPointer: self freeChunkSmallerIndex ofFreeChunk: node]]].
+ 
+ 	self error: 'freeChunk not found in free tree'
+ 	!

Item was changed:
  CogClass subclass: #SpurPlanningCompactor
+ 	instanceVariableNames: 'manager scavenger coInterpreter biasForGC firstFreeObject mobileStart firstMobileObject lastMobileObject savedFirstFieldsSpace savedFirstFieldsSpaceNotInOldSpace firstFieldOfRememberedSet anomaly objectAfterLastMobileObject'
- 	instanceVariableNames: 'manager scavenger coInterpreter biasForGC firstFreeObject mobileStart firstMobileObject lastMobileObject savedFirstFieldsSpace savedFirstFieldsSpaceNotInOldSpace firstFieldOfRememberedSet interestingObj anomaly objectAfterLastMobileObject relocationMap'
  	classVariableNames: ''
  	poolDictionaries: 'SpurMemoryManagementConstants VMBasicConstants VMBytecodeConstants VMSpurObjectRepresentationConstants'
  	category: 'VMMaker-SpurMemoryManager'!
  
  !SpurPlanningCompactor commentStamp: 'eem 12/23/2016 17:50' prior: 0!
  SpurPlanningCompactor implements the classic planning compaction algorithm for Spur.  It makes at least three passes through the heap.  The first pass plans where live movable objects will go, copying their forwarding field to the next slot in savedFirstFieldsSpace, and setting their forwarding pointer to point to their eventual location.  The second pass updates all pointers in live pointer objects to point to objects' final destinations.  The third pass moves objects to their final positions, unmarking objects as it does so.  If the forwarding fields of live objects in the to-be-moved portion of the entire heap won't fit in savedFirstFieldsSpace, then additional passes are made until the entire heap has been compacted.
  
  Instance Variables
  	biasForGC						<Boolean>
  	coInterpreter:					<StackInterpreter>
  	firstFieldOfRememberedSet		<Oop>
  	firstFreeObject					<Oop>
  	firstMobileObject				<Oop>
  	lastMobileObject				<Oop>
  	manager:						<SpurMemoryManager>
  	savedFirstFieldsSpace				<SpurContiguousObjStack>
  	savedFirstFieldsSpaceWasAllocated	<Boolean>
  	scavenger:						<SpurGenerationScavenger>
  
  biasForGC
  	- true if compacting for GC, in which case do only one pass, or false if compacting for snapshot, in which case do as many passes as necessary to compact the entire heap.
  
  firstFieldOfRememberedSet
  	- the saved first field of the rememberedSet.  The rememberedSet must be relocated specially because it is not a pointer object.  And hence the first field needs to be extracted for proper relocation.
  
  firstFreeObject
  	- the first free object in a compaction pass.
  
  firstMobileObject
  	- the first mobile object in a compaction.  Unpinned objects from the firstMobileObject through to the lastMobileObject are implicitly forwarded.
  
  lastMobileObject
  	- the last mobile object in a compaction.  Unpinned objects from the firstMobileObject through to the lastMobileObject are implicitly forwarded.
  
  savedFirstFieldsSpace
  	- the space holding the saved first fields, each overwritten by a forwarding pointer, for the objects from firstMobileObject through to lastMobileObject.
  
  savedFirstFieldsSpaceWasAllocated
  	- if true, the memory for savedFirstFieldsSpace was obtained via a call of sqAllocateMemorySegmentOfSize:Above:AllocatedSizeInto:!

Item was changed:
  ----- Method: SpurPlanningCompactor class>>declareCVarsIn: (in category 'translation') -----
  declareCVarsIn: aCCodeGenerator
  	self declareCAsOop: (self instVarNames select: [:iv| iv endsWith: 'Object']) in: aCCodeGenerator.
  	aCCodeGenerator
+ 		var: 'savedFirstFieldsSpace' type: #SpurContiguousObjStack!
- 		var: 'savedFirstFieldsSpace' type: #SpurContiguousObjStack;
- 		removeVariable: 'interestingObj';
- 		removeVariable: 'relocationMap'!

Item was added:
+ ----- Method: SpurPlanningCompactor class>>simulatorClass (in category 'simulation') -----
+ simulatorClass
+ 	^SpurPlanningCompactorSimulator!

Item was changed:
  ----- Method: SpurPlanningCompactor>>check: (in category 'private') -----
  check: obj
+ 	"No-op in the real class."
+ 	<inline: true>!
- 	<inline: true>
- 	self cCode: '' inSmalltalk: [obj = interestingObj ifTrue: [self halt]].
- 	"this debugged the misuse of the largest free chunk:"
- 	"(manager checkForLeaks bitAnd: GCModeFull+GCModeFreeSpace) = GCModeFull ifTrue:
- 		[self deny: ((manager isEnumerableObject: obj) and: [(manager heapMapAtWord: obj) = 0])]"!

Item was added:
+ ----- Method: SpurPlanningCompactor>>coalesceFrom: (in category 'private') -----
+ coalesceFrom: maybeStartOfFree
+ 	"manager printOopsFrom: maybeStartOfFree to: manager endOfMemory"
+ 	<var: 'maybeStartOfFree' type: #usqInt>
+ 	| obj next |
+ 	<var: 'obj' type: #usqInt>
+ 	<var: 'next' type: #usqInt>
+ 	maybeStartOfFree >= manager endOfMemory ifTrue:
+ 		[^self].
+ 	obj := manager objectStartingAt: maybeStartOfFree.
+ 	[next := manager oldSpaceObjectAfter: obj.
+ 	 next < manager endOfMemory] whileTrue:
+ 		[((manager isFreeObject: obj) and: [manager isFreeObject: next])
+ 			ifTrue:
+ 				[manager unlinkFreeChunk: obj.
+ 				 manager unlinkFreeChunk: next.
+ 				 obj := manager freeChunkWithBytes: (manager bytesInObject: obj) + (manager bytesInObject: next) at: (manager startOfObject: obj)]
+ 			ifFalse:
+ 				[obj := next]]!

Item was changed:
  ----- Method: SpurPlanningCompactor>>compact (in category 'compaction - api') -----
  compact
  	"Sweep all of old space, sliding unpinned marked objects down over free and unmarked objects.
  	 Let the segmentManager mark which segments contain pinned objects via notePinned:."
+ 	| finalPass firstPass |
- 	| onePass firstPass |
  	<inline: #never> "for profiling"
  	self initializeScanCheckingForFullyCompactedHeap ifTrue:
  		[^self unmarkObjectsInFullyCompactedHeap].
  	self initializeCompaction.
  	firstPass := true.
+ 	[finalPass := self planCompactSavingForwarders.
+ 	 self assert: (self validRelocationPlanInPass: finalPass) = 0.
- 	[onePass := self planCompactSavingForwarders.
- 	 self assert: (self validRelocationPlanInPass: onePass) = 0.
- 	 objectAfterLastMobileObject := manager oldSpaceObjectAfter: lastMobileObject.
  	 self updatePointers.
  	 self copyAndUnmark: firstPass.
+ 	 "Would like to check here, but can't if multi-pass."
+ 	 false ifTrue: [manager checkFreeSpace: GCModeFull].
+ 	 "Currently we do only a single pass if a normal GC, assuming that a pass will
+ 	  always compact plenty of space. But we should perhaps check this assumption
+ 	  by looking at the large free tree and seeing that the ratio of the largest free
+ 	  chunk to the total ammount of free space is high."
+ 	 finalPass or: [biasForGC]] whileFalse:
- 	 manager checkFreeSpace: GCModeFull.
- 	 onePass or: [biasForGC]] whileFalse:
  		[firstPass := false.
+ 		 self reinitializeScanFrom: firstFreeObject;
- 		 self reinitializeScan;
  			updateSavedFirstFieldsSpaceIfNecessary].
+ 	manager checkFreeSpace: GCModeFull.
  	self endCompaction!

Item was changed:
  ----- Method: SpurPlanningCompactor>>copyAndUnmark: (in category 'compaction') -----
  copyAndUnmark: firstPass
  	"Sweep the heap, unmarking all objects and moving mobile objects to their correct positions,
  	 restoring their savedFirstFields."
  	<inline: #never>
+ 	| finalPass |
- 	| onePass |
  	self logPhase: 'copying and unmarking...'.
  	firstPass ifTrue:
  		[self unmarkInitialImmobileObjects].
+ 	finalPass := self copyAndUnmarkMobileObjects.
+ 	(self thereAreObjectsToMove
+ 	 and: [finalPass not
+ 	 and: [biasForGC]]) ifTrue: "only ever one pass if biasForGC is true."
- 	"If savedFirstFieldsSpace is empty there is nothing to move, and no second pass."
- 	savedFirstFieldsSpace top < savedFirstFieldsSpace start ifTrue:
- 		[self assert: (self oop: firstMobileObject isGreaterThanOrEqualTo: manager endOfMemory).
- 		 ^self].
- 	onePass := self copyAndUnmarkMobileObjects.
- 	(onePass not and: [biasForGC]) ifTrue: "only ever one pass if biasForGC is true."
  		[self unmarkObjectsAfterLastMobileObject]!

Item was changed:
  ----- Method: SpurPlanningCompactor>>copyAndUnmarkMobileObjects (in category 'compaction') -----
  copyAndUnmarkMobileObjects
  	"Sweep the mobile portion of the heap, moving objects to their eventual locations, and clearing their marked bits.
  	 Remember to update the savedFirstFields of pointer objects, as these have been forwarded.
  	 Answer if the end of the heap was reached (savedFirstFieldsSpace has not overflowed).
  
  	 The enumerations in planCompactSavingForwarders, updatePointersInMobileObjects and copyAndUnmarkMobileObjects
  	 match.  We could implement them as a single enumeration method taking several block arguments, but arguably that
  	 would make understanding an already tricky algorithm more difficult.  Instead we tolerate the duplication and encourage
  	 the reader to diff the three methods to see where they diverge (e.g. via Cmd-shift-C)."
  	<inline: #never>
+ 	| toFinger top previousPin startOfPreviousPin |
- 	| toFinger top previousPin |
  	<var: 'o' type: #usqInt>
  	<var: 'top' type: #usqInt>
  	<var: 'toFinger' type: #usqInt>
  	<var: 'previousPin' type: #usqInt>
+ 	<var: 'startOfPreviousPin' type: #usqInt>
  	self deny: (manager isMarked: firstFreeObject).
  	toFinger := manager startOfObject: firstFreeObject.
  	top := savedFirstFieldsSpace start.
+ 	startOfPreviousPin := 0.
+ 	manager allOldSpaceEntitiesForCompactingFrom: firstFreeObject to: (lastMobileObject ifNil: manager nilObject) do:
- 	manager allOldSpaceEntitiesForCompactingFrom: firstFreeObject do:
  		[:o :n|
  		 self check: o.
  		 self assert: (previousPin
  						ifNil: [toFinger <= (manager startOfObject: o)]
+ 						ifNotNil: [(manager isMarked: previousPin) and: [toFinger <= startOfPreviousPin]]).
- 						ifNotNil: [(manager isMarked: previousPin) and: [toFinger <= (manager startOfObject: previousPin)]]).
  		 self assert: (savedFirstFieldsSpaceNotInOldSpace or: [toFinger < top]).
  		 (manager isMarked: o) ifTrue:
+ 			[(manager isPinned: o)
- 			[o > lastMobileObject ifTrue:
- 				[self assert: ((manager isPinned: o) not or: [previousPin isNil]).
- 				 self freeFrom: toFinger upTo: manager endOfMemory previousPin: ((manager isPinned: o) ifTrue: [o] ifFalse: [previousPin]).
- 				 ^true].
- 			 (manager isPinned: o)
  				ifTrue:
  					[previousPin ifNil:
+ 						[previousPin := o. startOfPreviousPin := manager startOfObject: o]]
- 						[previousPin := o]]
  				ifFalse:
  					[| availableSpace bytes |
  					 bytes := manager bytesInObject: o.
+ 					 [toFinger <= startOfPreviousPin
+ 					  and: [bytes ~= (availableSpace := startOfPreviousPin - toFinger)
+ 					  and: [bytes + (2 * manager allocationUnit) > availableSpace]]] whileTrue:
- 					 [previousPin notNil
- 					  and: [availableSpace := (manager startOfObject: previousPin) - toFinger.
- 							bytes ~= availableSpace
- 							and: [bytes + (2 * manager allocationUnit) > availableSpace]]] whileTrue:
  						["The object does not fit in the space between toFinger and previousPin.
  						   Move toFinger up to point at the first unmarked or mobile object after
  						   previousPin, or, if previousPin is contiguous with o, to the start of this
  						   object.  Update previousPin to be the next pinned object above toFInger
  						   and below this object, or nil if no such pinned object exists.
  						   Any unfillable gaps between adjacent pinned objects will be freed."
  						 availableSpace > 0 ifTrue:
  							[manager addFreeChunkWithBytes: availableSpace at: toFinger].
  					 	 [self assert: ((manager isMarked: previousPin) and: [manager isPinned: previousPin]).
  						  self unmarkPinned: previousPin.
  						  toFinger := manager addressAfter: previousPin.
  						  previousPin := manager objectStartingAt: toFinger.
  						  (manager isMarked: previousPin)
  						   and: [(manager isPinned: previousPin)
  						   and: [previousPin < o]]]
  							whileTrue.
  						 "Now previousPin is either equal to o or mobile.
  						  Move it to the next pinned object below o"
  						 [previousPin >= o
  						  or: [(manager isMarked: previousPin)
  						  and: [manager isPinned: previousPin]]] whileFalse:
  							[previousPin := manager oldSpaceObjectAfter: previousPin].
+ 						 previousPin >= o
+ 							ifTrue: [previousPin := nil. startOfPreviousPin := 0]
+ 							ifFalse: [startOfPreviousPin := manager startOfObject: previousPin]].
- 						 previousPin >= o ifTrue:
- 							[previousPin := nil]].
  					 self copyAndUnmarkObject: o to: toFinger bytes: bytes firstField: (manager longAt: top).
  					 toFinger := toFinger + bytes.
  					 (top := top + manager bytesPerOop) >= savedFirstFieldsSpace limit ifTrue:
+ 						[self assert: savedFirstFieldsSpace top = (top - manager bytesPerOop).
+ 						 self assert: n = objectAfterLastMobileObject.
+ 						 previousPin ifNil: [previousPin := n. startOfPreviousPin := manager startOfObject: n].
+ 						 "Create a free object for firstFreeObject to be set to on the next pass, but
+ 						   do not link it into the free tree as it will be written over in that next pass."
+ 						 toFinger < startOfPreviousPin
+ 							ifTrue:
+ 								[firstFreeObject := manager initFreeChunkWithBytes: startOfPreviousPin - toFinger at: toFinger]
+ 							ifFalse:
+ 								[firstFreeObject := previousPin].
+ 						^false]]]].
+ 	self freeFrom: toFinger upTo: manager endOfMemory nextObject: (previousPin ifNil: [objectAfterLastMobileObject ifNil: [manager objectAfter: firstFreeObject]]).
+ 	self coalesceFrom: toFinger.
- 						[| done |
- 						 self assert: savedFirstFieldsSpace top = (top - manager bytesPerOop).
- 						 done := self noMobileObjectsAfter: n.
- 						 done
- 							ifTrue: [self freeAllUnpinnedObjectsFromObject: (previousPin ifNil: [n]) toFinger: toFinger]
- 							ifFalse: [self freeFrom: toFinger upTo: (manager startOfObject: n) previousPin: previousPin].
- 						^done]]]].
- 	self freeFrom: toFinger upTo: manager endOfMemory previousPin: previousPin.
  	^true!

Item was changed:
  ----- Method: SpurPlanningCompactor>>forwardMobileObject:to:savedFirstFieldPtr: (in category 'private') -----
  forwardMobileObject: o to: toFinger savedFirstFieldPtr: savedFirstFieldPtr
  	"Forward a mobile object to some new location, saving its first field through savedFirstFieldPtr.
  	 Don't use forward:to:; we dont want to alter the object in any way other than by setting the forwarding pointer."
  	<inline: true>
  	lastMobileObject := o.
  	manager
  		longAt: savedFirstFieldPtr
  			put: (manager fetchPointer: 0 ofObject: o);
  		storePointerUnchecked: 0
  			ofObject: o
  				withValue: ((manager hasOverflowHeader: o)
  								ifTrue: [toFinger + manager baseHeaderSize]
  								ifFalse: [toFinger]).
+ 	self recordMovementOf: o to: toFinger savedFirstFieldPtr: savedFirstFieldPtr!
- 	self cCode: '' inSmalltalk: [relocationMap ifNotNil: [:rm| rm at: o put: savedFirstFieldPtr]]!

Item was added:
+ ----- Method: SpurPlanningCompactor>>freeFrom:upTo:nextObject: (in category 'private') -----
+ freeFrom: initialToFinger upTo: limit nextObject: nextObject
+ 	"Free from toFinger up to limit, dealing with possible intervening pinned objects."
+ 	<inline: false>
+ 	<var: 'limit' type: #usqInt>
+ 	<var: 'initialToFinger' type: #usqInt>
+ 	| toFinger obj objStart |
+ 	<var: 'objStart' type: #usqInt>
+ 	<var: 'toFinger' type: #usqInt>
+ 	self cCode: [] inSmalltalk:
+ 		[coInterpreter cr; cr; print: 'freeing at '; printHexnp: initialToFinger; print: ' up to '; printHexnp: limit; cr].
+ 	toFinger := initialToFinger.
+ 	objStart := manager startOfObject: nextObject.
+ 	toFinger < objStart ifTrue:
+ 		[manager addFreeChunkWithBytes: objStart - toFinger at: toFinger].
+ 	toFinger := objStart.
+ 	[objStart < limit] whileTrue:
+ 		[obj := manager objectStartingAt: objStart.
+ 		 ((manager isMarked: obj) and: [manager isPinned: obj])
+ 			ifTrue:
+ 				[self unmarkPinned: obj.
+ 				 toFinger < objStart ifTrue:
+ 					[manager addFreeChunkWithBytes: objStart - toFinger at: toFinger].
+ 				 toFinger := objStart := manager addressAfter: obj]
+ 			ifFalse:
+ 				[objStart := manager addressAfter: obj]].
+ 	limit > toFinger ifTrue:
+ 		[manager addFreeChunkWithBytes: limit - toFinger at: toFinger]!

Item was removed:
- ----- Method: SpurPlanningCompactor>>freeFrom:upTo:previousPin: (in category 'private') -----
- freeFrom: toFinger upTo: limit previousPin: previousPinOrNil
- 	"Free from toFinger up to limit, dealing with a possible intervening run of pinned objects starting at previousPinOrNil."
- 	<inline: false>
- 	<var: 'limit' type: #usqInt>
- 	<var: 'toFinger' type: #usqInt>
- 	<var: 'previousPinOrNil' type: #usqInt>
- 	| effectiveToFinger pin nextUnpinned start seg |
- 	<var: 'nextUnpinned' type: #usqInt>
- 	<var: #seg type: #'SpurSegmentInfo *'>
- 	self cCode: [] inSmalltalk:
- 		[coInterpreter cr; cr; print: 'freeing at '; printHexnp: toFinger; print: ' up to '; printHexnp: limit; print: ' pin '; printHexnp: previousPinOrNil; cr].
- 	effectiveToFinger := toFinger.
- 	pin := previousPinOrNil.
- 	"If the range toFinger to limit spans segments but there is no pin (as when freeing to the end of memory)
- 	 segment boundaries must still be observed.  So in this case use the nearest bridge above toFinger as the pin."
- 	pin ifNil:
- 		[seg := manager segmentManager segmentContainingObj: toFinger.
- 		 seg segLimit < limit ifTrue:
- 			[pin := manager segmentManager bridgeFor: seg]].
- 	[pin notNil] whileTrue:
- 		[(start := manager startOfObject: pin) > toFinger ifTrue:
- 			[manager addFreeChunkWithBytes: start - effectiveToFinger at: effectiveToFinger].
- 		 nextUnpinned := self unmarkPinnedObjectsAndFindFirstUnpinnedOrFreeEntityFollowing: pin.
- 		 nextUnpinned >= limit ifTrue:
- 			[^self].
- 		 effectiveToFinger := manager startOfObject: nextUnpinned.
- 		 pin := self findNextMarkedPinnedAfter: nextUnpinned].
- 	manager addFreeChunkWithBytes: limit - effectiveToFinger at: effectiveToFinger!

Item was changed:
  ----- Method: SpurPlanningCompactor>>initializeScanCheckingForFullyCompactedHeap (in category 'compaction') -----
  initializeScanCheckingForFullyCompactedHeap
+ 	"Scan for firstFreeObject and firstMobileObject from the start of memory.
+ 	 Answer if the heap is already fully compacted."
+ 	firstMobileObject := lastMobileObject := objectAfterLastMobileObject := nil.
+ 	self reinitializeScanFrom: manager hiddenRootsObject.
+ 	firstFreeObject ifNil:
+ 		[self error: 'uncompactable heap; no unmarked objects found'].
- 	"Scan for firstFreeObject and firstMobileObject from the start of memory (actually
- 	 from lastMobileObject so that reInitializeScan can work on subsequent passes).
- 	 Answer if the heap is already fully compacted.  Set "
- 	firstFreeObject := lastMobileObject := manager hiddenRootsObject.
- 	self reinitializeScan.
  	^firstMobileObject >= manager endOfMemory!

Item was removed:
- ----- Method: SpurPlanningCompactor>>interestingObj: (in category 'instance initialization') -----
- interestingObj: obj
- 	interestingObj := obj!

Item was removed:
- ----- Method: SpurPlanningCompactor>>noMobileObjectsAfter: (in category 'private') -----
- noMobileObjectsAfter: mobileObj
- 	self assert: ((manager isMarked: mobileObj) and: [(manager isPinned: mobileObj) not]).
- 	manager allOldSpaceEntitiesFrom: mobileObj do:
- 		[:o|
- 		 ((manager isMarked: o) and: [(manager isPinned: o) not]) ifTrue:
- 			[^false]].
- 	^true!

Item was changed:
  ----- Method: SpurPlanningCompactor>>planCompactSavingForwarders (in category 'compaction') -----
  planCompactSavingForwarders
  	"Sweep the heap from firstFreeObject forwarding marked objects to where they
  	 can be moved to, saving their forwarding pointer in savedFirstFieldsSpace.
  	 Continue until either the end of the heap is reached or savedFirstFieldsSpace is full.
  	 Answer if the end of the heap was reached (savedFirstFieldsSpace has not overflowed).
  
  	 The enumerations in planCompactSavingForwarders, updatePointersInMobileObjects and copyAndUnmarkMobileObjects
  	 match.  We could implement them as a single enumeration method taking several block arguments, but arguably that
  	 would make understanding an already tricky algorithm more difficult.  Instead we tolerate the duplication and encourage
  	 the reader to diff the three methods to see where they diverge (e.g. via Cmd-shift-C)."
  	<inline: #never>
+ 	| toFinger top previousPin startOfPreviousPin |
- 	| toFinger top previousPin |
  	<var: 'o' type: #usqInt>
  	<var: 'top' type: #usqInt>
  	<var: 'toFinger' type: #usqInt>
  	<var: 'previousPin' type: #usqInt>
+ 	<var: 'startOfPreviousPin' type: #usqInt>
  	savedFirstFieldsSpace top < savedFirstFieldsSpace start ifTrue:
  		[self logPhase: 'planning...'].
  	self deny: (manager isMarked: firstFreeObject).
  	toFinger := manager startOfObject: firstFreeObject.
  	top := savedFirstFieldsSpace start.
+ 	startOfPreviousPin := 0.
  	manager allOldSpaceEntitiesFrom: firstFreeObject do:
  		[:o|
  		 self check: o.
  		 self assert: (previousPin
  						ifNil: [toFinger <= (manager startOfObject: o)]
+ 						ifNotNil: [(manager isMarked: previousPin) and: [toFinger <= startOfPreviousPin]]).
- 						ifNotNil: [(manager isMarked: previousPin) and: [toFinger <= (manager startOfObject: previousPin)]]).
  		 self assert: (savedFirstFieldsSpaceNotInOldSpace or: [toFinger < top]).
  		 (manager isMarked: o) ifTrue:
  			[(manager isPinned: o)
  				ifTrue:
  					[previousPin ifNil:
+ 						[previousPin := o. startOfPreviousPin := manager startOfObject: o]]
- 						[previousPin := o]]
  				ifFalse:
  					[| availableSpace bytes |
  					 bytes := manager bytesInObject: o.
+ 					 [toFinger <= startOfPreviousPin
+ 					  and: [bytes ~= (availableSpace := startOfPreviousPin - toFinger)
+ 					  and: [bytes + (2 * manager allocationUnit) > availableSpace]]] whileTrue:
- 					 [previousPin notNil
- 					  and: [availableSpace := (manager startOfObject: previousPin) - toFinger.
- 							bytes ~= availableSpace
- 							and: [bytes + (2 * manager allocationUnit) > availableSpace]]] whileTrue:
  						["The object does not fit in the space between toFinger and previousPin.
  						   Move toFinger up to point at the first unmarked or mobile object after
  						   previousPin, or, if previousPin is contiguous with o, to the start of this
  						   object.  Update previousPin to be the next pinned object above toFInger
  						   and below this object, or nil if no such pinned object exists.
  						   Any unfillable gaps between adjacent pinned objects will be freed."
  					 	 [toFinger := manager addressAfter: previousPin.
  						  previousPin := manager objectStartingAt: toFinger.
  						  (manager isMarked: previousPin)
  						   and: [(manager isPinned: previousPin)
  						   and: [previousPin < o]]]
  							whileTrue.
  						 "Now previousPin is either equal to o or mobile.
  						  Move it to the next pinned object below o"
  						 [previousPin >= o
  						  or: [(manager isMarked: previousPin)
  						  and: [manager isPinned: previousPin]]] whileFalse:
  							[previousPin := manager oldSpaceObjectAfter: previousPin].
+ 						 previousPin >= o
+ 							ifTrue: [previousPin := nil. startOfPreviousPin := 0]
+ 							ifFalse: [startOfPreviousPin := manager startOfObject: previousPin]].
- 						 previousPin >= o ifTrue:
- 							[previousPin := nil]].
  					 self forwardMobileObject: o to: toFinger savedFirstFieldPtr: top.
  					 toFinger := toFinger + bytes.
  					 (top := top + manager bytesPerOop) >= savedFirstFieldsSpace limit ifTrue:
  						[savedFirstFieldsSpace top: top - manager bytesPerOop.
+ 						 objectAfterLastMobileObject := manager oldSpaceObjectAfter: lastMobileObject.
+ 						 ^false]]]].
+ 	"If the heap is already fully compacted there will be no lastMobileObject..."
+ 	lastMobileObject ifNotNil:
+ 		[savedFirstFieldsSpace top: top - manager bytesPerOop.
+ 		 objectAfterLastMobileObject := manager oldSpaceObjectAfter: lastMobileObject].
- 						 ^self noMobileObjectsAfter: o]]]].
- 	savedFirstFieldsSpace top: top - manager bytesPerOop.
  	^true!

Item was added:
+ ----- Method: SpurPlanningCompactor>>recordMovementOf:to:savedFirstFieldPtr: (in category 'private') -----
+ recordMovementOf: o to: toFinger savedFirstFieldPtr: savedFirstFieldPtr
+ 	"No-op in the real class."
+ 	<inline: true>!

Item was removed:
- ----- Method: SpurPlanningCompactor>>recordMovements (in category 'compaction') -----
- recordMovements
- 	relocationMap := Dictionary new!

Item was removed:
- ----- Method: SpurPlanningCompactor>>reinitializeScan (in category 'compaction') -----
- reinitializeScan
- 	"Search for firstFreeObject and firstMobileObject from lastMobileObject (which is
- 	 set to the hiddenRootsObject on the first pass)."
- 	firstMobileObject := manager endOfMemory.
- 	firstFreeObject := self scanForFirstFreeAndFirstMobileObjectFrom: lastMobileObject.
- 	firstFreeObject ifNil:
- 		[self error: 'uncompactable heap; no unmarked objects found'].
- 	mobileStart := manager startOfObject: firstFreeObject!

Item was added:
+ ----- Method: SpurPlanningCompactor>>reinitializeScanFrom: (in category 'compaction') -----
+ reinitializeScanFrom: initialObject
+ 	"Search for firstFreeObject and firstMobileObject from initialObject, which is the
+ 	 hiddenRootsObject on the first pass, and the objectAfterLastMobileObject on
+ 	 subsequent passes)."
+ 	firstMobileObject := manager endOfMemory.
+ 	firstFreeObject := self scanForFirstFreeAndFirstMobileObjectFrom: initialObject.
+ 	firstFreeObject ifNotNil:
+ 		[mobileStart := manager startOfObject: firstFreeObject].
+ 	objectAfterLastMobileObject ifNotNil:
+ 		[manager allOldSpaceEntitiesFrom: firstFreeObject to: objectAfterLastMobileObject do:
+ 			[:o|
+ 			 ((manager isPinned: o)
+ 			  or: [(manager isMarked: o) not
+ 			  or: [objectAfterLastMobileObject = o]]) ifFalse:
+ 				[manager setIsMarkedOf: o to: false]].
+ 		 firstMobileObject := objectAfterLastMobileObject]!

Item was added:
+ ----- Method: SpurPlanningCompactor>>thereAreObjectsToMove (in category 'private') -----
+ thereAreObjectsToMove
+ 	<inline: true>
+ 	^lastMobileObject notNil!

Item was added:
+ ----- Method: SpurPlanningCompactor>>unmarkAndScanForFirstUnpinnedObjectFrom: (in category 'compaction') -----
+ unmarkAndScanForFirstUnpinnedObjectFrom: initialObject
+ 	"Scan from initialObject, unmarking any marked pinned objects, and answering the first mobile or freeable object found, or nil if none."
+ 	manager allOldSpaceEntitiesFrom: initialObject do:
+ 		[:o|
+ 		((manager isMarked: o) and: [manager isPinned: o]) ifFalse:
+ 			[^o].
+ 		self unmarkPinned: o].
+ 	^nil!

Item was changed:
  ----- Method: SpurPlanningCompactor>>unmarkObjectsAfterLastMobileObject (in category 'compaction') -----
  unmarkObjectsAfterLastMobileObject
+ 	"Sweep the final immobile heap, freeing and coalescing unmarked and free objects,
+ 	 and unmarking all marked objects up to the end of memory."
+ 	| startOfFree freeBytes |
+ 	freeBytes := 0.
+ 	manager allOldSpaceEntitiesFrom: objectAfterLastMobileObject do:
- 	"Sweep the final immobile heap, unmarking all objects up to the end of memory."
- 	manager allOldSpaceObjectsFrom: objectAfterLastMobileObject do:
  		[:o|
  		self check: o.
+ 		(manager isMarked: o)
+ 			ifFalse:
+ 				[startOfFree ifNil: [startOfFree := manager startOfObject: o].
+ 				 freeBytes := freeBytes + (manager bytesInObject: o)]
+ 			ifTrue:
+ 				[startOfFree ifNotNil:
+ 					[manager addFreeChunkWithBytes: freeBytes at: startOfFree.
+ 					 startOfFree := nil.
+ 					 freeBytes := 0].
+ 				 (manager isPinned: o)
+ 					ifTrue: [self unmarkPinned: o]
+ 					ifFalse: [manager setIsMarkedOf: o to: false]]]!
- 		manager setIsMarkedOf: o to: false]!

Item was changed:
  ----- Method: SpurPlanningCompactor>>updatePointers (in category 'compaction') -----
  updatePointers
  	"Sweep the heap, updating all objects to their eventual locations.
  	 Remember to update the savedFirstFields of pointer objects, as these have been forwarded."
  	<inline: #never>
  	| onePass |
  	self logPhase: 'updating pointers...'.
+ 	self thereAreObjectsToMove ifFalse:
- 	"If savedFirstFieldsSpace is empty there is nothing to do."
- 	savedFirstFieldsSpace top < savedFirstFieldsSpace start ifTrue:
  		[^self].
  	self assert: (manager startOfObject: firstFreeObject) = mobileStart.
  	coInterpreter mapInterpreterOops.
  	manager mapExtraRoots.
  	self updatePointersInManagerHeapEntities.
  	self updatePointersInSurvivingObjects.
  	self updatePointersInInitialImmobileObjects.
  	onePass := self updatePointersInMobileObjects.
  	onePass ifFalse:
  		[self updatePointersInObjectsOverflowingSavedFirstFieldsSpace]!

Item was changed:
  ----- Method: SpurPlanningCompactor>>updatePointersInInitialImmobileObjects (in category 'compaction') -----
  updatePointersInInitialImmobileObjects
  	"Sweep the initial immobile heap, updating all references to mobile objects to their eventual locations."
  	manager allOldSpaceObjectsFrom: manager firstObject do:
  		[:o|
  		self check: o.
  		(self oop: o isGreaterThanOrEqualTo: firstFreeObject) ifTrue:
  			[^self].
+ 		"would like to assert this, but it isn't true if more than one pass: self assert: (manager isMarked: o)."
- 		self assert: (manager isMarked: o).
  		self updatePointersIn: o]!

Item was changed:
  ----- Method: SpurPlanningCompactor>>updatePointersInMobileObjects (in category 'compaction') -----
  updatePointersInMobileObjects
  	"Sweep the mobile portion of the heap, updating all references to objects to their eventual locations.
  	 Remember to update the savedFirstFields of pointer objects, as these have been forwarded.
  	 Answer if the end of the heap was reached (savedFirstFieldsSpace has not overflowed).
  
  	 The enumerations in planCompactSavingForwarders, updatePointersInMobileObjects and copyAndUnmarkMobileObjects
  	 match.  We could implement them as a single enumeration method taking several block arguments, but arguably that
  	 would make understanding an already tricky algorithm more difficult.  Instead we tolerate the duplication and encourage
  	 the reader to diff the three methods to see where they diverge (e.g. via Cmd-shift-C)."
+ 	| toFinger top previousPin startOfPreviousPin |
- 	| toFinger top previousPin |
  	<var: 'o' type: #usqInt>
  	<var: 'top' type: #usqInt>
  	<var: 'toFinger' type: #usqInt>
  	<var: 'previousPin' type: #usqInt>
+ 	<var: 'startOfPreviousPin' type: #usqInt>
  	self deny: (manager isMarked: firstFreeObject).
  	toFinger := manager startOfObject: firstFreeObject.
  	top := savedFirstFieldsSpace start.
+ 	startOfPreviousPin := 0.
  	manager allOldSpaceEntitiesFrom: firstFreeObject do:
  		[:o|
  		 self check: o.
  		 self assert: (previousPin
  						ifNil: [toFinger <= (manager startOfObject: o)]
+ 						ifNotNil: [(manager isMarked: previousPin) and: [toFinger <= startOfPreviousPin]]).
- 						ifNotNil: [(manager isMarked: previousPin) and: [toFinger <= (manager startOfObject: previousPin)]]).
  		 (manager isMarked: o) ifTrue:
  			[(manager isPinned: o)
  				ifTrue:
  					[previousPin ifNil:
+ 						[previousPin := o. startOfPreviousPin := manager startOfObject: o].
- 						[previousPin := o].
  					 self updatePointersIn: o]
  				ifFalse:
  					[| availableSpace bytes |
  					 bytes := manager bytesInObject: o.
+ 					 [toFinger <= startOfPreviousPin
+ 					  and: [bytes ~= (availableSpace := startOfPreviousPin - toFinger)
+ 					  and: [bytes + (2 * manager allocationUnit) > availableSpace]]] whileTrue:
- 					 [previousPin notNil
- 					  and: [availableSpace := (manager startOfObject: previousPin) - toFinger.
- 							bytes ~= availableSpace
- 							and: [bytes + (2 * manager allocationUnit) > availableSpace]]] whileTrue:
  						["The object does not fit in the space between toFinger and previousPin.
  						   Move toFinger up to point at the first unmarked or mobile object after
  						   previousPin, or, if previousPin is contiguous with o, to the start of this
  						   object.  Update previousPin to be the next pinned object above toFInger
  						   and below this object, or nil if no such pinned object exists.
  						   Any unfillable gaps between adjacent pinned objects will be freed."
  					 	 [toFinger := manager addressAfter: previousPin.
  						  previousPin := manager objectStartingAt: toFinger.
  						  (manager isMarked: previousPin)
  						   and: [(manager isPinned: previousPin)
  						   and: [previousPin < o]]]
  							whileTrue.
  						 "Now previousPin is either equal to o or mobile.
  						  Move it to the next pinned object below o"
  						 [previousPin >= o
  						  or: [(manager isMarked: previousPin)
  						  and: [manager isPinned: previousPin]]] whileFalse:
  							[previousPin := manager oldSpaceObjectAfter: previousPin].
+ 						 previousPin >= o
+ 							ifTrue: [previousPin := nil. startOfPreviousPin := 0]
+ 							ifFalse: [startOfPreviousPin := manager startOfObject: previousPin]].
- 						 previousPin >= o ifTrue:
- 							[previousPin := nil]].
  					 self updatePointersIn: o savedFirstFieldPointer: top.
  					 toFinger := toFinger + bytes.
  					 (top := top + manager bytesPerOop) >= savedFirstFieldsSpace limit ifTrue:
  						[self assert: savedFirstFieldsSpace top = (top - manager bytesPerOop).
+ 						 ^false]]]].
- 						 ^self noMobileObjectsAfter: o]]]].
  	self assert: savedFirstFieldsSpace top = (top - manager bytesPerOop).
  	^true!

Item was changed:
  ----- Method: SpurPlanningCompactor>>updatePointersInObjectsOverflowingSavedFirstFieldsSpace (in category 'compaction') -----
  updatePointersInObjectsOverflowingSavedFirstFieldsSpace
  	"Sweep the final immobile heap, is any (those objects with no room in savedFirstFieldsSpace
  	 in the current pass) updating all references to mobile objects to their eventual locations."
  	manager allOldSpaceObjectsFrom: objectAfterLastMobileObject do:
  		[:o|
  		self check: o.
+ 		(manager isMarked: o) ifTrue:
+ 			[self updatePointersIn: o]]!
- 		self assert: (manager isMarked: o).
- 		self updatePointersIn: o]!

Item was changed:
  ----- Method: SpurPlanningCompactor>>updateSavedFirstFieldsSpaceIfNecessary (in category 'space management') -----
  updateSavedFirstFieldsSpaceIfNecessary
  	"If savedFirstFieldsSpace is a free chunk then it may need to be repositioned if there is more than one pass."
  	<inline: true>
  	self savedFirstFieldsSpaceInFreeChunk ifTrue:
  		[self useFreeChunkForSavedFirstFieldsSpace: manager findLargestFreeChunk].
  
+ 	savedFirstFieldsSpace top: savedFirstFieldsSpace start - manager bytesPerOop!
- 	savedFirstFieldsSpace top: savedFirstFieldsSpace start - manager bytesPerOop.
- 	manager resetFreeListHeads!

Item was changed:
  ----- Method: SpurPlanningCompactor>>validRelocationPlanInPass: (in category 'private') -----
  validRelocationPlanInPass: onePass
  	"Answer 0 if all the mobile objects from firstMobileObject to lastMobileObject
  	 have sane forwarding addresses, and that savedFirstFieldsSpace is of
  	 matching capacity.  Otherwise answer an error code identifying the anomaly."
  	| nMobiles toFinger |
  	<var: 'toFinger' type: #usqInt>
  	<var: 'destination' type: #usqInt>
  	nMobiles := 0.
  	toFinger := mobileStart.
  	anomaly := nil.
  	manager allOldSpaceEntitiesFrom: firstMobileObject do:
  		[:o| | destination |
  		 self check: o.
  		 (manager isMarked: o) ifTrue:
  			[(manager isPinned: o) ifFalse:
  				[nMobiles := nMobiles + 1.
  				 destination := manager fetchPointer: 0 ofObject: o.
  				 destination >= toFinger ifFalse:
  					[anomaly := o. ^1].
  				 toFinger := toFinger + (manager bytesInObject: o).
  				 (self oop: o isGreaterThan: lastMobileObject) ifTrue:
  					[anomaly := o. ^2].
  				 o = lastMobileObject ifTrue:
+ 					[^savedFirstFieldsSpace top + manager bytesPerOop - savedFirstFieldsSpace start / manager bytesPerOop
- 					[^savedFirstFieldsSpace top - savedFirstFieldsSpace start / manager bytesPerOop + 1
  					   = nMobiles
  						ifTrue: [0]
  						ifFalse: [3]]]]].
+ 	"N.B. written this way so that if there are no mobiles the expression evaluates to 0 in Smalltalk /and/ in C unsigned arithmetic."
+ 	^savedFirstFieldsSpace top + manager bytesPerOop - savedFirstFieldsSpace start / manager bytesPerOop
- 	^savedFirstFieldsSpace top - savedFirstFieldsSpace start / manager bytesPerOop + 1
  	  = nMobiles
  		ifTrue: [0]
  		ifFalse: [4]!

Item was added:
+ SpurPlanningCompactor subclass: #SpurPlanningCompactorSimulator
+ 	instanceVariableNames: 'interestingObj relocationMap sffsMode'
+ 	classVariableNames: ''
+ 	poolDictionaries: ''
+ 	category: 'VMMaker-SpurMemoryManagerSimulation'!

Item was added:
+ ----- Method: SpurPlanningCompactorSimulator>>check: (in category 'private') -----
+ check: obj
+ 	<inline: true>
+ 	sffsMode ifNotNil: [self deny: ((manager isGrey: obj) or: [(manager isImmutable: obj)  or: [manager isRemembered: obj]])].
+ 	obj = interestingObj ifTrue: [self halt].
+ 	"this debugged the misuse of the largest free chunk:"
+ 	"(manager checkForLeaks bitAnd: GCModeFull+GCModeFreeSpace) = GCModeFull ifTrue:
+ 		[self deny: ((manager isEnumerableObject: obj) and: [(manager heapMapAtWord: obj) = 0])]"!

Item was added:
+ ----- Method: SpurPlanningCompactorSimulator>>forceMultiPass (in category 'accessing') -----
+ forceMultiPass
+ 	sffsMode := #multiPass!

Item was added:
+ ----- Method: SpurPlanningCompactorSimulator>>interestingObj: (in category 'accessing') -----
+ interestingObj: obj
+ 	interestingObj := obj!

Item was added:
+ ----- Method: SpurPlanningCompactorSimulator>>recordMovementOf:to:savedFirstFieldPtr: (in category 'private') -----
+ recordMovementOf: o to: toFinger savedFirstFieldPtr: savedFirstFieldPtr
+ 	relocationMap ifNotNil: [:rm| rm at: o put: savedFirstFieldPtr]!

Item was added:
+ ----- Method: SpurPlanningCompactorSimulator>>recordMovements (in category 'accessing') -----
+ recordMovements
+ 	relocationMap := Dictionary new!

Item was added:
+ ----- Method: SpurPlanningCompactorSimulator>>selectSavedFirstFieldsSpace (in category 'space management') -----
+ selectSavedFirstFieldsSpace
+ 	"Override to make savedFirstFieldsSpace small enough for multi-pass compaction, if desired (for testing)."
+ 	super selectSavedFirstFieldsSpace.
+ 	"testRandomAssortment: et al create 1000 objects, aboput 5% pnned and about 33% reclaimable.  Sp 512 objects should force multi-pass."
+ 	sffsMode == #multiPass ifTrue:
+ 		[savedFirstFieldsSpace limit: savedFirstFieldsSpace start + (512 * manager bytesPerOop).
+ 		 biasForGC := false]!

Item was changed:
+ LongTestCase subclass: #SpurPlanningCompactorTests
- TestCase subclass: #SpurPlanningCompactorTests
  	instanceVariableNames: ''
  	classVariableNames: ''
+ 	poolDictionaries: 'VMBasicConstants VMSqueakClassIndices'
- 	poolDictionaries: 'VMSqueakClassIndices'
  	category: 'VMMaker-Tests'!

Item was added:
+ ----- Method: SpurPlanningCompactorTests>>checkForLeaksIn: (in category 'private') -----
+ checkForLeaksIn: om
+ 	om setCheckForLeaks: GCModeFreeSpace + GCModeFull;
+ 		runLeakCheckerFor: GCModeFull;
+ 		checkFreeSpace: GCModeFull!

Item was added:
+ ----- Method: SpurPlanningCompactorTests>>expectedFailures (in category 'failures') -----
+ expectedFailures
+ 	^(FileDirectory default fileExists: self class imageNameForTests)
+ 		ifTrue: [#()]
+ 		ifFalse: [self testSelectors]!

Item was added:
+ ----- Method: SpurPlanningCompactorTests>>testCompactedHeap (in category 'tests') -----
+ testCompactedHeap
+ 	"First test for valid compactibility of an already compacted heap via fullGC"
+ 	| freeSpace om |
+ 	om := self initializedVM objectMemory.
+ 	freeSpace := om bytesLeftInOldSpace.
+ 	om fullGC.
+ 	self assert: freeSpace equals: om bytesLeftInOldSpace.
+ 	self checkForLeaksIn: om.
+ 	om fullGC.
+ 	self assert: freeSpace equals: om bytesLeftInOldSpace.
+ 	self checkForLeaksIn: om!

Item was added:
+ ----- Method: SpurPlanningCompactorTests>>testInitializedVM (in category 'tests') -----
+ testInitializedVM
+ 	self checkForLeaksIn: self initializedVM objectMemory!

Item was added:
+ ----- Method: SpurPlanningCompactorTests>>testMultiPassRandomAssortments (in category 'tests') -----
+ testMultiPassRandomAssortments
+ 	"Test that the compactor can handle multi-pass compaction of some
+ 	 number of random assortments of live, pinned, dead, and free chunks."
+ 	<timeout: 60>
+ 	| random |
+ 	random := Random new.
+ 	10 timesRepeat:
+ 		[| theVM |
+ 		 theVM := self initializedVM.
+ 		 theVM objectMemory compactor forceMultiPass.
+ 		 self testRandomAssortment: (random next: 3000) readStream
+ 			with: theVM]!

Item was added:
+ ----- Method: SpurPlanningCompactorTests>>testMultiPassRandomAssortmentsWithNewSegment (in category 'tests') -----
+ testMultiPassRandomAssortmentsWithNewSegment
+ 	"Test that the compactor can handle multi-pass compaction of some number of
+ 	 random assortments of live, pinned, dead, and free chunks allocated in a new segment."
+ 	<timeout: 60>
+ 	| random |
+ 	random := Random new.
+ 	10 timesRepeat:
+ 		[| theVM |
+ 		 theVM := self initializedVM.
+ 		 theVM objectMemory compactor forceMultiPass.
+ 		 self testRandomAssortmentWithNewSegment: (random next: 5000) readStream
+ 			with: theVM]!

Item was removed:
- ----- Method: SpurPlanningCompactorTests>>testRandomAssortment: (in category 'private') -----
- testRandomAssortment: random
- 	"Test that the compactor can handle a random assortment of live, pinned, dead, and free chunks."
- 	| om lastObj obj expectedFreeSpace liveFill pinFill liveCount pinCount totalLive totalPinned pinned |
- 	random reset. "random is a read stream on 3000 random numbers; for repeatability"
- 	om := self initializedVM objectMemory.
- 	om allOldSpaceObjectsDo: [:o| om setIsMarkedOf: o to: true. lastObj := o].
- 	pinFill := 16r99999900.
- 	liveFill := 16r55AA0000.
- 	liveCount := pinCount := expectedFreeSpace := 0.
- 	pinned := Set new.
- 	1000 timesRepeat:
- 		[| nSlots next newObj |
- 		 nSlots := (random next * 300) rounded. "Make sure we stray into overflow size field territory."
- 		 newObj := om allocateSlotsInOldSpace: nSlots format: om firstLongFormat classIndex: ClassBitmapCompactIndex.
- 		 (next := random next) > 0.95
- 			ifTrue: "pinned"
- 				[om
- 					fillObj: newObj numSlots: nSlots with: pinFill + (pinCount := pinCount + 1);
- 					setIsPinnedOf: newObj to: true]
- 			ifFalse: "mobile"
- 				[om
- 					fillObj: newObj numSlots: nSlots with: liveFill + (liveCount := liveCount + 1)].
- 		 (next := random next) >= 0.333
- 			ifTrue:
- 				[om setIsMarkedOf: newObj to: true.
- 				 (om isPinned: newObj) ifTrue:
- 					[pinned add: newObj]]
- 			ifFalse: "dead or free"
- 				[expectedFreeSpace := expectedFreeSpace + (om bytesInObject: newObj).
- 				 (om isPinned: newObj) "Must check /before/ setObjectFree: which clears all bits"
- 					ifTrue: [pinCount := pinCount - 1]
- 					ifFalse: [liveCount := liveCount - 1].
- 				 next >= 0.2
- 					ifTrue: [om setIsMarkedOf: newObj to: false]
- 					ifFalse: [om setObjectFree: newObj]]].
- 	totalPinned := pinCount.
- 	totalLive := liveCount.
- 	self assert: totalPinned < (totalPinned + totalLive / 10). "should average 5%"
- 
- 	"useful pre-compaction printing:"
- 	false ifTrue:
- 		[liveCount := pinCount := 0.
- 		 om allOldSpaceEntitiesFrom: (om objectAfter: lastObj) to: (om objectBefore: om endOfMemory) do:
- 			[:o|
- 			om coInterpreter print:
- 				((om isMarked: o)
- 					ifTrue: [(((om isPinned: o)
- 									ifTrue: [pinCount := pinCount + 1]
- 									ifFalse: [liveCount := liveCount + 1])
- 								printPaddedWith: Character space to: 3 base: 10), ' '] 
- 					ifFalse: ['     ']).
- 			 om printEntity: o]].
- 
- 	expectedFreeSpace := expectedFreeSpace + om bytesLeftInOldSpace.
- 	om compactor compact.
- 	self assert: expectedFreeSpace equals: om bytesLeftInOldSpace.
- 	self assert: om allObjectsUnmarked.
- 
- 	"useful post-compaction printing:"
- 	false ifTrue:
- 		[liveCount := pinCount := 0.
- 		 om allOldSpaceEntitiesFrom: (om objectAfter: lastObj) to: (om objectBefore: om endOfMemory) do:
- 			[:o|
- 			om coInterpreter print:
- 				((om isFreeObject: o)
- 					ifFalse: [(((om isPinned: o)
- 									ifTrue: [pinCount := pinCount + 1]
- 									ifFalse: [liveCount := liveCount + 1])
- 								printPaddedWith: Character space to: 3 base: 10), ' '] 
- 					ifTrue: ['     ']).
- 			 om printEntity: o]].
- 
- 	"First check and/or count populations..."
- 	liveCount := pinCount := 0.
- 	om allOldSpaceObjectsFrom: (om objectAfter: lastObj) do:
- 		[:o|
- 		(om isPinned: o)
- 			ifTrue:
- 				[pinCount := pinCount + 1.
- 				 self assert: (pinned includes: o)]
- 			ifFalse: [liveCount := liveCount + 1]].
- 	self assert: totalPinned equals: pinCount.
- 	self assert: totalLive equals: liveCount.
- 
- 	"Now check fills, which also tests update of first field on move..."
- 	liveCount := pinCount := 0.
- 	obj := lastObj.
- 	1 to: totalLive + totalPinned do:
- 		[:n| | expectedFill actualFill |
- 		 [obj := om objectAfter: obj. (om isEnumerableObject: obj) or: [obj >= om endOfMemory]] whileFalse.
- 		 expectedFill := (om isPinned: obj)
- 							ifTrue: [pinFill + (pinCount := pinCount + 1)]
- 							ifFalse: [liveFill + (liveCount := liveCount + 1)].
- 		 1 to: (om numSlotsOf: obj) do:
- 			[:i| self assert: expectedFill equals: (actualFill := om fetchPointer: i - 1 ofObject: obj)]].
- 	"They should be the last objects..."
- 	self assert: (om isFreeObject: (om objectAfter: obj)).
- 	self assert: om endOfMemory equals: (om addressAfter: (om objectAfter: obj))!

Item was added:
+ ----- Method: SpurPlanningCompactorTests>>testRandomAssortment:with: (in category 'private') -----
+ testRandomAssortment: random with: theVM
+ 	"Test that the compactor can handle a random assortment of live, pinned, dead, and free chunks."
+ 	| om lastObj obj expectedFreeSpace liveFill pinFill liveCount pinCount totalLive totalPinned pinned |
+ 	random reset. "random is a read stream on 3000 random numbers; for repeatability"
+ 	om := theVM objectMemory.
+ 	om allOldSpaceObjectsDo: [:o| om setIsMarkedOf: o to: true. lastObj := o].
+ 	pinFill := 16r99999900.
+ 	liveFill := 16r55AA0000.
+ 	liveCount := pinCount := expectedFreeSpace := 0.
+ 	pinned := Set new.
+ 	1000 timesRepeat:
+ 		[| nSlots next newObj |
+ 		 nSlots := (random next * 300) rounded. "Make sure we stray into overflow size field territory."
+ 		 newObj := om allocateSlotsInOldSpace: nSlots format: om firstLongFormat classIndex: ClassByteArrayCompactIndex.
+ 		 (next := random next) > 0.95
+ 			ifTrue: "pinned"
+ 				[om
+ 					fillObj: newObj numSlots: nSlots with: pinFill + (pinCount := pinCount + 1);
+ 					setIsPinnedOf: newObj to: true]
+ 			ifFalse: "mobile"
+ 				[om
+ 					fillObj: newObj numSlots: nSlots with: liveFill + (liveCount := liveCount + 1)].
+ 		 (next := random next) >= 0.333
+ 			ifTrue:
+ 				[om setIsMarkedOf: newObj to: true.
+ 				 (om isPinned: newObj) ifTrue:
+ 					[pinned add: newObj]]
+ 			ifFalse: "dead or free"
+ 				[expectedFreeSpace := expectedFreeSpace + (om bytesInObject: newObj).
+ 				 (om isPinned: newObj) "Must check /before/ setObjectFree: which clears all bits"
+ 					ifTrue: [pinCount := pinCount - 1]
+ 					ifFalse: [liveCount := liveCount - 1].
+ 				 next >= 0.2
+ 					ifTrue: [om setIsMarkedOf: newObj to: false]
+ 					ifFalse: [om setObjectFree: newObj]]].
+ 	totalPinned := pinCount.
+ 	totalLive := liveCount.
+ 	self assert: totalPinned < (totalPinned + totalLive / 10). "should average 5%"
+ 
+ 	"useful pre-compaction printing:"
+ 	false ifTrue:
+ 		[liveCount := pinCount := 0.
+ 		 om allOldSpaceEntitiesFrom: (om objectAfter: lastObj) to: (om objectBefore: om endOfMemory) do:
+ 			[:o|
+ 			om coInterpreter print:
+ 				((om isMarked: o)
+ 					ifTrue: [(((om isPinned: o)
+ 									ifTrue: [pinCount := pinCount + 1]
+ 									ifFalse: [liveCount := liveCount + 1])
+ 								printPaddedWith: Character space to: 3 base: 10), ' '] 
+ 					ifFalse: ['     ']).
+ 			 om printEntity: o].
+ 		 om allOldSpaceEntitiesFrom: (om objectAfter: lastObj) to: (om objectBefore: om endOfMemory) do:
+ 			[:o|
+ 			((om isMarked: o) and: [om isPinned: o]) ifTrue:
+ 				[om printEntity: o]]].
+ 
+ 	expectedFreeSpace := expectedFreeSpace + om bytesLeftInOldSpace.
+ 	om compactor compact.
+ 	self assert: expectedFreeSpace equals: om bytesLeftInOldSpace.
+ 	self assert: om allObjectsUnmarked.
+ 
+ 	"useful post-compaction printing:"
+ 	false ifTrue:
+ 		[liveCount := pinCount := 0.
+ 		 om allOldSpaceEntitiesFrom: (om objectAfter: lastObj) to: (om objectBefore: om endOfMemory) do:
+ 			[:o|
+ 			om coInterpreter print:
+ 				((om isFreeObject: o)
+ 					ifFalse: [(((om isPinned: o)
+ 									ifTrue: [pinCount := pinCount + 1]
+ 									ifFalse: [liveCount := liveCount + 1])
+ 								printPaddedWith: Character space to: 3 base: 10), ' '] 
+ 					ifTrue: ['     ']).
+ 			 om printEntity: o].
+ 		 om allOldSpaceEntitiesFrom: (om objectAfter: lastObj) to: (om objectBefore: om endOfMemory) do:
+ 			[:o|
+ 			(om isPinned: o) ifTrue:
+ 				[om printEntity: o]]].
+ 
+ 	"First check and/or count populations..."
+ 	liveCount := pinCount := 0.
+ 	om allOldSpaceObjectsFrom: (om objectAfter: lastObj) do:
+ 		[:o|
+ 		(om isPinned: o)
+ 			ifTrue:
+ 				[pinCount := pinCount + 1.
+ 				 self assert: (pinned includes: o)]
+ 			ifFalse: [liveCount := liveCount + 1]].
+ 	self assert: totalPinned equals: pinCount.
+ 	self assert: totalLive equals: liveCount.
+ 
+ 	"Now check fills, which also tests update of first field on move..."
+ 	liveCount := pinCount := 0.
+ 	obj := lastObj.
+ 	1 to: totalLive + totalPinned do:
+ 		[:n| | expectedFill actualFill |
+ 		 [obj := om objectAfter: obj. (om isEnumerableObject: obj) or: [obj >= om endOfMemory]] whileFalse.
+ 		 expectedFill := (om isPinned: obj)
+ 							ifTrue: [pinFill + (pinCount := pinCount + 1)]
+ 							ifFalse: [liveFill + (liveCount := liveCount + 1)].
+ 		 1 to: (om numSlotsOf: obj) do:
+ 			[:i| self assert: expectedFill equals: (actualFill := om fetchPointer: i - 1 ofObject: obj)]].
+ 	"They should be the last objects..."
+ 	self assert: (om isFreeObject: (om objectAfter: obj)).
+ 	self assert: om endOfMemory equals: (om addressAfter: (om objectAfter: obj)).
+ 	self checkForLeaksIn: om!

Item was removed:
- ----- Method: SpurPlanningCompactorTests>>testRandomAssortmentWithNewSegment: (in category 'private') -----
- testRandomAssortmentWithNewSegment: random
- 	"Test that the compactor can handle a random assortment of live, pinned, dead, and free chunks,
- 	 with some allocation in a new segment.  No live pinned objects are created in the new segment
- 	 to obtain the situation that the last segment is entirely empty after compaction.  This tests shrinkage."
- 	| om pig lastObj obj expectedFreeSpace liveFill pinFill liveCount pinCount totalLive totalPinned pinned |
- 	random reset. "random is a read stream on 3000 random numbers; for repeatability"
- 	om := self initializedVM objectMemory.
- 	om allOldSpaceObjectsDo: [:o| om setIsMarkedOf: o to: true. lastObj := o].
- 
- 	pinFill := 16r99999900.
- 	liveFill := 16r55AA0000.
- 	liveCount := pinCount := expectedFreeSpace := 0.
- 	pinned := Set new.
- 
- 	1000 timesRepeat:
- 		[| nSlots next newObj |
- 		 nSlots := (random next * 300) rounded. "Make sure we stray into overflow size field territory."
- 		 newObj := om allocateSlotsInOldSpace: nSlots format: om firstLongFormat classIndex: ClassBitmapCompactIndex.
- 		 (next := random next) > 0.95
- 			ifTrue: "pinned"
- 				[om
- 					fillObj: newObj numSlots: nSlots with: pinFill + (pinCount := pinCount + 1);
- 					setIsPinnedOf: newObj to: true]
- 			ifFalse: "mobile"
- 				[om
- 					fillObj: newObj numSlots: nSlots with: liveFill + (liveCount := liveCount + 1)].
- 		 (next := random next) >= 0.333
- 			ifTrue:
- 				[om setIsMarkedOf: newObj to: true.
- 				 (om isPinned: newObj) ifTrue:
- 					[pinned add: newObj]]
- 			ifFalse: "dead or free"
- 				[expectedFreeSpace := expectedFreeSpace + (om bytesInObject: newObj).
- 				 (om isPinned: newObj) "Must check /before/ setObjectFree: which clears all bits"
- 					ifTrue: [pinCount := pinCount - 1]
- 					ifFalse: [liveCount := liveCount - 1].
- 				 next >= 0.2
- 					ifTrue: [om setIsMarkedOf: newObj to: false]
- 					ifFalse: [om setObjectFree: newObj]]].
- 
- 	 pig := om allocateSlotsInOldSpace: (om numSlotsOfAny: om findLargestFreeChunk) format: om firstLongFormat classIndex: ClassBitmapCompactIndex.
- 	 self deny: pig isNil.
- 	 self assert: 0 equals: om bytesLeftInOldSpace.
- 	 om growOldSpaceByAtLeast: om growHeadroom // 2.
- 	 self assert: om growHeadroom equals: om bytesLeftInOldSpace + om bridgeSize.
- 	 expectedFreeSpace := expectedFreeSpace + (om bytesInObject: pig).
- 
- 	1000 timesRepeat:
- 		[| nSlots next newObj |
- 		 nSlots := (random next * 300) rounded. "Make sure we stray into overflow size field territory."
- 		 newObj := om allocateSlotsInOldSpace: nSlots format: om firstLongFormat classIndex: ClassBitmapCompactIndex.
- 		 "No pinned objects in second segment."
- 		 om fillObj: newObj numSlots: nSlots with: liveFill + (liveCount := liveCount + 1).
- 		 (next := random next) >= 0.333
- 			ifTrue:
- 				[om setIsMarkedOf: newObj to: true.
- 				 (om isPinned: newObj) ifTrue:
- 					[pinned add: newObj]]
- 			ifFalse: "dead or free"
- 				[expectedFreeSpace := expectedFreeSpace + (om bytesInObject: newObj).
- 				 liveCount := liveCount - 1.
- 				 next >= 0.2
- 					ifTrue: [om setIsMarkedOf: newObj to: false]
- 					ifFalse: [om setObjectFree: newObj]]].
- 
- 	totalPinned := pinCount.
- 	totalLive := liveCount.
- 	self assert: totalPinned < (totalPinned + totalLive / 20). "should average 2.5%"
- 
- 	"useful pre-compaction printing:"
- 	false ifTrue:
- 		[liveCount := pinCount := 0.
- 		 om allOldSpaceEntitiesFrom: (om objectAfter: lastObj) to: (om objectBefore: om endOfMemory) do:
- 			[:o|
- 			om coInterpreter print:
- 				((om isMarked: o)
- 					ifTrue: [(((om isPinned: o)
- 									ifTrue: [pinCount := pinCount + 1]
- 									ifFalse: [liveCount := liveCount + 1])
- 								printPaddedWith: Character space to: 3 base: 10), ' '] 
- 					ifFalse: ['     ']).
- 			 om printEntity: o]].
- 
- 	expectedFreeSpace := expectedFreeSpace + om bytesLeftInOldSpace.
- 	om compactor compact.
- 	self assert: expectedFreeSpace equals: om bytesLeftInOldSpace.
- 	self assert: om allObjectsUnmarked.
- 
- 	"useful post-compaction printing:"
- 	false ifTrue:
- 		[liveCount := pinCount := 0.
- 		 om allOldSpaceEntitiesFrom: (om objectAfter: lastObj) to: (om objectBefore: om endOfMemory) do:
- 			[:o|
- 			om coInterpreter print:
- 				((om isFreeObject: o)
- 					ifFalse: [(((om isPinned: o)
- 									ifTrue: [pinCount := pinCount + 1]
- 									ifFalse: [liveCount := liveCount + 1])
- 								printPaddedWith: Character space to: 3 base: 10), ' '] 
- 					ifTrue: ['     ']).
- 			 om printEntity: o]].
- 
- 	"First check and/or count populations..."
- 	liveCount := pinCount := 0.
- 	om allOldSpaceObjectsFrom: (om objectAfter: lastObj) do:
- 		[:o|
- 		(om isPinned: o)
- 			ifTrue:
- 				[pinCount := pinCount + 1.
- 				 self assert: (pinned includes: o)]
- 			ifFalse: [liveCount := liveCount + 1]].
- 	self assert: totalPinned equals: pinCount.
- 	self assert: totalLive equals: liveCount.
- 
- 	"Now check fills, which also tests update of first field on move..."
- 	liveCount := pinCount := 0.
- 	obj := lastObj.
- 	1 to: totalLive + totalPinned do:
- 		[:n| | expectedFill actualFill |
- 		 [obj := om objectAfter: obj. (om isEnumerableObject: obj) or: [obj >= om endOfMemory]] whileFalse.
- 		 expectedFill := (om isPinned: obj)
- 							ifTrue: [pinFill + (pinCount := pinCount + 1)]
- 							ifFalse: [liveFill + (liveCount := liveCount + 1)].
- 		 1 to: (om numSlotsOf: obj) do:
- 			[:i| self assert: expectedFill equals: (actualFill := om fetchPointer: i - 1 ofObject: obj)]].
- 	"the Last segment should be empty"
- 	self assert: (om segmentManager isEmptySegment: (om segmentManager segments at: 1)).
- 	"They should be the last objects, followed by a free object to the end fo the first segment, a bridge, then an empty segment with a single free object in it."
- 	self assert: (om isFreeObject: (om objectAfter: obj)).
- 	self assert: (om isSegmentBridge: (om objectAfter: (om objectAfter: obj))).
- 	self assert: (om isFreeObject: (om objectAfter: (om objectAfter: (om objectAfter: obj)))).
- 	self assert: om endOfMemory equals: (om addressAfter: (om objectAfter: (om objectAfter: (om objectAfter: obj)))).
- 
- 	"And the memory should shrink if the shrinkThreshold is low enough"
- 	om shrinkThreshold: om growHeadroom.
- 	om attemptToShrink.
- 	self assert: om segmentManager numSegments = 1!

Item was added:
+ ----- Method: SpurPlanningCompactorTests>>testRandomAssortmentWithNewSegment:with: (in category 'private') -----
+ testRandomAssortmentWithNewSegment: random with: theVM
+ 	"Test that the compactor can handle a random assortment of live, pinned, dead, and free chunks,
+ 	 with some allocation in a new segment.  No live pinned objects are created in the new segment
+ 	 to obtain the situation that the last segment is entirely empty after compaction.  This tests shrinkage."
+ 	| om pig lastObj obj expectedFreeSpace liveFill pinFill liveCount pinCount totalLive totalPinned pinned |
+ 	random reset. "random is a read stream on 3000 random numbers; for repeatability"
+ 	om := theVM objectMemory.
+ 	om allOldSpaceObjectsDo: [:o| om setIsMarkedOf: o to: true. lastObj := o].
+ 
+ 	pinFill := 16r99999900.
+ 	liveFill := 16r55AA0000.
+ 	liveCount := pinCount := expectedFreeSpace := 0.
+ 	pinned := Set new.
+ 
+ 	1000 timesRepeat:
+ 		[| nSlots next newObj |
+ 		 nSlots := (random next * 300) rounded. "Make sure we stray into overflow size field territory."
+ 		 newObj := om allocateSlotsInOldSpace: nSlots format: om firstLongFormat classIndex: ClassByteArrayCompactIndex.
+ 		 (next := random next) > 0.95
+ 			ifTrue: "pinned"
+ 				[om
+ 					fillObj: newObj numSlots: nSlots with: pinFill + (pinCount := pinCount + 1);
+ 					setIsPinnedOf: newObj to: true]
+ 			ifFalse: "mobile"
+ 				[om
+ 					fillObj: newObj numSlots: nSlots with: liveFill + (liveCount := liveCount + 1)].
+ 		 (next := random next) >= 0.333
+ 			ifTrue:
+ 				[om setIsMarkedOf: newObj to: true.
+ 				 (om isPinned: newObj) ifTrue:
+ 					[pinned add: newObj]]
+ 			ifFalse: "dead or free"
+ 				[expectedFreeSpace := expectedFreeSpace + (om bytesInObject: newObj).
+ 				 (om isPinned: newObj) "Must check /before/ setObjectFree: which clears all bits"
+ 					ifTrue: [pinCount := pinCount - 1]
+ 					ifFalse: [liveCount := liveCount - 1].
+ 				 next >= 0.2
+ 					ifTrue: [om setIsMarkedOf: newObj to: false]
+ 					ifFalse: [om setObjectFree: newObj]]].
+ 
+ 	 pig := om allocateSlotsInOldSpace: (om numSlotsOfAny: om findLargestFreeChunk) format: om firstLongFormat classIndex: ClassByteArrayCompactIndex.
+ 	 self deny: pig isNil.
+ 	 self assert: 0 equals: om bytesLeftInOldSpace.
+ 	 om growOldSpaceByAtLeast: om growHeadroom // 2.
+ 	 self assert: om growHeadroom equals: om bytesLeftInOldSpace + om bridgeSize.
+ 	 expectedFreeSpace := expectedFreeSpace + (om bytesInObject: pig).
+ 
+ 	1000 timesRepeat:
+ 		[| nSlots next newObj |
+ 		 nSlots := (random next * 300) rounded. "Make sure we stray into overflow size field territory."
+ 		 newObj := om allocateSlotsInOldSpace: nSlots format: om firstLongFormat classIndex: ClassByteArrayCompactIndex.
+ 		 "No pinned objects in second segment."
+ 		 om fillObj: newObj numSlots: nSlots with: liveFill + (liveCount := liveCount + 1).
+ 		 (next := random next) >= 0.333
+ 			ifTrue:
+ 				[om setIsMarkedOf: newObj to: true.
+ 				 (om isPinned: newObj) ifTrue:
+ 					[pinned add: newObj]]
+ 			ifFalse: "dead or free"
+ 				[expectedFreeSpace := expectedFreeSpace + (om bytesInObject: newObj).
+ 				 liveCount := liveCount - 1.
+ 				 next >= 0.2
+ 					ifTrue: [om setIsMarkedOf: newObj to: false]
+ 					ifFalse: [om setObjectFree: newObj]]].
+ 
+ 	totalPinned := pinCount.
+ 	totalLive := liveCount.
+ 	self assert: totalPinned < (totalPinned + totalLive / 20). "should average 2.5%"
+ 
+ 	"useful pre-compaction printing:"
+ 	false ifTrue:
+ 		[liveCount := pinCount := 0.
+ 		 om allOldSpaceEntitiesFrom: (om objectAfter: lastObj) to: (om objectBefore: om endOfMemory) do:
+ 			[:o|
+ 			om coInterpreter print:
+ 				((om isMarked: o)
+ 					ifTrue: [(((om isPinned: o)
+ 									ifTrue: [pinCount := pinCount + 1]
+ 									ifFalse: [liveCount := liveCount + 1])
+ 								printPaddedWith: Character space to: 3 base: 10), ' '] 
+ 					ifFalse: ['     ']).
+ 			 om printEntity: o].
+ 		 om allOldSpaceEntitiesFrom: (om objectAfter: lastObj) to: (om objectBefore: om endOfMemory) do:
+ 			[:o|
+ 			((om isMarked: o) and: [om isPinned: o]) ifTrue:
+ 				[om printEntity: o]]].
+ 
+ 	expectedFreeSpace := expectedFreeSpace + om bytesLeftInOldSpace.
+ 	om compactor compact.
+ 	self assert: expectedFreeSpace equals: om bytesLeftInOldSpace.
+ 	self assert: om allObjectsUnmarked.
+ 
+ 	"useful post-compaction printing:"
+ 	false ifTrue:
+ 		[liveCount := pinCount := 0.
+ 		 om allOldSpaceEntitiesFrom: (om objectAfter: lastObj) to: (om objectBefore: om endOfMemory) do:
+ 			[:o|
+ 			om coInterpreter print:
+ 				((om isFreeObject: o)
+ 					ifFalse: [(((om isPinned: o)
+ 									ifTrue: [pinCount := pinCount + 1]
+ 									ifFalse: [liveCount := liveCount + 1])
+ 								printPaddedWith: Character space to: 3 base: 10), ' '] 
+ 					ifTrue: ['     ']).
+ 			 om printEntity: o].
+ 		 om allOldSpaceEntitiesFrom: (om objectAfter: lastObj) to: (om objectBefore: om endOfMemory) do:
+ 			[:o|
+ 			(om isPinned: o) ifTrue:
+ 				[om printEntity: o]]].
+ 
+ 	"First check and/or count populations..."
+ 	liveCount := pinCount := 0.
+ 	om allOldSpaceObjectsFrom: (om objectAfter: lastObj) do:
+ 		[:o|
+ 		(om isPinned: o)
+ 			ifTrue:
+ 				[pinCount := pinCount + 1.
+ 				 self assert: (pinned includes: o)]
+ 			ifFalse: [liveCount := liveCount + 1]].
+ 	self assert: totalPinned equals: pinCount.
+ 	self assert: totalLive equals: liveCount.
+ 
+ 	"Now check fills, which also tests update of first field on move..."
+ 	liveCount := pinCount := 0.
+ 	obj := lastObj.
+ 	1 to: totalLive + totalPinned do:
+ 		[:n| | expectedFill actualFill |
+ 		 [obj := om objectAfter: obj. (om isEnumerableObject: obj) or: [obj >= om endOfMemory]] whileFalse.
+ 		 expectedFill := (om isPinned: obj)
+ 							ifTrue: [pinFill + (pinCount := pinCount + 1)]
+ 							ifFalse: [liveFill + (liveCount := liveCount + 1)].
+ 		 1 to: (om numSlotsOf: obj) do:
+ 			[:i| self assert: expectedFill equals: (actualFill := om fetchPointer: i - 1 ofObject: obj)]].
+ 	"the Last segment should be empty"
+ 	self assert: (om segmentManager isEmptySegment: (om segmentManager segments at: 1)).
+ 	"They should be the last objects, followed by a free object to the end fo the first segment, a bridge, then an empty segment with a single free object in it."
+ 	self assert: (om isFreeObject: (om objectAfter: obj)).
+ 	self assert: (om isSegmentBridge: (om objectAfter: (om objectAfter: obj))).
+ 	self assert: (om isFreeObject: (om objectAfter: (om objectAfter: (om objectAfter: obj)))).
+ 	self assert: om endOfMemory equals: (om addressAfter: (om objectAfter: (om objectAfter: (om objectAfter: obj)))).
+ 
+ 	"And the memory should shrink if the shrinkThreshold is low enough"
+ 	om shrinkThreshold: om growHeadroom.
+ 	om attemptToShrink.
+ 	self assert: om segmentManager numSegments = 1.
+ 	self checkForLeaksIn: om!

Item was changed:
  ----- Method: SpurPlanningCompactorTests>>testRandomAssortments (in category 'tests') -----
  testRandomAssortments
  	"Test that the compactor can handle some number of random assortments of live, pinned, dead, and free chunks."
  	<timeout: 60>
  	| random |
  	random := Random new.
+ 	10 timesRepeat:
+ 		[self testRandomAssortment: (random next: 3000) readStream
+ 			with: self initializedVM]!
- 	10 timesRepeat: [self testRandomAssortment: (random next: 3000) readStream]!

Item was changed:
  ----- Method: SpurPlanningCompactorTests>>testRandomAssortmentsWithNewSegment (in category 'tests') -----
  testRandomAssortmentsWithNewSegment
  	"Test that the compactor can handle some number of random assortments of live, pinned, dead, and free chunks
  	 allocated in a new segment."
  	<timeout: 60>
  	| random |
  	random := Random new.
+ 	10 timesRepeat:
+ 		[self testRandomAssortmentWithNewSegment: (random next: 6000) readStream
+ 			with: self initializedVM]!
- 	10 timesRepeat: [self testRandomAssortmentWithNewSegment: (random next: 6000) readStream]!

Item was added:
+ ----- Method: SpurPlanningCompactorTestsImageResource>>reset (in category 'accessing') -----
+ reset
+ 	"self current reset"
+ 	emptyVM := nil!

Item was changed:
  ----- Method: StackInterpreterPrimitives>>primitiveVMParameter (in category 'system control primitives') -----
(excessive size, no diff calculated)

Item was changed:
  ----- Method: StackInterpreterSimulator>>primitiveDirectoryEntry (in category 'file primitives') -----
  primitiveDirectoryEntry
  	| name pathName arrayNilOrSymbol result |
  	name := self stringOf: self stackTop.
  	pathName := self stringOf: (self stackValue: 1).
  	
  	"temporary work-around to make it work in Pharo..."
  	self cppIf: PharoVM ifTrue: [ pathName := Smalltalk imagePath ].
  	
  	self successful ifFalse:
  		[^self primitiveFail].
  
  	arrayNilOrSymbol := FileDirectory default primLookupEntryIn: pathName name: name.
  	arrayNilOrSymbol ifNil:
  		[self pop: 3 thenPush: objectMemory nilObject.
  		 ^self].
  	arrayNilOrSymbol isArray ifFalse:
  		["arrayNilOrSymbol ~~ #primFailed ifTrue:
  			[self halt]. "
+ 		self transcript show: name , ' NOT FOUND'.
- 		Transcript show: name , ' NOT FOUND'.
  		 ^self primitiveFail].
  
  	result := PharoVM 
  		ifTrue:
  			[self makeDirEntryName: (arrayNilOrSymbol at: 1) size: (arrayNilOrSymbol at: 1) size
  				createDate: (arrayNilOrSymbol at: 2) modDate: (arrayNilOrSymbol at: 3)
  				isDir: (arrayNilOrSymbol at: 4) fileSize: (arrayNilOrSymbol at: 5)
  				posixPermissions: (arrayNilOrSymbol at: 6) isSymlink: (arrayNilOrSymbol at: 7) ]
  		ifFalse:
  			[self makeDirEntryName: (arrayNilOrSymbol at: 1) size: (arrayNilOrSymbol at: 1) size
  				createDate: (arrayNilOrSymbol at: 2) modDate: (arrayNilOrSymbol at: 3)
  				isDir: (arrayNilOrSymbol at: 4) fileSize: (arrayNilOrSymbol at: 5) ].
  	self pop: 3 thenPush: result!

Item was changed:
  ----- Method: StackToRegisterMappingCogit>>ensureFixupAt: (in category 'compile abstract instructions') -----
+ ensureFixupAt: targetPC
+ 	"Make sure there's a flagged fixup at the target pc in fixups.
- ensureFixupAt: targetIndex
- 	"Make sure there's a flagged fixup at the targetIndex (pc relative to first pc) in fixups.
  	 Initially a fixup's target is just a flag.  Later on it is replaced with a proper instruction."
  	<returnTypeC: #'BytecodeFixup *'>
  	| fixup |
  	<var: #fixup type: #'BytecodeFixup *'>
+ 	fixup := self fixupAt:  targetPC - initialPC.
- 	fixup := self fixupAt: targetIndex.
  	self traceFixup: fixup.
  	self cCode: '' inSmalltalk:
+ 		[self assert: simStackPtr = (self debugStackPointerFor: targetPC).
- 		[self assert: simStackPtr = (self debugStackPointerFor: targetIndex + initialPC).
  		 (fixup isMergeFixupOrIsFixedUp
  		  and: [fixup isBackwardBranchFixup not]) ifTrue: "ignore backward branch targets"
  			[self assert: fixup simStackPtr = simStackPtr]].
  	fixup isNonMergeFixupOrNotAFixup
  		ifTrue: "convert a non-merge into a merge"
  			[fixup becomeMergeFixup.
  			 fixup simStackPtr: simStackPtr.
  			 LowcodeVM ifTrue: [
  				 fixup simNativeStackPtr: simNativeStackPtr.
  				 fixup simNativeStackSize: simNativeStackSize]]
  		ifFalse:
  			[fixup isBackwardBranchFixup
  				ifTrue: "this is the target of a backward branch and
  						 so doesn't have a simStackPtr assigned yet."
  						[fixup simStackPtr: simStackPtr.
  			 			 LowcodeVM ifTrue:
  				 			[fixup simNativeStackPtr: simNativeStackPtr.
  				 			 fixup simNativeStackSize: simNativeStackSize]]
  				ifFalse:
  					[self assert: fixup simStackPtr = simStackPtr.
  					 LowcodeVM ifTrue:
  				 		[self assert: fixup simNativeStackPtr = simNativeStackPtr.
  		 			 	 self assert: fixup simNativeStackSize = simNativeStackSize]]].
  	fixup recordBcpc: bytecodePC.
  	^fixup!

Item was changed:
  ----- Method: StackToRegisterMappingCogit>>ensureNonMergeFixupAt: (in category 'compile abstract instructions') -----
+ ensureNonMergeFixupAt: targetPC
+ 	"Make sure there's a flagged fixup at the target pc in fixups.
- ensureNonMergeFixupAt: targetIndex
- 	"Make sure there's a flagged fixup at the targetIndex (pc relative to first pc) in fixups.
  	 Initially a fixup's target is just a flag.  Later on it is replaced with a proper instruction."
  	<returnTypeC: #'BytecodeFixup *'>
  	| fixup |
  	<var: #fixup type: #'BytecodeFixup *'>
+ 	fixup := self fixupAt:  targetPC - initialPC.
- 	fixup := self fixupAt: targetIndex.
  	fixup notAFixup ifTrue:
  		[fixup becomeNonMergeFixup].
  	self cCode: '' inSmalltalk:
  		[fixup isMergeFixupOrIsFixedUp ifTrue:
  			[self assert:
  					(fixup isBackwardBranchFixup
+ 					 or: [fixup simStackPtr = (self debugStackPointerFor: targetPC)])]].
- 					 or: [fixup simStackPtr = (self debugStackPointerFor: targetIndex + initialPC)])]].
  	fixup recordBcpc: bytecodePC.
  	^fixup!

Item was changed:
  ----- Method: StackToRegisterMappingCogit>>genForwardersInlinedIdenticalOrNotIf: (in category 'bytecode generators') -----
  genForwardersInlinedIdenticalOrNotIf: orNot
  	| nextPC branchDescriptor unforwardRcvr argReg targetBytecodePC
  	unforwardArg  rcvrReg postBranchPC label fixup |
  	<var: #branchDescriptor type: #'BytecodeDescriptor *'>
  	<var: #label type: #'AbstractInstruction *'>
  	
  	self extractMaybeBranchDescriptorInto: [ :descr :next :postBranch :target | 
  		branchDescriptor := descr. nextPC := next. postBranchPC := postBranch. targetBytecodePC := target ].
  
  	"If an operand is an annotable constant, it may be forwarded, so we need to store it into a 
  	register so the forwarder check can jump back to the comparison after unforwarding the constant.
  	However, if one of the operand is an unnanotable constant, does not allocate a register for it 
  	(machine code will use operations on constants) and does not generate forwarder checks."
  	unforwardRcvr := (objectRepresentation isUnannotatableConstant: (self ssValue: 1)) not.
  	unforwardArg := (objectRepresentation isUnannotatableConstant: self ssTop) not.
  
  	self 
  		allocateEqualsEqualsRegistersArgNeedsReg: unforwardArg 
  		rcvrNeedsReg: unforwardRcvr 
  		into: [ :rcvr :arg | rcvrReg:= rcvr. argReg := arg ].
  
  	"If not followed by a branch, resolve to true or false."
  	(branchDescriptor isBranchTrue or: [branchDescriptor isBranchFalse]) ifFalse:
  		[^ self 
  			genIdenticalNoBranchArgIsConstant: unforwardArg not
  			rcvrIsConstant: unforwardRcvr not
  			argReg: argReg 
  			rcvrReg: rcvrReg 
  			orNotIf: orNot].
  	
  	"If branching the stack must be flushed for the merge"
  	self ssFlushTo: simStackPtr - 2.
  	
  	label := self Label.
  	self genCmpArgIsConstant: unforwardArg not rcvrIsConstant: unforwardRcvr not argReg: argReg rcvrReg: rcvrReg.
  	self ssPop: 2.
  
  	"Further since there is a following conditional jump bytecode, define
  	 non-merge fixups and leave the cond bytecode to set the mergeness."
  	(self fixupAt: nextPC - initialPC) notAFixup
  		ifTrue: "The next instruction is dead.  we can skip it."
  			[deadCode := true.
+ 		 	 self ensureFixupAt: targetBytecodePC.
+ 			 self ensureFixupAt: postBranchPC]
- 		 	 self ensureFixupAt: targetBytecodePC - initialPC.
- 			 self ensureFixupAt: postBranchPC - initialPC]
  		ifFalse:
  			[self deny: deadCode]. "push dummy value below"
  
  	self assert: (unforwardArg or: [unforwardRcvr]).
  	orNot == branchDescriptor isBranchTrue "orNot is true for ~~"
  		ifFalse:
+ 			[ fixup := (self ensureNonMergeFixupAt: postBranchPC) asUnsignedInteger.
+ 			self JumpZero:  (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger ]
- 			[ fixup := (self ensureNonMergeFixupAt: postBranchPC - initialPC) asUnsignedInteger.
- 			self JumpZero:  (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger ]
  		ifTrue:
+ 			[ fixup := (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger.
+ 			self JumpZero: (self ensureNonMergeFixupAt: postBranchPC) asUnsignedInteger ].
- 			[ fixup := (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger.
- 			self JumpZero: (self ensureNonMergeFixupAt: postBranchPC - initialPC) asUnsignedInteger ].
  
  	deadCode ifFalse:
  		[self ssPushConstant: objectMemory trueObject]. "dummy value"
  		
  	"The forwarders checks need to jump back to the comparison (label) if a forwarder is found, else 
  	jump forward either to the next forwarder check or to the postBranch or branch target (fixup)."
  	unforwardArg ifTrue: 
  		[ unforwardRcvr
  			ifTrue: [ objectRepresentation genEnsureOopInRegNotForwarded: argReg scratchReg: TempReg jumpBackTo: label ]
  			ifFalse: [ objectRepresentation 
  				genEnsureOopInRegNotForwarded: argReg 
  				scratchReg: TempReg 
  				ifForwarder: label
  				ifNotForwarder: fixup ] ].
  	unforwardRcvr ifTrue: 
  		[ objectRepresentation 
  			genEnsureOopInRegNotForwarded: rcvrReg 
  			scratchReg: TempReg 
  			ifForwarder: label
  			ifNotForwarder: fixup ].
  		
  	"Not reached, execution flow have jumped to fixup"
  	
  	^0!

Item was changed:
  ----- Method: StackToRegisterMappingCogit>>genJumpIf:to: (in category 'bytecode generator support') -----
  genJumpIf: boolean to: targetBytecodePC
  	<inline: false>
  	| desc fixup ok eventualTarget |
  	<var: #desc type: #'CogSimStackEntry *'>
  	<var: #fixup type: #'BytecodeFixup *'>
  	<var: #ok type: #'AbstractInstruction *'>
  	eventualTarget := self eventualTargetOf: targetBytecodePC.
  	self ssFlushTo: simStackPtr - 1.
  	desc := self ssTop.
  	self ssPop: 1.
  	(desc type == SSConstant
  	 and: [desc constant = objectMemory trueObject or: [desc constant = objectMemory falseObject]]) ifTrue:
  		["Must arrange there's a fixup at the target whether it is jumped to or
  		  not so that the simStackPtr can be kept correct."
+ 		 fixup := self ensureFixupAt: eventualTarget.
- 		 fixup := self ensureFixupAt: eventualTarget - initialPC.
  		 "Must annotate the bytecode for correct pc mapping."
  		 self annotateBytecode: (desc constant = boolean
  									ifTrue: [self Jump: fixup]
  									ifFalse: [self prevInstIsPCAnnotated
  												ifTrue: [self Nop]
  												ifFalse: [self Label]]).
  		 extA := 0.
  		 ^0].
  	desc popToReg: TempReg.
  	"Cunning trick by LPD.  If true and false are contiguous subtract the smaller.
  	 Correct result is either 0 or the distance between them.  If result is not 0 or
  	 their distance send mustBeBoolean."
  	self assert: (objectMemory objectAfter: objectMemory falseObject) = objectMemory trueObject.
  	self genSubConstant: boolean R: TempReg.
+ 	self JumpZero: (self ensureFixupAt: eventualTarget).
- 	self JumpZero: (self ensureFixupAt: eventualTarget - initialPC).
  	
  	self extASpecifiesNoMustBeBoolean ifTrue: 
  		[ extA := 0. 
  		self annotateBytecode: self lastOpcode.
  		^ 0].
  	extA := 0.
  	
  	self CmpCq: (boolean = objectMemory falseObject
  					ifTrue: [objectMemory trueObject - objectMemory falseObject]
  					ifFalse: [objectMemory falseObject - objectMemory trueObject])
  		R: TempReg.
  	ok := self JumpZero: 0.
  	self genCallMustBeBooleanFor: boolean.
  	ok jmpTarget: (self annotateBytecode: self Label).
  	^0!

Item was changed:
  ----- Method: StackToRegisterMappingCogit>>genJumpTo: (in category 'bytecode generator support') -----
  genJumpTo: targetBytecodePC
  	self ssFlushTo: simStackPtr.
  	deadCode := true. "can't fall through"
+ 	self Jump: (self ensureFixupAt: (self eventualTargetOf: targetBytecodePC)).
- 	self Jump: (self ensureFixupAt: (self eventualTargetOf: targetBytecodePC) - initialPC).
  	^0!

Item was changed:
  ----- Method: StackToRegisterMappingCogit>>genSpecialSelectorComparison (in category 'bytecode generators') -----
  genSpecialSelectorComparison
  	| nextPC postBranchPC targetBytecodePC primDescriptor branchDescriptor
  	  rcvrIsInt argIsInt argInt jumpNotSmallInts inlineCAB index |
  	<var: #primDescriptor type: #'BytecodeDescriptor *'>
  	<var: #branchDescriptor type: #'BytecodeDescriptor *'>
  	<var: #jumpNotSmallInts type: #'AbstractInstruction *'>
  	self ssFlushTo: simStackPtr - 2.
  	primDescriptor := self generatorAt: byte0.
  	argIsInt := self ssTop type = SSConstant
  				 and: [objectMemory isIntegerObject: (argInt := self ssTop constant)].
  	rcvrIsInt := (self ssValue: 1) type = SSConstant
  				 and: [objectMemory isIntegerObject: (self ssValue: 1) constant].
  
  	(argIsInt and: [rcvrIsInt]) ifTrue:
  		[^ self genStaticallyResolvedSpecialSelectorComparison].
  
  	self extractMaybeBranchDescriptorInto: [ :descr :next :postBranch :target | 
  		branchDescriptor := descr. nextPC := next. postBranchPC := postBranch. targetBytecodePC := target ].
  
  	"Only interested in inlining if followed by a conditional branch."
  	inlineCAB := branchDescriptor isBranchTrue or: [branchDescriptor isBranchFalse].
  	"Further, only interested in inlining = and ~= if there's a SmallInteger constant involved.
  	 The relational operators successfully statically predict SmallIntegers; the equality operators do not."
  	(inlineCAB and: [primDescriptor opcode = JumpZero or: [primDescriptor opcode = JumpNonZero]]) ifTrue:
  		[inlineCAB := argIsInt or: [rcvrIsInt]].
  	inlineCAB ifFalse:
  		[^self genSpecialSelectorSend].
  
  	argIsInt
  		ifTrue:
  			[(self ssValue: 1) popToReg: ReceiverResultReg.
  			 self ssPop: 2.
  			 self MoveR: ReceiverResultReg R: TempReg]
  		ifFalse:
  			[self marshallSendArguments: 1.
  			 self MoveR: Arg0Reg R: TempReg].
  	jumpNotSmallInts := (argIsInt or: [rcvrIsInt])
  							ifTrue: [objectRepresentation genJumpNotSmallIntegerInScratchReg: TempReg]
  							ifFalse: [objectRepresentation genJumpNotSmallIntegersIn: ReceiverResultReg andScratch: TempReg scratch: ClassReg].
  	argIsInt
  		ifTrue: [self CmpCq: argInt R: ReceiverResultReg]
  		ifFalse: [self CmpR: Arg0Reg R: ReceiverResultReg].
  	"Cmp is weird/backwards so invert the comparison.  Further since there is a following conditional
  	 jump bytecode define non-merge fixups and leave the cond bytecode to set the mergeness."
  	self genConditionalBranch: (branchDescriptor isBranchTrue
  				ifTrue: [primDescriptor opcode]
  				ifFalse: [self inverseBranchFor: primDescriptor opcode])
+ 		operand: (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger.
+ 	self Jump: (self ensureNonMergeFixupAt: postBranchPC).
- 		operand: (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger.
- 	self Jump: (self ensureNonMergeFixupAt: postBranchPC - initialPC).
  	jumpNotSmallInts jmpTarget: self Label.
  	argIsInt ifTrue:
  		[self MoveCq: argInt R: Arg0Reg].
  	index := byte0 - self firstSpecialSelectorBytecodeOffset.
  	^self genMarshalledSend: index negated - 1 numArgs: 1 sendTable: ordinarySendTrampolines!

Item was changed:
  ----- Method: StackToRegisterMappingCogit>>genVanillaInlinedIdenticalOrNotIf: (in category 'bytecode generators') -----
  genVanillaInlinedIdenticalOrNotIf: orNot
  	| nextPC postBranchPC targetBytecodePC branchDescriptor
  	  rcvrReg argReg argIsConstant rcvrIsConstant  |
  	<var: #branchDescriptor type: #'BytecodeDescriptor *'>
  	
  	self extractMaybeBranchDescriptorInto: [ :descr :next :postBranch :target | 
  		branchDescriptor := descr. nextPC := next. postBranchPC := postBranch. targetBytecodePC := target ].
  	
  	argIsConstant := self ssTop type = SSConstant.
  	"They can't be both constants to use correct machine opcodes.
  	 However annotable constants can't be resolved statically, hence we need to careful."
  	rcvrIsConstant := argIsConstant not and: [(self ssValue: 1) type = SSConstant].
  	
  	self 
  		allocateEqualsEqualsRegistersArgNeedsReg: argIsConstant not 
  		rcvrNeedsReg: rcvrIsConstant not 
  		into: [ :rcvr :arg | rcvrReg:= rcvr. argReg := arg ].
  	
  	"If not followed by a branch, resolve to true or false."
  	(branchDescriptor isBranchTrue or: [branchDescriptor isBranchFalse]) ifFalse:
  		[^ self 
  			genIdenticalNoBranchArgIsConstant: argIsConstant 
  			rcvrIsConstant: rcvrIsConstant 
  			argReg: argReg 
  			rcvrReg: rcvrReg 
  			orNotIf: orNot].
  	
  	"If branching the stack must be flushed for the merge"
  	self ssFlushTo: simStackPtr - 2.
  	
  	self genCmpArgIsConstant: argIsConstant rcvrIsConstant: rcvrIsConstant argReg: argReg rcvrReg: rcvrReg.
  	self ssPop: 2.
  
  	"Further since there is a following conditional jump bytecode, define
  	 non-merge fixups and leave the cond bytecode to set the mergeness."
  	(self fixupAt: nextPC - initialPC) notAFixup
  		ifTrue: "The next instruction is dead.  we can skip it."
  			[deadCode := true.
+ 		 	 self ensureFixupAt: targetBytecodePC.
+ 			 self ensureFixupAt: postBranchPC]
- 		 	 self ensureFixupAt: targetBytecodePC - initialPC.
- 			 self ensureFixupAt: postBranchPC - initialPC]
  		ifFalse:
  			[self deny: deadCode]. "push dummy value below"
  		
  	self genConditionalBranch: (orNot == branchDescriptor isBranchTrue ifTrue: [JumpNonZero] ifFalse: [JumpZero])
+ 		operand: (self ensureNonMergeFixupAt: targetBytecodePC) asUnsignedInteger.
- 		operand: (self ensureNonMergeFixupAt: targetBytecodePC - initialPC) asUnsignedInteger.
  
  	"If the branch is dead, then we can just fall through postBranchPC (only a nop in-between), else 
  	we need to jump over the code of the branch"
  	deadCode ifFalse:
+ 		[self Jump: (self ensureNonMergeFixupAt: postBranchPC).
- 		[self Jump: (self ensureNonMergeFixupAt: postBranchPC - initialPC).
  		 self ssPushConstant: objectMemory trueObject]. "dummy value"
  	^0!

Item was added:
+ ----- Method: TInlineNode>>nodesDo:parent: (in category 'enumerating') -----
+ nodesDo: aBlock parent: parent
+ 	"Apply aBlock to all nodes in the receiver with each node's parent.
+ 	 N.B. This is assumed to be bottom-up, leaves first."
+ 	method parseTree nodesDo: aBlock parent: self.
+ 	aBlock value: self value: parent!

Item was added:
+ ----- Method: TMethod>>readsVariable: (in category 'accessing') -----
+ readsVariable: variableName
+ 	"Answer if the receiver reads the variable (i.e. ignore assignments to the variable)."
+ 	parseTree nodesWithParentsDo:
+ 		[:node :parent|
+ 		 (node isVariable
+ 		  and: [node name = variableName]) ifTrue:
+ 			[(parent notNil
+ 			  and: [parent isAssignment
+ 			  and: [node == parent variable]]) ifFalse:
+ 				[^true]]].
+ 	^false!

Item was added:
+ ----- Method: VMClass class>>simulatorClass (in category 'simulation') -----
+ simulatorClass
+ 	"For running from Smalltalk - answer a class that can be used to simulate the receiver."
+ 
+ 	^self!




More information about the Vm-dev mailing list