[Vm-dev] VM Maker: VMMaker.oscog-eem.1688.mcz

commits at source.squeak.org commits at source.squeak.org
Sun Feb 21 03:09:35 UTC 2016


Eliot Miranda uploaded a new version of VMMaker to project VM Maker:
http://source.squeak.org/VMMaker/VMMaker.oscog-eem.1688.mcz

==================== Summary ====================

Name: VMMaker.oscog-eem.1688
Author: eem
Time: 20 February 2016, 7:07:45.253804 pm
UUID: 1216c2a8-fd06-474f-9f5b-33d14918ca23
Ancestors: VMMaker.oscog-eem.1687

X64 Cogit:
make sure that literal32BeforeFollowingAddress: answrs unsigned.  Otherwise 16rBABE1F1(5H) causes an assert faul in expectedClosedPICPrototype at startup.

Introduce an abstraction for unlinked inline cache tags holding selector indices (when will I ever learn??).

=============== Diff against VMMaker.oscog-eem.1687 ===============

Item was changed:
  ----- Method: CogInLineLiteralsX64Compiler>>literal32BeforeFollowingAddress: (in category 'inline cacheing') -----
  literal32BeforeFollowingAddress: followingAddress
  	"Answer the 32-bit literal embedded in the instruction immediately preceding followingAddress."
+ 	^self
+ 		cCoerceSimple: (self unalignedLong32At: followingAddress - 4)
+ 		to: #'unsigned int'!
- 	^self unalignedLong32At: followingAddress - 4!

Item was changed:
  ----- Method: CogObjectRepresentationForSpur>>remapObject: (in category 'garbage collection') -----
  remapObject: objOop
+ 	self assert: (objectMemory addressCouldBeObj: objOop).
  	^(objectMemory shouldRemapObj: objOop)
  		ifTrue: [objectMemory remapObj: objOop]
  		ifFalse: [objOop]!

Item was changed:
  ----- Method: CogObjectRepresentationForSqueakV3>>remapObject: (in category 'garbage collection') -----
+ remapObject: objOop
+ 	self assert: (objectMemory addressCouldBeObj: objOop).
+ 	^objectMemory remap: objOop!
- remapObject: oop
- 	^objectMemory remap: oop!

Item was changed:
  ----- Method: Cogit>>checkIfValidOopRef:pc:cogMethod: (in category 'garbage collection') -----
  checkIfValidOopRef: annotation pc: mcpc cogMethod: cogMethod
  	"Check for a valid object reference, if any, at a map entry.  Answer a code unique to each error for debugging."
  	<var: #mcpc type: #'char *'>
  	<var: #nsSendCache type: #'NSSendCache *'>
  	annotation = IsObjectReference ifTrue:
  		[| literal |
  		 literal := literalsManager fetchLiteralAtAnnotatedAddress: mcpc asUnsignedInteger using: backEnd.
  		 (objectRepresentation checkValidOopReference: literal) ifFalse:
  			[coInterpreter print: 'object ref leak in CM '; printHex: cogMethod asInteger; print: ' @ '; printHex: mcpc asInteger; cr.
  			^1]].
  
  	self cppIf: NewspeakVM ifTrue:
  		[annotation = IsNSSendCall ifTrue:
  			[| nsSendCache enclosingObject |
  			nsSendCache := self nsSendCacheFromReturnAddress: mcpc asInteger.
  			[(objectRepresentation checkValidOopReference: nsSendCache selector) ifFalse:
  				[coInterpreter print: 'selector leak in CM '; printHex: cogMethod asInteger; print: ' @ '; printHex: mcpc asInteger; cr.
  				^1]].
  			(enclosingObject := nsSendCache enclosingObject) ~= 0 ifTrue:
  				[[(objectRepresentation checkValidOopReference: enclosingObject) ifFalse:
  					[coInterpreter print: 'enclosing object leak in CM '; printHex: cogMethod asInteger; print: ' @ '; printHex: mcpc asInteger; cr.
  					^1]]]]].
  
  	(self isPureSendAnnotation: annotation) ifTrue:
  		[| entryPoint selectorOrCacheTag offset |
  		 entryPoint := backEnd callTargetFromReturnAddress: mcpc asInteger.
  		 entryPoint <= methodZoneBase
  			ifTrue:
  				[offset := entryPoint]
  			ifFalse:
  				[self
  					offsetAndSendTableFor: entryPoint
  					annotation: annotation
  					into: [:off :table| offset := off]].
  		 selectorOrCacheTag := backEnd inlineCacheTagAt: mcpc asInteger.
  		 (entryPoint > methodZoneBase
  		  and: [offset ~= cmNoCheckEntryOffset
  		  and: [(self cCoerceSimple: entryPoint - offset to: #'CogMethod *') cmType ~= CMOpenPIC]])
  			ifTrue: "linked non-super send, cacheTag is a cacheTag"
  				[(objectRepresentation validInlineCacheTag: selectorOrCacheTag) ifFalse:
  					[coInterpreter print: 'cache tag leak in CM '; printHex: cogMethod asInteger; print: ' @ '; printHex: mcpc asInteger; cr.
  					^1]]
  			ifFalse: "unlinked send or super send; cacheTag is a selector unless 64-bit, in which case it is an index."
+ 				[(self inlineCacheTagsAreIndexes
- 				[(objectMemory wordSize = 8
  				  or: [objectRepresentation checkValidOopReference: selectorOrCacheTag]) ifFalse:
  					[coInterpreter print: 'selector leak in CM '; printHex: cogMethod asInteger; print: ' @ '; printHex: mcpc asInteger; cr.
  					^1]]].
  	^0 "keep scanning"!

Item was changed:
  ----- Method: Cogit>>checkValidObjectReferencesInClosedPIC: (in category 'garbage collection') -----
  checkValidObjectReferencesInClosedPIC: cPIC
  	<var: #cPIC type: #'CogMethod *'>
  	| ok pc |
  	ok := true.
  	pc := cPIC asInteger + firstCPICCaseOffset.
  	
  	"first we check the obj ref at the beginning of the CPIC"
  	(self checkMaybeObjRefInClosedPIC: (backEnd literalBeforeFollowingAddress: pc - backEnd jumpLongByteSize)) ifFalse:
  		[self print: 'object leak in CPIC '; printHex: cPIC asInteger;
  			print: ' @ '; printHex: pc - backEnd jumpLongByteSize; cr.
  		 ok := false].
  	
  	"Next we step over each case that is in use. We find the end address of the cPICNumCases'th case and can then just step forward by the case size thereafter"
  	pc := self addressOfEndOfCase: cPIC cPICNumCases inCPIC: cPIC.
  	
  	"For each case we check any object reference at the end address - sizeof(conditional instruction) and then increment the end address by case size"
  	2 to: cPIC cPICNumCases do:
  		[:i|
+ 		(self inlineCacheTagsAreIndexes not
+ 		 and: [objectRepresentation inlineCacheTagsMayBeObjects]) ifTrue:
- 		objectRepresentation inlineCacheTagsMayBeObjects ifTrue:
  			[(self checkMaybeObjRefInClosedPIC: (backEnd literal32BeforeFollowingAddress: pc - backEnd jumpLongConditionalByteSize)) ifFalse:
  				[self print: 'object leak in CPIC '; printHex: cPIC asInteger;
  					print: ' @ '; printHex: pc - backEnd jumpLongConditionalByteSize - backEnd loadLiteralByteSize; cr.
  				 ok := false]].
  		(self checkMaybeObjRefInClosedPIC: (backEnd literalBeforeFollowingAddress: pc - backEnd jumpLongConditionalByteSize - backEnd cmpC32RTempByteSize)) ifFalse:
  			[self print: 'object leak in CPIC '; printHex: cPIC asInteger;
  				print: ' @ '; printHex: pc - backEnd jumpLongConditionalByteSize; cr.
  			 ok := false].
  		pc := pc + cPICCaseSize].
  	^ok!

Item was changed:
  ----- Method: Cogit>>closedPICRefersToUnmarkedObject: (in category 'garbage collection') -----
  closedPICRefersToUnmarkedObject: cPIC
  	"Answer if the ClosedPIC refers to any unmarked objects or freed/freeable target methods,
  	 applying markAndTraceOrFreeCogMethod:firstVisit: to those targets to determine if freed/freeable."
  	<var: #cPIC type: #'CogMethod *'>
  	| pc object |
  	((objectMemory isImmediate: cPIC selector)
  	or: [objectMemory isMarked: cPIC selector]) ifFalse:
  		[^true].
  
  	"First jump is unconditional; subsequent ones are conditional."
  	"Check the potential method oop for the first case only.
  	 Inline cache tags for the 1st case are at the send site."
  	pc := self addressOfEndOfCase: 1 inCPIC: cPIC.
  	(objectRepresentation couldBeObject: (object := backEnd literalBeforeFollowingAddress: pc - backEnd jumpLongByteSize)) ifTrue:
  		[(objectMemory isMarked: object) ifFalse:
  			[^true]].
  
  	"Check the first target"
  	(self markAndTraceOrFreePICTarget: (backEnd jumpLongTargetBeforeFollowingAddress: pc) in: cPIC) ifTrue:
  		[^true].
  
  	2 to: cPIC cPICNumCases do:
  		[:i| 
  		pc := self addressOfEndOfCase: i inCPIC: cPIC.
+ 		(self inlineCacheTagsAreIndexes not
+ 		 and: [objectRepresentation inlineCacheTagsMayBeObjects
+ 		 and: [objectRepresentation couldBeObject: (object := literalsManager backEnd literal32BeforeFollowingAddress: pc - backEnd jumpLongConditionalByteSize)]]) ifTrue:
- 		(objectRepresentation inlineCacheTagsMayBeObjects
- 		and: [objectRepresentation couldBeObject: (object := literalsManager backEnd literal32BeforeFollowingAddress: pc - backEnd jumpLongConditionalByteSize)]) ifTrue:
  			[(objectMemory isMarked: object) ifFalse:
  				[^true]].
  		"Check the potential method oop for subsequent cases."
  		(objectRepresentation couldBeObject: (object := backEnd literalBeforeFollowingAddress: pc - backEnd jumpLongConditionalByteSize - backEnd cmpC32RTempByteSize)) ifTrue:
  			[(objectMemory isMarked: object) ifFalse:
  				[^true]].
  		"Check subsequent targets"
  		(self markAndTraceOrFreePICTarget: (backEnd jumpLongConditionalTargetBeforeFollowingAddress: pc) in: cPIC) ifTrue:
  			[^true]].
  
  	^false!

Item was changed:
  ----- Method: Cogit>>genLoadInlineCacheWithSelector: (in category 'in-line cacheing') -----
  genLoadInlineCacheWithSelector: selectorIndex
  	"The in-line cache for a send is implemented as a constant load into ClassReg.
  	 We always use a 32-bit load, even in 64-bits.
  
  	 In the initial (unlinked) state the in-line cache is notionally loaded with the selector.
  	 But since in 64-bits an arbitrary selector oop won't fit in a 32-bit constant load, we
  	 instead load the cache with the selector's index, either into the literal frame of the
  	 current method, or into the special selector array.  Negative values are 1-relative
  	 indices into the special selector array.
  
  	 When a send is linked, the load of the selector, or selector index, is overwritten with a
  	 load of the receiver's class, or class tag.  Hence, the 64-bit VM is currently constrained
  	 to use class indices as cache tags.  If out-of-line literals are used, distinct caches /must
  	 not/ share acche locations, for if they do, send cacheing will be confused by the sharing.
  	 Hence we use the MoveUniqueC32:R: instruction that will not share literal locations."
  
  	| cacheValue |
  	self assert: (selectorIndex < 0
  					ifTrue: [selectorIndex negated between: 1 and: self numSpecialSelectors]
  					ifFalse: [selectorIndex between: 0 and: (objectMemory literalCountOf: methodObj) - 1]).
  
+ 	self inlineCacheTagsAreIndexes
- 	objectMemory wordSize = 8
  		ifTrue:
  			[cacheValue := selectorIndex]
  		ifFalse:
  			[| selector |
  			 selector := selectorIndex < 0
  							ifTrue: [(coInterpreter specialSelector: -1 - selectorIndex)]
  							ifFalse: [self getLiteral: selectorIndex].
  			 self assert: (objectMemory addressCouldBeOop: selector).
  			 (objectMemory isYoung: selector) ifTrue:
  				[hasYoungReferent := true].
  			 cacheValue := selector].
  
  	self MoveUniqueC32: cacheValue R: ClassReg!

Item was added:
+ ----- Method: Cogit>>inlineCacheTagsAreIndexes (in category 'in-line cacheing') -----
+ inlineCacheTagsAreIndexes
+ 	"The Cogit always generates 32-bit inline caches.  This implies that in 64-bits there is no room
+ 	 in an unlinked inline cache for a selector oop.  Instead it contains a signed 32-bit index, positive
+ 	 for selectors in a method's literal frame and negative for selectors in the specialSelectorsOop.
+ 	 And it implies that linked inline cache entries contain class indices, not class oops."
+ 	<inline: true>
+ 	^objectMemory wordSize = 8!

Item was changed:
  ----- Method: Cogit>>inlineCacheValueForSelector:in:at: (in category 'in-line cacheing') -----
  inlineCacheValueForSelector: selector in: aCogMethod at: mcpc
  	"Answer the value to put in an inline-cache that is being loaded with the selector.
  	 Usually this is simply the selector, but in 64-bits the cache is only 32-bits wide
  	 and so the cache is loaded with the index of the selector."
  	<inline: true>
+ 	^self inlineCacheTagsAreIndexes
+ 		ifTrue: [self indexForSelector: selector in: aCogMethod at: mcpc]
+ 		ifFalse: [selector]!
- 	^objectMemory wordSize < 8
- 		ifTrue: [selector]
- 		ifFalse: [self indexForSelector: selector in: aCogMethod at: mcpc]!

Item was changed:
  ----- Method: Cogit>>mapFor:performUntil:arg: (in category 'method map') -----
  mapFor: cogMethod performUntil: functionSymbol arg: arg
  	"Unlinking/GC/Disassembly support"
  	<var: #cogMethod type: #'CogMethod *'>
  	<var: #functionSymbol declareC: 'sqInt (*functionSymbol)(sqInt annotation, char *mcpc, sqInt arg)'>
  	<inline: true>
  	| mcpc map mapByte annotation result |
  	mcpc := cogMethod asInteger + cmNoCheckEntryOffset.
  	map := self mapStartFor: cogMethod.
+ 	self inlineCacheTagsAreIndexes ifTrue:
- 	objectMemory wordSize = 8 ifTrue:
  		[enumeratingCogMethod := cogMethod].
  	[(mapByte := objectMemory byteAt: map) ~= MapEnd] whileTrue:
  		[mapByte >= FirstAnnotation
  			ifTrue:
  				[mcpc := mcpc + ((mapByte bitAnd: DisplacementMask) * backEnd codeGranularity).
  				 "If this is an IsSendCall annotation, peek ahead for an IsAnnotationExtension, and consume it."
  				 ((annotation := mapByte >> AnnotationShift) = IsSendCall
  				  and: [(mapByte := objectMemory byteAt: map - 1) >> AnnotationShift = IsAnnotationExtension]) ifTrue:
  					[annotation := annotation + (mapByte bitAnd: DisplacementMask).
  					 map := map - 1].
  				 result := self perform: functionSymbol
  							   with: annotation
  							   with: (self cCoerceSimple: mcpc to: #'char *')
  							   with: arg.
  				 result ~= 0 ifTrue:
  					[^result]]
  			ifFalse:
  				[mapByte < (IsAnnotationExtension << AnnotationShift) ifTrue:
  					[mcpc := mcpc + ((mapByte - DisplacementX2N << AnnotationShift) * backEnd codeGranularity)]].
  		 map := map - 1].
  	^0!

Item was changed:
  ----- Method: Cogit>>mapObjectReferencesInClosedPIC: (in category 'garbage collection') -----
  mapObjectReferencesInClosedPIC: cPIC
  	"Remap all object references in the closed PIC.  Answer if any references are young.
  	Set codeModified if any modifications are made."
  	<var: #cPIC type: #'CogMethod *'>
  	| pc refersToYoung |
  	pc := self addressOfEndOfCase:1 inCPIC:cPIC.
  
  	"first we check the potential method oop load at the beginning of the CPIC"
  	refersToYoung := self remapMaybeObjRefInClosedPICAt: pc - backEnd jumpLongByteSize.
  
  	"We find the end address of the cPICNumCases'th case and can then just step forward by the case size thereafter"
  	pc := self addressOfEndOfCase: cPIC cPICNumCases inCPIC: cPIC.
  	
  	"Next we check the potential class ref in the compare instruction, and the potential method oop load for each case."
  	2 to: cPIC cPICNumCases do:
  		[:i|
+ 		(self inlineCacheTagsAreIndexes not
+ 		 and: [objectRepresentation inlineCacheTagsMayBeObjects]) ifTrue:
- 		objectRepresentation inlineCacheTagsMayBeObjects ifTrue:
  			[(self remapMaybeObjRefInClosedPICAt: pc - backEnd jumpLongConditionalByteSize) ifTrue:
  				[refersToYoung := true]].
  		(self remapMaybeObjRefInClosedPICAt: pc - backEnd jumpLongConditionalByteSize - backEnd cmpC32RTempByteSize) ifTrue:
  			[refersToYoung := true].
  		pc := pc + cPICCaseSize].
  	^refersToYoung!

Item was changed:
  ----- Method: Cogit>>offsetCacheTagAndCouldBeObjectAt:annotation:into: (in category 'in-line cacheing') -----
  offsetCacheTagAndCouldBeObjectAt: mcpc annotation: annotation into: trinaryBlock
  	"Evaluate trinaryBlock with the entry, inline cache tag and whether the cache
  	 tag could be an object, for the send at mcpc with annotation annotation."
  	<inline: true>
  	| cacheTag entryPoint tagCouldBeObj |
  	cacheTag := backEnd inlineCacheTagAt: mcpc asInteger.
  	entryPoint := backEnd callTargetFromReturnAddress: mcpc asInteger.
  	"in-line cache tags are the selectors of sends if sends are unlinked,
  	 the selectors of super sends (entry offset = cmNoCheckEntryOffset),
  	 the selectors of open PIC sends (entry offset = cmEntryOffset, target is an Open PIC)
  	 or in-line cache tags (classes, class indices, immediate bit patterns, etc).
  	 Note that selectors can be immediate so there is no guarantee that they
  	 are markable/remappable objects."
+ 	tagCouldBeObj := self inlineCacheTagsAreIndexes not
+ 						and: [objectRepresentation inlineCacheTagsMayBeObjects
+ 							or: [entryPoint < methodZoneBase
+ 							or: [(entryPoint bitAnd: entryPointMask) = uncheckedEntryAlignment
+ 							or: [(entryPoint bitAnd: entryPointMask) = checkedEntryAlignment
+ 								and: [(self cCoerceSimple: entryPoint - cmEntryOffset to: #'CogMethod *') cmType = CMOpenPIC]]]]].
- 	tagCouldBeObj := objectRepresentation inlineCacheTagsMayBeObjects
- 						or: [entryPoint < methodZoneBase
- 						or: [(entryPoint bitAnd: entryPointMask) = uncheckedEntryAlignment
- 						or: [(entryPoint bitAnd: entryPointMask) = checkedEntryAlignment
- 							and: [(self cCoerceSimple: entryPoint - cmEntryOffset to: #'CogMethod *') cmType = CMOpenPIC]]]].
  	trinaryBlock
  		value: entryPoint
  		value: cacheTag
  		value: tagCouldBeObj!

Item was changed:
  ----- Method: Cogit>>selectorForSendAt:annotation:in: (in category 'simulation only') -----
  selectorForSendAt: mcpc annotation: annotation in: aCompiledMethod
  	<doNotGenerate>
  	| entryPoint offset targetMethod selector |
  	entryPoint := backEnd callTargetFromReturnAddress: mcpc asInteger.
  	selector := entryPoint > methodZoneBase
  					ifTrue: "It's a linked send."
  						[self
  							offsetAndSendTableFor: entryPoint
  							annotation: annotation
  							into: [:off :table| offset := off].
  						targetMethod := self cCoerceSimple: entryPoint - offset to: #'CogMethod *'.
  						targetMethod selector]
  					ifFalse:
+ 						[self inlineCacheTagsAreIndexes
- 						[objectMemory wordSize = 8
  							ifTrue: [self selectorFromSelectorIndex: (backEnd inlineCacheTagAt: mcpc) signedIntFromLong
  										in: aCompiledMethod]
  							ifFalse: [backEnd inlineCacheTagAt: mcpc]].
  	^(annotation ~= IsNSSendCall and: [coInterpreter isCurrentImageFacade])
  		ifTrue: [coInterpreter objectForOop: selector]
  		ifFalse: [selector]!

Item was changed:
  ----- Method: SistaStackToRegisterMappingCogit>>mapFor:bcpc:withAnnotationPerformUntil:arg: (in category 'method map') -----
  mapFor: cogMethod bcpc: startbcpc withAnnotationPerformUntil: functionSymbol arg: arg
  	"A version of mapFor:bcpc:performUntil:arg: that passes the annotation instead of the isBackwardBranch
  	 flag. Evaluate functionSymbol for each mcpc, bcpc pair in the map until the function returns non-zero,
  	 answering that result, or 0 if it fails to.  This works only for frameful methods"
  	<var: #cogMethod type: #'CogBlockMethod *'>
  	<var: #functionSymbol declareC: 'sqInt (*functionSymbol)(BytecodeDescriptor *desc, sqInt annotation, char *mcpc, sqInt bcpc, void *arg)'>
  	<var: #arg type: #'void *'>
  	<inline: true>
  	| isInBlock mcpc bcpc endbcpc map mapByte homeMethod aMethodObj result
  	  latestContinuation byte descriptor bsOffset nExts annotation |
  	<var: #descriptor type: #'BytecodeDescriptor *'>
  	<var: #homeMethod type: #'CogMethod *'>
  	self assert: cogMethod stackCheckOffset > 0.
  	"In both CMMethod and CMBlock cases find the start of the map and
  	 skip forward to the bytecode pc map entry for the stack check."
  	cogMethod cmType = CMMethod
  		ifTrue:
  			[isInBlock := false.
  			 homeMethod := self cCoerceSimple: cogMethod to: #'CogMethod *'.
  			 self assert: startbcpc = (coInterpreter startPCOfMethodHeader: homeMethod methodHeader).
  			 map := self mapStartFor: homeMethod.
  			 annotation := (objectMemory byteAt: map) >> AnnotationShift.
  			 self assert: (annotation = IsAbsPCReference
  						 or: [annotation = IsObjectReference
  						 or: [annotation = IsRelativeCall
  						 or: [annotation = IsDisplacementX2N]]]).
  			 latestContinuation := startbcpc.
  			 aMethodObj := homeMethod methodObject.
  			 endbcpc := (objectMemory numBytesOf: aMethodObj) - 1.
  			 bsOffset := self bytecodeSetOffsetForHeader: homeMethod methodHeader]
  		ifFalse:
  			[isInBlock := true.
  			 homeMethod := cogMethod cmHomeMethod.
  			 map := self findMapLocationForMcpc: cogMethod asUnsignedInteger + (self sizeof: CogBlockMethod)
  						inMethod: homeMethod.
  			 self assert: map ~= 0.
  			 annotation := (objectMemory byteAt: map) >> AnnotationShift.
  			 self assert: (annotation >> AnnotationShift = HasBytecodePC "fiducial"
  						 or: [annotation >> AnnotationShift = IsDisplacementX2N]).
  			 [(annotation := (objectMemory byteAt: map) >> AnnotationShift) ~= HasBytecodePC] whileTrue:
  				[map := map - 1].
  			 map := map - 1. "skip fiducial; i.e. the map entry for the pc immediately following the method header."
  			 aMethodObj := homeMethod methodObject.
  			 bcpc := startbcpc - (self blockCreationBytecodeSizeForHeader: homeMethod methodHeader).
  			 bsOffset := self bytecodeSetOffsetForHeader: homeMethod methodHeader.
  			 byte := (objectMemory fetchByte: bcpc ofObject: aMethodObj) + bsOffset.
  			 descriptor := self generatorAt: byte.
  			 endbcpc := self nextBytecodePCFor: descriptor at: bcpc exts: -1 in: aMethodObj].
  	bcpc := startbcpc.
  	mcpc := cogMethod asUnsignedInteger + cogMethod stackCheckOffset.
  	nExts := 0.
+ 	self inlineCacheTagsAreIndexes ifTrue:
- 	objectMemory wordSize = 8 ifTrue:
  		[enumeratingCogMethod := homeMethod].
  	"The stack check maps to the start of the first bytecode,
  	 the first bytecode being effectively after frame build."
  	result := self perform: functionSymbol
  					with: nil
  					with: annotation
  					with: (self cCoerceSimple: mcpc to: #'char *')
  					with: startbcpc
  					with: arg.
  	result ~= 0 ifTrue:
  		[^result].
  	"Now skip up through the bytecode pc map entry for the stack check." 
  	[(objectMemory byteAt: map) >> AnnotationShift ~= HasBytecodePC] whileTrue:
  		[map := map - 1].
  	map := map - 1.
  	[(mapByte := objectMemory byteAt: map) ~= MapEnd] whileTrue: "defensive; we exit on bcpc"
  		[mapByte >= FirstAnnotation
  			ifTrue:
  				[| nextBcpc |
  				annotation := mapByte >> AnnotationShift.
  				mcpc := mcpc + ((mapByte bitAnd: DisplacementMask) * backEnd codeGranularity).
  				(self isPCMappedAnnotation: annotation) ifTrue:
  					[(annotation = IsSendCall
  					  and: [(mapByte := objectMemory byteAt: map - 1) >> AnnotationShift = IsAnnotationExtension]) ifTrue:
  						[annotation := annotation + (mapByte bitAnd: DisplacementMask).
  						 map := map - 1].
  					[byte := (objectMemory fetchByte: bcpc ofObject: aMethodObj) + bsOffset.
  					  descriptor := self generatorAt: byte.
  					  isInBlock
  						ifTrue: [bcpc >= endbcpc ifTrue: [^0]]
  						ifFalse:
  							[(descriptor isReturn and: [bcpc >= latestContinuation]) ifTrue: [^0].
  							 (descriptor isBranch or: [descriptor isBlockCreation]) ifTrue:
  								[| targetPC |
  								 targetPC := self latestContinuationPCFor: descriptor at: bcpc exts: nExts in: aMethodObj.
  								 latestContinuation := latestContinuation max: targetPC]].
  					  nextBcpc := self nextBytecodePCFor: descriptor at: bcpc exts: nExts in: aMethodObj.
  					  descriptor isMapped
  					  or: [isInBlock and: [descriptor isMappedInBlock]]] whileFalse:
  						[bcpc := nextBcpc.
  						 nExts := descriptor isExtension ifTrue: [nExts + 1] ifFalse: [0]].
  					result := self perform: functionSymbol
  									with: descriptor
  									with: annotation
  									with: (self cCoerceSimple: mcpc to: #'char *')
  									with: bcpc
  									with: arg.
  					 result ~= 0 ifTrue:
  						[^result].
  					 bcpc := nextBcpc.
  					 nExts := descriptor isExtension ifTrue: [nExts + 1] ifFalse: [0]]]
  			ifFalse:
  				[self assert: (mapByte >> AnnotationShift = IsDisplacementX2N
  							or: [mapByte >> AnnotationShift = IsAnnotationExtension]).
  				 mapByte < (IsAnnotationExtension << AnnotationShift) ifTrue:
  					[mcpc := mcpc + ((mapByte - DisplacementX2N << AnnotationShift) * backEnd codeGranularity)]].
  		 map := map - 1].
  	^0!



More information about the Vm-dev mailing list