1 /* 2 * Copyright (c) 2011, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 24 25 package org.graalvm.compiler.replacements; 26 27 import static jdk.vm.ci.code.MemoryBarriers.JMM_POST_VOLATILE_READ; 28 import static jdk.vm.ci.code.MemoryBarriers.JMM_POST_VOLATILE_WRITE; 29 import static jdk.vm.ci.code.MemoryBarriers.JMM_PRE_VOLATILE_READ; 30 import static jdk.vm.ci.code.MemoryBarriers.JMM_PRE_VOLATILE_WRITE; 31 import static jdk.vm.ci.meta.DeoptimizationAction.InvalidateReprofile; 32 import static jdk.vm.ci.meta.DeoptimizationReason.BoundsCheckException; 33 import static jdk.vm.ci.meta.DeoptimizationReason.NullCheckException; 34 import static org.graalvm.compiler.core.common.SpeculativeExecutionAttacksMitigations.Options.UseIndexMasking; 35 import static org.graalvm.compiler.nodes.NamedLocationIdentity.ARRAY_LENGTH_LOCATION; 36 import static org.graalvm.compiler.nodes.calc.BinaryArithmeticNode.branchlessMax; 37 import static org.graalvm.compiler.nodes.calc.BinaryArithmeticNode.branchlessMin; 38 import static org.graalvm.compiler.nodes.java.ArrayLengthNode.readArrayLength; 39 import static org.graalvm.compiler.nodes.util.GraphUtil.skipPiWhileNonNull; 40 41 import java.nio.ByteOrder; 42 import java.util.ArrayList; 43 import java.util.BitSet; 44 import java.util.List; 45 46 import org.graalvm.compiler.api.directives.GraalDirectives; 47 import org.graalvm.compiler.api.replacements.Snippet; 48 import org.graalvm.compiler.api.replacements.SnippetReflectionProvider; 49 import org.graalvm.compiler.core.common.LIRKind; 50 import org.graalvm.compiler.core.common.spi.ForeignCallsProvider; 51 import org.graalvm.compiler.core.common.type.IntegerStamp; 52 import org.graalvm.compiler.core.common.type.ObjectStamp; 53 import org.graalvm.compiler.core.common.type.Stamp; 54 import org.graalvm.compiler.core.common.type.StampFactory; 55 import org.graalvm.compiler.core.common.type.TypeReference; 56 import org.graalvm.compiler.debug.DebugCloseable; 57 import org.graalvm.compiler.debug.DebugHandlersFactory; 58 import org.graalvm.compiler.debug.GraalError; 59 import org.graalvm.compiler.graph.Node; 60 import org.graalvm.compiler.nodeinfo.InputType; 61 import org.graalvm.compiler.nodes.CompressionNode.CompressionOp; 62 import org.graalvm.compiler.nodes.ConstantNode; 63 import org.graalvm.compiler.nodes.FieldLocationIdentity; 64 import org.graalvm.compiler.nodes.FixedNode; 65 import org.graalvm.compiler.nodes.LogicNode; 66 import org.graalvm.compiler.nodes.NamedLocationIdentity; 67 import org.graalvm.compiler.nodes.NodeView; 68 import org.graalvm.compiler.nodes.PiNode; 69 import org.graalvm.compiler.nodes.StructuredGraph; 70 import org.graalvm.compiler.nodes.ValueNode; 71 import org.graalvm.compiler.nodes.calc.AddNode; 72 import org.graalvm.compiler.nodes.calc.ConditionalNode; 73 import org.graalvm.compiler.nodes.calc.IntegerBelowNode; 74 import org.graalvm.compiler.nodes.calc.IntegerConvertNode; 75 import org.graalvm.compiler.nodes.calc.IntegerEqualsNode; 76 import org.graalvm.compiler.nodes.calc.IsNullNode; 77 import org.graalvm.compiler.nodes.calc.LeftShiftNode; 78 import org.graalvm.compiler.nodes.calc.NarrowNode; 79 import org.graalvm.compiler.nodes.calc.RightShiftNode; 80 import org.graalvm.compiler.nodes.calc.SignExtendNode; 81 import org.graalvm.compiler.nodes.calc.SubNode; 82 import org.graalvm.compiler.nodes.calc.UnpackEndianHalfNode; 83 import org.graalvm.compiler.nodes.calc.ZeroExtendNode; 84 import org.graalvm.compiler.nodes.debug.VerifyHeapNode; 85 import org.graalvm.compiler.nodes.extended.BoxNode; 86 import org.graalvm.compiler.nodes.extended.FixedValueAnchorNode; 87 import org.graalvm.compiler.nodes.extended.ForeignCallNode; 88 import org.graalvm.compiler.nodes.extended.GuardedUnsafeLoadNode; 89 import org.graalvm.compiler.nodes.extended.GuardingNode; 90 import org.graalvm.compiler.nodes.extended.JavaReadNode; 91 import org.graalvm.compiler.nodes.extended.JavaWriteNode; 92 import org.graalvm.compiler.nodes.extended.LoadArrayComponentHubNode; 93 import org.graalvm.compiler.nodes.extended.LoadHubNode; 94 import org.graalvm.compiler.nodes.extended.MembarNode; 95 import org.graalvm.compiler.nodes.extended.RawLoadNode; 96 import org.graalvm.compiler.nodes.extended.RawStoreNode; 97 import org.graalvm.compiler.nodes.extended.UnboxNode; 98 import org.graalvm.compiler.nodes.extended.UnsafeMemoryLoadNode; 99 import org.graalvm.compiler.nodes.extended.UnsafeMemoryStoreNode; 100 import org.graalvm.compiler.nodes.java.AbstractNewObjectNode; 101 import org.graalvm.compiler.nodes.java.AccessIndexedNode; 102 import org.graalvm.compiler.nodes.java.ArrayLengthNode; 103 import org.graalvm.compiler.nodes.java.AtomicReadAndWriteNode; 104 import org.graalvm.compiler.nodes.java.FinalFieldBarrierNode; 105 import org.graalvm.compiler.nodes.java.InstanceOfDynamicNode; 106 import org.graalvm.compiler.nodes.java.InstanceOfNode; 107 import org.graalvm.compiler.nodes.java.LoadFieldNode; 108 import org.graalvm.compiler.nodes.java.LoadIndexedNode; 109 import org.graalvm.compiler.nodes.java.LogicCompareAndSwapNode; 110 import org.graalvm.compiler.nodes.java.LoweredAtomicReadAndWriteNode; 111 import org.graalvm.compiler.nodes.java.MonitorEnterNode; 112 import org.graalvm.compiler.nodes.java.MonitorIdNode; 113 import org.graalvm.compiler.nodes.java.NewArrayNode; 114 import org.graalvm.compiler.nodes.java.NewInstanceNode; 115 import org.graalvm.compiler.nodes.java.RawMonitorEnterNode; 116 import org.graalvm.compiler.nodes.java.StoreFieldNode; 117 import org.graalvm.compiler.nodes.java.StoreIndexedNode; 118 import org.graalvm.compiler.nodes.java.UnsafeCompareAndExchangeNode; 119 import org.graalvm.compiler.nodes.java.UnsafeCompareAndSwapNode; 120 import org.graalvm.compiler.nodes.java.ValueCompareAndSwapNode; 121 import org.graalvm.compiler.nodes.memory.HeapAccess.BarrierType; 122 import org.graalvm.compiler.nodes.memory.ReadNode; 123 import org.graalvm.compiler.nodes.memory.WriteNode; 124 import org.graalvm.compiler.nodes.memory.address.AddressNode; 125 import org.graalvm.compiler.nodes.memory.address.IndexAddressNode; 126 import org.graalvm.compiler.nodes.memory.address.OffsetAddressNode; 127 import org.graalvm.compiler.nodes.spi.Lowerable; 128 import org.graalvm.compiler.nodes.spi.LoweringProvider; 129 import org.graalvm.compiler.nodes.spi.LoweringTool; 130 import org.graalvm.compiler.nodes.type.StampTool; 131 import org.graalvm.compiler.nodes.util.GraphUtil; 132 import org.graalvm.compiler.nodes.virtual.AllocatedObjectNode; 133 import org.graalvm.compiler.nodes.virtual.CommitAllocationNode; 134 import org.graalvm.compiler.nodes.virtual.VirtualArrayNode; 135 import org.graalvm.compiler.nodes.virtual.VirtualInstanceNode; 136 import org.graalvm.compiler.nodes.virtual.VirtualObjectNode; 137 import org.graalvm.compiler.options.OptionValues; 138 import org.graalvm.compiler.phases.util.Providers; 139 import org.graalvm.compiler.replacements.SnippetLowerableMemoryNode.SnippetLowering; 140 import org.graalvm.compiler.replacements.nodes.BinaryMathIntrinsicNode; 141 import org.graalvm.compiler.replacements.nodes.UnaryMathIntrinsicNode; 142 import jdk.internal.vm.compiler.word.LocationIdentity; 143 144 import jdk.vm.ci.code.CodeUtil; 145 import jdk.vm.ci.code.MemoryBarriers; 146 import jdk.vm.ci.code.TargetDescription; 147 import jdk.vm.ci.meta.DeoptimizationAction; 148 import jdk.vm.ci.meta.DeoptimizationReason; 149 import jdk.vm.ci.meta.JavaKind; 150 import jdk.vm.ci.meta.MetaAccessProvider; 151 import jdk.vm.ci.meta.ResolvedJavaField; 152 import jdk.vm.ci.meta.ResolvedJavaMethod; 153 import jdk.vm.ci.meta.ResolvedJavaType; 154 import jdk.vm.ci.meta.SpeculationLog; 155 156 /** 157 * VM-independent lowerings for standard Java nodes. VM-specific methods are abstract and must be 158 * implemented by VM-specific subclasses. 159 */ 160 public abstract class DefaultJavaLoweringProvider implements LoweringProvider { 161 162 protected final MetaAccessProvider metaAccess; 163 protected final ForeignCallsProvider foreignCalls; 164 protected final TargetDescription target; 165 private final boolean useCompressedOops; 166 private final ResolvedJavaType objectArrayType; 167 168 private BoxingSnippets.Templates boxingSnippets; 169 private ConstantStringIndexOfSnippets.Templates indexOfSnippets; 170 171 public DefaultJavaLoweringProvider(MetaAccessProvider metaAccess, ForeignCallsProvider foreignCalls, TargetDescription target, boolean useCompressedOops) { 172 this.metaAccess = metaAccess; 173 this.foreignCalls = foreignCalls; 174 this.target = target; 175 this.useCompressedOops = useCompressedOops; 176 this.objectArrayType = metaAccess.lookupJavaType(Object[].class); 177 } 178 179 public void initialize(OptionValues options, Iterable<DebugHandlersFactory> factories, SnippetCounter.Group.Factory factory, Providers providers, SnippetReflectionProvider snippetReflection) { 180 boxingSnippets = new BoxingSnippets.Templates(options, factories, factory, providers, snippetReflection, target); 181 indexOfSnippets = new ConstantStringIndexOfSnippets.Templates(options, factories, providers, snippetReflection, target); 182 providers.getReplacements().registerSnippetTemplateCache(new SnippetCounterNode.SnippetCounterSnippets.Templates(options, factories, providers, snippetReflection, target)); 183 } 184 185 public final TargetDescription getTarget() { 186 return target; 187 } 188 189 public MetaAccessProvider getMetaAccess() { 190 return metaAccess; 191 } 192 193 @Override 194 @SuppressWarnings("try") 195 public void lower(Node n, LoweringTool tool) { 196 assert n instanceof Lowerable; 197 StructuredGraph graph = (StructuredGraph) n.graph(); 198 try (DebugCloseable context = n.withNodeSourcePosition()) { 199 if (n instanceof LoadFieldNode) { 200 lowerLoadFieldNode((LoadFieldNode) n, tool); 201 } else if (n instanceof StoreFieldNode) { 202 lowerStoreFieldNode((StoreFieldNode) n, tool); 203 } else if (n instanceof LoadIndexedNode) { 204 lowerLoadIndexedNode((LoadIndexedNode) n, tool); 205 } else if (n instanceof StoreIndexedNode) { 206 lowerStoreIndexedNode((StoreIndexedNode) n, tool); 207 } else if (n instanceof IndexAddressNode) { 208 lowerIndexAddressNode((IndexAddressNode) n); 209 } else if (n instanceof ArrayLengthNode) { 210 lowerArrayLengthNode((ArrayLengthNode) n, tool); 211 } else if (n instanceof LoadHubNode) { 212 lowerLoadHubNode((LoadHubNode) n, tool); 213 } else if (n instanceof LoadArrayComponentHubNode) { 214 lowerLoadArrayComponentHubNode((LoadArrayComponentHubNode) n); 215 } else if (n instanceof MonitorEnterNode) { 216 lowerMonitorEnterNode((MonitorEnterNode) n, tool, graph); 217 } else if (n instanceof UnsafeCompareAndSwapNode) { 218 lowerCompareAndSwapNode((UnsafeCompareAndSwapNode) n); 219 } else if (n instanceof UnsafeCompareAndExchangeNode) { 220 lowerCompareAndExchangeNode((UnsafeCompareAndExchangeNode) n); 221 } else if (n instanceof AtomicReadAndWriteNode) { 222 lowerAtomicReadAndWriteNode((AtomicReadAndWriteNode) n); 223 } else if (n instanceof RawLoadNode) { 224 lowerUnsafeLoadNode((RawLoadNode) n, tool); 225 } else if (n instanceof UnsafeMemoryLoadNode) { 226 lowerUnsafeMemoryLoadNode((UnsafeMemoryLoadNode) n); 227 } else if (n instanceof RawStoreNode) { 228 lowerUnsafeStoreNode((RawStoreNode) n); 229 } else if (n instanceof UnsafeMemoryStoreNode) { 230 lowerUnsafeMemoryStoreNode((UnsafeMemoryStoreNode) n); 231 } else if (n instanceof JavaReadNode) { 232 lowerJavaReadNode((JavaReadNode) n); 233 } else if (n instanceof JavaWriteNode) { 234 lowerJavaWriteNode((JavaWriteNode) n); 235 } else if (n instanceof CommitAllocationNode) { 236 lowerCommitAllocationNode((CommitAllocationNode) n, tool); 237 } else if (n instanceof BoxNode) { 238 boxingSnippets.lower((BoxNode) n, tool); 239 } else if (n instanceof UnboxNode) { 240 boxingSnippets.lower((UnboxNode) n, tool); 241 } else if (n instanceof VerifyHeapNode) { 242 lowerVerifyHeap((VerifyHeapNode) n); 243 } else if (n instanceof UnaryMathIntrinsicNode) { 244 lowerUnaryMath((UnaryMathIntrinsicNode) n, tool); 245 } else if (n instanceof BinaryMathIntrinsicNode) { 246 lowerBinaryMath((BinaryMathIntrinsicNode) n, tool); 247 } else if (n instanceof StringIndexOfNode) { 248 lowerIndexOf((StringIndexOfNode) n); 249 } else if (n instanceof StringLatin1IndexOfNode) { 250 lowerLatin1IndexOf((StringLatin1IndexOfNode) n); 251 } else if (n instanceof StringUTF16IndexOfNode) { 252 lowerUTF16IndexOf((StringUTF16IndexOfNode) n); 253 } else if (n instanceof UnpackEndianHalfNode) { 254 lowerSecondHalf((UnpackEndianHalfNode) n); 255 } else { 256 throw GraalError.shouldNotReachHere("Node implementing Lowerable not handled: " + n); 257 } 258 } 259 } 260 261 private void lowerSecondHalf(UnpackEndianHalfNode n) { 262 ByteOrder byteOrder = target.arch.getByteOrder(); 263 n.lower(byteOrder); 264 } 265 266 private void lowerIndexOf(StringIndexOfNode n) { 267 if (n.getArgument(3).isConstant()) { 268 SnippetLowering lowering = new SnippetLowering() { 269 @Override 270 public void lower(SnippetLowerableMemoryNode node, LoweringTool tool) { 271 if (tool.getLoweringStage() != LoweringTool.StandardLoweringStage.LOW_TIER) { 272 return; 273 } 274 indexOfSnippets.lower(node, tool); 275 } 276 }; 277 SnippetLowerableMemoryNode snippetLower = new SnippetLowerableMemoryNode(lowering, NamedLocationIdentity.getArrayLocation(JavaKind.Char), n.stamp(NodeView.DEFAULT), n.toArgumentArray()); 278 n.graph().add(snippetLower); 279 n.graph().replaceFixedWithFixed(n, snippetLower); 280 } 281 } 282 283 private void lowerLatin1IndexOf(StringLatin1IndexOfNode n) { 284 if (n.getArgument(2).isConstant()) { 285 SnippetLowering lowering = new SnippetLowering() { 286 @Override 287 public void lower(SnippetLowerableMemoryNode node, LoweringTool tool) { 288 if (tool.getLoweringStage() != LoweringTool.StandardLoweringStage.LOW_TIER) { 289 return; 290 } 291 indexOfSnippets.lowerLatin1(node, tool); 292 } 293 }; 294 SnippetLowerableMemoryNode snippetLower = new SnippetLowerableMemoryNode(lowering, NamedLocationIdentity.getArrayLocation(JavaKind.Byte), n.stamp(NodeView.DEFAULT), n.toArgumentArray()); 295 n.graph().add(snippetLower); 296 n.graph().replaceFixedWithFixed(n, snippetLower); 297 } 298 } 299 300 private void lowerUTF16IndexOf(StringUTF16IndexOfNode n) { 301 if (n.getArgument(2).isConstant()) { 302 SnippetLowering lowering = new SnippetLowering() { 303 @Override 304 public void lower(SnippetLowerableMemoryNode node, LoweringTool tool) { 305 if (tool.getLoweringStage() != LoweringTool.StandardLoweringStage.LOW_TIER) { 306 return; 307 } 308 indexOfSnippets.lowerUTF16(node, tool); 309 } 310 }; 311 SnippetLowerableMemoryNode snippetLower = new SnippetLowerableMemoryNode(lowering, NamedLocationIdentity.getArrayLocation(JavaKind.Byte), n.stamp(NodeView.DEFAULT), n.toArgumentArray()); 312 n.graph().add(snippetLower); 313 n.graph().replaceFixedWithFixed(n, snippetLower); 314 } 315 } 316 317 private void lowerBinaryMath(BinaryMathIntrinsicNode math, LoweringTool tool) { 318 if (tool.getLoweringStage() == LoweringTool.StandardLoweringStage.HIGH_TIER) { 319 return; 320 } 321 ResolvedJavaMethod method = math.graph().method(); 322 if (method != null) { 323 if (method.getAnnotation(Snippet.class) != null) { 324 // In the context of SnippetStub, i.e., Graal-generated stubs, use the LIR 325 // lowering to emit the stub assembly code instead of the Node lowering. 326 return; 327 } 328 if (method.getName().equalsIgnoreCase(math.getOperation().name()) && tool.getMetaAccess().lookupJavaType(Math.class).equals(method.getDeclaringClass())) { 329 // A root compilation of the intrinsic method should emit the full assembly 330 // implementation. 331 return; 332 } 333 } 334 StructuredGraph graph = math.graph(); 335 ForeignCallNode call = graph.add(new ForeignCallNode(foreignCalls, math.getOperation().foreignCallDescriptor, math.getX(), math.getY())); 336 graph.addAfterFixed(tool.lastFixedNode(), call); 337 math.replaceAtUsages(call); 338 } 339 340 private void lowerUnaryMath(UnaryMathIntrinsicNode math, LoweringTool tool) { 341 if (tool.getLoweringStage() == LoweringTool.StandardLoweringStage.HIGH_TIER) { 342 return; 343 } 344 ResolvedJavaMethod method = math.graph().method(); 345 if (method != null) { 346 if (method.getName().equalsIgnoreCase(math.getOperation().name()) && tool.getMetaAccess().lookupJavaType(Math.class).equals(method.getDeclaringClass())) { 347 // A root compilation of the intrinsic method should emit the full assembly 348 // implementation. 349 return; 350 } 351 } 352 StructuredGraph graph = math.graph(); 353 ForeignCallNode call = math.graph().add(new ForeignCallNode(foreignCalls, math.getOperation().foreignCallDescriptor, math.getValue())); 354 graph.addAfterFixed(tool.lastFixedNode(), call); 355 math.replaceAtUsages(call); 356 } 357 358 protected void lowerVerifyHeap(VerifyHeapNode n) { 359 GraphUtil.removeFixedWithUnusedInputs(n); 360 } 361 362 protected AddressNode createOffsetAddress(StructuredGraph graph, ValueNode object, long offset) { 363 ValueNode o = ConstantNode.forIntegerKind(target.wordJavaKind, offset, graph); 364 return graph.unique(new OffsetAddressNode(object, o)); 365 } 366 367 protected AddressNode createFieldAddress(StructuredGraph graph, ValueNode object, ResolvedJavaField field) { 368 int offset = fieldOffset(field); 369 if (offset >= 0) { 370 return createOffsetAddress(graph, object, offset); 371 } else { 372 return null; 373 } 374 } 375 376 protected abstract JavaKind getStorageKind(ResolvedJavaField field); 377 378 protected void lowerLoadFieldNode(LoadFieldNode loadField, LoweringTool tool) { 379 assert loadField.getStackKind() != JavaKind.Illegal; 380 StructuredGraph graph = loadField.graph(); 381 ResolvedJavaField field = loadField.field(); 382 ValueNode object = loadField.isStatic() ? staticFieldBase(graph, field) : loadField.object(); 383 object = createNullCheckedValue(object, loadField, tool); 384 Stamp loadStamp = loadStamp(loadField.stamp(NodeView.DEFAULT), getStorageKind(field)); 385 386 AddressNode address = createFieldAddress(graph, object, field); 387 assert address != null : "Field that is loaded must not be eliminated: " + field.getDeclaringClass().toJavaName(true) + "." + field.getName(); 388 389 ReadNode memoryRead = graph.add(new ReadNode(address, fieldLocationIdentity(field), loadStamp, fieldLoadBarrierType(field))); 390 ValueNode readValue = implicitLoadConvert(graph, getStorageKind(field), memoryRead); 391 loadField.replaceAtUsages(readValue); 392 graph.replaceFixed(loadField, memoryRead); 393 394 if (loadField.isVolatile()) { 395 MembarNode preMembar = graph.add(new MembarNode(JMM_PRE_VOLATILE_READ)); 396 graph.addBeforeFixed(memoryRead, preMembar); 397 MembarNode postMembar = graph.add(new MembarNode(JMM_POST_VOLATILE_READ)); 398 graph.addAfterFixed(memoryRead, postMembar); 399 } 400 } 401 402 protected void lowerStoreFieldNode(StoreFieldNode storeField, LoweringTool tool) { 403 StructuredGraph graph = storeField.graph(); 404 ResolvedJavaField field = storeField.field(); 405 ValueNode object = storeField.isStatic() ? staticFieldBase(graph, field) : storeField.object(); 406 object = createNullCheckedValue(object, storeField, tool); 407 ValueNode value = implicitStoreConvert(graph, getStorageKind(storeField.field()), storeField.value()); 408 AddressNode address = createFieldAddress(graph, object, field); 409 assert address != null; 410 411 WriteNode memoryWrite = graph.add(new WriteNode(address, fieldLocationIdentity(field), value, fieldStoreBarrierType(storeField.field()))); 412 memoryWrite.setStateAfter(storeField.stateAfter()); 413 graph.replaceFixedWithFixed(storeField, memoryWrite); 414 415 if (storeField.isVolatile()) { 416 MembarNode preMembar = graph.add(new MembarNode(JMM_PRE_VOLATILE_WRITE)); 417 graph.addBeforeFixed(memoryWrite, preMembar); 418 MembarNode postMembar = graph.add(new MembarNode(JMM_POST_VOLATILE_WRITE)); 419 graph.addAfterFixed(memoryWrite, postMembar); 420 } 421 } 422 423 public static final IntegerStamp POSITIVE_ARRAY_INDEX_STAMP = StampFactory.forInteger(32, 0, Integer.MAX_VALUE - 1); 424 425 /** 426 * Create a PiNode on the index proving that the index is positive. On some platforms this is 427 * important to allow the index to be used as an int in the address mode. 428 */ 429 public AddressNode createArrayIndexAddress(StructuredGraph graph, ValueNode array, JavaKind elementKind, ValueNode index, GuardingNode boundsCheck) { 430 ValueNode positiveIndex = graph.maybeAddOrUnique(PiNode.create(index, POSITIVE_ARRAY_INDEX_STAMP, boundsCheck != null ? boundsCheck.asNode() : null)); 431 return createArrayAddress(graph, array, elementKind, positiveIndex); 432 } 433 434 public AddressNode createArrayAddress(StructuredGraph graph, ValueNode array, JavaKind elementKind, ValueNode index) { 435 return createArrayAddress(graph, array, elementKind, elementKind, index); 436 } 437 438 public AddressNode createArrayAddress(StructuredGraph graph, ValueNode array, JavaKind arrayKind, JavaKind elementKind, ValueNode index) { 439 ValueNode wordIndex; 440 if (target.wordSize > 4) { 441 wordIndex = graph.unique(new SignExtendNode(index, target.wordSize * 8)); 442 } else { 443 assert target.wordSize == 4 : "unsupported word size"; 444 wordIndex = index; 445 } 446 447 int shift = CodeUtil.log2(metaAccess.getArrayIndexScale(elementKind)); 448 ValueNode scaledIndex = graph.unique(new LeftShiftNode(wordIndex, ConstantNode.forInt(shift, graph))); 449 450 int base = metaAccess.getArrayBaseOffset(arrayKind); 451 ValueNode offset = graph.unique(new AddNode(scaledIndex, ConstantNode.forIntegerKind(target.wordJavaKind, base, graph))); 452 453 return graph.unique(new OffsetAddressNode(array, offset)); 454 } 455 456 protected void lowerIndexAddressNode(IndexAddressNode indexAddress) { 457 AddressNode lowered = createArrayAddress(indexAddress.graph(), indexAddress.getArray(), indexAddress.getArrayKind(), indexAddress.getElementKind(), indexAddress.getIndex()); 458 indexAddress.replaceAndDelete(lowered); 459 } 460 461 protected void lowerLoadIndexedNode(LoadIndexedNode loadIndexed, LoweringTool tool) { 462 StructuredGraph graph = loadIndexed.graph(); 463 ValueNode array = loadIndexed.array(); 464 array = createNullCheckedValue(array, loadIndexed, tool); 465 JavaKind elementKind = loadIndexed.elementKind(); 466 Stamp loadStamp = loadStamp(loadIndexed.stamp(NodeView.DEFAULT), elementKind); 467 468 GuardingNode boundsCheck = getBoundsCheck(loadIndexed, array, tool); 469 ValueNode index = loadIndexed.index(); 470 if (UseIndexMasking.getValue(graph.getOptions())) { 471 index = proxyIndex(loadIndexed, index, array, tool); 472 } 473 AddressNode address = createArrayIndexAddress(graph, array, elementKind, index, boundsCheck); 474 475 ReadNode memoryRead = graph.add(new ReadNode(address, NamedLocationIdentity.getArrayLocation(elementKind), loadStamp, BarrierType.NONE)); 476 memoryRead.setGuard(boundsCheck); 477 ValueNode readValue = implicitLoadConvert(graph, elementKind, memoryRead); 478 479 loadIndexed.replaceAtUsages(readValue); 480 graph.replaceFixed(loadIndexed, memoryRead); 481 } 482 483 protected void lowerStoreIndexedNode(StoreIndexedNode storeIndexed, LoweringTool tool) { 484 StructuredGraph graph = storeIndexed.graph(); 485 486 ValueNode value = storeIndexed.value(); 487 ValueNode array = storeIndexed.array(); 488 489 array = this.createNullCheckedValue(array, storeIndexed, tool); 490 491 GuardingNode boundsCheck = getBoundsCheck(storeIndexed, array, tool); 492 493 JavaKind elementKind = storeIndexed.elementKind(); 494 495 LogicNode condition = null; 496 if (storeIndexed.getStoreCheck() == null && elementKind == JavaKind.Object && !StampTool.isPointerAlwaysNull(value)) { 497 /* Array store check. */ 498 TypeReference arrayType = StampTool.typeReferenceOrNull(array); 499 if (arrayType != null && arrayType.isExact()) { 500 ResolvedJavaType elementType = arrayType.getType().getComponentType(); 501 if (!elementType.isJavaLangObject()) { 502 TypeReference typeReference = TypeReference.createTrusted(storeIndexed.graph().getAssumptions(), elementType); 503 LogicNode typeTest = graph.addOrUniqueWithInputs(InstanceOfNode.create(typeReference, value)); 504 condition = LogicNode.or(graph.unique(IsNullNode.create(value)), typeTest, GraalDirectives.UNLIKELY_PROBABILITY); 505 } 506 } else { 507 /* 508 * The guard on the read hub should be the null check of the array that was 509 * introduced earlier. 510 */ 511 ValueNode arrayClass = createReadHub(graph, array, tool); 512 ValueNode componentHub = createReadArrayComponentHub(graph, arrayClass, storeIndexed); 513 LogicNode typeTest = graph.unique(InstanceOfDynamicNode.create(graph.getAssumptions(), tool.getConstantReflection(), componentHub, value, false)); 514 condition = LogicNode.or(graph.unique(IsNullNode.create(value)), typeTest, GraalDirectives.UNLIKELY_PROBABILITY); 515 } 516 } 517 518 AddressNode address = createArrayIndexAddress(graph, array, elementKind, storeIndexed.index(), boundsCheck); 519 WriteNode memoryWrite = graph.add(new WriteNode(address, NamedLocationIdentity.getArrayLocation(elementKind), implicitStoreConvert(graph, elementKind, value), 520 arrayStoreBarrierType(storeIndexed.elementKind()))); 521 memoryWrite.setGuard(boundsCheck); 522 if (condition != null) { 523 tool.createGuard(storeIndexed, condition, DeoptimizationReason.ArrayStoreException, DeoptimizationAction.InvalidateReprofile); 524 } 525 memoryWrite.setStateAfter(storeIndexed.stateAfter()); 526 graph.replaceFixedWithFixed(storeIndexed, memoryWrite); 527 } 528 529 protected void lowerArrayLengthNode(ArrayLengthNode arrayLengthNode, LoweringTool tool) { 530 arrayLengthNode.replaceAtUsages(createReadArrayLength(arrayLengthNode.array(), arrayLengthNode, tool)); 531 StructuredGraph graph = arrayLengthNode.graph(); 532 graph.removeFixed(arrayLengthNode); 533 } 534 535 /** 536 * Creates a read node that read the array length and is guarded by a null-check. 537 * 538 * The created node is placed before {@code before} in the CFG. 539 */ 540 protected ReadNode createReadArrayLength(ValueNode array, FixedNode before, LoweringTool tool) { 541 StructuredGraph graph = array.graph(); 542 ValueNode canonicalArray = this.createNullCheckedValue(skipPiWhileNonNull(array), before, tool); 543 AddressNode address = createOffsetAddress(graph, canonicalArray, arrayLengthOffset()); 544 ReadNode readArrayLength = graph.add(new ReadNode(address, ARRAY_LENGTH_LOCATION, StampFactory.positiveInt(), BarrierType.NONE)); 545 graph.addBeforeFixed(before, readArrayLength); 546 return readArrayLength; 547 } 548 549 protected void lowerLoadHubNode(LoadHubNode loadHub, LoweringTool tool) { 550 StructuredGraph graph = loadHub.graph(); 551 if (tool.getLoweringStage() != LoweringTool.StandardLoweringStage.LOW_TIER) { 552 return; 553 } 554 if (graph.getGuardsStage().allowsFloatingGuards()) { 555 return; 556 } 557 ValueNode hub = createReadHub(graph, loadHub.getValue(), tool); 558 loadHub.replaceAtUsagesAndDelete(hub); 559 } 560 561 protected void lowerLoadArrayComponentHubNode(LoadArrayComponentHubNode loadHub) { 562 StructuredGraph graph = loadHub.graph(); 563 ValueNode hub = createReadArrayComponentHub(graph, loadHub.getValue(), loadHub); 564 graph.replaceFixed(loadHub, hub); 565 } 566 567 protected void lowerMonitorEnterNode(MonitorEnterNode monitorEnter, LoweringTool tool, StructuredGraph graph) { 568 ValueNode object = createNullCheckedValue(monitorEnter.object(), monitorEnter, tool); 569 ValueNode hub = graph.addOrUnique(LoadHubNode.create(object, tool.getStampProvider(), tool.getMetaAccess(), tool.getConstantReflection())); 570 RawMonitorEnterNode rawMonitorEnter = graph.add(new RawMonitorEnterNode(object, hub, monitorEnter.getMonitorId())); 571 rawMonitorEnter.setStateBefore(monitorEnter.stateBefore()); 572 rawMonitorEnter.setStateAfter(monitorEnter.stateAfter()); 573 graph.replaceFixedWithFixed(monitorEnter, rawMonitorEnter); 574 } 575 576 protected void lowerCompareAndSwapNode(UnsafeCompareAndSwapNode cas) { 577 StructuredGraph graph = cas.graph(); 578 JavaKind valueKind = cas.getValueKind(); 579 580 ValueNode expectedValue = implicitStoreConvert(graph, valueKind, cas.expected()); 581 ValueNode newValue = implicitStoreConvert(graph, valueKind, cas.newValue()); 582 583 AddressNode address = graph.unique(new OffsetAddressNode(cas.object(), cas.offset())); 584 BarrierType barrierType = guessStoreBarrierType(cas.object(), expectedValue); 585 LogicCompareAndSwapNode atomicNode = graph.add(new LogicCompareAndSwapNode(address, cas.getLocationIdentity(), expectedValue, newValue, barrierType)); 586 atomicNode.setStateAfter(cas.stateAfter()); 587 graph.replaceFixedWithFixed(cas, atomicNode); 588 } 589 590 protected void lowerCompareAndExchangeNode(UnsafeCompareAndExchangeNode cas) { 591 StructuredGraph graph = cas.graph(); 592 JavaKind valueKind = cas.getValueKind(); 593 594 ValueNode expectedValue = implicitStoreConvert(graph, valueKind, cas.expected()); 595 ValueNode newValue = implicitStoreConvert(graph, valueKind, cas.newValue()); 596 597 AddressNode address = graph.unique(new OffsetAddressNode(cas.object(), cas.offset())); 598 BarrierType barrierType = guessStoreBarrierType(cas.object(), expectedValue); 599 ValueCompareAndSwapNode atomicNode = graph.add(new ValueCompareAndSwapNode(address, expectedValue, newValue, cas.getLocationIdentity(), barrierType)); 600 ValueNode coercedNode = implicitLoadConvert(graph, valueKind, atomicNode, true); 601 atomicNode.setStateAfter(cas.stateAfter()); 602 cas.replaceAtUsages(coercedNode); 603 graph.replaceFixedWithFixed(cas, atomicNode); 604 } 605 606 protected void lowerAtomicReadAndWriteNode(AtomicReadAndWriteNode n) { 607 StructuredGraph graph = n.graph(); 608 JavaKind valueKind = n.getValueKind(); 609 610 ValueNode newValue = implicitStoreConvert(graph, valueKind, n.newValue()); 611 612 AddressNode address = graph.unique(new OffsetAddressNode(n.object(), n.offset())); 613 BarrierType barrierType = guessStoreBarrierType(n.object(), n.newValue()); 614 LIRKind lirAccessKind = LIRKind.fromJavaKind(target.arch, valueKind); 615 LoweredAtomicReadAndWriteNode memoryRead = graph.add(new LoweredAtomicReadAndWriteNode(address, n.getLocationIdentity(), newValue, lirAccessKind, barrierType)); 616 memoryRead.setStateAfter(n.stateAfter()); 617 618 ValueNode readValue = implicitLoadConvert(graph, valueKind, memoryRead); 619 n.stateAfter().replaceFirstInput(n, memoryRead); 620 n.replaceAtUsages(readValue); 621 graph.replaceFixedWithFixed(n, memoryRead); 622 } 623 624 /** 625 * @param tool utility for performing the lowering 626 */ 627 protected void lowerUnsafeLoadNode(RawLoadNode load, LoweringTool tool) { 628 StructuredGraph graph = load.graph(); 629 if (load instanceof GuardedUnsafeLoadNode) { 630 GuardedUnsafeLoadNode guardedLoad = (GuardedUnsafeLoadNode) load; 631 GuardingNode guard = guardedLoad.getGuard(); 632 if (guard == null) { 633 // can float freely if the guard folded away 634 ReadNode memoryRead = createUnsafeRead(graph, load, null); 635 memoryRead.setForceFixed(false); 636 graph.replaceFixedWithFixed(load, memoryRead); 637 } else { 638 // must be guarded, but flows below the guard 639 ReadNode memoryRead = createUnsafeRead(graph, load, guard); 640 graph.replaceFixedWithFixed(load, memoryRead); 641 } 642 } else { 643 // never had a guarding condition so it must be fixed, creation of the read will force 644 // it to be fixed 645 ReadNode memoryRead = createUnsafeRead(graph, load, null); 646 graph.replaceFixedWithFixed(load, memoryRead); 647 } 648 } 649 650 protected AddressNode createUnsafeAddress(StructuredGraph graph, ValueNode object, ValueNode offset) { 651 if (object.isConstant() && object.asConstant().isDefaultForKind()) { 652 return graph.addOrUniqueWithInputs(OffsetAddressNode.create(offset)); 653 } else { 654 return graph.unique(new OffsetAddressNode(object, offset)); 655 } 656 } 657 658 protected ReadNode createUnsafeRead(StructuredGraph graph, RawLoadNode load, GuardingNode guard) { 659 boolean compressible = load.accessKind() == JavaKind.Object; 660 JavaKind readKind = load.accessKind(); 661 Stamp loadStamp = loadStamp(load.stamp(NodeView.DEFAULT), readKind, compressible); 662 AddressNode address = createUnsafeAddress(graph, load.object(), load.offset()); 663 ReadNode memoryRead = graph.add(new ReadNode(address, load.getLocationIdentity(), loadStamp, BarrierType.NONE)); 664 if (guard == null) { 665 // An unsafe read must not float otherwise it may float above 666 // a test guaranteeing the read is safe. 667 memoryRead.setForceFixed(true); 668 } else { 669 memoryRead.setGuard(guard); 670 } 671 ValueNode readValue = performBooleanCoercionIfNecessary(implicitLoadConvert(graph, readKind, memoryRead, compressible), readKind); 672 load.replaceAtUsages(readValue); 673 return memoryRead; 674 } 675 676 protected void lowerUnsafeMemoryLoadNode(UnsafeMemoryLoadNode load) { 677 StructuredGraph graph = load.graph(); 678 JavaKind readKind = load.getKind(); 679 assert readKind != JavaKind.Object; 680 Stamp loadStamp = loadStamp(load.stamp(NodeView.DEFAULT), readKind, false); 681 AddressNode address = graph.addOrUniqueWithInputs(OffsetAddressNode.create(load.getAddress())); 682 ReadNode memoryRead = graph.add(new ReadNode(address, load.getLocationIdentity(), loadStamp, BarrierType.NONE)); 683 // An unsafe read must not float otherwise it may float above 684 // a test guaranteeing the read is safe. 685 memoryRead.setForceFixed(true); 686 ValueNode readValue = performBooleanCoercionIfNecessary(implicitLoadConvert(graph, readKind, memoryRead, false), readKind); 687 load.replaceAtUsages(readValue); 688 graph.replaceFixedWithFixed(load, memoryRead); 689 } 690 691 private static ValueNode performBooleanCoercionIfNecessary(ValueNode readValue, JavaKind readKind) { 692 if (readKind == JavaKind.Boolean) { 693 StructuredGraph graph = readValue.graph(); 694 IntegerEqualsNode eq = graph.addOrUnique(new IntegerEqualsNode(readValue, ConstantNode.forInt(0, graph))); 695 return graph.addOrUnique(new ConditionalNode(eq, ConstantNode.forBoolean(false, graph), ConstantNode.forBoolean(true, graph))); 696 } 697 return readValue; 698 } 699 700 protected void lowerUnsafeStoreNode(RawStoreNode store) { 701 StructuredGraph graph = store.graph(); 702 boolean compressible = store.value().getStackKind() == JavaKind.Object; 703 JavaKind valueKind = store.accessKind(); 704 ValueNode value = implicitStoreConvert(graph, valueKind, store.value(), compressible); 705 AddressNode address = createUnsafeAddress(graph, store.object(), store.offset()); 706 WriteNode write = graph.add(new WriteNode(address, store.getLocationIdentity(), value, unsafeStoreBarrierType(store))); 707 write.setStateAfter(store.stateAfter()); 708 graph.replaceFixedWithFixed(store, write); 709 } 710 711 protected void lowerUnsafeMemoryStoreNode(UnsafeMemoryStoreNode store) { 712 StructuredGraph graph = store.graph(); 713 assert store.getValue().getStackKind() != JavaKind.Object; 714 JavaKind valueKind = store.getKind(); 715 ValueNode value = implicitStoreConvert(graph, valueKind, store.getValue(), false); 716 AddressNode address = graph.addOrUniqueWithInputs(OffsetAddressNode.create(store.getAddress())); 717 WriteNode write = graph.add(new WriteNode(address, store.getLocationIdentity(), value, BarrierType.NONE)); 718 write.setStateAfter(store.stateAfter()); 719 graph.replaceFixedWithFixed(store, write); 720 } 721 722 protected void lowerJavaReadNode(JavaReadNode read) { 723 StructuredGraph graph = read.graph(); 724 JavaKind valueKind = read.getReadKind(); 725 Stamp loadStamp = loadStamp(read.stamp(NodeView.DEFAULT), valueKind, read.isCompressible()); 726 727 ReadNode memoryRead = graph.add(new ReadNode(read.getAddress(), read.getLocationIdentity(), loadStamp, read.getBarrierType())); 728 GuardingNode guard = read.getGuard(); 729 ValueNode readValue = implicitLoadConvert(graph, valueKind, memoryRead, read.isCompressible()); 730 if (guard == null) { 731 // An unsafe read must not float otherwise it may float above 732 // a test guaranteeing the read is safe. 733 memoryRead.setForceFixed(true); 734 } else { 735 memoryRead.setGuard(guard); 736 } 737 read.replaceAtUsages(readValue); 738 graph.replaceFixed(read, memoryRead); 739 } 740 741 protected void lowerJavaWriteNode(JavaWriteNode write) { 742 StructuredGraph graph = write.graph(); 743 ValueNode value = implicitStoreConvert(graph, write.getWriteKind(), write.value(), write.isCompressible()); 744 WriteNode memoryWrite = graph.add(new WriteNode(write.getAddress(), write.getLocationIdentity(), value, write.getBarrierType())); 745 memoryWrite.setStateAfter(write.stateAfter()); 746 graph.replaceFixedWithFixed(write, memoryWrite); 747 memoryWrite.setGuard(write.getGuard()); 748 } 749 750 @SuppressWarnings("try") 751 protected void lowerCommitAllocationNode(CommitAllocationNode commit, LoweringTool tool) { 752 StructuredGraph graph = commit.graph(); 753 if (graph.getGuardsStage() == StructuredGraph.GuardsStage.FIXED_DEOPTS) { 754 List<AbstractNewObjectNode> recursiveLowerings = new ArrayList<>(); 755 756 ValueNode[] allocations = new ValueNode[commit.getVirtualObjects().size()]; 757 BitSet omittedValues = new BitSet(); 758 int valuePos = 0; 759 for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) { 760 VirtualObjectNode virtual = commit.getVirtualObjects().get(objIndex); 761 try (DebugCloseable nsp = graph.withNodeSourcePosition(virtual)) { 762 int entryCount = virtual.entryCount(); 763 AbstractNewObjectNode newObject; 764 if (virtual instanceof VirtualInstanceNode) { 765 newObject = graph.add(createNewInstanceFromVirtual(virtual)); 766 } else { 767 newObject = graph.add(createNewArrayFromVirtual(virtual, ConstantNode.forInt(entryCount, graph))); 768 } 769 // The final STORE_STORE barrier will be emitted by finishAllocatedObjects 770 newObject.clearEmitMemoryBarrier(); 771 772 recursiveLowerings.add(newObject); 773 graph.addBeforeFixed(commit, newObject); 774 allocations[objIndex] = newObject; 775 for (int i = 0; i < entryCount; i++) { 776 ValueNode value = commit.getValues().get(valuePos); 777 if (value instanceof VirtualObjectNode) { 778 value = allocations[commit.getVirtualObjects().indexOf(value)]; 779 } 780 if (value == null) { 781 omittedValues.set(valuePos); 782 } else if (!(value.isConstant() && value.asConstant().isDefaultForKind())) { 783 // Constant.illegal is always the defaultForKind, so it is skipped 784 JavaKind valueKind = value.getStackKind(); 785 JavaKind entryKind = virtual.entryKind(i); 786 787 // Truffle requires some leniency in terms of what can be put where: 788 assert valueKind.getStackKind() == entryKind.getStackKind() || 789 (valueKind == JavaKind.Long || valueKind == JavaKind.Double || (valueKind == JavaKind.Int && virtual instanceof VirtualArrayNode)); 790 AddressNode address = null; 791 BarrierType barrierType = null; 792 if (virtual instanceof VirtualInstanceNode) { 793 ResolvedJavaField field = ((VirtualInstanceNode) virtual).field(i); 794 long offset = fieldOffset(field); 795 if (offset >= 0) { 796 address = createOffsetAddress(graph, newObject, offset); 797 barrierType = fieldInitializationBarrier(entryKind); 798 } 799 } else { 800 address = createOffsetAddress(graph, newObject, metaAccess.getArrayBaseOffset(entryKind) + i * metaAccess.getArrayIndexScale(entryKind)); 801 barrierType = arrayInitializationBarrier(entryKind); 802 } 803 if (address != null) { 804 WriteNode write = new WriteNode(address, LocationIdentity.init(), implicitStoreConvert(graph, entryKind, value), barrierType); 805 graph.addAfterFixed(newObject, graph.add(write)); 806 } 807 } 808 valuePos++; 809 } 810 } 811 } 812 valuePos = 0; 813 814 for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) { 815 VirtualObjectNode virtual = commit.getVirtualObjects().get(objIndex); 816 try (DebugCloseable nsp = graph.withNodeSourcePosition(virtual)) { 817 int entryCount = virtual.entryCount(); 818 ValueNode newObject = allocations[objIndex]; 819 for (int i = 0; i < entryCount; i++) { 820 if (omittedValues.get(valuePos)) { 821 ValueNode value = commit.getValues().get(valuePos); 822 assert value instanceof VirtualObjectNode; 823 ValueNode allocValue = allocations[commit.getVirtualObjects().indexOf(value)]; 824 if (!(allocValue.isConstant() && allocValue.asConstant().isDefaultForKind())) { 825 assert virtual.entryKind(i) == JavaKind.Object && allocValue.getStackKind() == JavaKind.Object; 826 AddressNode address; 827 BarrierType barrierType; 828 if (virtual instanceof VirtualInstanceNode) { 829 VirtualInstanceNode virtualInstance = (VirtualInstanceNode) virtual; 830 address = createFieldAddress(graph, newObject, virtualInstance.field(i)); 831 barrierType = fieldStoreBarrierType(virtualInstance.field(i)); 832 } else { 833 address = createArrayAddress(graph, newObject, virtual.entryKind(i), ConstantNode.forInt(i, graph)); 834 barrierType = arrayStoreBarrierType(virtual.entryKind(i)); 835 } 836 if (address != null) { 837 WriteNode write = new WriteNode(address, LocationIdentity.init(), implicitStoreConvert(graph, JavaKind.Object, allocValue), barrierType); 838 graph.addBeforeFixed(commit, graph.add(write)); 839 } 840 } 841 } 842 valuePos++; 843 } 844 } 845 } 846 847 finishAllocatedObjects(tool, commit, allocations); 848 graph.removeFixed(commit); 849 850 for (AbstractNewObjectNode recursiveLowering : recursiveLowerings) { 851 recursiveLowering.lower(tool); 852 } 853 } 854 855 } 856 857 public NewInstanceNode createNewInstanceFromVirtual(VirtualObjectNode virtual) { 858 return new NewInstanceNode(virtual.type(), true); 859 } 860 861 protected NewArrayNode createNewArrayFromVirtual(VirtualObjectNode virtual, ValueNode length) { 862 return new NewArrayNode(((VirtualArrayNode) virtual).componentType(), length, true); 863 } 864 865 public void finishAllocatedObjects(LoweringTool tool, CommitAllocationNode commit, ValueNode[] allocations) { 866 StructuredGraph graph = commit.graph(); 867 for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) { 868 FixedValueAnchorNode anchor = graph.add(new FixedValueAnchorNode(allocations[objIndex])); 869 allocations[objIndex] = anchor; 870 graph.addBeforeFixed(commit, anchor); 871 } 872 /* 873 * Note that the FrameState that is assigned to these MonitorEnterNodes isn't the correct 874 * state. It will be the state from before the allocation occurred instead of a valid state 875 * after the locking is performed. In practice this should be fine since these are newly 876 * allocated objects. The bytecodes themselves permit allocating an object, doing a 877 * monitorenter and then dropping all references to the object which would produce the same 878 * state, though that would normally produce an IllegalMonitorStateException. In HotSpot 879 * some form of fast path locking should always occur so the FrameState should never 880 * actually be used. 881 */ 882 ArrayList<MonitorEnterNode> enters = null; 883 for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) { 884 List<MonitorIdNode> locks = commit.getLocks(objIndex); 885 if (locks.size() > 1) { 886 // Ensure that the lock operations are performed in lock depth order 887 ArrayList<MonitorIdNode> newList = new ArrayList<>(locks); 888 newList.sort((a, b) -> Integer.compare(a.getLockDepth(), b.getLockDepth())); 889 locks = newList; 890 } 891 int lastDepth = -1; 892 for (MonitorIdNode monitorId : locks) { 893 assert lastDepth < monitorId.getLockDepth(); 894 lastDepth = monitorId.getLockDepth(); 895 MonitorEnterNode enter = graph.add(new MonitorEnterNode(allocations[objIndex], monitorId)); 896 graph.addBeforeFixed(commit, enter); 897 if (enters == null) { 898 enters = new ArrayList<>(); 899 } 900 enters.add(enter); 901 } 902 } 903 for (Node usage : commit.usages().snapshot()) { 904 if (usage instanceof AllocatedObjectNode) { 905 AllocatedObjectNode addObject = (AllocatedObjectNode) usage; 906 int index = commit.getVirtualObjects().indexOf(addObject.getVirtualObject()); 907 addObject.replaceAtUsagesAndDelete(allocations[index]); 908 } else { 909 assert enters != null; 910 commit.replaceAtUsages(InputType.Memory, enters.get(enters.size() - 1)); 911 } 912 } 913 if (enters != null) { 914 for (MonitorEnterNode enter : enters) { 915 enter.lower(tool); 916 } 917 } 918 assert commit.hasNoUsages(); 919 insertAllocationBarrier(commit, graph); 920 } 921 922 /** 923 * Insert the required {@link MemoryBarriers#STORE_STORE} barrier for an allocation and also 924 * include the {@link MemoryBarriers#LOAD_STORE} required for final fields if any final fields 925 * are being written, as if {@link FinalFieldBarrierNode} were emitted. 926 */ 927 private static void insertAllocationBarrier(CommitAllocationNode commit, StructuredGraph graph) { 928 int barrier = MemoryBarriers.STORE_STORE; 929 outer: for (VirtualObjectNode vobj : commit.getVirtualObjects()) { 930 for (ResolvedJavaField field : vobj.type().getInstanceFields(true)) { 931 if (field.isFinal()) { 932 barrier = barrier | MemoryBarriers.LOAD_STORE; 933 break outer; 934 } 935 } 936 } 937 graph.addAfterFixed(commit, graph.add(new MembarNode(barrier, LocationIdentity.init()))); 938 } 939 940 /** 941 * @param field the field whose barrier type should be returned 942 */ 943 protected BarrierType fieldLoadBarrierType(ResolvedJavaField field) { 944 return BarrierType.NONE; 945 } 946 947 protected BarrierType fieldStoreBarrierType(ResolvedJavaField field) { 948 if (getStorageKind(field) == JavaKind.Object) { 949 return BarrierType.FIELD; 950 } 951 return BarrierType.NONE; 952 } 953 954 protected BarrierType arrayStoreBarrierType(JavaKind elementKind) { 955 if (elementKind == JavaKind.Object) { 956 return BarrierType.ARRAY; 957 } 958 return BarrierType.NONE; 959 } 960 961 public BarrierType fieldInitializationBarrier(JavaKind entryKind) { 962 return entryKind == JavaKind.Object ? BarrierType.FIELD : BarrierType.NONE; 963 } 964 965 public BarrierType arrayInitializationBarrier(JavaKind entryKind) { 966 return entryKind == JavaKind.Object ? BarrierType.ARRAY : BarrierType.NONE; 967 } 968 969 private BarrierType unsafeStoreBarrierType(RawStoreNode store) { 970 if (!store.needsBarrier()) { 971 return BarrierType.NONE; 972 } 973 return guessStoreBarrierType(store.object(), store.value()); 974 } 975 976 private BarrierType guessStoreBarrierType(ValueNode object, ValueNode value) { 977 if (value.getStackKind() == JavaKind.Object && object.getStackKind() == JavaKind.Object) { 978 ResolvedJavaType type = StampTool.typeOrNull(object); 979 // Array types must use a precise barrier, so if the type is unknown or is a supertype 980 // of Object[] then treat it as an array. 981 if (type != null && type.isArray()) { 982 return BarrierType.ARRAY; 983 } else if (type == null || type.isAssignableFrom(objectArrayType)) { 984 return BarrierType.UNKNOWN; 985 } else { 986 return BarrierType.FIELD; 987 } 988 } 989 return BarrierType.NONE; 990 } 991 992 public abstract int fieldOffset(ResolvedJavaField field); 993 994 public FieldLocationIdentity fieldLocationIdentity(ResolvedJavaField field) { 995 return new FieldLocationIdentity(field); 996 } 997 998 public abstract ValueNode staticFieldBase(StructuredGraph graph, ResolvedJavaField field); 999 1000 public abstract int arrayLengthOffset(); 1001 1002 public Stamp loadStamp(Stamp stamp, JavaKind kind) { 1003 return loadStamp(stamp, kind, true); 1004 } 1005 1006 private boolean useCompressedOops(JavaKind kind, boolean compressible) { 1007 return kind == JavaKind.Object && compressible && useCompressedOops; 1008 } 1009 1010 protected abstract Stamp loadCompressedStamp(ObjectStamp stamp); 1011 1012 /** 1013 * @param compressible whether the stamp should be compressible 1014 */ 1015 protected Stamp loadStamp(Stamp stamp, JavaKind kind, boolean compressible) { 1016 if (useCompressedOops(kind, compressible)) { 1017 return loadCompressedStamp((ObjectStamp) stamp); 1018 } 1019 1020 switch (kind) { 1021 case Boolean: 1022 case Byte: 1023 return IntegerStamp.OPS.getNarrow().foldStamp(32, 8, stamp); 1024 case Char: 1025 case Short: 1026 return IntegerStamp.OPS.getNarrow().foldStamp(32, 16, stamp); 1027 } 1028 return stamp; 1029 } 1030 1031 public final ValueNode implicitLoadConvert(StructuredGraph graph, JavaKind kind, ValueNode value) { 1032 return implicitLoadConvert(graph, kind, value, true); 1033 } 1034 1035 public ValueNode implicitLoadConvert(JavaKind kind, ValueNode value) { 1036 return implicitLoadConvert(kind, value, true); 1037 } 1038 1039 protected final ValueNode implicitLoadConvert(StructuredGraph graph, JavaKind kind, ValueNode value, boolean compressible) { 1040 ValueNode ret = implicitLoadConvert(kind, value, compressible); 1041 if (!ret.isAlive()) { 1042 ret = graph.addOrUnique(ret); 1043 } 1044 return ret; 1045 } 1046 1047 protected abstract ValueNode newCompressionNode(CompressionOp op, ValueNode value); 1048 1049 /** 1050 * @param compressible whether the convert should be compressible 1051 */ 1052 protected ValueNode implicitLoadConvert(JavaKind kind, ValueNode value, boolean compressible) { 1053 if (useCompressedOops(kind, compressible)) { 1054 return newCompressionNode(CompressionOp.Uncompress, value); 1055 } 1056 1057 switch (kind) { 1058 case Byte: 1059 case Short: 1060 return new SignExtendNode(value, 32); 1061 case Boolean: 1062 case Char: 1063 return new ZeroExtendNode(value, 32); 1064 } 1065 return value; 1066 } 1067 1068 public final ValueNode implicitStoreConvert(StructuredGraph graph, JavaKind kind, ValueNode value) { 1069 return implicitStoreConvert(graph, kind, value, true); 1070 } 1071 1072 public ValueNode implicitStoreConvert(JavaKind kind, ValueNode value) { 1073 return implicitStoreConvert(kind, value, true); 1074 } 1075 1076 protected final ValueNode implicitStoreConvert(StructuredGraph graph, JavaKind kind, ValueNode value, boolean compressible) { 1077 ValueNode ret = implicitStoreConvert(kind, value, compressible); 1078 if (!ret.isAlive()) { 1079 ret = graph.addOrUnique(ret); 1080 } 1081 return ret; 1082 } 1083 1084 /** 1085 * @param compressible whether the covert should be compressible 1086 */ 1087 protected ValueNode implicitStoreConvert(JavaKind kind, ValueNode value, boolean compressible) { 1088 if (useCompressedOops(kind, compressible)) { 1089 return newCompressionNode(CompressionOp.Compress, value); 1090 } 1091 1092 switch (kind) { 1093 case Boolean: 1094 case Byte: 1095 return new NarrowNode(value, 8); 1096 case Char: 1097 case Short: 1098 return new NarrowNode(value, 16); 1099 } 1100 return value; 1101 } 1102 1103 protected abstract ValueNode createReadHub(StructuredGraph graph, ValueNode object, LoweringTool tool); 1104 1105 protected abstract ValueNode createReadArrayComponentHub(StructuredGraph graph, ValueNode arrayHub, FixedNode anchor); 1106 1107 protected ValueNode proxyIndex(AccessIndexedNode n, ValueNode index, ValueNode array, LoweringTool tool) { 1108 StructuredGraph graph = index.graph(); 1109 ValueNode arrayLength = readOrCreateArrayLength(n, array, tool, graph); 1110 ValueNode lengthMinusOne = SubNode.create(arrayLength, ConstantNode.forInt(1), NodeView.DEFAULT); 1111 return branchlessMax(branchlessMin(index, lengthMinusOne, NodeView.DEFAULT), ConstantNode.forInt(0), NodeView.DEFAULT); 1112 } 1113 1114 protected GuardingNode getBoundsCheck(AccessIndexedNode n, ValueNode array, LoweringTool tool) { 1115 if (n.getBoundsCheck() != null) { 1116 return n.getBoundsCheck(); 1117 } 1118 1119 StructuredGraph graph = n.graph(); 1120 ValueNode arrayLength = readOrCreateArrayLength(n, array, tool, graph); 1121 1122 LogicNode boundsCheck = IntegerBelowNode.create(n.index(), arrayLength, NodeView.DEFAULT); 1123 if (boundsCheck.isTautology()) { 1124 return null; 1125 } 1126 return tool.createGuard(n, graph.addOrUniqueWithInputs(boundsCheck), BoundsCheckException, InvalidateReprofile); 1127 } 1128 1129 private ValueNode readOrCreateArrayLength(AccessIndexedNode n, ValueNode array, LoweringTool tool, StructuredGraph graph) { 1130 ValueNode arrayLength = readArrayLength(array, tool.getConstantReflection()); 1131 if (arrayLength == null) { 1132 arrayLength = createReadArrayLength(array, n, tool); 1133 } else { 1134 arrayLength = arrayLength.isAlive() ? arrayLength : graph.addOrUniqueWithInputs(arrayLength); 1135 } 1136 return arrayLength; 1137 } 1138 1139 protected GuardingNode createNullCheck(ValueNode object, FixedNode before, LoweringTool tool) { 1140 if (StampTool.isPointerNonNull(object)) { 1141 return null; 1142 } 1143 return tool.createGuard(before, before.graph().unique(IsNullNode.create(object)), NullCheckException, InvalidateReprofile, SpeculationLog.NO_SPECULATION, true, null); 1144 } 1145 1146 protected ValueNode createNullCheckedValue(ValueNode object, FixedNode before, LoweringTool tool) { 1147 GuardingNode nullCheck = createNullCheck(object, before, tool); 1148 if (nullCheck == null) { 1149 return object; 1150 } 1151 return before.graph().maybeAddOrUnique(PiNode.create(object, (object.stamp(NodeView.DEFAULT)).join(StampFactory.objectNonNull()), (ValueNode) nullCheck)); 1152 } 1153 1154 @Override 1155 public ValueNode reconstructArrayIndex(JavaKind elementKind, AddressNode address) { 1156 StructuredGraph graph = address.graph(); 1157 ValueNode offset = ((OffsetAddressNode) address).getOffset(); 1158 1159 int base = metaAccess.getArrayBaseOffset(elementKind); 1160 ValueNode scaledIndex = graph.unique(new SubNode(offset, ConstantNode.forIntegerStamp(offset.stamp(NodeView.DEFAULT), base, graph))); 1161 1162 int shift = CodeUtil.log2(metaAccess.getArrayIndexScale(elementKind)); 1163 ValueNode ret = graph.unique(new RightShiftNode(scaledIndex, ConstantNode.forInt(shift, graph))); 1164 return IntegerConvertNode.convert(ret, StampFactory.forKind(JavaKind.Int), graph, NodeView.DEFAULT); 1165 } 1166 }