1 /* 2 * Copyright (c) 2011, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 24 25 package org.graalvm.compiler.replacements; 26 27 import static jdk.vm.ci.code.MemoryBarriers.JMM_POST_VOLATILE_READ; 28 import static jdk.vm.ci.code.MemoryBarriers.JMM_POST_VOLATILE_WRITE; 29 import static jdk.vm.ci.code.MemoryBarriers.JMM_PRE_VOLATILE_READ; 30 import static jdk.vm.ci.code.MemoryBarriers.JMM_PRE_VOLATILE_WRITE; 31 import static jdk.vm.ci.meta.DeoptimizationAction.InvalidateReprofile; 32 import static jdk.vm.ci.meta.DeoptimizationReason.BoundsCheckException; 33 import static jdk.vm.ci.meta.DeoptimizationReason.NullCheckException; 34 import static org.graalvm.compiler.core.common.SpeculativeExecutionAttacksMitigations.Options.UseIndexMasking; 35 import static org.graalvm.compiler.nodes.NamedLocationIdentity.ARRAY_LENGTH_LOCATION; 36 import static org.graalvm.compiler.nodes.calc.BinaryArithmeticNode.branchlessMax; 37 import static org.graalvm.compiler.nodes.calc.BinaryArithmeticNode.branchlessMin; 38 import static org.graalvm.compiler.nodes.java.ArrayLengthNode.readArrayLength; 39 import static org.graalvm.compiler.nodes.util.GraphUtil.skipPiWhileNonNull; 40 41 import java.nio.ByteOrder; 42 import java.util.ArrayList; 43 import java.util.BitSet; 44 import java.util.List; 45 46 import org.graalvm.compiler.api.directives.GraalDirectives; 47 import org.graalvm.compiler.api.replacements.Snippet; 48 import org.graalvm.compiler.api.replacements.SnippetReflectionProvider; 49 import org.graalvm.compiler.core.common.LIRKind; 50 import org.graalvm.compiler.core.common.spi.ForeignCallDescriptor; 51 import org.graalvm.compiler.core.common.spi.ForeignCallsProvider; 52 import org.graalvm.compiler.core.common.type.IntegerStamp; 53 import org.graalvm.compiler.core.common.type.ObjectStamp; 54 import org.graalvm.compiler.core.common.type.Stamp; 55 import org.graalvm.compiler.core.common.type.StampFactory; 56 import org.graalvm.compiler.core.common.type.TypeReference; 57 import org.graalvm.compiler.debug.DebugCloseable; 58 import org.graalvm.compiler.debug.DebugHandlersFactory; 59 import org.graalvm.compiler.debug.GraalError; 60 import org.graalvm.compiler.graph.Node; 61 import org.graalvm.compiler.nodeinfo.InputType; 62 import org.graalvm.compiler.nodes.CompressionNode.CompressionOp; 63 import org.graalvm.compiler.nodes.ConstantNode; 64 import org.graalvm.compiler.nodes.FieldLocationIdentity; 65 import org.graalvm.compiler.nodes.FixedNode; 66 import org.graalvm.compiler.nodes.LogicNode; 67 import org.graalvm.compiler.nodes.NamedLocationIdentity; 68 import org.graalvm.compiler.nodes.NodeView; 69 import org.graalvm.compiler.nodes.PiNode; 70 import org.graalvm.compiler.nodes.StructuredGraph; 71 import org.graalvm.compiler.nodes.ValueNode; 72 import org.graalvm.compiler.nodes.calc.AddNode; 73 import org.graalvm.compiler.nodes.calc.ConditionalNode; 74 import org.graalvm.compiler.nodes.calc.IntegerBelowNode; 75 import org.graalvm.compiler.nodes.calc.IntegerConvertNode; 76 import org.graalvm.compiler.nodes.calc.IntegerEqualsNode; 77 import org.graalvm.compiler.nodes.calc.IsNullNode; 78 import org.graalvm.compiler.nodes.calc.LeftShiftNode; 79 import org.graalvm.compiler.nodes.calc.NarrowNode; 80 import org.graalvm.compiler.nodes.calc.RightShiftNode; 81 import org.graalvm.compiler.nodes.calc.SignExtendNode; 82 import org.graalvm.compiler.nodes.calc.SubNode; 83 import org.graalvm.compiler.nodes.calc.UnpackEndianHalfNode; 84 import org.graalvm.compiler.nodes.calc.ZeroExtendNode; 85 import org.graalvm.compiler.nodes.debug.VerifyHeapNode; 86 import org.graalvm.compiler.nodes.extended.BoxNode; 87 import org.graalvm.compiler.nodes.extended.FixedValueAnchorNode; 88 import org.graalvm.compiler.nodes.extended.ForeignCallNode; 89 import org.graalvm.compiler.nodes.extended.GuardedUnsafeLoadNode; 90 import org.graalvm.compiler.nodes.extended.GuardingNode; 91 import org.graalvm.compiler.nodes.extended.JavaReadNode; 92 import org.graalvm.compiler.nodes.extended.JavaWriteNode; 93 import org.graalvm.compiler.nodes.extended.LoadArrayComponentHubNode; 94 import org.graalvm.compiler.nodes.extended.LoadHubNode; 95 import org.graalvm.compiler.nodes.extended.MembarNode; 96 import org.graalvm.compiler.nodes.extended.RawLoadNode; 97 import org.graalvm.compiler.nodes.extended.RawStoreNode; 98 import org.graalvm.compiler.nodes.extended.UnboxNode; 99 import org.graalvm.compiler.nodes.extended.UnsafeMemoryLoadNode; 100 import org.graalvm.compiler.nodes.extended.UnsafeMemoryStoreNode; 101 import org.graalvm.compiler.nodes.java.AbstractNewObjectNode; 102 import org.graalvm.compiler.nodes.java.AccessIndexedNode; 103 import org.graalvm.compiler.nodes.java.ArrayLengthNode; 104 import org.graalvm.compiler.nodes.java.AtomicReadAndWriteNode; 105 import org.graalvm.compiler.nodes.java.FinalFieldBarrierNode; 106 import org.graalvm.compiler.nodes.java.InstanceOfDynamicNode; 107 import org.graalvm.compiler.nodes.java.InstanceOfNode; 108 import org.graalvm.compiler.nodes.java.LoadFieldNode; 109 import org.graalvm.compiler.nodes.java.LoadIndexedNode; 110 import org.graalvm.compiler.nodes.java.LogicCompareAndSwapNode; 111 import org.graalvm.compiler.nodes.java.LoweredAtomicReadAndWriteNode; 112 import org.graalvm.compiler.nodes.java.MonitorEnterNode; 113 import org.graalvm.compiler.nodes.java.MonitorIdNode; 114 import org.graalvm.compiler.nodes.java.NewArrayNode; 115 import org.graalvm.compiler.nodes.java.NewInstanceNode; 116 import org.graalvm.compiler.nodes.java.RawMonitorEnterNode; 117 import org.graalvm.compiler.nodes.java.StoreFieldNode; 118 import org.graalvm.compiler.nodes.java.StoreIndexedNode; 119 import org.graalvm.compiler.nodes.java.UnsafeCompareAndExchangeNode; 120 import org.graalvm.compiler.nodes.java.UnsafeCompareAndSwapNode; 121 import org.graalvm.compiler.nodes.java.ValueCompareAndSwapNode; 122 import org.graalvm.compiler.nodes.memory.HeapAccess.BarrierType; 123 import org.graalvm.compiler.nodes.memory.ReadNode; 124 import org.graalvm.compiler.nodes.memory.WriteNode; 125 import org.graalvm.compiler.nodes.memory.address.AddressNode; 126 import org.graalvm.compiler.nodes.memory.address.OffsetAddressNode; 127 import org.graalvm.compiler.nodes.spi.Lowerable; 128 import org.graalvm.compiler.nodes.spi.LoweringProvider; 129 import org.graalvm.compiler.nodes.spi.LoweringTool; 130 import org.graalvm.compiler.nodes.type.StampTool; 131 import org.graalvm.compiler.nodes.util.GraphUtil; 132 import org.graalvm.compiler.nodes.virtual.AllocatedObjectNode; 133 import org.graalvm.compiler.nodes.virtual.CommitAllocationNode; 134 import org.graalvm.compiler.nodes.virtual.VirtualArrayNode; 135 import org.graalvm.compiler.nodes.virtual.VirtualInstanceNode; 136 import org.graalvm.compiler.nodes.virtual.VirtualObjectNode; 137 import org.graalvm.compiler.options.OptionValues; 138 import org.graalvm.compiler.phases.util.Providers; 139 import org.graalvm.compiler.replacements.SnippetLowerableMemoryNode.SnippetLowering; 140 import org.graalvm.compiler.replacements.nodes.BinaryMathIntrinsicNode; 141 import org.graalvm.compiler.replacements.nodes.BinaryMathIntrinsicNode.BinaryOperation; 142 import org.graalvm.compiler.replacements.nodes.UnaryMathIntrinsicNode; 143 import org.graalvm.compiler.replacements.nodes.UnaryMathIntrinsicNode.UnaryOperation; 144 import jdk.internal.vm.compiler.word.LocationIdentity; 145 146 import jdk.vm.ci.code.CodeUtil; 147 import jdk.vm.ci.code.MemoryBarriers; 148 import jdk.vm.ci.code.TargetDescription; 149 import jdk.vm.ci.meta.DeoptimizationAction; 150 import jdk.vm.ci.meta.DeoptimizationReason; 151 import jdk.vm.ci.meta.JavaKind; 152 import jdk.vm.ci.meta.MetaAccessProvider; 153 import jdk.vm.ci.meta.ResolvedJavaField; 154 import jdk.vm.ci.meta.ResolvedJavaMethod; 155 import jdk.vm.ci.meta.ResolvedJavaType; 156 import jdk.vm.ci.meta.SpeculationLog; 157 158 /** 159 * VM-independent lowerings for standard Java nodes. VM-specific methods are abstract and must be 160 * implemented by VM-specific subclasses. 161 */ 162 public abstract class DefaultJavaLoweringProvider implements LoweringProvider { 163 164 protected final MetaAccessProvider metaAccess; 165 protected final ForeignCallsProvider foreignCalls; 166 protected final TargetDescription target; 167 private final boolean useCompressedOops; 168 private final ResolvedJavaType objectArrayType; 169 170 private BoxingSnippets.Templates boxingSnippets; 171 private ConstantStringIndexOfSnippets.Templates indexOfSnippets; 172 173 public DefaultJavaLoweringProvider(MetaAccessProvider metaAccess, ForeignCallsProvider foreignCalls, TargetDescription target, boolean useCompressedOops) { 174 this.metaAccess = metaAccess; 175 this.foreignCalls = foreignCalls; 176 this.target = target; 177 this.useCompressedOops = useCompressedOops; 178 this.objectArrayType = metaAccess.lookupJavaType(Object[].class); 179 } 180 181 public void initialize(OptionValues options, Iterable<DebugHandlersFactory> factories, SnippetCounter.Group.Factory factory, Providers providers, SnippetReflectionProvider snippetReflection) { 182 boxingSnippets = new BoxingSnippets.Templates(options, factories, factory, providers, snippetReflection, target); 183 indexOfSnippets = new ConstantStringIndexOfSnippets.Templates(options, factories, providers, snippetReflection, target); 184 providers.getReplacements().registerSnippetTemplateCache(new SnippetCounterNode.SnippetCounterSnippets.Templates(options, factories, providers, snippetReflection, target)); 185 } 186 187 public final TargetDescription getTarget() { 188 return target; 189 } 190 191 public MetaAccessProvider getMetaAccess() { 192 return metaAccess; 193 } 194 195 @Override 196 @SuppressWarnings("try") 197 public void lower(Node n, LoweringTool tool) { 198 assert n instanceof Lowerable; 199 StructuredGraph graph = (StructuredGraph) n.graph(); 200 try (DebugCloseable context = n.withNodeSourcePosition()) { 201 if (n instanceof LoadFieldNode) { 202 lowerLoadFieldNode((LoadFieldNode) n, tool); 203 } else if (n instanceof StoreFieldNode) { 204 lowerStoreFieldNode((StoreFieldNode) n, tool); 205 } else if (n instanceof LoadIndexedNode) { 206 lowerLoadIndexedNode((LoadIndexedNode) n, tool); 207 } else if (n instanceof StoreIndexedNode) { 208 lowerStoreIndexedNode((StoreIndexedNode) n, tool); 209 } else if (n instanceof ArrayLengthNode) { 210 lowerArrayLengthNode((ArrayLengthNode) n, tool); 211 } else if (n instanceof LoadHubNode) { 212 lowerLoadHubNode((LoadHubNode) n, tool); 213 } else if (n instanceof LoadArrayComponentHubNode) { 214 lowerLoadArrayComponentHubNode((LoadArrayComponentHubNode) n); 215 } else if (n instanceof MonitorEnterNode) { 216 lowerMonitorEnterNode((MonitorEnterNode) n, tool, graph); 217 } else if (n instanceof UnsafeCompareAndSwapNode) { 218 lowerCompareAndSwapNode((UnsafeCompareAndSwapNode) n); 219 } else if (n instanceof UnsafeCompareAndExchangeNode) { 220 lowerCompareAndExchangeNode((UnsafeCompareAndExchangeNode) n); 221 } else if (n instanceof AtomicReadAndWriteNode) { 222 lowerAtomicReadAndWriteNode((AtomicReadAndWriteNode) n); 223 } else if (n instanceof RawLoadNode) { 224 lowerUnsafeLoadNode((RawLoadNode) n, tool); 225 } else if (n instanceof UnsafeMemoryLoadNode) { 226 lowerUnsafeMemoryLoadNode((UnsafeMemoryLoadNode) n); 227 } else if (n instanceof RawStoreNode) { 228 lowerUnsafeStoreNode((RawStoreNode) n); 229 } else if (n instanceof UnsafeMemoryStoreNode) { 230 lowerUnsafeMemoryStoreNode((UnsafeMemoryStoreNode) n); 231 } else if (n instanceof JavaReadNode) { 232 lowerJavaReadNode((JavaReadNode) n); 233 } else if (n instanceof JavaWriteNode) { 234 lowerJavaWriteNode((JavaWriteNode) n); 235 } else if (n instanceof CommitAllocationNode) { 236 lowerCommitAllocationNode((CommitAllocationNode) n, tool); 237 } else if (n instanceof BoxNode) { 238 boxingSnippets.lower((BoxNode) n, tool); 239 } else if (n instanceof UnboxNode) { 240 boxingSnippets.lower((UnboxNode) n, tool); 241 } else if (n instanceof VerifyHeapNode) { 242 lowerVerifyHeap((VerifyHeapNode) n); 243 } else if (n instanceof UnaryMathIntrinsicNode) { 244 lowerUnaryMath((UnaryMathIntrinsicNode) n, tool); 245 } else if (n instanceof BinaryMathIntrinsicNode) { 246 lowerBinaryMath((BinaryMathIntrinsicNode) n, tool); 247 } else if (n instanceof StringIndexOfNode) { 248 lowerIndexOf((StringIndexOfNode) n); 249 } else if (n instanceof StringLatin1IndexOfNode) { 250 lowerLatin1IndexOf((StringLatin1IndexOfNode) n); 251 } else if (n instanceof StringUTF16IndexOfNode) { 252 lowerUTF16IndexOf((StringUTF16IndexOfNode) n); 253 } else if (n instanceof UnpackEndianHalfNode) { 254 lowerSecondHalf((UnpackEndianHalfNode) n); 255 } else { 256 throw GraalError.shouldNotReachHere("Node implementing Lowerable not handled: " + n); 257 } 258 } 259 } 260 261 private void lowerSecondHalf(UnpackEndianHalfNode n) { 262 ByteOrder byteOrder = target.arch.getByteOrder(); 263 n.lower(byteOrder); 264 } 265 266 private void lowerIndexOf(StringIndexOfNode n) { 267 if (n.getArgument(3).isConstant()) { 268 SnippetLowering lowering = new SnippetLowering() { 269 @Override 270 public void lower(SnippetLowerableMemoryNode node, LoweringTool tool) { 271 if (tool.getLoweringStage() != LoweringTool.StandardLoweringStage.LOW_TIER) { 272 return; 273 } 274 indexOfSnippets.lower(node, tool); 275 } 276 }; 277 SnippetLowerableMemoryNode snippetLower = new SnippetLowerableMemoryNode(lowering, NamedLocationIdentity.getArrayLocation(JavaKind.Char), n.stamp(NodeView.DEFAULT), n.toArgumentArray()); 278 n.graph().add(snippetLower); 279 n.graph().replaceFixedWithFixed(n, snippetLower); 280 } 281 } 282 283 private void lowerLatin1IndexOf(StringLatin1IndexOfNode n) { 284 if (n.getArgument(2).isConstant()) { 285 SnippetLowering lowering = new SnippetLowering() { 286 @Override 287 public void lower(SnippetLowerableMemoryNode node, LoweringTool tool) { 288 if (tool.getLoweringStage() != LoweringTool.StandardLoweringStage.LOW_TIER) { 289 return; 290 } 291 indexOfSnippets.lowerLatin1(node, tool); 292 } 293 }; 294 SnippetLowerableMemoryNode snippetLower = new SnippetLowerableMemoryNode(lowering, NamedLocationIdentity.getArrayLocation(JavaKind.Byte), n.stamp(NodeView.DEFAULT), n.toArgumentArray()); 295 n.graph().add(snippetLower); 296 n.graph().replaceFixedWithFixed(n, snippetLower); 297 } 298 } 299 300 private void lowerUTF16IndexOf(StringUTF16IndexOfNode n) { 301 if (n.getArgument(2).isConstant()) { 302 SnippetLowering lowering = new SnippetLowering() { 303 @Override 304 public void lower(SnippetLowerableMemoryNode node, LoweringTool tool) { 305 if (tool.getLoweringStage() != LoweringTool.StandardLoweringStage.LOW_TIER) { 306 return; 307 } 308 indexOfSnippets.lowerUTF16(node, tool); 309 } 310 }; 311 SnippetLowerableMemoryNode snippetLower = new SnippetLowerableMemoryNode(lowering, NamedLocationIdentity.getArrayLocation(JavaKind.Byte), n.stamp(NodeView.DEFAULT), n.toArgumentArray()); 312 n.graph().add(snippetLower); 313 n.graph().replaceFixedWithFixed(n, snippetLower); 314 } 315 } 316 317 private void lowerBinaryMath(BinaryMathIntrinsicNode math, LoweringTool tool) { 318 if (tool.getLoweringStage() == LoweringTool.StandardLoweringStage.HIGH_TIER) { 319 return; 320 } 321 ResolvedJavaMethod method = math.graph().method(); 322 if (method != null) { 323 if (method.getAnnotation(Snippet.class) != null) { 324 /* 325 * In the context of the snippet use the LIR lowering instead of the Node lowering. 326 */ 327 return; 328 } 329 if (method.getName().equalsIgnoreCase(math.getOperation().name()) && tool.getMetaAccess().lookupJavaType(Math.class).equals(method.getDeclaringClass())) { 330 /* 331 * A root compilation of the intrinsic method should emit the full assembly 332 * implementation. 333 */ 334 return; 335 } 336 337 } 338 ForeignCallDescriptor foreignCall = toForeignCall(math.getOperation()); 339 if (foreignCall != null) { 340 StructuredGraph graph = math.graph(); 341 ForeignCallNode call = graph.add(new ForeignCallNode(foreignCalls, toForeignCall(math.getOperation()), math.getX(), math.getY())); 342 graph.addAfterFixed(tool.lastFixedNode(), call); 343 math.replaceAtUsages(call); 344 } 345 } 346 347 private void lowerUnaryMath(UnaryMathIntrinsicNode math, LoweringTool tool) { 348 if (tool.getLoweringStage() == LoweringTool.StandardLoweringStage.HIGH_TIER) { 349 return; 350 } 351 ResolvedJavaMethod method = math.graph().method(); 352 if (method != null) { 353 if (method.getAnnotation(Snippet.class) != null) { 354 /* 355 * In the context of the snippet use the LIR lowering instead of the Node lowering. 356 */ 357 return; 358 } 359 if (method.getName().equalsIgnoreCase(math.getOperation().name()) && tool.getMetaAccess().lookupJavaType(Math.class).equals(method.getDeclaringClass())) { 360 /* 361 * A root compilation of the intrinsic method should emit the full assembly 362 * implementation. 363 */ 364 return; 365 } 366 367 } 368 ForeignCallDescriptor foreignCall = toForeignCall(math.getOperation()); 369 if (foreignCall != null) { 370 StructuredGraph graph = math.graph(); 371 ForeignCallNode call = math.graph().add(new ForeignCallNode(foreignCalls, foreignCall, math.getValue())); 372 graph.addAfterFixed(tool.lastFixedNode(), call); 373 math.replaceAtUsages(call); 374 } 375 } 376 377 protected ForeignCallDescriptor toForeignCall(UnaryOperation operation) { 378 return operation.foreignCallDescriptor; 379 } 380 381 protected ForeignCallDescriptor toForeignCall(BinaryOperation operation) { 382 return operation.foreignCallDescriptor; 383 } 384 385 protected void lowerVerifyHeap(VerifyHeapNode n) { 386 GraphUtil.removeFixedWithUnusedInputs(n); 387 } 388 389 protected AddressNode createOffsetAddress(StructuredGraph graph, ValueNode object, long offset) { 390 ValueNode o = ConstantNode.forIntegerKind(target.wordJavaKind, offset, graph); 391 return graph.unique(new OffsetAddressNode(object, o)); 392 } 393 394 protected AddressNode createFieldAddress(StructuredGraph graph, ValueNode object, ResolvedJavaField field) { 395 int offset = fieldOffset(field); 396 if (offset >= 0) { 397 return createOffsetAddress(graph, object, offset); 398 } else { 399 return null; 400 } 401 } 402 403 protected abstract JavaKind getStorageKind(ResolvedJavaField field); 404 405 protected void lowerLoadFieldNode(LoadFieldNode loadField, LoweringTool tool) { 406 assert loadField.getStackKind() != JavaKind.Illegal; 407 StructuredGraph graph = loadField.graph(); 408 ResolvedJavaField field = loadField.field(); 409 ValueNode object = loadField.isStatic() ? staticFieldBase(graph, field) : loadField.object(); 410 object = createNullCheckedValue(object, loadField, tool); 411 Stamp loadStamp = loadStamp(loadField.stamp(NodeView.DEFAULT), getStorageKind(field)); 412 413 AddressNode address = createFieldAddress(graph, object, field); 414 assert address != null : "Field that is loaded must not be eliminated: " + field.getDeclaringClass().toJavaName(true) + "." + field.getName(); 415 416 ReadNode memoryRead = graph.add(new ReadNode(address, fieldLocationIdentity(field), loadStamp, fieldLoadBarrierType(field))); 417 ValueNode readValue = implicitLoadConvert(graph, getStorageKind(field), memoryRead); 418 loadField.replaceAtUsages(readValue); 419 graph.replaceFixed(loadField, memoryRead); 420 421 if (loadField.isVolatile()) { 422 MembarNode preMembar = graph.add(new MembarNode(JMM_PRE_VOLATILE_READ)); 423 graph.addBeforeFixed(memoryRead, preMembar); 424 MembarNode postMembar = graph.add(new MembarNode(JMM_POST_VOLATILE_READ)); 425 graph.addAfterFixed(memoryRead, postMembar); 426 } 427 } 428 429 protected void lowerStoreFieldNode(StoreFieldNode storeField, LoweringTool tool) { 430 StructuredGraph graph = storeField.graph(); 431 ResolvedJavaField field = storeField.field(); 432 ValueNode object = storeField.isStatic() ? staticFieldBase(graph, field) : storeField.object(); 433 object = createNullCheckedValue(object, storeField, tool); 434 ValueNode value = implicitStoreConvert(graph, getStorageKind(storeField.field()), storeField.value()); 435 AddressNode address = createFieldAddress(graph, object, field); 436 assert address != null; 437 438 WriteNode memoryWrite = graph.add(new WriteNode(address, fieldLocationIdentity(field), value, fieldStoreBarrierType(storeField.field()))); 439 memoryWrite.setStateAfter(storeField.stateAfter()); 440 graph.replaceFixedWithFixed(storeField, memoryWrite); 441 442 if (storeField.isVolatile()) { 443 MembarNode preMembar = graph.add(new MembarNode(JMM_PRE_VOLATILE_WRITE)); 444 graph.addBeforeFixed(memoryWrite, preMembar); 445 MembarNode postMembar = graph.add(new MembarNode(JMM_POST_VOLATILE_WRITE)); 446 graph.addAfterFixed(memoryWrite, postMembar); 447 } 448 } 449 450 public static final IntegerStamp POSITIVE_ARRAY_INDEX_STAMP = StampFactory.forInteger(32, 0, Integer.MAX_VALUE - 1); 451 452 /** 453 * Create a PiNode on the index proving that the index is positive. On some platforms this is 454 * important to allow the index to be used as an int in the address mode. 455 */ 456 public AddressNode createArrayIndexAddress(StructuredGraph graph, ValueNode array, JavaKind elementKind, ValueNode index, GuardingNode boundsCheck) { 457 ValueNode positiveIndex = graph.maybeAddOrUnique(PiNode.create(index, POSITIVE_ARRAY_INDEX_STAMP, boundsCheck != null ? boundsCheck.asNode() : null)); 458 return createArrayAddress(graph, array, elementKind, positiveIndex); 459 } 460 461 public AddressNode createArrayAddress(StructuredGraph graph, ValueNode array, JavaKind elementKind, ValueNode index) { 462 ValueNode wordIndex; 463 if (target.wordSize > 4) { 464 wordIndex = graph.unique(new SignExtendNode(index, target.wordSize * 8)); 465 } else { 466 assert target.wordSize == 4 : "unsupported word size"; 467 wordIndex = index; 468 } 469 470 int shift = CodeUtil.log2(metaAccess.getArrayIndexScale(elementKind)); 471 ValueNode scaledIndex = graph.unique(new LeftShiftNode(wordIndex, ConstantNode.forInt(shift, graph))); 472 473 int base = metaAccess.getArrayBaseOffset(elementKind); 474 ValueNode offset = graph.unique(new AddNode(scaledIndex, ConstantNode.forIntegerKind(target.wordJavaKind, base, graph))); 475 476 return graph.unique(new OffsetAddressNode(array, offset)); 477 } 478 479 protected void lowerLoadIndexedNode(LoadIndexedNode loadIndexed, LoweringTool tool) { 480 StructuredGraph graph = loadIndexed.graph(); 481 ValueNode array = loadIndexed.array(); 482 array = createNullCheckedValue(array, loadIndexed, tool); 483 JavaKind elementKind = loadIndexed.elementKind(); 484 Stamp loadStamp = loadStamp(loadIndexed.stamp(NodeView.DEFAULT), elementKind); 485 486 GuardingNode boundsCheck = getBoundsCheck(loadIndexed, array, tool); 487 ValueNode index = loadIndexed.index(); 488 if (UseIndexMasking.getValue(graph.getOptions())) { 489 index = proxyIndex(loadIndexed, index, array, tool); 490 } 491 AddressNode address = createArrayIndexAddress(graph, array, elementKind, index, boundsCheck); 492 493 ReadNode memoryRead = graph.add(new ReadNode(address, NamedLocationIdentity.getArrayLocation(elementKind), loadStamp, BarrierType.NONE)); 494 memoryRead.setGuard(boundsCheck); 495 ValueNode readValue = implicitLoadConvert(graph, elementKind, memoryRead); 496 497 loadIndexed.replaceAtUsages(readValue); 498 graph.replaceFixed(loadIndexed, memoryRead); 499 } 500 501 protected void lowerStoreIndexedNode(StoreIndexedNode storeIndexed, LoweringTool tool) { 502 StructuredGraph graph = storeIndexed.graph(); 503 504 ValueNode value = storeIndexed.value(); 505 ValueNode array = storeIndexed.array(); 506 507 array = this.createNullCheckedValue(array, storeIndexed, tool); 508 509 GuardingNode boundsCheck = getBoundsCheck(storeIndexed, array, tool); 510 511 JavaKind elementKind = storeIndexed.elementKind(); 512 513 LogicNode condition = null; 514 if (storeIndexed.getStoreCheck() == null && elementKind == JavaKind.Object && !StampTool.isPointerAlwaysNull(value)) { 515 /* Array store check. */ 516 TypeReference arrayType = StampTool.typeReferenceOrNull(array); 517 if (arrayType != null && arrayType.isExact()) { 518 ResolvedJavaType elementType = arrayType.getType().getComponentType(); 519 if (!elementType.isJavaLangObject()) { 520 TypeReference typeReference = TypeReference.createTrusted(storeIndexed.graph().getAssumptions(), elementType); 521 LogicNode typeTest = graph.addOrUniqueWithInputs(InstanceOfNode.create(typeReference, value)); 522 condition = LogicNode.or(graph.unique(IsNullNode.create(value)), typeTest, GraalDirectives.UNLIKELY_PROBABILITY); 523 } 524 } else { 525 /* 526 * The guard on the read hub should be the null check of the array that was 527 * introduced earlier. 528 */ 529 ValueNode arrayClass = createReadHub(graph, array, tool); 530 ValueNode componentHub = createReadArrayComponentHub(graph, arrayClass, storeIndexed); 531 LogicNode typeTest = graph.unique(InstanceOfDynamicNode.create(graph.getAssumptions(), tool.getConstantReflection(), componentHub, value, false)); 532 condition = LogicNode.or(graph.unique(IsNullNode.create(value)), typeTest, GraalDirectives.UNLIKELY_PROBABILITY); 533 } 534 } 535 536 AddressNode address = createArrayIndexAddress(graph, array, elementKind, storeIndexed.index(), boundsCheck); 537 WriteNode memoryWrite = graph.add(new WriteNode(address, NamedLocationIdentity.getArrayLocation(elementKind), implicitStoreConvert(graph, elementKind, value), 538 arrayStoreBarrierType(storeIndexed.elementKind()))); 539 memoryWrite.setGuard(boundsCheck); 540 if (condition != null) { 541 tool.createGuard(storeIndexed, condition, DeoptimizationReason.ArrayStoreException, DeoptimizationAction.InvalidateReprofile); 542 } 543 memoryWrite.setStateAfter(storeIndexed.stateAfter()); 544 graph.replaceFixedWithFixed(storeIndexed, memoryWrite); 545 } 546 547 protected void lowerArrayLengthNode(ArrayLengthNode arrayLengthNode, LoweringTool tool) { 548 arrayLengthNode.replaceAtUsages(createReadArrayLength(arrayLengthNode.array(), arrayLengthNode, tool)); 549 StructuredGraph graph = arrayLengthNode.graph(); 550 graph.removeFixed(arrayLengthNode); 551 } 552 553 /** 554 * Creates a read node that read the array length and is guarded by a null-check. 555 * 556 * The created node is placed before {@code before} in the CFG. 557 */ 558 protected ReadNode createReadArrayLength(ValueNode array, FixedNode before, LoweringTool tool) { 559 StructuredGraph graph = array.graph(); 560 ValueNode canonicalArray = this.createNullCheckedValue(skipPiWhileNonNull(array), before, tool); 561 AddressNode address = createOffsetAddress(graph, canonicalArray, arrayLengthOffset()); 562 ReadNode readArrayLength = graph.add(new ReadNode(address, ARRAY_LENGTH_LOCATION, StampFactory.positiveInt(), BarrierType.NONE)); 563 graph.addBeforeFixed(before, readArrayLength); 564 return readArrayLength; 565 } 566 567 protected void lowerLoadHubNode(LoadHubNode loadHub, LoweringTool tool) { 568 StructuredGraph graph = loadHub.graph(); 569 if (tool.getLoweringStage() != LoweringTool.StandardLoweringStage.LOW_TIER) { 570 return; 571 } 572 if (graph.getGuardsStage().allowsFloatingGuards()) { 573 return; 574 } 575 ValueNode hub = createReadHub(graph, loadHub.getValue(), tool); 576 loadHub.replaceAtUsagesAndDelete(hub); 577 } 578 579 protected void lowerLoadArrayComponentHubNode(LoadArrayComponentHubNode loadHub) { 580 StructuredGraph graph = loadHub.graph(); 581 ValueNode hub = createReadArrayComponentHub(graph, loadHub.getValue(), loadHub); 582 graph.replaceFixed(loadHub, hub); 583 } 584 585 protected void lowerMonitorEnterNode(MonitorEnterNode monitorEnter, LoweringTool tool, StructuredGraph graph) { 586 ValueNode object = createNullCheckedValue(monitorEnter.object(), monitorEnter, tool); 587 ValueNode hub = graph.addOrUnique(LoadHubNode.create(object, tool.getStampProvider(), tool.getMetaAccess(), tool.getConstantReflection())); 588 RawMonitorEnterNode rawMonitorEnter = graph.add(new RawMonitorEnterNode(object, hub, monitorEnter.getMonitorId())); 589 rawMonitorEnter.setStateBefore(monitorEnter.stateBefore()); 590 rawMonitorEnter.setStateAfter(monitorEnter.stateAfter()); 591 graph.replaceFixedWithFixed(monitorEnter, rawMonitorEnter); 592 } 593 594 protected void lowerCompareAndSwapNode(UnsafeCompareAndSwapNode cas) { 595 StructuredGraph graph = cas.graph(); 596 JavaKind valueKind = cas.getValueKind(); 597 598 ValueNode expectedValue = implicitStoreConvert(graph, valueKind, cas.expected()); 599 ValueNode newValue = implicitStoreConvert(graph, valueKind, cas.newValue()); 600 601 AddressNode address = graph.unique(new OffsetAddressNode(cas.object(), cas.offset())); 602 BarrierType barrierType = guessStoreBarrierType(cas.object(), expectedValue); 603 LogicCompareAndSwapNode atomicNode = graph.add(new LogicCompareAndSwapNode(address, cas.getLocationIdentity(), expectedValue, newValue, barrierType)); 604 atomicNode.setStateAfter(cas.stateAfter()); 605 graph.replaceFixedWithFixed(cas, atomicNode); 606 } 607 608 protected void lowerCompareAndExchangeNode(UnsafeCompareAndExchangeNode cas) { 609 StructuredGraph graph = cas.graph(); 610 JavaKind valueKind = cas.getValueKind(); 611 612 ValueNode expectedValue = implicitStoreConvert(graph, valueKind, cas.expected()); 613 ValueNode newValue = implicitStoreConvert(graph, valueKind, cas.newValue()); 614 615 AddressNode address = graph.unique(new OffsetAddressNode(cas.object(), cas.offset())); 616 BarrierType barrierType = guessStoreBarrierType(cas.object(), expectedValue); 617 ValueCompareAndSwapNode atomicNode = graph.add(new ValueCompareAndSwapNode(address, expectedValue, newValue, cas.getLocationIdentity(), barrierType)); 618 ValueNode coercedNode = implicitLoadConvert(graph, valueKind, atomicNode, true); 619 atomicNode.setStateAfter(cas.stateAfter()); 620 cas.replaceAtUsages(coercedNode); 621 graph.replaceFixedWithFixed(cas, atomicNode); 622 } 623 624 protected void lowerAtomicReadAndWriteNode(AtomicReadAndWriteNode n) { 625 StructuredGraph graph = n.graph(); 626 JavaKind valueKind = n.getValueKind(); 627 628 ValueNode newValue = implicitStoreConvert(graph, valueKind, n.newValue()); 629 630 AddressNode address = graph.unique(new OffsetAddressNode(n.object(), n.offset())); 631 BarrierType barrierType = guessStoreBarrierType(n.object(), n.newValue()); 632 LIRKind lirAccessKind = LIRKind.fromJavaKind(target.arch, valueKind); 633 LoweredAtomicReadAndWriteNode memoryRead = graph.add(new LoweredAtomicReadAndWriteNode(address, n.getLocationIdentity(), newValue, lirAccessKind, barrierType)); 634 memoryRead.setStateAfter(n.stateAfter()); 635 636 ValueNode readValue = implicitLoadConvert(graph, valueKind, memoryRead); 637 n.stateAfter().replaceFirstInput(n, memoryRead); 638 n.replaceAtUsages(readValue); 639 graph.replaceFixedWithFixed(n, memoryRead); 640 } 641 642 /** 643 * @param tool utility for performing the lowering 644 */ 645 protected void lowerUnsafeLoadNode(RawLoadNode load, LoweringTool tool) { 646 StructuredGraph graph = load.graph(); 647 if (load instanceof GuardedUnsafeLoadNode) { 648 GuardedUnsafeLoadNode guardedLoad = (GuardedUnsafeLoadNode) load; 649 GuardingNode guard = guardedLoad.getGuard(); 650 if (guard == null) { 651 // can float freely if the guard folded away 652 ReadNode memoryRead = createUnsafeRead(graph, load, null); 653 memoryRead.setForceFixed(false); 654 graph.replaceFixedWithFixed(load, memoryRead); 655 } else { 656 // must be guarded, but flows below the guard 657 ReadNode memoryRead = createUnsafeRead(graph, load, guard); 658 graph.replaceFixedWithFixed(load, memoryRead); 659 } 660 } else { 661 // never had a guarding condition so it must be fixed, creation of the read will force 662 // it to be fixed 663 ReadNode memoryRead = createUnsafeRead(graph, load, null); 664 graph.replaceFixedWithFixed(load, memoryRead); 665 } 666 } 667 668 protected AddressNode createUnsafeAddress(StructuredGraph graph, ValueNode object, ValueNode offset) { 669 if (object.isConstant() && object.asConstant().isDefaultForKind()) { 670 return graph.addOrUniqueWithInputs(OffsetAddressNode.create(offset)); 671 } else { 672 return graph.unique(new OffsetAddressNode(object, offset)); 673 } 674 } 675 676 protected ReadNode createUnsafeRead(StructuredGraph graph, RawLoadNode load, GuardingNode guard) { 677 boolean compressible = load.accessKind() == JavaKind.Object; 678 JavaKind readKind = load.accessKind(); 679 Stamp loadStamp = loadStamp(load.stamp(NodeView.DEFAULT), readKind, compressible); 680 AddressNode address = createUnsafeAddress(graph, load.object(), load.offset()); 681 ReadNode memoryRead = graph.add(new ReadNode(address, load.getLocationIdentity(), loadStamp, BarrierType.NONE)); 682 if (guard == null) { 683 // An unsafe read must not float otherwise it may float above 684 // a test guaranteeing the read is safe. 685 memoryRead.setForceFixed(true); 686 } else { 687 memoryRead.setGuard(guard); 688 } 689 ValueNode readValue = performBooleanCoercionIfNecessary(implicitLoadConvert(graph, readKind, memoryRead, compressible), readKind); 690 load.replaceAtUsages(readValue); 691 return memoryRead; 692 } 693 694 protected void lowerUnsafeMemoryLoadNode(UnsafeMemoryLoadNode load) { 695 StructuredGraph graph = load.graph(); 696 JavaKind readKind = load.getKind(); 697 assert readKind != JavaKind.Object; 698 Stamp loadStamp = loadStamp(load.stamp(NodeView.DEFAULT), readKind, false); 699 AddressNode address = graph.addOrUniqueWithInputs(OffsetAddressNode.create(load.getAddress())); 700 ReadNode memoryRead = graph.add(new ReadNode(address, load.getLocationIdentity(), loadStamp, BarrierType.NONE)); 701 // An unsafe read must not float otherwise it may float above 702 // a test guaranteeing the read is safe. 703 memoryRead.setForceFixed(true); 704 ValueNode readValue = performBooleanCoercionIfNecessary(implicitLoadConvert(graph, readKind, memoryRead, false), readKind); 705 load.replaceAtUsages(readValue); 706 graph.replaceFixedWithFixed(load, memoryRead); 707 } 708 709 private static ValueNode performBooleanCoercionIfNecessary(ValueNode readValue, JavaKind readKind) { 710 if (readKind == JavaKind.Boolean) { 711 StructuredGraph graph = readValue.graph(); 712 IntegerEqualsNode eq = graph.addOrUnique(new IntegerEqualsNode(readValue, ConstantNode.forInt(0, graph))); 713 return graph.addOrUnique(new ConditionalNode(eq, ConstantNode.forBoolean(false, graph), ConstantNode.forBoolean(true, graph))); 714 } 715 return readValue; 716 } 717 718 protected void lowerUnsafeStoreNode(RawStoreNode store) { 719 StructuredGraph graph = store.graph(); 720 boolean compressible = store.value().getStackKind() == JavaKind.Object; 721 JavaKind valueKind = store.accessKind(); 722 ValueNode value = implicitStoreConvert(graph, valueKind, store.value(), compressible); 723 AddressNode address = createUnsafeAddress(graph, store.object(), store.offset()); 724 WriteNode write = graph.add(new WriteNode(address, store.getLocationIdentity(), value, unsafeStoreBarrierType(store))); 725 write.setStateAfter(store.stateAfter()); 726 graph.replaceFixedWithFixed(store, write); 727 } 728 729 protected void lowerUnsafeMemoryStoreNode(UnsafeMemoryStoreNode store) { 730 StructuredGraph graph = store.graph(); 731 assert store.getValue().getStackKind() != JavaKind.Object; 732 JavaKind valueKind = store.getKind(); 733 ValueNode value = implicitStoreConvert(graph, valueKind, store.getValue(), false); 734 AddressNode address = graph.addOrUniqueWithInputs(OffsetAddressNode.create(store.getAddress())); 735 WriteNode write = graph.add(new WriteNode(address, store.getLocationIdentity(), value, BarrierType.NONE)); 736 write.setStateAfter(store.stateAfter()); 737 graph.replaceFixedWithFixed(store, write); 738 } 739 740 protected void lowerJavaReadNode(JavaReadNode read) { 741 StructuredGraph graph = read.graph(); 742 JavaKind valueKind = read.getReadKind(); 743 Stamp loadStamp = loadStamp(read.stamp(NodeView.DEFAULT), valueKind, read.isCompressible()); 744 745 ReadNode memoryRead = graph.add(new ReadNode(read.getAddress(), read.getLocationIdentity(), loadStamp, read.getBarrierType())); 746 GuardingNode guard = read.getGuard(); 747 ValueNode readValue = implicitLoadConvert(graph, valueKind, memoryRead, read.isCompressible()); 748 if (guard == null) { 749 // An unsafe read must not float otherwise it may float above 750 // a test guaranteeing the read is safe. 751 memoryRead.setForceFixed(true); 752 } else { 753 memoryRead.setGuard(guard); 754 } 755 read.replaceAtUsages(readValue); 756 graph.replaceFixed(read, memoryRead); 757 } 758 759 protected void lowerJavaWriteNode(JavaWriteNode write) { 760 StructuredGraph graph = write.graph(); 761 ValueNode value = implicitStoreConvert(graph, write.getWriteKind(), write.value(), write.isCompressible()); 762 WriteNode memoryWrite = graph.add(new WriteNode(write.getAddress(), write.getLocationIdentity(), value, write.getBarrierType())); 763 memoryWrite.setStateAfter(write.stateAfter()); 764 graph.replaceFixedWithFixed(write, memoryWrite); 765 memoryWrite.setGuard(write.getGuard()); 766 } 767 768 @SuppressWarnings("try") 769 protected void lowerCommitAllocationNode(CommitAllocationNode commit, LoweringTool tool) { 770 StructuredGraph graph = commit.graph(); 771 if (graph.getGuardsStage() == StructuredGraph.GuardsStage.FIXED_DEOPTS) { 772 List<AbstractNewObjectNode> recursiveLowerings = new ArrayList<>(); 773 774 ValueNode[] allocations = new ValueNode[commit.getVirtualObjects().size()]; 775 BitSet omittedValues = new BitSet(); 776 int valuePos = 0; 777 for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) { 778 VirtualObjectNode virtual = commit.getVirtualObjects().get(objIndex); 779 try (DebugCloseable nsp = graph.withNodeSourcePosition(virtual)) { 780 int entryCount = virtual.entryCount(); 781 AbstractNewObjectNode newObject; 782 if (virtual instanceof VirtualInstanceNode) { 783 newObject = graph.add(createNewInstanceFromVirtual(virtual)); 784 } else { 785 newObject = graph.add(createNewArrayFromVirtual(virtual, ConstantNode.forInt(entryCount, graph))); 786 } 787 788 recursiveLowerings.add(newObject); 789 graph.addBeforeFixed(commit, newObject); 790 allocations[objIndex] = newObject; 791 for (int i = 0; i < entryCount; i++) { 792 ValueNode value = commit.getValues().get(valuePos); 793 if (value instanceof VirtualObjectNode) { 794 value = allocations[commit.getVirtualObjects().indexOf(value)]; 795 } 796 if (value == null) { 797 omittedValues.set(valuePos); 798 } else if (!(value.isConstant() && value.asConstant().isDefaultForKind())) { 799 // Constant.illegal is always the defaultForKind, so it is skipped 800 JavaKind valueKind = value.getStackKind(); 801 JavaKind entryKind = virtual.entryKind(i); 802 803 // Truffle requires some leniency in terms of what can be put where: 804 assert valueKind.getStackKind() == entryKind.getStackKind() || 805 (valueKind == JavaKind.Long || valueKind == JavaKind.Double || (valueKind == JavaKind.Int && virtual instanceof VirtualArrayNode)); 806 AddressNode address = null; 807 BarrierType barrierType = null; 808 if (virtual instanceof VirtualInstanceNode) { 809 ResolvedJavaField field = ((VirtualInstanceNode) virtual).field(i); 810 long offset = fieldOffset(field); 811 if (offset >= 0) { 812 address = createOffsetAddress(graph, newObject, offset); 813 barrierType = fieldInitializationBarrier(entryKind); 814 } 815 } else { 816 address = createOffsetAddress(graph, newObject, metaAccess.getArrayBaseOffset(entryKind) + i * metaAccess.getArrayIndexScale(entryKind)); 817 barrierType = arrayInitializationBarrier(entryKind); 818 } 819 if (address != null) { 820 WriteNode write = new WriteNode(address, LocationIdentity.init(), implicitStoreConvert(graph, entryKind, value), barrierType); 821 graph.addAfterFixed(newObject, graph.add(write)); 822 } 823 } 824 valuePos++; 825 } 826 } 827 } 828 valuePos = 0; 829 830 for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) { 831 VirtualObjectNode virtual = commit.getVirtualObjects().get(objIndex); 832 try (DebugCloseable nsp = graph.withNodeSourcePosition(virtual)) { 833 int entryCount = virtual.entryCount(); 834 ValueNode newObject = allocations[objIndex]; 835 for (int i = 0; i < entryCount; i++) { 836 if (omittedValues.get(valuePos)) { 837 ValueNode value = commit.getValues().get(valuePos); 838 assert value instanceof VirtualObjectNode; 839 ValueNode allocValue = allocations[commit.getVirtualObjects().indexOf(value)]; 840 if (!(allocValue.isConstant() && allocValue.asConstant().isDefaultForKind())) { 841 assert virtual.entryKind(i) == JavaKind.Object && allocValue.getStackKind() == JavaKind.Object; 842 AddressNode address; 843 BarrierType barrierType; 844 if (virtual instanceof VirtualInstanceNode) { 845 VirtualInstanceNode virtualInstance = (VirtualInstanceNode) virtual; 846 address = createFieldAddress(graph, newObject, virtualInstance.field(i)); 847 barrierType = BarrierType.IMPRECISE; 848 } else { 849 address = createArrayAddress(graph, newObject, virtual.entryKind(i), ConstantNode.forInt(i, graph)); 850 barrierType = BarrierType.PRECISE; 851 } 852 if (address != null) { 853 WriteNode write = new WriteNode(address, LocationIdentity.init(), implicitStoreConvert(graph, JavaKind.Object, allocValue), barrierType); 854 graph.addBeforeFixed(commit, graph.add(write)); 855 } 856 } 857 } 858 valuePos++; 859 } 860 } 861 } 862 863 finishAllocatedObjects(tool, commit, allocations); 864 graph.removeFixed(commit); 865 866 for (AbstractNewObjectNode recursiveLowering : recursiveLowerings) { 867 recursiveLowering.lower(tool); 868 } 869 } 870 871 } 872 873 public NewInstanceNode createNewInstanceFromVirtual(VirtualObjectNode virtual) { 874 return new NewInstanceNode(virtual.type(), true); 875 } 876 877 protected NewArrayNode createNewArrayFromVirtual(VirtualObjectNode virtual, ValueNode length) { 878 return new NewArrayNode(((VirtualArrayNode) virtual).componentType(), length, true); 879 } 880 881 public void finishAllocatedObjects(LoweringTool tool, CommitAllocationNode commit, ValueNode[] allocations) { 882 StructuredGraph graph = commit.graph(); 883 for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) { 884 FixedValueAnchorNode anchor = graph.add(new FixedValueAnchorNode(allocations[objIndex])); 885 allocations[objIndex] = anchor; 886 graph.addBeforeFixed(commit, anchor); 887 } 888 /* 889 * Note that the FrameState that is assigned to these MonitorEnterNodes isn't the correct 890 * state. It will be the state from before the allocation occurred instead of a valid state 891 * after the locking is performed. In practice this should be fine since these are newly 892 * allocated objects. The bytecodes themselves permit allocating an object, doing a 893 * monitorenter and then dropping all references to the object which would produce the same 894 * state, though that would normally produce an IllegalMonitorStateException. In HotSpot 895 * some form of fast path locking should always occur so the FrameState should never 896 * actually be used. 897 */ 898 ArrayList<MonitorEnterNode> enters = null; 899 for (int objIndex = 0; objIndex < commit.getVirtualObjects().size(); objIndex++) { 900 List<MonitorIdNode> locks = commit.getLocks(objIndex); 901 if (locks.size() > 1) { 902 // Ensure that the lock operations are performed in lock depth order 903 ArrayList<MonitorIdNode> newList = new ArrayList<>(locks); 904 newList.sort((a, b) -> Integer.compare(a.getLockDepth(), b.getLockDepth())); 905 locks = newList; 906 } 907 int lastDepth = -1; 908 for (MonitorIdNode monitorId : locks) { 909 assert lastDepth < monitorId.getLockDepth(); 910 lastDepth = monitorId.getLockDepth(); 911 MonitorEnterNode enter = graph.add(new MonitorEnterNode(allocations[objIndex], monitorId)); 912 graph.addBeforeFixed(commit, enter); 913 if (enters == null) { 914 enters = new ArrayList<>(); 915 } 916 enters.add(enter); 917 } 918 } 919 for (Node usage : commit.usages().snapshot()) { 920 if (usage instanceof AllocatedObjectNode) { 921 AllocatedObjectNode addObject = (AllocatedObjectNode) usage; 922 int index = commit.getVirtualObjects().indexOf(addObject.getVirtualObject()); 923 addObject.replaceAtUsagesAndDelete(allocations[index]); 924 } else { 925 assert enters != null; 926 commit.replaceAtUsages(InputType.Memory, enters.get(enters.size() - 1)); 927 } 928 } 929 if (enters != null) { 930 for (MonitorEnterNode enter : enters) { 931 enter.lower(tool); 932 } 933 } 934 assert commit.hasNoUsages(); 935 insertAllocationBarrier(commit, graph); 936 } 937 938 /** 939 * Insert the required {@link MemoryBarriers#STORE_STORE} barrier for an allocation and also 940 * include the {@link MemoryBarriers#LOAD_STORE} required for final fields if any final fields 941 * are being written, as if {@link FinalFieldBarrierNode} were emitted. 942 */ 943 private static void insertAllocationBarrier(CommitAllocationNode commit, StructuredGraph graph) { 944 int barrier = MemoryBarriers.STORE_STORE; 945 outer: for (VirtualObjectNode vobj : commit.getVirtualObjects()) { 946 for (ResolvedJavaField field : vobj.type().getInstanceFields(true)) { 947 if (field.isFinal()) { 948 barrier = barrier | MemoryBarriers.LOAD_STORE; 949 break outer; 950 } 951 } 952 } 953 graph.addAfterFixed(commit, graph.add(new MembarNode(barrier, LocationIdentity.init()))); 954 } 955 956 /** 957 * @param field the field whose barrier type should be returned 958 */ 959 protected BarrierType fieldLoadBarrierType(ResolvedJavaField field) { 960 return BarrierType.NONE; 961 } 962 963 protected BarrierType fieldStoreBarrierType(ResolvedJavaField field) { 964 if (field.getJavaKind() == JavaKind.Object) { 965 return BarrierType.IMPRECISE; 966 } 967 return BarrierType.NONE; 968 } 969 970 protected BarrierType arrayStoreBarrierType(JavaKind elementKind) { 971 if (elementKind == JavaKind.Object) { 972 return BarrierType.PRECISE; 973 } 974 return BarrierType.NONE; 975 } 976 977 public BarrierType fieldInitializationBarrier(JavaKind entryKind) { 978 return entryKind == JavaKind.Object ? BarrierType.IMPRECISE : BarrierType.NONE; 979 } 980 981 public BarrierType arrayInitializationBarrier(JavaKind entryKind) { 982 return entryKind == JavaKind.Object ? BarrierType.PRECISE : BarrierType.NONE; 983 } 984 985 private BarrierType unsafeStoreBarrierType(RawStoreNode store) { 986 if (!store.needsBarrier()) { 987 return BarrierType.NONE; 988 } 989 return guessStoreBarrierType(store.object(), store.value()); 990 } 991 992 private BarrierType guessStoreBarrierType(ValueNode object, ValueNode value) { 993 if (value.getStackKind() == JavaKind.Object && object.getStackKind() == JavaKind.Object) { 994 ResolvedJavaType type = StampTool.typeOrNull(object); 995 // Array types must use a precise barrier, so if the type is unknown or is a supertype 996 // of Object[] then treat it as an array. 997 if (type == null || type.isArray() || type.isAssignableFrom(objectArrayType)) { 998 return BarrierType.PRECISE; 999 } else { 1000 return BarrierType.IMPRECISE; 1001 } 1002 } 1003 return BarrierType.NONE; 1004 } 1005 1006 public abstract int fieldOffset(ResolvedJavaField field); 1007 1008 public FieldLocationIdentity fieldLocationIdentity(ResolvedJavaField field) { 1009 return new FieldLocationIdentity(field); 1010 } 1011 1012 public abstract ValueNode staticFieldBase(StructuredGraph graph, ResolvedJavaField field); 1013 1014 public abstract int arrayLengthOffset(); 1015 1016 public Stamp loadStamp(Stamp stamp, JavaKind kind) { 1017 return loadStamp(stamp, kind, true); 1018 } 1019 1020 private boolean useCompressedOops(JavaKind kind, boolean compressible) { 1021 return kind == JavaKind.Object && compressible && useCompressedOops; 1022 } 1023 1024 protected abstract Stamp loadCompressedStamp(ObjectStamp stamp); 1025 1026 /** 1027 * @param compressible whether the stamp should be compressible 1028 */ 1029 protected Stamp loadStamp(Stamp stamp, JavaKind kind, boolean compressible) { 1030 if (useCompressedOops(kind, compressible)) { 1031 return loadCompressedStamp((ObjectStamp) stamp); 1032 } 1033 1034 switch (kind) { 1035 case Boolean: 1036 case Byte: 1037 return IntegerStamp.OPS.getNarrow().foldStamp(32, 8, stamp); 1038 case Char: 1039 case Short: 1040 return IntegerStamp.OPS.getNarrow().foldStamp(32, 16, stamp); 1041 } 1042 return stamp; 1043 } 1044 1045 public final ValueNode implicitLoadConvert(StructuredGraph graph, JavaKind kind, ValueNode value) { 1046 return implicitLoadConvert(graph, kind, value, true); 1047 } 1048 1049 public ValueNode implicitLoadConvert(JavaKind kind, ValueNode value) { 1050 return implicitLoadConvert(kind, value, true); 1051 } 1052 1053 protected final ValueNode implicitLoadConvert(StructuredGraph graph, JavaKind kind, ValueNode value, boolean compressible) { 1054 ValueNode ret = implicitLoadConvert(kind, value, compressible); 1055 if (!ret.isAlive()) { 1056 ret = graph.addOrUnique(ret); 1057 } 1058 return ret; 1059 } 1060 1061 protected abstract ValueNode newCompressionNode(CompressionOp op, ValueNode value); 1062 1063 /** 1064 * @param compressible whether the convert should be compressible 1065 */ 1066 protected ValueNode implicitLoadConvert(JavaKind kind, ValueNode value, boolean compressible) { 1067 if (useCompressedOops(kind, compressible)) { 1068 return newCompressionNode(CompressionOp.Uncompress, value); 1069 } 1070 1071 switch (kind) { 1072 case Byte: 1073 case Short: 1074 return new SignExtendNode(value, 32); 1075 case Boolean: 1076 case Char: 1077 return new ZeroExtendNode(value, 32); 1078 } 1079 return value; 1080 } 1081 1082 public final ValueNode implicitStoreConvert(StructuredGraph graph, JavaKind kind, ValueNode value) { 1083 return implicitStoreConvert(graph, kind, value, true); 1084 } 1085 1086 public ValueNode implicitStoreConvert(JavaKind kind, ValueNode value) { 1087 return implicitStoreConvert(kind, value, true); 1088 } 1089 1090 protected final ValueNode implicitStoreConvert(StructuredGraph graph, JavaKind kind, ValueNode value, boolean compressible) { 1091 ValueNode ret = implicitStoreConvert(kind, value, compressible); 1092 if (!ret.isAlive()) { 1093 ret = graph.addOrUnique(ret); 1094 } 1095 return ret; 1096 } 1097 1098 /** 1099 * @param compressible whether the covert should be compressible 1100 */ 1101 protected ValueNode implicitStoreConvert(JavaKind kind, ValueNode value, boolean compressible) { 1102 if (useCompressedOops(kind, compressible)) { 1103 return newCompressionNode(CompressionOp.Compress, value); 1104 } 1105 1106 switch (kind) { 1107 case Boolean: 1108 case Byte: 1109 return new NarrowNode(value, 8); 1110 case Char: 1111 case Short: 1112 return new NarrowNode(value, 16); 1113 } 1114 return value; 1115 } 1116 1117 protected abstract ValueNode createReadHub(StructuredGraph graph, ValueNode object, LoweringTool tool); 1118 1119 protected abstract ValueNode createReadArrayComponentHub(StructuredGraph graph, ValueNode arrayHub, FixedNode anchor); 1120 1121 protected ValueNode proxyIndex(AccessIndexedNode n, ValueNode index, ValueNode array, LoweringTool tool) { 1122 StructuredGraph graph = index.graph(); 1123 ValueNode arrayLength = readOrCreateArrayLength(n, array, tool, graph); 1124 ValueNode lengthMinusOne = SubNode.create(arrayLength, ConstantNode.forInt(1), NodeView.DEFAULT); 1125 return branchlessMax(branchlessMin(index, lengthMinusOne, NodeView.DEFAULT), ConstantNode.forInt(0), NodeView.DEFAULT); 1126 } 1127 1128 protected GuardingNode getBoundsCheck(AccessIndexedNode n, ValueNode array, LoweringTool tool) { 1129 if (n.getBoundsCheck() != null) { 1130 return n.getBoundsCheck(); 1131 } 1132 1133 StructuredGraph graph = n.graph(); 1134 ValueNode arrayLength = readOrCreateArrayLength(n, array, tool, graph); 1135 1136 LogicNode boundsCheck = IntegerBelowNode.create(n.index(), arrayLength, NodeView.DEFAULT); 1137 if (boundsCheck.isTautology()) { 1138 return null; 1139 } 1140 return tool.createGuard(n, graph.addOrUniqueWithInputs(boundsCheck), BoundsCheckException, InvalidateReprofile); 1141 } 1142 1143 private ValueNode readOrCreateArrayLength(AccessIndexedNode n, ValueNode array, LoweringTool tool, StructuredGraph graph) { 1144 ValueNode arrayLength = readArrayLength(array, tool.getConstantReflection()); 1145 if (arrayLength == null) { 1146 arrayLength = createReadArrayLength(array, n, tool); 1147 } else { 1148 arrayLength = arrayLength.isAlive() ? arrayLength : graph.addOrUniqueWithInputs(arrayLength); 1149 } 1150 return arrayLength; 1151 } 1152 1153 protected GuardingNode createNullCheck(ValueNode object, FixedNode before, LoweringTool tool) { 1154 if (StampTool.isPointerNonNull(object)) { 1155 return null; 1156 } 1157 return tool.createGuard(before, before.graph().unique(IsNullNode.create(object)), NullCheckException, InvalidateReprofile, SpeculationLog.NO_SPECULATION, true, null); 1158 } 1159 1160 protected ValueNode createNullCheckedValue(ValueNode object, FixedNode before, LoweringTool tool) { 1161 GuardingNode nullCheck = createNullCheck(object, before, tool); 1162 if (nullCheck == null) { 1163 return object; 1164 } 1165 return before.graph().maybeAddOrUnique(PiNode.create(object, (object.stamp(NodeView.DEFAULT)).join(StampFactory.objectNonNull()), (ValueNode) nullCheck)); 1166 } 1167 1168 @Override 1169 public ValueNode reconstructArrayIndex(JavaKind elementKind, AddressNode address) { 1170 StructuredGraph graph = address.graph(); 1171 ValueNode offset = ((OffsetAddressNode) address).getOffset(); 1172 1173 int base = metaAccess.getArrayBaseOffset(elementKind); 1174 ValueNode scaledIndex = graph.unique(new SubNode(offset, ConstantNode.forIntegerStamp(offset.stamp(NodeView.DEFAULT), base, graph))); 1175 1176 int shift = CodeUtil.log2(metaAccess.getArrayIndexScale(elementKind)); 1177 ValueNode ret = graph.unique(new RightShiftNode(scaledIndex, ConstantNode.forInt(shift, graph))); 1178 return IntegerConvertNode.convert(ret, StampFactory.forKind(JavaKind.Int), graph, NodeView.DEFAULT); 1179 } 1180 }