1 /*
   2  * Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "ci/ciMethod.hpp"
  26 #include "classfile/javaClasses.hpp"
  27 #include "opto/callGenerator.hpp"
  28 #include "opto/graphKit.hpp"
  29 #include "opto/castnode.hpp"
  30 #include "opto/convertnode.hpp"
  31 #include "opto/intrinsicnode.hpp"
  32 #include "opto/movenode.hpp"
  33 
  34 class LibraryIntrinsic : public InlineCallGenerator {
  35   // Extend the set of intrinsics known to the runtime:
  36  public:
  37  private:
  38   bool             _is_virtual;
  39   bool             _does_virtual_dispatch;
  40   int8_t           _predicates_count;  // Intrinsic is predicated by several conditions
  41   int8_t           _last_predicate; // Last generated predicate
  42   vmIntrinsics::ID _intrinsic_id;
  43 
  44  public:
  45   LibraryIntrinsic(ciMethod* m, bool is_virtual, int predicates_count, bool does_virtual_dispatch, vmIntrinsics::ID id)
  46     : InlineCallGenerator(m),
  47       _is_virtual(is_virtual),
  48       _does_virtual_dispatch(does_virtual_dispatch),
  49       _predicates_count((int8_t)predicates_count),
  50       _last_predicate((int8_t)-1),
  51       _intrinsic_id(id)
  52   {
  53   }
  54   virtual bool is_intrinsic() const { return true; }
  55   virtual bool is_virtual()   const { return _is_virtual; }
  56   virtual bool is_predicated() const { return _predicates_count > 0; }
  57   virtual int  predicates_count() const { return _predicates_count; }
  58   virtual bool does_virtual_dispatch()   const { return _does_virtual_dispatch; }
  59   virtual JVMState* generate(JVMState* jvms);
  60   virtual Node* generate_predicate(JVMState* jvms, int predicate);
  61   vmIntrinsics::ID intrinsic_id() const { return _intrinsic_id; }
  62 };
  63 
  64 
  65 // Local helper class for LibraryIntrinsic:
  66 class LibraryCallKit : public GraphKit {
  67  private:
  68   LibraryIntrinsic* _intrinsic;     // the library intrinsic being called
  69   Node*             _result;        // the result node, if any
  70   int               _reexecute_sp;  // the stack pointer when bytecode needs to be reexecuted
  71 
  72   const TypeOopPtr* sharpen_unsafe_type(Compile::AliasType* alias_type, const TypePtr *adr_type);
  73 
  74  public:
  75   LibraryCallKit(JVMState* jvms, LibraryIntrinsic* intrinsic)
  76     : GraphKit(jvms),
  77       _intrinsic(intrinsic),
  78       _result(NULL)
  79   {
  80     // Check if this is a root compile.  In that case we don't have a caller.
  81     if (!jvms->has_method()) {
  82       _reexecute_sp = sp();
  83     } else {
  84       // Find out how many arguments the interpreter needs when deoptimizing
  85       // and save the stack pointer value so it can used by uncommon_trap.
  86       // We find the argument count by looking at the declared signature.
  87       bool ignored_will_link;
  88       ciSignature* declared_signature = NULL;
  89       ciMethod* ignored_callee = caller()->get_method_at_bci(bci(), ignored_will_link, &declared_signature);
  90       const int nargs = declared_signature->arg_size_for_bc(caller()->java_code_at_bci(bci()));
  91       _reexecute_sp = sp() + nargs;  // "push" arguments back on stack
  92     }
  93   }
  94 
  95   virtual LibraryCallKit* is_LibraryCallKit() const { return (LibraryCallKit*)this; }
  96 
  97   ciMethod*         caller()    const    { return jvms()->method(); }
  98   int               bci()       const    { return jvms()->bci(); }
  99   LibraryIntrinsic* intrinsic() const    { return _intrinsic; }
 100   vmIntrinsics::ID  intrinsic_id() const { return _intrinsic->intrinsic_id(); }
 101   ciMethod*         callee()    const    { return _intrinsic->method(); }
 102 
 103   bool  try_to_inline(int predicate);
 104   Node* try_to_predicate(int predicate);
 105 
 106   void push_result() {
 107     // Push the result onto the stack.
 108     if (!stopped() && result() != NULL) {
 109       BasicType bt = result()->bottom_type()->basic_type();
 110       push_node(bt, result());
 111     }
 112   }
 113 
 114  private:
 115   void fatal_unexpected_iid(vmIntrinsics::ID iid) {
 116     fatal("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid));
 117   }
 118 
 119   void  set_result(Node* n) { assert(_result == NULL, "only set once"); _result = n; }
 120   void  set_result(RegionNode* region, PhiNode* value);
 121   Node*     result() { return _result; }
 122 
 123   virtual int reexecute_sp() { return _reexecute_sp; }
 124 
 125   // Helper functions to inline natives
 126   Node* generate_guard(Node* test, RegionNode* region, float true_prob);
 127   Node* generate_slow_guard(Node* test, RegionNode* region);
 128   Node* generate_fair_guard(Node* test, RegionNode* region);
 129   Node* generate_negative_guard(Node* index, RegionNode* region,
 130                                 // resulting CastII of index:
 131                                 Node* *pos_index = NULL);
 132   Node* generate_limit_guard(Node* offset, Node* subseq_length,
 133                              Node* array_length,
 134                              RegionNode* region);
 135   void  generate_string_range_check(Node* array, Node* offset,
 136                                     Node* length, bool char_count);
 137   Node* generate_current_thread(Node* &tls_output);
 138   Node* load_mirror_from_klass(Node* klass);
 139   Node* load_klass_from_mirror_common(Node* mirror, bool never_see_null,
 140                                       RegionNode* region, int null_path,
 141                                       int offset);
 142   Node* load_klass_from_mirror(Node* mirror, bool never_see_null,
 143                                RegionNode* region, int null_path) {
 144     int offset = java_lang_Class::klass_offset();
 145     return load_klass_from_mirror_common(mirror, never_see_null,
 146                                          region, null_path,
 147                                          offset);
 148   }
 149   Node* load_array_klass_from_mirror(Node* mirror, bool never_see_null,
 150                                      RegionNode* region, int null_path) {
 151     int offset = java_lang_Class::array_klass_offset();
 152     return load_klass_from_mirror_common(mirror, never_see_null,
 153                                          region, null_path,
 154                                          offset);
 155   }
 156   Node* generate_access_flags_guard(Node* kls,
 157                                     int modifier_mask, int modifier_bits,
 158                                     RegionNode* region);
 159   Node* generate_interface_guard(Node* kls, RegionNode* region);
 160   Node* generate_hidden_class_guard(Node* kls, RegionNode* region);
 161   Node* generate_array_guard(Node* kls, RegionNode* region) {
 162     return generate_array_guard_common(kls, region, false, false);
 163   }
 164   Node* generate_non_array_guard(Node* kls, RegionNode* region) {
 165     return generate_array_guard_common(kls, region, false, true);
 166   }
 167   Node* generate_objArray_guard(Node* kls, RegionNode* region) {
 168     return generate_array_guard_common(kls, region, true, false);
 169   }
 170   Node* generate_non_objArray_guard(Node* kls, RegionNode* region) {
 171     return generate_array_guard_common(kls, region, true, true);
 172   }
 173   Node* generate_array_guard_common(Node* kls, RegionNode* region,
 174                                     bool obj_array, bool not_array);
 175   Node* generate_virtual_guard(Node* obj_klass, RegionNode* slow_region);
 176   CallJavaNode* generate_method_call(vmIntrinsics::ID method_id,
 177                                      bool is_virtual = false, bool is_static = false);
 178   CallJavaNode* generate_method_call_static(vmIntrinsics::ID method_id) {
 179     return generate_method_call(method_id, false, true);
 180   }
 181   CallJavaNode* generate_method_call_virtual(vmIntrinsics::ID method_id) {
 182     return generate_method_call(method_id, true, false);
 183   }
 184   Node * load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static, ciInstanceKlass * fromKls);
 185   Node * field_address_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static, ciInstanceKlass * fromKls);
 186 
 187   Node* make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2, StrIntrinsicNode::ArgEnc ae);
 188   bool inline_string_compareTo(StrIntrinsicNode::ArgEnc ae);
 189   bool inline_string_indexOf(StrIntrinsicNode::ArgEnc ae);
 190   bool inline_string_indexOfI(StrIntrinsicNode::ArgEnc ae);
 191   Node* make_indexOf_node(Node* src_start, Node* src_count, Node* tgt_start, Node* tgt_count,
 192                           RegionNode* region, Node* phi, StrIntrinsicNode::ArgEnc ae);
 193   bool inline_string_indexOfChar();
 194   bool inline_string_equals(StrIntrinsicNode::ArgEnc ae);
 195   bool inline_string_toBytesU();
 196   bool inline_string_getCharsU();
 197   bool inline_string_copy(bool compress);
 198   bool inline_string_char_access(bool is_store);
 199   Node* round_double_node(Node* n);
 200   bool runtime_math(const TypeFunc* call_type, address funcAddr, const char* funcName);
 201   bool inline_math_native(vmIntrinsics::ID id);
 202   bool inline_math(vmIntrinsics::ID id);
 203   bool inline_double_math(vmIntrinsics::ID id);
 204   template <typename OverflowOp>
 205   bool inline_math_overflow(Node* arg1, Node* arg2);
 206   void inline_math_mathExact(Node* math, Node* test);
 207   bool inline_math_addExactI(bool is_increment);
 208   bool inline_math_addExactL(bool is_increment);
 209   bool inline_math_multiplyExactI();
 210   bool inline_math_multiplyExactL();
 211   bool inline_math_multiplyHigh();
 212   bool inline_math_negateExactI();
 213   bool inline_math_negateExactL();
 214   bool inline_math_subtractExactI(bool is_decrement);
 215   bool inline_math_subtractExactL(bool is_decrement);
 216   bool inline_min_max(vmIntrinsics::ID id);
 217   bool inline_notify(vmIntrinsics::ID id);
 218   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
 219   // This returns Type::AnyPtr, RawPtr, or OopPtr.
 220   int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
 221   Node* make_unsafe_address(Node*& base, Node* offset, DecoratorSet decorators, BasicType type = T_ILLEGAL, bool can_cast = false);
 222 
 223   typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
 224   DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
 225   bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
 226   static bool klass_needs_init_guard(Node* kls);
 227   bool inline_unsafe_allocate();
 228   bool inline_unsafe_newArray(bool uninitialized);
 229   bool inline_unsafe_writeback0();
 230   bool inline_unsafe_writebackSync0(bool is_pre);
 231   bool inline_unsafe_copyMemory();
 232   bool inline_native_currentThread();
 233 
 234   bool inline_native_time_funcs(address method, const char* funcName);
 235 #ifdef JFR_HAVE_INTRINSICS
 236   bool inline_native_classID();
 237   bool inline_native_getEventWriter();
 238 #endif
 239   bool inline_native_Class_query(vmIntrinsics::ID id);
 240   bool inline_native_subtype_check();
 241   bool inline_native_getLength();
 242   bool inline_array_copyOf(bool is_copyOfRange);
 243   bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
 244   bool inline_preconditions_checkIndex();
 245   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array);
 246   bool inline_native_clone(bool is_virtual);
 247   bool inline_native_Reflection_getCallerClass();
 248   // Helper function for inlining native object hash method
 249   bool inline_native_hashcode(bool is_virtual, bool is_static);
 250   bool inline_native_getClass();
 251 
 252   // Helper functions for inlining arraycopy
 253   bool inline_arraycopy();
 254   AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
 255                                                 RegionNode* slow_region);
 256   JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
 257   void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms, int saved_reexecute_sp,
 258                                       uint new_idx);
 259 
 260   typedef enum { LS_get_add, LS_get_set, LS_cmp_swap, LS_cmp_swap_weak, LS_cmp_exchange } LoadStoreKind;
 261   bool inline_unsafe_load_store(BasicType type,  LoadStoreKind kind, AccessKind access_kind);
 262   bool inline_unsafe_fence(vmIntrinsics::ID id);
 263   bool inline_onspinwait();
 264   bool inline_fp_conversions(vmIntrinsics::ID id);
 265   bool inline_number_methods(vmIntrinsics::ID id);
 266   bool inline_reference_get();
 267   bool inline_Class_cast();
 268   bool inline_aescrypt_Block(vmIntrinsics::ID id);
 269   bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id);
 270   bool inline_electronicCodeBook_AESCrypt(vmIntrinsics::ID id);
 271   bool inline_counterMode_AESCrypt(vmIntrinsics::ID id);
 272   Node* inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting);
 273   Node* inline_electronicCodeBook_AESCrypt_predicate(bool decrypting);
 274   Node* inline_counterMode_AESCrypt_predicate();
 275   Node* get_key_start_from_aescrypt_object(Node* aescrypt_object);
 276   Node* get_original_key_start_from_aescrypt_object(Node* aescrypt_object);
 277   bool inline_ghash_processBlocks();
 278   bool inline_base64_encodeBlock();
 279   bool inline_digestBase_implCompress(vmIntrinsics::ID id);
 280   bool inline_digestBase_implCompressMB(int predicate);
 281   bool inline_digestBase_implCompressMB(Node* digestBaseObj, ciInstanceKlass* instklass,
 282                                         bool long_state, address stubAddr, const char *stubName,
 283                                         Node* src_start, Node* ofs, Node* limit);
 284   Node* get_state_from_digest_object(Node *digestBase_object);
 285   Node* get_long_state_from_digest_object(Node *digestBase_object);
 286   Node* inline_digestBase_implCompressMB_predicate(int predicate);
 287   bool inline_encodeISOArray();
 288   bool inline_updateCRC32();
 289   bool inline_updateBytesCRC32();
 290   bool inline_updateByteBufferCRC32();
 291   Node* get_table_from_crc32c_class(ciInstanceKlass *crc32c_class);
 292   bool inline_updateBytesCRC32C();
 293   bool inline_updateDirectByteBufferCRC32C();
 294   bool inline_updateBytesAdler32();
 295   bool inline_updateByteBufferAdler32();
 296   bool inline_multiplyToLen();
 297   bool inline_hasNegatives();
 298   bool inline_squareToLen();
 299   bool inline_mulAdd();
 300   bool inline_montgomeryMultiply();
 301   bool inline_montgomerySquare();
 302   bool inline_bigIntegerShift(bool isRightShift);
 303   bool inline_vectorizedMismatch();
 304   bool inline_fma(vmIntrinsics::ID id);
 305   bool inline_character_compare(vmIntrinsics::ID id);
 306   bool inline_fp_min_max(vmIntrinsics::ID id);
 307 
 308   bool inline_profileBoolean();
 309   bool inline_isCompileConstant();
 310 
 311   // Vector API support
 312   bool inline_vector_nary_operation(int n);
 313   bool inline_vector_broadcast_coerced();
 314   bool inline_vector_shuffle_to_vector();
 315   bool inline_vector_shuffle_iota();
 316   bool inline_vector_mem_operation(bool is_store);
 317   bool inline_vector_gather_scatter(bool is_scatter);
 318   bool inline_vector_reduction();
 319   bool inline_vector_test();
 320   bool inline_vector_blend();
 321   bool inline_vector_rearrange();
 322   bool inline_vector_compare();
 323   bool inline_vector_broadcast_int();
 324   bool inline_vector_convert();
 325   bool inline_vector_extract();
 326   bool inline_vector_insert();
 327   Node* box_vector(Node* in, const TypeInstPtr* vbox_type, BasicType bt, int num_elem);
 328   Node* unbox_vector(Node* in, const TypeInstPtr* vbox_type, BasicType bt, int num_elem, bool shuffle_to_vector = false);
 329   Node* shift_count(Node* cnt, int shift_op, BasicType bt, int num_elem);
 330 
 331   enum VectorMaskUseType {
 332     VecMaskUseLoad,
 333     VecMaskUseStore,
 334     VecMaskUseAll,
 335     VecMaskNotUsed
 336   };
 337 
 338   bool arch_supports_vector(int op, int num_elem, BasicType type, VectorMaskUseType mask_use_type, bool has_scalar_args = false);
 339 
 340   void clear_upper_avx() {
 341 #ifdef X86
 342     if (UseAVX >= 2) {
 343       C->set_clear_upper_avx(true);
 344     }
 345 #endif
 346   }
 347 };
 348