369 return null_check_common(value, T_LONG);
370 }
371 // Throw an uncommon trap if a given value is __not__ null.
372 // Return the value cast to null, and be clever about dominating checks.
373 Node* null_assert(Node* value, BasicType type = T_OBJECT) {
374 return null_check_common(value, type, true);
375 }
376
377 // Null check oop. Return null-path control into (*null_control).
378 // Return a cast-not-null node which depends on the not-null control.
379 // If never_see_null, use an uncommon trap (*null_control sees a top).
380 // The cast is not valid along the null path; keep a copy of the original.
381 // If safe_for_replace, then we can replace the value with the cast
382 // in the parsing map (the cast is guaranteed to dominate the map)
383 Node* null_check_oop(Node* value, Node* *null_control,
384 bool never_see_null = false, bool safe_for_replace = false);
385
386 // Check the null_seen bit.
387 bool seems_never_null(Node* obj, ciProfileData* data);
388
389 // Use the type profile to narrow an object type.
390 Node* maybe_cast_profiled_receiver(Node* not_null_obj,
391 ciProfileData* data,
392 ciKlass* require_klass);
393
394 // Cast obj to not-null on this path
395 Node* cast_not_null(Node* obj, bool do_replace_in_map = true);
396 // Replace all occurrences of one node by another.
397 void replace_in_map(Node* old, Node* neww);
398
399 void push(Node* n) { map_not_null(); _map->set_stack(_map->_jvms, _sp++ , n); }
400 Node* pop() { map_not_null(); return _map->stack( _map->_jvms, --_sp ); }
401 Node* peek(int off = 0) { map_not_null(); return _map->stack( _map->_jvms, _sp - off - 1 ); }
402
403 void push_pair(Node* ldval) {
404 push(ldval);
405 push(top()); // the halfword is merely a placeholder
406 }
407 void push_pair_local(int i) {
408 // longs are stored in locals in "push" order
409 push( local(i+0) ); // the real value
410 assert(local(i+1) == top(), "");
411 push(top()); // halfword placeholder
412 }
758 RC_LEAF = 0 // null value: no flags set
759 };
760
761 // merge in all memory slices from new_mem, along the given path
762 void merge_memory(Node* new_mem, Node* region, int new_path);
763 void make_slow_call_ex(Node* call, ciInstanceKlass* ex_klass, bool separate_io_proj);
764
765 // Helper functions to build synchronizations
766 int next_monitor();
767 Node* insert_mem_bar(int opcode, Node* precedent = NULL);
768 Node* insert_mem_bar_volatile(int opcode, int alias_idx, Node* precedent = NULL);
769 // Optional 'precedent' is appended as an extra edge, to force ordering.
770 FastLockNode* shared_lock(Node* obj);
771 void shared_unlock(Node* box, Node* obj);
772
773 // helper functions for the fast path/slow path idioms
774 Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, Klass* ex_klass, Node* slow_result);
775
776 // Generate an instance-of idiom. Used by both the instance-of bytecode
777 // and the reflective instance-of call.
778 Node* gen_instanceof( Node *subobj, Node* superkls );
779
780 // Generate a check-cast idiom. Used by both the check-cast bytecode
781 // and the array-store bytecode
782 Node* gen_checkcast( Node *subobj, Node* superkls,
783 Node* *failure_control = NULL );
784
785 // Generate a subtyping check. Takes as input the subtype and supertype.
786 // Returns 2 values: sets the default control() to the true path and
787 // returns the false path. Only reads from constant memory taken from the
788 // default memory; does not write anything. It also doesn't take in an
789 // Object; if you wish to check an Object you need to load the Object's
790 // class prior to coming here.
791 Node* gen_subtype_check(Node* subklass, Node* superklass);
792
793 // Static parse-time type checking logic for gen_subtype_check:
794 enum { SSC_always_false, SSC_always_true, SSC_easy_test, SSC_full_test };
795 int static_subtype_check(ciKlass* superk, ciKlass* subk);
796
797 // Exact type check used for predicted calls and casts.
798 // Rewrites (*casted_receiver) to be casted to the stronger type.
|
369 return null_check_common(value, T_LONG);
370 }
371 // Throw an uncommon trap if a given value is __not__ null.
372 // Return the value cast to null, and be clever about dominating checks.
373 Node* null_assert(Node* value, BasicType type = T_OBJECT) {
374 return null_check_common(value, type, true);
375 }
376
377 // Null check oop. Return null-path control into (*null_control).
378 // Return a cast-not-null node which depends on the not-null control.
379 // If never_see_null, use an uncommon trap (*null_control sees a top).
380 // The cast is not valid along the null path; keep a copy of the original.
381 // If safe_for_replace, then we can replace the value with the cast
382 // in the parsing map (the cast is guaranteed to dominate the map)
383 Node* null_check_oop(Node* value, Node* *null_control,
384 bool never_see_null = false, bool safe_for_replace = false);
385
386 // Check the null_seen bit.
387 bool seems_never_null(Node* obj, ciProfileData* data);
388
389 // At a call, if profiling tells us there's a unique klass for the
390 // receiver, return it.
391 ciKlass* profile_has_unique_klass() {
392 ciCallProfile profile = method()->call_profile_at_bci(bci());
393 if (profile.count() >= 0 && // no cast failures here
394 profile.has_receiver(0) &&
395 profile.morphism() == 1) {
396 return profile.receiver(0);
397 }
398 return NULL;
399 }
400
401 // Record profiling data exact_kls for Node n with the type system
402 // so that it can propagate it (speculation)
403 Node* record_profile_for_speculation(Node* n, ciKlass* exact_kls);
404 // Record profiling data from receiver profiling at an invoke with
405 // the type system so that it can propagate it (speculation)
406 Node* record_profiled_receiver_for_speculation(Node* n);
407 // Record profiling data from argument profiling at an invoke with
408 // the type system so that it can propagate it (speculation)
409 void record_profiled_arguments_for_speculation(ciMethod* dest_method, Bytecodes::Code bc);
410 // Record profiling data from argument profiling at an invoke with
411 // the type system so that it can propagate it (speculation)
412 void record_profiled_parameters_for_speculation();
413
414 // Use the type profile to narrow an object type.
415 Node* maybe_cast_profiled_receiver(Node* not_null_obj,
416 ciKlass* require_klass,
417 ciKlass* spec,
418 bool safe_for_replace);
419
420 // Cast obj to type and emit guard unless we had too many traps here already
421 Node* maybe_cast_profiled_obj(Node* obj,
422 ciKlass* type,
423 bool not_null = false);
424
425 // Cast obj to not-null on this path
426 Node* cast_not_null(Node* obj, bool do_replace_in_map = true);
427 // Replace all occurrences of one node by another.
428 void replace_in_map(Node* old, Node* neww);
429
430 void push(Node* n) { map_not_null(); _map->set_stack(_map->_jvms, _sp++ , n); }
431 Node* pop() { map_not_null(); return _map->stack( _map->_jvms, --_sp ); }
432 Node* peek(int off = 0) { map_not_null(); return _map->stack( _map->_jvms, _sp - off - 1 ); }
433
434 void push_pair(Node* ldval) {
435 push(ldval);
436 push(top()); // the halfword is merely a placeholder
437 }
438 void push_pair_local(int i) {
439 // longs are stored in locals in "push" order
440 push( local(i+0) ); // the real value
441 assert(local(i+1) == top(), "");
442 push(top()); // halfword placeholder
443 }
789 RC_LEAF = 0 // null value: no flags set
790 };
791
792 // merge in all memory slices from new_mem, along the given path
793 void merge_memory(Node* new_mem, Node* region, int new_path);
794 void make_slow_call_ex(Node* call, ciInstanceKlass* ex_klass, bool separate_io_proj);
795
796 // Helper functions to build synchronizations
797 int next_monitor();
798 Node* insert_mem_bar(int opcode, Node* precedent = NULL);
799 Node* insert_mem_bar_volatile(int opcode, int alias_idx, Node* precedent = NULL);
800 // Optional 'precedent' is appended as an extra edge, to force ordering.
801 FastLockNode* shared_lock(Node* obj);
802 void shared_unlock(Node* box, Node* obj);
803
804 // helper functions for the fast path/slow path idioms
805 Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, Klass* ex_klass, Node* slow_result);
806
807 // Generate an instance-of idiom. Used by both the instance-of bytecode
808 // and the reflective instance-of call.
809 Node* gen_instanceof(Node *subobj, Node* superkls, bool safe_for_replace = false);
810
811 // Generate a check-cast idiom. Used by both the check-cast bytecode
812 // and the array-store bytecode
813 Node* gen_checkcast( Node *subobj, Node* superkls,
814 Node* *failure_control = NULL );
815
816 // Generate a subtyping check. Takes as input the subtype and supertype.
817 // Returns 2 values: sets the default control() to the true path and
818 // returns the false path. Only reads from constant memory taken from the
819 // default memory; does not write anything. It also doesn't take in an
820 // Object; if you wish to check an Object you need to load the Object's
821 // class prior to coming here.
822 Node* gen_subtype_check(Node* subklass, Node* superklass);
823
824 // Static parse-time type checking logic for gen_subtype_check:
825 enum { SSC_always_false, SSC_always_true, SSC_easy_test, SSC_full_test };
826 int static_subtype_check(ciKlass* superk, ciKlass* subk);
827
828 // Exact type check used for predicted calls and casts.
829 // Rewrites (*casted_receiver) to be casted to the stronger type.
|