src/hotspot/share/code/compiledMethod.hpp
changeset 52385 5c679ec60888
parent 51591 9183040e34d8
child 52405 c0c6cdea32f1
equal deleted inserted replaced
52384:d6dc479bcdd3 52385:5c679ec60888
   145 
   145 
   146   MarkForDeoptimizationStatus _mark_for_deoptimization_status; // Used for stack deoptimization
   146   MarkForDeoptimizationStatus _mark_for_deoptimization_status; // Used for stack deoptimization
   147 
   147 
   148   bool _is_far_code; // Code is far from CodeCache.
   148   bool _is_far_code; // Code is far from CodeCache.
   149                      // Have to use far call instructions to call it from code in CodeCache.
   149                      // Have to use far call instructions to call it from code in CodeCache.
       
   150 
       
   151   volatile uint8_t _is_unloading_state;      // Local state used to keep track of whether unloading is happening or not
       
   152 
   150   // set during construction
   153   // set during construction
   151   unsigned int _has_unsafe_access:1;         // May fault due to unsafe access.
   154   unsigned int _has_unsafe_access:1;         // May fault due to unsafe access.
   152   unsigned int _has_method_handle_invokes:1; // Has this method MethodHandle invokes?
   155   unsigned int _has_method_handle_invokes:1; // Has this method MethodHandle invokes?
   153   unsigned int _lazy_critical_native:1;      // Lazy JNI critical native
   156   unsigned int _lazy_critical_native:1;      // Lazy JNI critical native
   154   unsigned int _has_wide_vectors:1;          // Preserve wide vectors at safepoints
   157   unsigned int _has_wide_vectors:1;          // Preserve wide vectors at safepoints
   200   virtual int   comp_level() const = 0;
   203   virtual int   comp_level() const = 0;
   201   virtual int   compile_id() const = 0;
   204   virtual int   compile_id() const = 0;
   202 
   205 
   203   virtual address verified_entry_point() const = 0;
   206   virtual address verified_entry_point() const = 0;
   204   virtual void log_identity(xmlStream* log) const = 0;
   207   virtual void log_identity(xmlStream* log) const = 0;
   205   virtual void log_state_change(oop cause = NULL) const = 0;
   208   virtual void log_state_change() const = 0;
   206   virtual bool make_not_used() = 0;
   209   virtual bool make_not_used() = 0;
   207   virtual bool make_not_entrant() = 0;
   210   virtual bool make_not_entrant() = 0;
   208   virtual bool make_entrant() = 0;
   211   virtual bool make_entrant() = 0;
   209   virtual address entry_point() const = 0;
   212   virtual address entry_point() const = 0;
   210   virtual bool make_zombie() = 0;
   213   virtual bool make_zombie() = 0;
   331   // implicit exceptions support
   334   // implicit exceptions support
   332   virtual address continuation_for_implicit_exception(address pc) { return NULL; }
   335   virtual address continuation_for_implicit_exception(address pc) { return NULL; }
   333 
   336 
   334   static address get_deopt_original_pc(const frame* fr);
   337   static address get_deopt_original_pc(const frame* fr);
   335 
   338 
   336   // GC unloading support
       
   337   // Cleans unloaded klasses and unloaded nmethods in inline caches
       
   338   bool unload_nmethod_caches(bool parallel, bool class_unloading_occurred);
       
   339 
       
   340   // Inline cache support for class unloading and nmethod unloading
   339   // Inline cache support for class unloading and nmethod unloading
   341  private:
   340  private:
   342   bool cleanup_inline_caches_impl(bool parallel, bool unloading_occurred, bool clean_all);
   341   void cleanup_inline_caches_impl(bool unloading_occurred, bool clean_all);
   343  public:
   342  public:
   344   bool cleanup_inline_caches(bool clean_all = false) {
   343   void cleanup_inline_caches(bool clean_all) {
   345     // Serial version used by sweeper and whitebox test
   344     // Serial version used by sweeper and whitebox test
   346     return cleanup_inline_caches_impl(false, false, clean_all);
   345     cleanup_inline_caches_impl(false, clean_all);
   347   }
   346   }
   348 
   347 
   349   virtual void clear_inline_caches();
   348   virtual void clear_inline_caches();
   350   void clear_ic_stubs();
   349   void clear_ic_stubs();
   351 
   350 
   371   Method* attached_method_before_pc(address pc);
   370   Method* attached_method_before_pc(address pc);
   372 
   371 
   373   virtual void metadata_do(void f(Metadata*)) = 0;
   372   virtual void metadata_do(void f(Metadata*)) = 0;
   374 
   373 
   375   // GC support
   374   // GC support
   376 
       
   377   void set_unloading_next(CompiledMethod* next) { _unloading_next = next; }
       
   378   CompiledMethod* unloading_next()              { return _unloading_next; }
       
   379 
       
   380  protected:
   375  protected:
   381   address oops_reloc_begin() const;
   376   address oops_reloc_begin() const;
       
   377 
   382  private:
   378  private:
   383   void static clean_ic_if_metadata_is_dead(CompiledIC *ic);
   379   void static clean_ic_if_metadata_is_dead(CompiledIC *ic);
   384 
   380 
   385   void clean_ic_stubs();
   381   void clean_ic_stubs();
   386 
   382 
   387  public:
   383  public:
   388   virtual void do_unloading(BoolObjectClosure* is_alive);
   384   // GC unloading support
   389   //  The parallel versions are used by G1.
   385   // Cleans unloaded klasses and unloaded nmethods in inline caches
   390   virtual bool do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred);
   386 
   391   virtual void do_unloading_parallel_postponed();
   387   bool is_unloading();
   392 
   388 
   393   static unsigned char global_unloading_clock()   { return _global_unloading_clock; }
   389   void unload_nmethod_caches(bool class_unloading_occurred);
   394   static void increase_unloading_clock();
   390   void clear_unloading_state();
   395 
   391   virtual void do_unloading(bool unloading_occurred) { }
   396   void set_unloading_clock(unsigned char unloading_clock);
       
   397   unsigned char unloading_clock();
       
   398 
       
   399 protected:
       
   400   virtual bool do_unloading_oops(address low_boundary, BoolObjectClosure* is_alive) = 0;
       
   401 #if INCLUDE_JVMCI
       
   402   virtual bool do_unloading_jvmci() = 0;
       
   403 #endif
       
   404 
   392 
   405 private:
   393 private:
   406   // GC support to help figure out if an nmethod has been
       
   407   // cleaned/unloaded by the current GC.
       
   408   static unsigned char _global_unloading_clock;
       
   409 
       
   410   volatile unsigned char _unloading_clock;   // Incremented after GC unloaded/cleaned the nmethod
       
   411 
       
   412   PcDesc* find_pc_desc(address pc, bool approximate) {
   394   PcDesc* find_pc_desc(address pc, bool approximate) {
   413     return _pc_desc_container.find_pc_desc(pc, approximate, PcDescSearch(code_begin(), scopes_pcs_begin(), scopes_pcs_end()));
   395     return _pc_desc_container.find_pc_desc(pc, approximate, PcDescSearch(code_begin(), scopes_pcs_begin(), scopes_pcs_end()));
   414   }
   396   }
   415 
   397 
   416 protected:
   398 protected:
   417   union {
   399   // Used by some GCs to chain nmethods.
   418     // Used by G1 to chain nmethods.
   400   nmethod* _scavenge_root_link; // from CodeCache::scavenge_root_nmethods
   419     CompiledMethod* _unloading_next;
       
   420     // Used by non-G1 GCs to chain nmethods.
       
   421     nmethod* _scavenge_root_link; // from CodeCache::scavenge_root_nmethods
       
   422   };
       
   423 };
   401 };
   424 
   402 
   425 #endif //SHARE_VM_CODE_COMPILEDMETHOD_HPP
   403 #endif //SHARE_VM_CODE_COMPILEDMETHOD_HPP