src/hotspot/share/code/dependencyContext.cpp
changeset 59248 e92153ed8bdc
parent 59247 56bf71d64d51
child 59250 a6deb69743d4
equal deleted inserted replaced
59247:56bf71d64d51 59248:e92153ed8bdc
   298   }
   298   }
   299 }
   299 }
   300 
   300 
   301 // Relaxed accessors
   301 // Relaxed accessors
   302 void DependencyContext::set_dependencies(nmethodBucket* b) {
   302 void DependencyContext::set_dependencies(nmethodBucket* b) {
   303   Atomic::store(b, _dependency_context_addr);
   303   Atomic::store(_dependency_context_addr, b);
   304 }
   304 }
   305 
   305 
   306 nmethodBucket* DependencyContext::dependencies() {
   306 nmethodBucket* DependencyContext::dependencies() {
   307   return Atomic::load(_dependency_context_addr);
   307   return Atomic::load(_dependency_context_addr);
   308 }
   308 }
   311 // and releasing of nmethodBucket entries will be deferred and placed on
   311 // and releasing of nmethodBucket entries will be deferred and placed on
   312 // a purge list to be deleted later.
   312 // a purge list to be deleted later.
   313 void DependencyContext::cleaning_start() {
   313 void DependencyContext::cleaning_start() {
   314   assert(SafepointSynchronize::is_at_safepoint(), "must be");
   314   assert(SafepointSynchronize::is_at_safepoint(), "must be");
   315   uint64_t epoch = ++_cleaning_epoch_monotonic;
   315   uint64_t epoch = ++_cleaning_epoch_monotonic;
   316   Atomic::store(epoch, &_cleaning_epoch);
   316   Atomic::store(&_cleaning_epoch, epoch);
   317 }
   317 }
   318 
   318 
   319 // The epilogue marks the end of dependency context cleanup by the GC,
   319 // The epilogue marks the end of dependency context cleanup by the GC,
   320 // and also makes subsequent releases of nmethodBuckets cause immediate
   320 // and also makes subsequent releases of nmethodBuckets cause immediate
   321 // deletion. It is okay to delay calling of cleaning_end() to a concurrent
   321 // deletion. It is okay to delay calling of cleaning_end() to a concurrent
   322 // phase, subsequent to the safepoint operation in which cleaning_start()
   322 // phase, subsequent to the safepoint operation in which cleaning_start()
   323 // was called. That allows dependency contexts to be cleaned concurrently.
   323 // was called. That allows dependency contexts to be cleaned concurrently.
   324 void DependencyContext::cleaning_end() {
   324 void DependencyContext::cleaning_end() {
   325   uint64_t epoch = 0;
   325   uint64_t epoch = 0;
   326   Atomic::store(epoch, &_cleaning_epoch);
   326   Atomic::store(&_cleaning_epoch, epoch);
   327 }
   327 }
   328 
   328 
   329 // This function skips over nmethodBuckets in the list corresponding to
   329 // This function skips over nmethodBuckets in the list corresponding to
   330 // nmethods that are is_unloading. This allows exposing a view of the
   330 // nmethods that are is_unloading. This allows exposing a view of the
   331 // dependents as-if they were already cleaned, despite being cleaned
   331 // dependents as-if they were already cleaned, despite being cleaned
   356 nmethodBucket* nmethodBucket::next() {
   356 nmethodBucket* nmethodBucket::next() {
   357   return Atomic::load(&_next);
   357   return Atomic::load(&_next);
   358 }
   358 }
   359 
   359 
   360 void nmethodBucket::set_next(nmethodBucket* b) {
   360 void nmethodBucket::set_next(nmethodBucket* b) {
   361   Atomic::store(b, &_next);
   361   Atomic::store(&_next, b);
   362 }
   362 }
   363 
   363 
   364 nmethodBucket* nmethodBucket::purge_list_next() {
   364 nmethodBucket* nmethodBucket::purge_list_next() {
   365   return Atomic::load(&_purge_list_next);
   365   return Atomic::load(&_purge_list_next);
   366 }
   366 }
   367 
   367 
   368 void nmethodBucket::set_purge_list_next(nmethodBucket* b) {
   368 void nmethodBucket::set_purge_list_next(nmethodBucket* b) {
   369   Atomic::store(b, &_purge_list_next);
   369   Atomic::store(&_purge_list_next, b);
   370 }
   370 }