src/share/vm/memory/metaspace.cpp

Print this page




1414 size_t MetaspaceGC::inc_capacity_until_GC(size_t v) {
1415   assert_is_size_aligned(v, Metaspace::commit_alignment());
1416 
1417   return (size_t)Atomic::add_ptr(v, &_capacity_until_GC);
1418 }
1419 
1420 size_t MetaspaceGC::dec_capacity_until_GC(size_t v) {
1421   assert_is_size_aligned(v, Metaspace::commit_alignment());
1422 
1423   return (size_t)Atomic::add_ptr(-(intptr_t)v, &_capacity_until_GC);
1424 }
1425 
1426 void MetaspaceGC::initialize() {
1427   // Set the high-water mark to MaxMetapaceSize during VM initializaton since
1428   // we can't do a GC during initialization.
1429   _capacity_until_GC = MaxMetaspaceSize;
1430 }
1431 
1432 void MetaspaceGC::post_initialize() {
1433   // Reset the high-water mark once the VM initialization is done.
1434   _capacity_until_GC = MAX2(MetaspaceAux::committed_bytes(), MetaspaceSize);
1435 }
1436 
1437 bool MetaspaceGC::can_expand(size_t word_size, bool is_class) {
1438   // Check if the compressed class space is full.
1439   if (is_class && Metaspace::using_class_space()) {
1440     size_t class_committed = MetaspaceAux::committed_bytes(Metaspace::ClassType);
1441     if (class_committed + word_size * BytesPerWord > CompressedClassSpaceSize) {
1442       return false;
1443     }
1444   }
1445 
1446   // Check if the user has imposed a limit on the metaspace memory.
1447   size_t committed_bytes = MetaspaceAux::committed_bytes();
1448   if (committed_bytes + word_size * BytesPerWord > MaxMetaspaceSize) {
1449     return false;
1450   }
1451 
1452   return true;
1453 }
1454 


1474 
1475   // Using committed_bytes() for used_after_gc is an overestimation, since the
1476   // chunk free lists are included in committed_bytes() and the memory in an
1477   // un-fragmented chunk free list is available for future allocations.
1478   // However, if the chunk free lists becomes fragmented, then the memory may
1479   // not be available for future allocations and the memory is therefore "in use".
1480   // Including the chunk free lists in the definition of "in use" is therefore
1481   // necessary. Not including the chunk free lists can cause capacity_until_GC to
1482   // shrink below committed_bytes() and this has caused serious bugs in the past.
1483   const size_t used_after_gc = MetaspaceAux::committed_bytes();
1484   const size_t capacity_until_GC = MetaspaceGC::capacity_until_GC();
1485 
1486   const double minimum_free_percentage = MinMetaspaceFreeRatio / 100.0;
1487   const double maximum_used_percentage = 1.0 - minimum_free_percentage;
1488 
1489   const double min_tmp = used_after_gc / maximum_used_percentage;
1490   size_t minimum_desired_capacity =
1491     (size_t)MIN2(min_tmp, double(max_uintx));
1492   // Don't shrink less than the initial generation size
1493   minimum_desired_capacity = MAX2(minimum_desired_capacity,
1494                                   MetaspaceSize);
1495 
1496   if (PrintGCDetails && Verbose) {
1497     gclog_or_tty->print_cr("\nMetaspaceGC::compute_new_size: ");
1498     gclog_or_tty->print_cr("  "
1499                   "  minimum_free_percentage: %6.2f"
1500                   "  maximum_used_percentage: %6.2f",
1501                   minimum_free_percentage,
1502                   maximum_used_percentage);
1503     gclog_or_tty->print_cr("  "
1504                   "   used_after_gc       : %6.1fKB",
1505                   used_after_gc / (double) K);
1506   }
1507 
1508 
1509   size_t shrink_bytes = 0;
1510   if (capacity_until_GC < minimum_desired_capacity) {
1511     // If we have less capacity below the metaspace HWM, then
1512     // increment the HWM.
1513     size_t expand_bytes = minimum_desired_capacity - capacity_until_GC;
1514     expand_bytes = align_size_up(expand_bytes, Metaspace::commit_alignment());


1530                       new_capacity_until_GC / (double) K);
1531       }
1532     }
1533     return;
1534   }
1535 
1536   // No expansion, now see if we want to shrink
1537   // We would never want to shrink more than this
1538   assert(capacity_until_GC >= minimum_desired_capacity,
1539          err_msg(SIZE_FORMAT " >= " SIZE_FORMAT,
1540                  capacity_until_GC, minimum_desired_capacity));
1541   size_t max_shrink_bytes = capacity_until_GC - minimum_desired_capacity;
1542 
1543   // Should shrinking be considered?
1544   if (MaxMetaspaceFreeRatio < 100) {
1545     const double maximum_free_percentage = MaxMetaspaceFreeRatio / 100.0;
1546     const double minimum_used_percentage = 1.0 - maximum_free_percentage;
1547     const double max_tmp = used_after_gc / minimum_used_percentage;
1548     size_t maximum_desired_capacity = (size_t)MIN2(max_tmp, double(max_uintx));
1549     maximum_desired_capacity = MAX2(maximum_desired_capacity,
1550                                     MetaspaceSize);
1551     if (PrintGCDetails && Verbose) {
1552       gclog_or_tty->print_cr("  "
1553                              "  maximum_free_percentage: %6.2f"
1554                              "  minimum_used_percentage: %6.2f",
1555                              maximum_free_percentage,
1556                              minimum_used_percentage);
1557       gclog_or_tty->print_cr("  "
1558                              "  minimum_desired_capacity: %6.1fKB"
1559                              "  maximum_desired_capacity: %6.1fKB",
1560                              minimum_desired_capacity / (double) K,
1561                              maximum_desired_capacity / (double) K);
1562     }
1563 
1564     assert(minimum_desired_capacity <= maximum_desired_capacity,
1565            "sanity check");
1566 
1567     if (capacity_until_GC > maximum_desired_capacity) {
1568       // Capacity too large, compute shrinking size
1569       shrink_bytes = capacity_until_GC - maximum_desired_capacity;
1570       // We don't want shrink all the way back to initSize if people call


3180     // If UseCompressedClassPointers is set then allocate the metaspace area
3181     // above the heap and above the CDS area (if it exists).
3182     if (using_class_space()) {
3183       if (UseSharedSpaces) {
3184         char* cds_end = (char*)(cds_address + cds_total);
3185         cds_end = (char *)align_ptr_up(cds_end, _reserve_alignment);
3186         allocate_metaspace_compressed_klass_ptrs(cds_end, cds_address);
3187       } else {
3188         char* base = (char*)align_ptr_up(Universe::heap()->reserved_region().end(), _reserve_alignment);
3189         allocate_metaspace_compressed_klass_ptrs(base, 0);
3190       }
3191     }
3192 #endif
3193 
3194     // Initialize these before initializing the VirtualSpaceList
3195     _first_chunk_word_size = InitialBootClassLoaderMetaspaceSize / BytesPerWord;
3196     _first_chunk_word_size = align_word_size_up(_first_chunk_word_size);
3197     // Make the first class chunk bigger than a medium chunk so it's not put
3198     // on the medium chunk list.   The next chunk will be small and progress
3199     // from there.  This size calculated by -version.
3200     _first_class_chunk_word_size = MIN2((size_t)MediumChunk*6,
3201                                        (CompressedClassSpaceSize/BytesPerWord)*2);
3202     _first_class_chunk_word_size = align_word_size_up(_first_class_chunk_word_size);
3203     // Arbitrarily set the initial virtual space to a multiple
3204     // of the boot class loader size.
3205     size_t word_size = VIRTUALSPACEMULTIPLIER * _first_chunk_word_size;
3206     word_size = align_size_up(word_size, Metaspace::reserve_alignment_words());
3207 
3208     // Initialize the list of virtual spaces.
3209     _space_list = new VirtualSpaceList(word_size);
3210     _chunk_manager_metadata = new ChunkManager(SpecializedChunk, SmallChunk, MediumChunk);
3211 
3212     if (!_space_list->initialization_succeeded()) {
3213       vm_exit_during_initialization("Unable to setup metadata virtual space list.", NULL);
3214     }
3215   }
3216 
3217   _tracer = new MetaspaceTracer();
3218 }
3219 
3220 void Metaspace::post_initialize() {




1414 size_t MetaspaceGC::inc_capacity_until_GC(size_t v) {
1415   assert_is_size_aligned(v, Metaspace::commit_alignment());
1416 
1417   return (size_t)Atomic::add_ptr(v, &_capacity_until_GC);
1418 }
1419 
1420 size_t MetaspaceGC::dec_capacity_until_GC(size_t v) {
1421   assert_is_size_aligned(v, Metaspace::commit_alignment());
1422 
1423   return (size_t)Atomic::add_ptr(-(intptr_t)v, &_capacity_until_GC);
1424 }
1425 
1426 void MetaspaceGC::initialize() {
1427   // Set the high-water mark to MaxMetapaceSize during VM initializaton since
1428   // we can't do a GC during initialization.
1429   _capacity_until_GC = MaxMetaspaceSize;
1430 }
1431 
1432 void MetaspaceGC::post_initialize() {
1433   // Reset the high-water mark once the VM initialization is done.
1434   _capacity_until_GC = MAX2(MetaspaceAux::committed_bytes(), (size_t)MetaspaceSize);
1435 }
1436 
1437 bool MetaspaceGC::can_expand(size_t word_size, bool is_class) {
1438   // Check if the compressed class space is full.
1439   if (is_class && Metaspace::using_class_space()) {
1440     size_t class_committed = MetaspaceAux::committed_bytes(Metaspace::ClassType);
1441     if (class_committed + word_size * BytesPerWord > CompressedClassSpaceSize) {
1442       return false;
1443     }
1444   }
1445 
1446   // Check if the user has imposed a limit on the metaspace memory.
1447   size_t committed_bytes = MetaspaceAux::committed_bytes();
1448   if (committed_bytes + word_size * BytesPerWord > MaxMetaspaceSize) {
1449     return false;
1450   }
1451 
1452   return true;
1453 }
1454 


1474 
1475   // Using committed_bytes() for used_after_gc is an overestimation, since the
1476   // chunk free lists are included in committed_bytes() and the memory in an
1477   // un-fragmented chunk free list is available for future allocations.
1478   // However, if the chunk free lists becomes fragmented, then the memory may
1479   // not be available for future allocations and the memory is therefore "in use".
1480   // Including the chunk free lists in the definition of "in use" is therefore
1481   // necessary. Not including the chunk free lists can cause capacity_until_GC to
1482   // shrink below committed_bytes() and this has caused serious bugs in the past.
1483   const size_t used_after_gc = MetaspaceAux::committed_bytes();
1484   const size_t capacity_until_GC = MetaspaceGC::capacity_until_GC();
1485 
1486   const double minimum_free_percentage = MinMetaspaceFreeRatio / 100.0;
1487   const double maximum_used_percentage = 1.0 - minimum_free_percentage;
1488 
1489   const double min_tmp = used_after_gc / maximum_used_percentage;
1490   size_t minimum_desired_capacity =
1491     (size_t)MIN2(min_tmp, double(max_uintx));
1492   // Don't shrink less than the initial generation size
1493   minimum_desired_capacity = MAX2(minimum_desired_capacity,
1494                                   (size_t)MetaspaceSize);
1495 
1496   if (PrintGCDetails && Verbose) {
1497     gclog_or_tty->print_cr("\nMetaspaceGC::compute_new_size: ");
1498     gclog_or_tty->print_cr("  "
1499                   "  minimum_free_percentage: %6.2f"
1500                   "  maximum_used_percentage: %6.2f",
1501                   minimum_free_percentage,
1502                   maximum_used_percentage);
1503     gclog_or_tty->print_cr("  "
1504                   "   used_after_gc       : %6.1fKB",
1505                   used_after_gc / (double) K);
1506   }
1507 
1508 
1509   size_t shrink_bytes = 0;
1510   if (capacity_until_GC < minimum_desired_capacity) {
1511     // If we have less capacity below the metaspace HWM, then
1512     // increment the HWM.
1513     size_t expand_bytes = minimum_desired_capacity - capacity_until_GC;
1514     expand_bytes = align_size_up(expand_bytes, Metaspace::commit_alignment());


1530                       new_capacity_until_GC / (double) K);
1531       }
1532     }
1533     return;
1534   }
1535 
1536   // No expansion, now see if we want to shrink
1537   // We would never want to shrink more than this
1538   assert(capacity_until_GC >= minimum_desired_capacity,
1539          err_msg(SIZE_FORMAT " >= " SIZE_FORMAT,
1540                  capacity_until_GC, minimum_desired_capacity));
1541   size_t max_shrink_bytes = capacity_until_GC - minimum_desired_capacity;
1542 
1543   // Should shrinking be considered?
1544   if (MaxMetaspaceFreeRatio < 100) {
1545     const double maximum_free_percentage = MaxMetaspaceFreeRatio / 100.0;
1546     const double minimum_used_percentage = 1.0 - maximum_free_percentage;
1547     const double max_tmp = used_after_gc / minimum_used_percentage;
1548     size_t maximum_desired_capacity = (size_t)MIN2(max_tmp, double(max_uintx));
1549     maximum_desired_capacity = MAX2(maximum_desired_capacity,
1550                                     (size_t)MetaspaceSize);
1551     if (PrintGCDetails && Verbose) {
1552       gclog_or_tty->print_cr("  "
1553                              "  maximum_free_percentage: %6.2f"
1554                              "  minimum_used_percentage: %6.2f",
1555                              maximum_free_percentage,
1556                              minimum_used_percentage);
1557       gclog_or_tty->print_cr("  "
1558                              "  minimum_desired_capacity: %6.1fKB"
1559                              "  maximum_desired_capacity: %6.1fKB",
1560                              minimum_desired_capacity / (double) K,
1561                              maximum_desired_capacity / (double) K);
1562     }
1563 
1564     assert(minimum_desired_capacity <= maximum_desired_capacity,
1565            "sanity check");
1566 
1567     if (capacity_until_GC > maximum_desired_capacity) {
1568       // Capacity too large, compute shrinking size
1569       shrink_bytes = capacity_until_GC - maximum_desired_capacity;
1570       // We don't want shrink all the way back to initSize if people call


3180     // If UseCompressedClassPointers is set then allocate the metaspace area
3181     // above the heap and above the CDS area (if it exists).
3182     if (using_class_space()) {
3183       if (UseSharedSpaces) {
3184         char* cds_end = (char*)(cds_address + cds_total);
3185         cds_end = (char *)align_ptr_up(cds_end, _reserve_alignment);
3186         allocate_metaspace_compressed_klass_ptrs(cds_end, cds_address);
3187       } else {
3188         char* base = (char*)align_ptr_up(Universe::heap()->reserved_region().end(), _reserve_alignment);
3189         allocate_metaspace_compressed_klass_ptrs(base, 0);
3190       }
3191     }
3192 #endif
3193 
3194     // Initialize these before initializing the VirtualSpaceList
3195     _first_chunk_word_size = InitialBootClassLoaderMetaspaceSize / BytesPerWord;
3196     _first_chunk_word_size = align_word_size_up(_first_chunk_word_size);
3197     // Make the first class chunk bigger than a medium chunk so it's not put
3198     // on the medium chunk list.   The next chunk will be small and progress
3199     // from there.  This size calculated by -version.
3200     _first_class_chunk_word_size = MIN2((uintx)MediumChunk*6,
3201                                        (CompressedClassSpaceSize/BytesPerWord)*2);
3202     _first_class_chunk_word_size = align_word_size_up(_first_class_chunk_word_size);
3203     // Arbitrarily set the initial virtual space to a multiple
3204     // of the boot class loader size.
3205     size_t word_size = VIRTUALSPACEMULTIPLIER * _first_chunk_word_size;
3206     word_size = align_size_up(word_size, Metaspace::reserve_alignment_words());
3207 
3208     // Initialize the list of virtual spaces.
3209     _space_list = new VirtualSpaceList(word_size);
3210     _chunk_manager_metadata = new ChunkManager(SpecializedChunk, SmallChunk, MediumChunk);
3211 
3212     if (!_space_list->initialization_succeeded()) {
3213       vm_exit_during_initialization("Unable to setup metadata virtual space list.", NULL);
3214     }
3215   }
3216 
3217   _tracer = new MetaspaceTracer();
3218 }
3219 
3220 void Metaspace::post_initialize() {