@@ -198,14 +198,12 @@ CodeBlob* CodeCache::allocate(int size, bool is_critical) {
198
198
}
199
199
maxCodeCacheUsed = MAX2 (maxCodeCacheUsed, ((address)_heap->high_boundary () -
200
200
(address)_heap->low_boundary ()) - unallocated_capacity ());
201
- verify_if_often ();
202
201
print_trace (" allocation" , cb, size);
203
202
return cb;
204
203
}
205
204
206
205
void CodeCache::free (CodeBlob* cb) {
207
206
assert_locked_or_safepoint (CodeCache_lock);
208
- verify_if_often ();
209
207
210
208
print_trace (" free" , cb);
211
209
if (cb->is_nmethod ()) {
@@ -221,7 +219,6 @@ void CodeCache::free(CodeBlob* cb) {
221
219
222
220
_heap->deallocate (cb);
223
221
224
- verify_if_often ();
225
222
assert (_number_of_blobs >= 0 , " sanity check" );
226
223
}
227
224
@@ -244,12 +241,6 @@ void CodeCache::commit(CodeBlob* cb) {
244
241
}
245
242
246
243
247
- void CodeCache::flush () {
248
- assert_locked_or_safepoint (CodeCache_lock);
249
- Unimplemented ();
250
- }
251
-
252
-
253
244
// Iteration over CodeBlobs
254
245
255
246
#define FOR_ALL_BLOBS (var ) for (CodeBlob *var = first() ; var != NULL ; var = next(var) )
@@ -269,7 +260,7 @@ bool CodeCache::contains(void *p) {
269
260
CodeBlob* CodeCache::find_blob (void * start) {
270
261
CodeBlob* result = find_blob_unsafe (start);
271
262
if (result == NULL ) return NULL ;
272
- // We could potientially look up non_entrant methods
263
+ // We could potentially look up non_entrant methods
273
264
guarantee (!result->is_zombie () || result->is_locked_by_vm () || is_error_reported (), " unsafe access to zombie method" );
274
265
return result;
275
266
}
@@ -741,17 +732,26 @@ void CodeCache::report_codemem_full() {
741
732
}
742
733
}
743
734
735
+ void CodeCache::print_memory_overhead () {
736
+ size_t wasted_bytes = 0 ;
737
+ CodeBlob *cb;
738
+ for (cb = first (); cb != NULL ; cb = next (cb)) {
739
+ HeapBlock* heap_block = ((HeapBlock*)cb) - 1 ;
740
+ wasted_bytes += heap_block->length () * CodeCacheSegmentSize - cb->size ();
741
+ }
742
+ // Print bytes that are allocated in the freelist
743
+ ttyLocker ttl;
744
+ tty->print_cr (" Number of elements in freelist: %d" , freelist_length ());
745
+ tty->print_cr (" Allocated in freelist: %dkB" , bytes_allocated_in_freelist ()/K);
746
+ tty->print_cr (" Unused bytes in CodeBlobs: %dkB" , (int )(wasted_bytes/K));
747
+ tty->print_cr (" Segment map size: %dkB" , allocated_segments ()/K); // 1 byte per segment
748
+ }
749
+
744
750
// ------------------------------------------------------------------------------------------------
745
751
// Non-product version
746
752
747
753
#ifndef PRODUCT
748
754
749
- void CodeCache::verify_if_often () {
750
- if (VerifyCodeCacheOften) {
751
- _heap->verify ();
752
- }
753
- }
754
-
755
755
void CodeCache::print_trace (const char * event, CodeBlob* cb, int size) {
756
756
if (PrintCodeCache2) { // Need to add a new flag
757
757
ResourceMark rm;
@@ -774,7 +774,7 @@ void CodeCache::print_internals() {
774
774
int nmethodUnloaded = 0 ;
775
775
int nmethodJava = 0 ;
776
776
int nmethodNative = 0 ;
777
- int maxCodeSize = 0 ;
777
+ int max_nm_size = 0 ;
778
778
ResourceMark rm;
779
779
780
780
CodeBlob *cb;
@@ -798,13 +798,11 @@ void CodeCache::print_internals() {
798
798
if (nm->is_not_entrant ()) { nmethodNotEntrant++; }
799
799
if (nm->is_zombie ()) { nmethodZombie++; }
800
800
if (nm->is_unloaded ()) { nmethodUnloaded++; }
801
- if (nm->is_native_method ()) { nmethodNative++; }
801
+ if (nm->method () != NULL && nm-> is_native_method ()) { nmethodNative++; }
802
802
803
803
if (nm->method () != NULL && nm->is_java_method ()) {
804
804
nmethodJava++;
805
- if (nm->insts_size () > maxCodeSize) {
806
- maxCodeSize = nm->insts_size ();
807
- }
805
+ max_nm_size = MAX2 (max_nm_size, nm->size ());
808
806
}
809
807
} else if (cb->is_runtime_stub ()) {
810
808
runtimeStubCount++;
@@ -820,18 +818,19 @@ void CodeCache::print_internals() {
820
818
}
821
819
822
820
int bucketSize = 512 ;
823
- int bucketLimit = maxCodeSize / bucketSize + 1 ;
821
+ int bucketLimit = max_nm_size / bucketSize + 1 ;
824
822
int *buckets = NEW_C_HEAP_ARRAY (int , bucketLimit, mtCode);
825
- memset (buckets,0 , sizeof (int ) * bucketLimit);
823
+ memset (buckets, 0 , sizeof (int ) * bucketLimit);
826
824
827
825
for (cb = first (); cb != NULL ; cb = next (cb)) {
828
826
if (cb->is_nmethod ()) {
829
827
nmethod* nm = (nmethod*)cb;
830
828
if (nm->is_java_method ()) {
831
- buckets[nm->insts_size () / bucketSize]++;
832
- }
829
+ buckets[nm->size () / bucketSize]++;
830
+ }
833
831
}
834
832
}
833
+
835
834
tty->print_cr (" Code Cache Entries (total of %d)" ,total);
836
835
tty->print_cr (" -------------------------------------------------" );
837
836
tty->print_cr (" nmethods: %d" ,nmethodCount);
@@ -858,6 +857,7 @@ void CodeCache::print_internals() {
858
857
}
859
858
860
859
FREE_C_HEAP_ARRAY (int , buckets, mtCode);
860
+ print_memory_overhead ();
861
861
}
862
862
863
863
#endif // !PRODUCT
0 commit comments