Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mobile
Path: blob/master/src/hotspot/share/services/memReporter.cpp
41145 views
1
/*
2
* Copyright (c) 2012, 2021, Oracle and/or its affiliates. All rights reserved.
3
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
*
5
* This code is free software; you can redistribute it and/or modify it
6
* under the terms of the GNU General Public License version 2 only, as
7
* published by the Free Software Foundation.
8
*
9
* This code is distributed in the hope that it will be useful, but WITHOUT
10
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12
* version 2 for more details (a copy is included in the LICENSE file that
13
* accompanied this code).
14
*
15
* You should have received a copy of the GNU General Public License version
16
* 2 along with this work; if not, write to the Free Software Foundation,
17
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18
*
19
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20
* or visit www.oracle.com if you need additional information or have any
21
* questions.
22
*
23
*/
24
#include "precompiled.hpp"
25
#include "memory/allocation.hpp"
26
#include "memory/metaspace.hpp"
27
#include "memory/metaspaceUtils.hpp"
28
#include "services/mallocTracker.hpp"
29
#include "services/memReporter.hpp"
30
#include "services/threadStackTracker.hpp"
31
#include "services/virtualMemoryTracker.hpp"
32
#include "utilities/globalDefinitions.hpp"
33
34
size_t MemReporterBase::reserved_total(const MallocMemory* malloc, const VirtualMemory* vm) const {
35
return malloc->malloc_size() + malloc->arena_size() + vm->reserved();
36
}
37
38
size_t MemReporterBase::committed_total(const MallocMemory* malloc, const VirtualMemory* vm) const {
39
return malloc->malloc_size() + malloc->arena_size() + vm->committed();
40
}
41
42
void MemReporterBase::print_total(size_t reserved, size_t committed) const {
43
const char* scale = current_scale();
44
output()->print("reserved=" SIZE_FORMAT "%s, committed=" SIZE_FORMAT "%s",
45
amount_in_current_scale(reserved), scale, amount_in_current_scale(committed), scale);
46
}
47
48
void MemReporterBase::print_malloc(size_t amount, size_t count, MEMFLAGS flag) const {
49
const char* scale = current_scale();
50
outputStream* out = output();
51
const char* alloc_type = (flag == mtThreadStack) ? "" : "malloc=";
52
53
if (flag != mtNone) {
54
out->print("(%s" SIZE_FORMAT "%s type=%s", alloc_type,
55
amount_in_current_scale(amount), scale, NMTUtil::flag_to_name(flag));
56
} else {
57
out->print("(%s" SIZE_FORMAT "%s", alloc_type,
58
amount_in_current_scale(amount), scale);
59
}
60
61
if (count > 0) {
62
out->print(" #" SIZE_FORMAT "", count);
63
}
64
65
out->print(")");
66
}
67
68
void MemReporterBase::print_virtual_memory(size_t reserved, size_t committed) const {
69
const char* scale = current_scale();
70
output()->print("(mmap: reserved=" SIZE_FORMAT "%s, committed=" SIZE_FORMAT "%s)",
71
amount_in_current_scale(reserved), scale, amount_in_current_scale(committed), scale);
72
}
73
74
void MemReporterBase::print_malloc_line(size_t amount, size_t count) const {
75
output()->print("%28s", " ");
76
print_malloc(amount, count);
77
output()->print_cr(" ");
78
}
79
80
void MemReporterBase::print_virtual_memory_line(size_t reserved, size_t committed) const {
81
output()->print("%28s", " ");
82
print_virtual_memory(reserved, committed);
83
output()->print_cr(" ");
84
}
85
86
void MemReporterBase::print_arena_line(size_t amount, size_t count) const {
87
const char* scale = current_scale();
88
output()->print_cr("%27s (arena=" SIZE_FORMAT "%s #" SIZE_FORMAT ")", " ",
89
amount_in_current_scale(amount), scale, count);
90
}
91
92
void MemReporterBase::print_virtual_memory_region(const char* type, address base, size_t size) const {
93
const char* scale = current_scale();
94
output()->print("[" PTR_FORMAT " - " PTR_FORMAT "] %s " SIZE_FORMAT "%s",
95
p2i(base), p2i(base + size), type, amount_in_current_scale(size), scale);
96
}
97
98
99
void MemSummaryReporter::report() {
100
outputStream* out = output();
101
size_t total_reserved_amount = _malloc_snapshot->total() +
102
_vm_snapshot->total_reserved();
103
size_t total_committed_amount = _malloc_snapshot->total() +
104
_vm_snapshot->total_committed();
105
106
// Overall total
107
out->print_cr("\nNative Memory Tracking:\n");
108
109
if (scale() > 1) {
110
out->print_cr("(Omitting categories weighting less than 1%s)", current_scale());
111
out->cr();
112
}
113
114
out->print("Total: ");
115
print_total(total_reserved_amount, total_committed_amount);
116
out->print("\n");
117
118
// Summary by memory type
119
for (int index = 0; index < mt_number_of_types; index ++) {
120
MEMFLAGS flag = NMTUtil::index_to_flag(index);
121
// thread stack is reported as part of thread category
122
if (flag == mtThreadStack) continue;
123
MallocMemory* malloc_memory = _malloc_snapshot->by_type(flag);
124
VirtualMemory* virtual_memory = _vm_snapshot->by_type(flag);
125
126
report_summary_of_type(flag, malloc_memory, virtual_memory);
127
}
128
}
129
130
void MemSummaryReporter::report_summary_of_type(MEMFLAGS flag,
131
MallocMemory* malloc_memory, VirtualMemory* virtual_memory) {
132
133
size_t reserved_amount = reserved_total (malloc_memory, virtual_memory);
134
size_t committed_amount = committed_total(malloc_memory, virtual_memory);
135
136
// Count thread's native stack in "Thread" category
137
if (flag == mtThread) {
138
if (ThreadStackTracker::track_as_vm()) {
139
const VirtualMemory* thread_stack_usage =
140
(const VirtualMemory*)_vm_snapshot->by_type(mtThreadStack);
141
reserved_amount += thread_stack_usage->reserved();
142
committed_amount += thread_stack_usage->committed();
143
} else {
144
const MallocMemory* thread_stack_usage =
145
(const MallocMemory*)_malloc_snapshot->by_type(mtThreadStack);
146
reserved_amount += thread_stack_usage->malloc_size();
147
committed_amount += thread_stack_usage->malloc_size();
148
}
149
} else if (flag == mtNMT) {
150
// Count malloc headers in "NMT" category
151
reserved_amount += _malloc_snapshot->malloc_overhead()->size();
152
committed_amount += _malloc_snapshot->malloc_overhead()->size();
153
}
154
155
if (amount_in_current_scale(reserved_amount) > 0) {
156
outputStream* out = output();
157
const char* scale = current_scale();
158
out->print("-%26s (", NMTUtil::flag_to_name(flag));
159
print_total(reserved_amount, committed_amount);
160
out->print_cr(")");
161
162
if (flag == mtClass) {
163
// report class count
164
out->print_cr("%27s (classes #" SIZE_FORMAT ")",
165
" ", (_instance_class_count + _array_class_count));
166
out->print_cr("%27s ( instance classes #" SIZE_FORMAT ", array classes #" SIZE_FORMAT ")",
167
" ", _instance_class_count, _array_class_count);
168
} else if (flag == mtThread) {
169
if (ThreadStackTracker::track_as_vm()) {
170
const VirtualMemory* thread_stack_usage =
171
_vm_snapshot->by_type(mtThreadStack);
172
// report thread count
173
out->print_cr("%27s (thread #" SIZE_FORMAT ")", " ", ThreadStackTracker::thread_count());
174
out->print("%27s (stack: ", " ");
175
print_total(thread_stack_usage->reserved(), thread_stack_usage->committed());
176
} else {
177
MallocMemory* thread_stack_memory = _malloc_snapshot->by_type(mtThreadStack);
178
const char* scale = current_scale();
179
// report thread count
180
assert(ThreadStackTracker::thread_count() == 0, "Not used");
181
out->print_cr("%27s (thread #" SIZE_FORMAT ")", " ", thread_stack_memory->malloc_count());
182
out->print("%27s (Stack: " SIZE_FORMAT "%s", " ",
183
amount_in_current_scale(thread_stack_memory->malloc_size()), scale);
184
}
185
out->print_cr(")");
186
}
187
188
// report malloc'd memory
189
if (amount_in_current_scale(malloc_memory->malloc_size()) > 0) {
190
// We don't know how many arena chunks are in used, so don't report the count
191
size_t count = (flag == mtChunk) ? 0 : malloc_memory->malloc_count();
192
print_malloc_line(malloc_memory->malloc_size(), count);
193
}
194
195
if (amount_in_current_scale(virtual_memory->reserved()) > 0) {
196
print_virtual_memory_line(virtual_memory->reserved(), virtual_memory->committed());
197
}
198
199
if (amount_in_current_scale(malloc_memory->arena_size()) > 0) {
200
print_arena_line(malloc_memory->arena_size(), malloc_memory->arena_count());
201
}
202
203
if (flag == mtNMT &&
204
amount_in_current_scale(_malloc_snapshot->malloc_overhead()->size()) > 0) {
205
out->print_cr("%27s (tracking overhead=" SIZE_FORMAT "%s)", " ",
206
amount_in_current_scale(_malloc_snapshot->malloc_overhead()->size()), scale);
207
} else if (flag == mtClass) {
208
// Metadata information
209
report_metadata(Metaspace::NonClassType);
210
if (Metaspace::using_class_space()) {
211
report_metadata(Metaspace::ClassType);
212
}
213
}
214
out->print_cr(" ");
215
}
216
}
217
218
void MemSummaryReporter::report_metadata(Metaspace::MetadataType type) const {
219
assert(type == Metaspace::NonClassType || type == Metaspace::ClassType,
220
"Invalid metadata type");
221
const char* name = (type == Metaspace::NonClassType) ?
222
"Metadata: " : "Class space:";
223
224
outputStream* out = output();
225
const char* scale = current_scale();
226
const MetaspaceStats stats = MetaspaceUtils::get_statistics(type);
227
228
size_t waste = stats.committed() - stats.used();
229
float waste_percentage = stats.committed() > 0 ? (((float)waste * 100)/stats.committed()) : 0.0f;
230
231
out->print_cr("%27s ( %s)", " ", name);
232
out->print("%27s ( ", " ");
233
print_total(stats.reserved(), stats.committed());
234
out->print_cr(")");
235
out->print_cr("%27s ( used=" SIZE_FORMAT "%s)", " ", amount_in_current_scale(stats.used()), scale);
236
out->print_cr("%27s ( waste=" SIZE_FORMAT "%s =%2.2f%%)", " ", amount_in_current_scale(waste),
237
scale, waste_percentage);
238
}
239
240
void MemDetailReporter::report_detail() {
241
// Start detail report
242
outputStream* out = output();
243
out->print_cr("Details:\n");
244
245
int num_omitted =
246
report_malloc_sites() +
247
report_virtual_memory_allocation_sites();
248
if (num_omitted > 0) {
249
assert(scale() > 1, "sanity");
250
out->print_cr("(%d call sites weighting less than 1%s each omitted.)",
251
num_omitted, current_scale());
252
out->cr();
253
}
254
}
255
256
int MemDetailReporter::report_malloc_sites() {
257
MallocSiteIterator malloc_itr = _baseline.malloc_sites(MemBaseline::by_size);
258
if (malloc_itr.is_empty()) return 0;
259
260
outputStream* out = output();
261
262
const MallocSite* malloc_site;
263
int num_omitted = 0;
264
while ((malloc_site = malloc_itr.next()) != NULL) {
265
// Don't report free sites; does not count toward omitted count.
266
if (malloc_site->size() == 0) {
267
continue;
268
}
269
// Don't report if site has allocated less than one unit of whatever our scale is
270
if (scale() > 1 && amount_in_current_scale(malloc_site->size()) == 0) {
271
num_omitted ++;
272
continue;
273
}
274
const NativeCallStack* stack = malloc_site->call_stack();
275
stack->print_on(out);
276
out->print("%29s", " ");
277
MEMFLAGS flag = malloc_site->flag();
278
assert(NMTUtil::flag_is_valid(flag) && flag != mtNone,
279
"Must have a valid memory type");
280
print_malloc(malloc_site->size(), malloc_site->count(),flag);
281
out->print_cr("\n");
282
}
283
return num_omitted;
284
}
285
286
int MemDetailReporter::report_virtual_memory_allocation_sites() {
287
VirtualMemorySiteIterator virtual_memory_itr =
288
_baseline.virtual_memory_sites(MemBaseline::by_size);
289
290
if (virtual_memory_itr.is_empty()) return 0;
291
292
outputStream* out = output();
293
const VirtualMemoryAllocationSite* virtual_memory_site;
294
int num_omitted = 0;
295
while ((virtual_memory_site = virtual_memory_itr.next()) != NULL) {
296
// Don't report free sites; does not count toward omitted count.
297
if (virtual_memory_site->reserved() == 0) {
298
continue;
299
}
300
// Don't report if site has reserved less than one unit of whatever our scale is
301
if (scale() > 1 && amount_in_current_scale(virtual_memory_site->reserved()) == 0) {
302
num_omitted++;
303
continue;
304
}
305
const NativeCallStack* stack = virtual_memory_site->call_stack();
306
stack->print_on(out);
307
out->print("%28s (", " ");
308
print_total(virtual_memory_site->reserved(), virtual_memory_site->committed());
309
MEMFLAGS flag = virtual_memory_site->flag();
310
if (flag != mtNone) {
311
out->print(" Type=%s", NMTUtil::flag_to_name(flag));
312
}
313
out->print_cr(")\n");
314
}
315
return num_omitted;
316
}
317
318
319
void MemDetailReporter::report_virtual_memory_map() {
320
// Virtual memory map always in base address order
321
VirtualMemoryAllocationIterator itr = _baseline.virtual_memory_allocations();
322
const ReservedMemoryRegion* rgn;
323
324
output()->print_cr("Virtual memory map:");
325
while ((rgn = itr.next()) != NULL) {
326
report_virtual_memory_region(rgn);
327
}
328
}
329
330
void MemDetailReporter::report_virtual_memory_region(const ReservedMemoryRegion* reserved_rgn) {
331
assert(reserved_rgn != NULL, "NULL pointer");
332
333
// Don't report if size is too small
334
if (amount_in_current_scale(reserved_rgn->size()) == 0) return;
335
336
outputStream* out = output();
337
const char* scale = current_scale();
338
const NativeCallStack* stack = reserved_rgn->call_stack();
339
bool all_committed = reserved_rgn->size() == reserved_rgn->committed_size();
340
const char* region_type = (all_committed ? "reserved and committed" : "reserved");
341
out->print_cr(" ");
342
print_virtual_memory_region(region_type, reserved_rgn->base(), reserved_rgn->size());
343
out->print(" for %s", NMTUtil::flag_to_name(reserved_rgn->flag()));
344
if (stack->is_empty()) {
345
out->print_cr(" ");
346
} else {
347
out->print_cr(" from");
348
stack->print_on(out, 4);
349
}
350
351
if (all_committed) {
352
CommittedRegionIterator itr = reserved_rgn->iterate_committed_regions();
353
const CommittedMemoryRegion* committed_rgn = itr.next();
354
if (committed_rgn->size() == reserved_rgn->size() && committed_rgn->call_stack()->equals(*stack)) {
355
// One region spanning the entire reserved region, with the same stack trace.
356
// Don't print this regions because the "reserved and committed" line above
357
// already indicates that the region is comitted.
358
assert(itr.next() == NULL, "Unexpectedly more than one regions");
359
return;
360
}
361
}
362
363
CommittedRegionIterator itr = reserved_rgn->iterate_committed_regions();
364
const CommittedMemoryRegion* committed_rgn;
365
while ((committed_rgn = itr.next()) != NULL) {
366
// Don't report if size is too small
367
if (amount_in_current_scale(committed_rgn->size()) == 0) continue;
368
stack = committed_rgn->call_stack();
369
out->print("\n\t");
370
print_virtual_memory_region("committed", committed_rgn->base(), committed_rgn->size());
371
if (stack->is_empty()) {
372
out->print_cr(" ");
373
} else {
374
out->print_cr(" from");
375
stack->print_on(out, 12);
376
}
377
}
378
}
379
380
void MemSummaryDiffReporter::report_diff() {
381
outputStream* out = output();
382
out->print_cr("\nNative Memory Tracking:\n");
383
384
if (scale() > 1) {
385
out->print_cr("(Omitting categories weighting less than 1%s)", current_scale());
386
out->cr();
387
}
388
389
// Overall diff
390
out->print("Total: ");
391
print_virtual_memory_diff(_current_baseline.total_reserved_memory(),
392
_current_baseline.total_committed_memory(), _early_baseline.total_reserved_memory(),
393
_early_baseline.total_committed_memory());
394
395
out->print_cr("\n");
396
397
// Summary diff by memory type
398
for (int index = 0; index < mt_number_of_types; index ++) {
399
MEMFLAGS flag = NMTUtil::index_to_flag(index);
400
// thread stack is reported as part of thread category
401
if (flag == mtThreadStack) continue;
402
diff_summary_of_type(flag,
403
_early_baseline.malloc_memory(flag),
404
_early_baseline.virtual_memory(flag),
405
_early_baseline.metaspace_stats(),
406
_current_baseline.malloc_memory(flag),
407
_current_baseline.virtual_memory(flag),
408
_current_baseline.metaspace_stats());
409
}
410
}
411
412
void MemSummaryDiffReporter::print_malloc_diff(size_t current_amount, size_t current_count,
413
size_t early_amount, size_t early_count, MEMFLAGS flags) const {
414
const char* scale = current_scale();
415
outputStream* out = output();
416
const char* alloc_type = (flags == mtThread) ? "" : "malloc=";
417
418
out->print("%s" SIZE_FORMAT "%s", alloc_type, amount_in_current_scale(current_amount), scale);
419
// Report type only if it is valid and not under "thread" category
420
if (flags != mtNone && flags != mtThread) {
421
out->print(" type=%s", NMTUtil::flag_to_name(flags));
422
}
423
424
long amount_diff = diff_in_current_scale(current_amount, early_amount);
425
if (amount_diff != 0) {
426
out->print(" %+ld%s", amount_diff, scale);
427
}
428
if (current_count > 0) {
429
out->print(" #" SIZE_FORMAT "", current_count);
430
if (current_count != early_count) {
431
out->print(" %+d", (int)(current_count - early_count));
432
}
433
}
434
}
435
436
void MemSummaryDiffReporter::print_arena_diff(size_t current_amount, size_t current_count,
437
size_t early_amount, size_t early_count) const {
438
const char* scale = current_scale();
439
outputStream* out = output();
440
out->print("arena=" SIZE_FORMAT "%s", amount_in_current_scale(current_amount), scale);
441
if (diff_in_current_scale(current_amount, early_amount) != 0) {
442
out->print(" %+ld", diff_in_current_scale(current_amount, early_amount));
443
}
444
445
out->print(" #" SIZE_FORMAT "", current_count);
446
if (current_count != early_count) {
447
out->print(" %+d", (int)(current_count - early_count));
448
}
449
}
450
451
void MemSummaryDiffReporter::print_virtual_memory_diff(size_t current_reserved, size_t current_committed,
452
size_t early_reserved, size_t early_committed) const {
453
const char* scale = current_scale();
454
outputStream* out = output();
455
out->print("reserved=" SIZE_FORMAT "%s", amount_in_current_scale(current_reserved), scale);
456
long reserved_diff = diff_in_current_scale(current_reserved, early_reserved);
457
if (reserved_diff != 0) {
458
out->print(" %+ld%s", reserved_diff, scale);
459
}
460
461
out->print(", committed=" SIZE_FORMAT "%s", amount_in_current_scale(current_committed), scale);
462
long committed_diff = diff_in_current_scale(current_committed, early_committed);
463
if (committed_diff != 0) {
464
out->print(" %+ld%s", committed_diff, scale);
465
}
466
}
467
468
469
void MemSummaryDiffReporter::diff_summary_of_type(MEMFLAGS flag,
470
const MallocMemory* early_malloc, const VirtualMemory* early_vm,
471
const MetaspaceCombinedStats& early_ms,
472
const MallocMemory* current_malloc, const VirtualMemory* current_vm,
473
const MetaspaceCombinedStats& current_ms) const {
474
475
outputStream* out = output();
476
const char* scale = current_scale();
477
478
// Total reserved and committed memory in current baseline
479
size_t current_reserved_amount = reserved_total (current_malloc, current_vm);
480
size_t current_committed_amount = committed_total(current_malloc, current_vm);
481
482
// Total reserved and committed memory in early baseline
483
size_t early_reserved_amount = reserved_total(early_malloc, early_vm);
484
size_t early_committed_amount = committed_total(early_malloc, early_vm);
485
486
// Adjust virtual memory total
487
if (flag == mtThread) {
488
const VirtualMemory* early_thread_stack_usage =
489
_early_baseline.virtual_memory(mtThreadStack);
490
const VirtualMemory* current_thread_stack_usage =
491
_current_baseline.virtual_memory(mtThreadStack);
492
493
early_reserved_amount += early_thread_stack_usage->reserved();
494
early_committed_amount += early_thread_stack_usage->committed();
495
496
current_reserved_amount += current_thread_stack_usage->reserved();
497
current_committed_amount += current_thread_stack_usage->committed();
498
} else if (flag == mtNMT) {
499
early_reserved_amount += _early_baseline.malloc_tracking_overhead();
500
early_committed_amount += _early_baseline.malloc_tracking_overhead();
501
502
current_reserved_amount += _current_baseline.malloc_tracking_overhead();
503
current_committed_amount += _current_baseline.malloc_tracking_overhead();
504
}
505
506
if (amount_in_current_scale(current_reserved_amount) > 0 ||
507
diff_in_current_scale(current_reserved_amount, early_reserved_amount) != 0) {
508
509
// print summary line
510
out->print("-%26s (", NMTUtil::flag_to_name(flag));
511
print_virtual_memory_diff(current_reserved_amount, current_committed_amount,
512
early_reserved_amount, early_committed_amount);
513
out->print_cr(")");
514
515
// detail lines
516
if (flag == mtClass) {
517
// report class count
518
out->print("%27s (classes #" SIZE_FORMAT "", " ", _current_baseline.class_count());
519
int class_count_diff = (int)(_current_baseline.class_count() -
520
_early_baseline.class_count());
521
if (_current_baseline.class_count() != _early_baseline.class_count()) {
522
out->print(" %+d", (int)(_current_baseline.class_count() - _early_baseline.class_count()));
523
}
524
out->print_cr(")");
525
526
out->print("%27s ( instance classes #" SIZE_FORMAT, " ", _current_baseline.instance_class_count());
527
if (_current_baseline.instance_class_count() != _early_baseline.instance_class_count()) {
528
out->print(" %+d", (int)(_current_baseline.instance_class_count() - _early_baseline.instance_class_count()));
529
}
530
out->print(", array classes #" SIZE_FORMAT, _current_baseline.array_class_count());
531
if (_current_baseline.array_class_count() != _early_baseline.array_class_count()) {
532
out->print(" %+d", (int)(_current_baseline.array_class_count() - _early_baseline.array_class_count()));
533
}
534
out->print_cr(")");
535
536
} else if (flag == mtThread) {
537
// report thread count
538
out->print("%27s (thread #" SIZE_FORMAT "", " ", _current_baseline.thread_count());
539
int thread_count_diff = (int)(_current_baseline.thread_count() -
540
_early_baseline.thread_count());
541
if (thread_count_diff != 0) {
542
out->print(" %+d", thread_count_diff);
543
}
544
out->print_cr(")");
545
546
out->print("%27s (stack: ", " ");
547
if (ThreadStackTracker::track_as_vm()) {
548
// report thread stack
549
const VirtualMemory* current_thread_stack =
550
_current_baseline.virtual_memory(mtThreadStack);
551
const VirtualMemory* early_thread_stack =
552
_early_baseline.virtual_memory(mtThreadStack);
553
554
print_virtual_memory_diff(current_thread_stack->reserved(), current_thread_stack->committed(),
555
early_thread_stack->reserved(), early_thread_stack->committed());
556
} else {
557
const MallocMemory* current_thread_stack =
558
_current_baseline.malloc_memory(mtThreadStack);
559
const MallocMemory* early_thread_stack =
560
_early_baseline.malloc_memory(mtThreadStack);
561
562
print_malloc_diff(current_thread_stack->malloc_size(), current_thread_stack->malloc_count(),
563
early_thread_stack->malloc_size(), early_thread_stack->malloc_count(), flag);
564
}
565
out->print_cr(")");
566
}
567
568
// Report malloc'd memory
569
size_t current_malloc_amount = current_malloc->malloc_size();
570
size_t early_malloc_amount = early_malloc->malloc_size();
571
if (amount_in_current_scale(current_malloc_amount) > 0 ||
572
diff_in_current_scale(current_malloc_amount, early_malloc_amount) != 0) {
573
out->print("%28s(", " ");
574
print_malloc_diff(current_malloc_amount, (flag == mtChunk) ? 0 : current_malloc->malloc_count(),
575
early_malloc_amount, early_malloc->malloc_count(), mtNone);
576
out->print_cr(")");
577
}
578
579
// Report virtual memory
580
if (amount_in_current_scale(current_vm->reserved()) > 0 ||
581
diff_in_current_scale(current_vm->reserved(), early_vm->reserved()) != 0) {
582
out->print("%27s (mmap: ", " ");
583
print_virtual_memory_diff(current_vm->reserved(), current_vm->committed(),
584
early_vm->reserved(), early_vm->committed());
585
out->print_cr(")");
586
}
587
588
// Report arena memory
589
if (amount_in_current_scale(current_malloc->arena_size()) > 0 ||
590
diff_in_current_scale(current_malloc->arena_size(), early_malloc->arena_size()) != 0) {
591
out->print("%28s(", " ");
592
print_arena_diff(current_malloc->arena_size(), current_malloc->arena_count(),
593
early_malloc->arena_size(), early_malloc->arena_count());
594
out->print_cr(")");
595
}
596
597
// Report native memory tracking overhead
598
if (flag == mtNMT) {
599
size_t current_tracking_overhead = amount_in_current_scale(_current_baseline.malloc_tracking_overhead());
600
size_t early_tracking_overhead = amount_in_current_scale(_early_baseline.malloc_tracking_overhead());
601
602
out->print("%27s (tracking overhead=" SIZE_FORMAT "%s", " ",
603
amount_in_current_scale(_current_baseline.malloc_tracking_overhead()), scale);
604
605
long overhead_diff = diff_in_current_scale(_current_baseline.malloc_tracking_overhead(),
606
_early_baseline.malloc_tracking_overhead());
607
if (overhead_diff != 0) {
608
out->print(" %+ld%s", overhead_diff, scale);
609
}
610
out->print_cr(")");
611
} else if (flag == mtClass) {
612
print_metaspace_diff(current_ms, early_ms);
613
}
614
out->print_cr(" ");
615
}
616
}
617
618
void MemSummaryDiffReporter::print_metaspace_diff(const MetaspaceCombinedStats& current_ms,
619
const MetaspaceCombinedStats& early_ms) const {
620
print_metaspace_diff("Metadata", current_ms.non_class_space_stats(), early_ms.non_class_space_stats());
621
if (Metaspace::using_class_space()) {
622
print_metaspace_diff("Class space", current_ms.class_space_stats(), early_ms.class_space_stats());
623
}
624
}
625
626
void MemSummaryDiffReporter::print_metaspace_diff(const char* header,
627
const MetaspaceStats& current_stats,
628
const MetaspaceStats& early_stats) const {
629
outputStream* out = output();
630
const char* scale = current_scale();
631
632
out->print_cr("%27s: ( %s)", " ", header);
633
out->print("%27s ( ", " ");
634
print_virtual_memory_diff(current_stats.reserved(),
635
current_stats.committed(),
636
early_stats.reserved(),
637
early_stats.committed());
638
out->print_cr(")");
639
640
long diff_used = diff_in_current_scale(current_stats.used(),
641
early_stats.used());
642
643
size_t current_waste = current_stats.committed() - current_stats.used();
644
size_t early_waste = early_stats.committed() - early_stats.used();
645
long diff_waste = diff_in_current_scale(current_waste, early_waste);
646
647
// Diff used
648
out->print("%27s ( used=" SIZE_FORMAT "%s", " ",
649
amount_in_current_scale(current_stats.used()), scale);
650
if (diff_used != 0) {
651
out->print(" %+ld%s", diff_used, scale);
652
}
653
out->print_cr(")");
654
655
// Diff waste
656
const float waste_percentage = current_stats.committed() == 0 ? 0.0f :
657
(current_waste * 100.0f) / current_stats.committed();
658
out->print("%27s ( waste=" SIZE_FORMAT "%s =%2.2f%%", " ",
659
amount_in_current_scale(current_waste), scale, waste_percentage);
660
if (diff_waste != 0) {
661
out->print(" %+ld%s", diff_waste, scale);
662
}
663
out->print_cr(")");
664
}
665
666
void MemDetailDiffReporter::report_diff() {
667
MemSummaryDiffReporter::report_diff();
668
diff_malloc_sites();
669
diff_virtual_memory_sites();
670
}
671
672
void MemDetailDiffReporter::diff_malloc_sites() const {
673
MallocSiteIterator early_itr = _early_baseline.malloc_sites(MemBaseline::by_site_and_type);
674
MallocSiteIterator current_itr = _current_baseline.malloc_sites(MemBaseline::by_site_and_type);
675
676
const MallocSite* early_site = early_itr.next();
677
const MallocSite* current_site = current_itr.next();
678
679
while (early_site != NULL || current_site != NULL) {
680
if (early_site == NULL) {
681
new_malloc_site(current_site);
682
current_site = current_itr.next();
683
} else if (current_site == NULL) {
684
old_malloc_site(early_site);
685
early_site = early_itr.next();
686
} else {
687
int compVal = current_site->call_stack()->compare(*early_site->call_stack());
688
if (compVal < 0) {
689
new_malloc_site(current_site);
690
current_site = current_itr.next();
691
} else if (compVal > 0) {
692
old_malloc_site(early_site);
693
early_site = early_itr.next();
694
} else {
695
diff_malloc_site(early_site, current_site);
696
early_site = early_itr.next();
697
current_site = current_itr.next();
698
}
699
}
700
}
701
}
702
703
void MemDetailDiffReporter::diff_virtual_memory_sites() const {
704
VirtualMemorySiteIterator early_itr = _early_baseline.virtual_memory_sites(MemBaseline::by_site);
705
VirtualMemorySiteIterator current_itr = _current_baseline.virtual_memory_sites(MemBaseline::by_site);
706
707
const VirtualMemoryAllocationSite* early_site = early_itr.next();
708
const VirtualMemoryAllocationSite* current_site = current_itr.next();
709
710
while (early_site != NULL || current_site != NULL) {
711
if (early_site == NULL) {
712
new_virtual_memory_site(current_site);
713
current_site = current_itr.next();
714
} else if (current_site == NULL) {
715
old_virtual_memory_site(early_site);
716
early_site = early_itr.next();
717
} else {
718
int compVal = current_site->call_stack()->compare(*early_site->call_stack());
719
if (compVal < 0) {
720
new_virtual_memory_site(current_site);
721
current_site = current_itr.next();
722
} else if (compVal > 0) {
723
old_virtual_memory_site(early_site);
724
early_site = early_itr.next();
725
} else {
726
diff_virtual_memory_site(early_site, current_site);
727
early_site = early_itr.next();
728
current_site = current_itr.next();
729
}
730
}
731
}
732
}
733
734
735
void MemDetailDiffReporter::new_malloc_site(const MallocSite* malloc_site) const {
736
diff_malloc_site(malloc_site->call_stack(), malloc_site->size(), malloc_site->count(),
737
0, 0, malloc_site->flag());
738
}
739
740
void MemDetailDiffReporter::old_malloc_site(const MallocSite* malloc_site) const {
741
diff_malloc_site(malloc_site->call_stack(), 0, 0, malloc_site->size(),
742
malloc_site->count(), malloc_site->flag());
743
}
744
745
void MemDetailDiffReporter::diff_malloc_site(const MallocSite* early,
746
const MallocSite* current) const {
747
if (early->flag() != current->flag()) {
748
// If malloc site type changed, treat it as deallocation of old type and
749
// allocation of new type.
750
old_malloc_site(early);
751
new_malloc_site(current);
752
} else {
753
diff_malloc_site(current->call_stack(), current->size(), current->count(),
754
early->size(), early->count(), early->flag());
755
}
756
}
757
758
void MemDetailDiffReporter::diff_malloc_site(const NativeCallStack* stack, size_t current_size,
759
size_t current_count, size_t early_size, size_t early_count, MEMFLAGS flags) const {
760
outputStream* out = output();
761
762
assert(stack != NULL, "NULL stack");
763
764
if (diff_in_current_scale(current_size, early_size) == 0) {
765
return;
766
}
767
768
stack->print_on(out);
769
out->print("%28s (", " ");
770
print_malloc_diff(current_size, current_count,
771
early_size, early_count, flags);
772
773
out->print_cr(")\n");
774
}
775
776
777
void MemDetailDiffReporter::new_virtual_memory_site(const VirtualMemoryAllocationSite* site) const {
778
diff_virtual_memory_site(site->call_stack(), site->reserved(), site->committed(), 0, 0, site->flag());
779
}
780
781
void MemDetailDiffReporter::old_virtual_memory_site(const VirtualMemoryAllocationSite* site) const {
782
diff_virtual_memory_site(site->call_stack(), 0, 0, site->reserved(), site->committed(), site->flag());
783
}
784
785
void MemDetailDiffReporter::diff_virtual_memory_site(const VirtualMemoryAllocationSite* early,
786
const VirtualMemoryAllocationSite* current) const {
787
assert(early->flag() == current->flag(), "Should be the same");
788
diff_virtual_memory_site(current->call_stack(), current->reserved(), current->committed(),
789
early->reserved(), early->committed(), current->flag());
790
}
791
792
void MemDetailDiffReporter::diff_virtual_memory_site(const NativeCallStack* stack, size_t current_reserved,
793
size_t current_committed, size_t early_reserved, size_t early_committed, MEMFLAGS flag) const {
794
outputStream* out = output();
795
796
// no change
797
if (diff_in_current_scale(current_reserved, early_reserved) == 0 &&
798
diff_in_current_scale(current_committed, early_committed) == 0) {
799
return;
800
}
801
802
stack->print_on(out);
803
out->print("%28s (mmap: ", " ");
804
print_virtual_memory_diff(current_reserved, current_committed,
805
early_reserved, early_committed);
806
807
if (flag != mtNone) {
808
out->print(" Type=%s", NMTUtil::flag_to_name(flag));
809
}
810
811
out->print_cr(")\n");
812
}
813
814