1 #ifndef JEMALLOC_INTERNAL_PROF_EXTERNS_H 2 #define JEMALLOC_INTERNAL_PROF_EXTERNS_H 3 4 #include "jemalloc/internal/mutex.h" 5 6 extern malloc_mutex_t bt2gctx_mtx; 7 8 extern bool opt_prof; 9 extern bool opt_prof_active; 10 extern bool opt_prof_thread_active_init; 11 extern size_t opt_lg_prof_sample; /* Mean bytes between samples. */ 12 extern ssize_t opt_lg_prof_interval; /* lg(prof_interval). */ 13 extern bool opt_prof_gdump; /* High-water memory dumping. */ 14 extern bool opt_prof_final; /* Final profile dumping. */ 15 extern bool opt_prof_leak; /* Dump leak summary at exit. */ 16 extern bool opt_prof_accum; /* Report cumulative bytes. */ 17 extern char opt_prof_prefix[ 18 /* Minimize memory bloat for non-prof builds. */ 19 #ifdef JEMALLOC_PROF 20 PATH_MAX + 21 #endif 22 1]; 23 24 /* Accessed via prof_active_[gs]et{_unlocked,}(). */ 25 extern bool prof_active; 26 27 /* Accessed via prof_gdump_[gs]et{_unlocked,}(). */ 28 extern bool prof_gdump_val; 29 30 /* 31 * Profile dump interval, measured in bytes allocated. Each arena triggers a 32 * profile dump when it reaches this threshold. The effect is that the 33 * interval between profile dumps averages prof_interval, though the actual 34 * interval between dumps will tend to be sporadic, and the interval will be a 35 * maximum of approximately (prof_interval * narenas). 36 */ 37 extern uint64_t prof_interval; 38 39 /* 40 * Initialized as opt_lg_prof_sample, and potentially modified during profiling 41 * resets. 42 */ 43 extern size_t lg_prof_sample; 44 45 void prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated); 46 void prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize, 47 prof_tctx_t *tctx); 48 void prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx); 49 void bt_init(prof_bt_t *bt, void **vec); 50 void prof_backtrace(prof_bt_t *bt); 51 prof_tctx_t *prof_lookup(tsd_t *tsd, prof_bt_t *bt); 52 #ifdef JEMALLOC_JET 53 size_t prof_tdata_count(void); 54 size_t prof_bt_count(void); 55 #endif 56 typedef int (prof_dump_open_t)(bool, const char *); 57 extern prof_dump_open_t *JET_MUTABLE prof_dump_open; 58 59 typedef bool (prof_dump_header_t)(tsdn_t *, bool, const prof_cnt_t *); 60 extern prof_dump_header_t *JET_MUTABLE prof_dump_header; 61 #ifdef JEMALLOC_JET 62 void prof_cnt_all(uint64_t *curobjs, uint64_t *curbytes, uint64_t *accumobjs, 63 uint64_t *accumbytes); 64 #endif 65 bool prof_accum_init(tsdn_t *tsdn, prof_accum_t *prof_accum); 66 void prof_idump(tsdn_t *tsdn); 67 bool prof_mdump(tsd_t *tsd, const char *filename); 68 void prof_gdump(tsdn_t *tsdn); 69 prof_tdata_t *prof_tdata_init(tsd_t *tsd); 70 prof_tdata_t *prof_tdata_reinit(tsd_t *tsd, prof_tdata_t *tdata); 71 void prof_reset(tsd_t *tsd, size_t lg_sample); 72 void prof_tdata_cleanup(tsd_t *tsd); 73 bool prof_active_get(tsdn_t *tsdn); 74 bool prof_active_set(tsdn_t *tsdn, bool active); 75 const char *prof_thread_name_get(tsd_t *tsd); 76 int prof_thread_name_set(tsd_t *tsd, const char *thread_name); 77 bool prof_thread_active_get(tsd_t *tsd); 78 bool prof_thread_active_set(tsd_t *tsd, bool active); 79 bool prof_thread_active_init_get(tsdn_t *tsdn); 80 bool prof_thread_active_init_set(tsdn_t *tsdn, bool active_init); 81 bool prof_gdump_get(tsdn_t *tsdn); 82 bool prof_gdump_set(tsdn_t *tsdn, bool active); 83 void prof_boot0(void); 84 void prof_boot1(void); 85 bool prof_boot2(tsd_t *tsd); 86 void prof_prefork0(tsdn_t *tsdn); 87 void prof_prefork1(tsdn_t *tsdn); 88 void prof_postfork_parent(tsdn_t *tsdn); 89 void prof_postfork_child(tsdn_t *tsdn); 90 void prof_sample_threshold_update(prof_tdata_t *tdata); 91 92 #endif /* JEMALLOC_INTERNAL_PROF_EXTERNS_H */ 93