diff --git a/ddprof-lib/src/main/cpp/flightRecorder.cpp b/ddprof-lib/src/main/cpp/flightRecorder.cpp index cefbc476b..7b04c514b 100644 --- a/ddprof-lib/src/main/cpp/flightRecorder.cpp +++ b/ddprof-lib/src/main/cpp/flightRecorder.cpp @@ -17,6 +17,7 @@ #include "incbin.h" #include "jfrMetadata.h" #include "jniHelper.h" +#include "lookup.h" #include "os.h" #include "profiler.h" #include "rustDemangler.h" @@ -44,386 +45,6 @@ static const char *const SETTING_RING[] = {NULL, "kernel", "user", "any"}; static const char *const SETTING_CSTACK[] = {NULL, "no", "fp", "dwarf", "lbr"}; -SharedLineNumberTable::~SharedLineNumberTable() { - // Always attempt to deallocate if we have a valid pointer - // JVMTI spec requires that memory allocated by GetLineNumberTable - // must be freed with Deallocate - if (_ptr != nullptr) { - jvmtiEnv *jvmti = VM::jvmti(); - if (jvmti != nullptr) { - jvmtiError err = jvmti->Deallocate((unsigned char *)_ptr); - // If Deallocate fails, log it for debugging (this could indicate a JVM bug) - // JVMTI_ERROR_ILLEGAL_ARGUMENT means the memory wasn't allocated by JVMTI - // which would be a serious bug in GetLineNumberTable - if (err != JVMTI_ERROR_NONE) { - TEST_LOG("Unexpected error while deallocating linenumber table: %d", err); - } - } else { - TEST_LOG("WARNING: Cannot deallocate line number table - JVMTI is null"); - } - // Decrement counter whenever destructor runs (symmetric with increment at creation) - Counters::decrement(LINE_NUMBER_TABLES); - } -} - -void Lookup::fillNativeMethodInfo(MethodInfo *mi, const char *name, - const char *lib_name) { - mi->_class = _classes->lookup(""); - // TODO return the library name once we figured out how to cooperate with the - // backend - // if (lib_name == NULL) { - // mi->_class = _classes->lookup(""); - // } else if (lib_name[0] == '[' && lib_name[1] != 0) { - // mi->_class = _classes->lookup(lib_name + 1, strlen(lib_name) - - // 2); - // } else { - // mi->_class = _classes->lookup(lib_name); - // } - - mi->_modifiers = 0x100; - mi->_line_number_table = nullptr; - - if (name[0] == '_' && name[1] == 'Z') { - int status; - char *demangled = abi::__cxa_demangle(name, NULL, NULL, &status); - if (demangled != NULL) { - cutArguments(demangled); - mi->_sig = _symbols.lookup("()L;"); - mi->_type = FRAME_CPP; - - // Rust legacy demangling - if (RustDemangler::is_probably_rust_legacy(demangled)) { - std::string rust_demangled = RustDemangler::demangle(demangled); - mi->_name = _symbols.lookup(rust_demangled.c_str()); - } else { - mi->_name = _symbols.lookup(demangled); - } - free(demangled); - return; - } - } - - size_t len = strlen(name); - if (len >= 4 && strcmp(name + len - 4, "_[k]") == 0) { - mi->_name = _symbols.lookup(name, len - 4); - mi->_sig = _symbols.lookup("(Lk;)L;"); - mi->_type = FRAME_KERNEL; - } else { - mi->_name = _symbols.lookup(name); - mi->_sig = _symbols.lookup("()L;"); - mi->_type = FRAME_NATIVE; - } -} - -void Lookup::fillRemoteFrameInfo(MethodInfo *mi, const RemoteFrameInfo *rfi) { - // Store build-id in the class name field - mi->_class = _classes->lookup(rfi->build_id); - - // Store PC offset in hex format in the signature field - char offset_hex[32]; - snprintf(offset_hex, sizeof(offset_hex), "0x%" PRIxPTR, rfi->pc_offset); - mi->_sig = _symbols.lookup(offset_hex); - - // Use same modifiers as regular native frames (0x100 = ACC_NATIVE for consistency) - mi->_modifiers = 0x100; - // Use FRAME_NATIVE_REMOTE type to indicate remote symbolication - mi->_type = FRAME_NATIVE_REMOTE; - mi->_line_number_table = nullptr; - - // Method name indicates need for remote symbolication - mi->_name = _symbols.lookup(""); -} - -void Lookup::cutArguments(char *func) { - char *p = strrchr(func, ')'); - if (p == NULL) - return; - - int balance = 1; - while (--p > func) { - if (*p == '(' && --balance == 0) { - *p = 0; - return; - } else if (*p == ')') { - balance++; - } - } -} - -void Lookup::fillJavaMethodInfo(MethodInfo *mi, jmethodID method, - bool first_time) { - JNIEnv *jni = VM::jni(); - if (jni->PushLocalFrame(64) != 0) { - return; - } - jvmtiEnv *jvmti = VM::jvmti(); - - jvmtiPhase phase; - jclass method_class = NULL; - // invariant: these strings must remain null, or be assigned by JVMTI - char *class_name = nullptr; - char *method_name = nullptr; - char *method_sig = nullptr; - u32 class_name_id = 0; - u32 method_name_id = 0; - u32 method_sig_id = 0; - - jint line_number_table_size = 0; - jvmtiLineNumberEntry *line_number_table = NULL; - - jvmti->GetPhase(&phase); - if ((phase & (JVMTI_PHASE_START | JVMTI_PHASE_LIVE)) != 0) { - bool entry = false; - if (VMMethod::check_jmethodID(method) && - jvmti->GetMethodDeclaringClass(method, &method_class) == 0 && - // GetMethodDeclaringClass may return a jclass wrapping a stale/garbage oop when the class was - // unloaded between sample capture and dump (TOCTOU race with class unloading). Guard against - // null handles before calling GetClassSignature. - method_class != NULL && - // On some older versions of J9, the JVMTI call to GetMethodDeclaringClass will return OK = 0, but when a - // classloader is unloaded they free all JNIIDs. This means that anyone holding on to a jmethodID is - // pointing to corrupt data and the behaviour is undefined. - // The behaviour is adjusted so that when asgct() is used or if `-XX:+KeepJNIIDs` is specified, - // when a classloader is unloaded, the jmethodIDs are not freed, but instead marked as -1. - // The check below mitigates these crashes on J9. - (!VM::isOpenJ9() || method_class != reinterpret_cast(-1)) && - jvmti->GetClassSignature(method_class, &class_name, NULL) == 0 && - jvmti->GetMethodName(method, &method_name, &method_sig, NULL) == 0) { - - if (first_time) { - jvmtiError line_table_error = jvmti->GetLineNumberTable(method, &line_number_table_size, - &line_number_table); - // Defensive: if GetLineNumberTable failed, clean up any potentially allocated memory - // Some buggy JVMTI implementations might allocate despite returning an error - if (line_table_error != JVMTI_ERROR_NONE) { - if (line_number_table != nullptr) { - // Try to deallocate to prevent leak from buggy JVM - jvmti->Deallocate((unsigned char *)line_number_table); - } - line_number_table = nullptr; - line_number_table_size = 0; - } - } - - // Check if the frame is Thread.run or inherits from it - if (strncmp(method_name, "run", 4) == 0 && - strncmp(method_sig, "()V", 3) == 0) { - jclass Thread_class = jni->FindClass("java/lang/Thread"); - jclass Class_class = jni->FindClass("java/lang/Class"); - if (Thread_class != nullptr && Class_class != nullptr) { - jmethodID equals = jni->GetMethodID(Class_class, - "equals", "(Ljava/lang/Object;)Z"); - if (equals != nullptr) { - jclass klass = method_class; - do { - entry = jni->CallBooleanMethod(Thread_class, equals, klass); - if (jniExceptionCheck(jni)) { - entry = false; - break; - } - if (entry) { - break; - } - } while ((klass = jni->GetSuperclass(klass)) != NULL); - } - } - // Clear any exceptions from the reflection calls above - jniExceptionCheck(jni); - } else if (strncmp(method_name, "main", 5) == 0 && - strncmp(method_sig, "(Ljava/lang/String;)V", 21)) { - // public static void main(String[] args) - 'public static' translates - // to modifier bits 0 and 3, hence check for '9' - entry = true; - } - - // maybe we should store the lookups below in initialisation-time - // constants... - if (has_prefix(class_name, - "Ljdk/internal/reflect/GeneratedConstructorAccessor")) { - class_name_id = _classes->lookup( - "jdk/internal/reflect/GeneratedConstructorAccessor"); - method_name_id = - _symbols.lookup("Object " - "jdk.internal.reflect.GeneratedConstructorAccessor." - "newInstance(Object[])"); - method_sig_id = _symbols.lookup(method_sig); - } else if (has_prefix(class_name, - "Lsun/reflect/GeneratedConstructorAccessor")) { - class_name_id = - _classes->lookup("sun/reflect/GeneratedConstructorAccessor"); - method_name_id = _symbols.lookup( - "Object " - "sun.reflect.GeneratedConstructorAccessor.newInstance(Object[])"); - method_sig_id = _symbols.lookup(method_sig); - } else if (has_prefix(class_name, - "Ljdk/internal/reflect/GeneratedMethodAccessor")) { - class_name_id = - _classes->lookup("jdk/internal/reflect.GeneratedMethodAccessor"); - method_name_id = - _symbols.lookup("Object " - "jdk.internal.reflect.GeneratedMethodAccessor." - "invoke(Object, Object[])"); - method_sig_id = _symbols.lookup(method_sig); - } else if (has_prefix(class_name, - "Lsun/reflect/GeneratedMethodAccessor")) { - class_name_id = _classes->lookup("sun/reflect/GeneratedMethodAccessor"); - method_name_id = _symbols.lookup( - "Object sun.reflect.GeneratedMethodAccessor.invoke(Object, " - "Object[])"); - method_sig_id = _symbols.lookup(method_sig); - } else if (has_prefix(class_name, "Ljava/lang/invoke/LambdaForm$")) { - const int lambdaFormPrefixLength = - strlen("Ljava/lang/invoke/LambdaForm$"); - // we want to normalise to java/lang/invoke/LambdaForm$MH, - // java/lang/invoke/LambdaForm$DMH, java/lang/invoke/LambdaForm$BMH, - if (has_prefix(class_name + lambdaFormPrefixLength, "MH")) { - class_name_id = _classes->lookup("java/lang/invoke/LambdaForm$MH"); - } else if (has_prefix(class_name + lambdaFormPrefixLength, "BMH")) { - class_name_id = _classes->lookup("java/lang/invoke/LambdaForm$BMH"); - } else if (has_prefix(class_name + lambdaFormPrefixLength, "DMH")) { - class_name_id = _classes->lookup("java/lang/invoke/LambdaForm$DMH"); - } else { - // don't recognise the suffix, so don't normalise - class_name_id = - _classes->lookup(class_name + 1, strlen(class_name) - 2); - } - method_name_id = _symbols.lookup(method_name); - method_sig_id = _symbols.lookup(method_sig); - } else { - class_name_id = - _classes->lookup(class_name + 1, strlen(class_name) - 2); - method_name_id = _symbols.lookup(method_name); - method_sig_id = _symbols.lookup(method_sig); - } - } else { - Counters::increment(JMETHODID_SKIPPED); - class_name_id = _classes->lookup(""); - method_name_id = _symbols.lookup("jvmtiError"); - method_sig_id = _symbols.lookup("()L;"); - } - - mi->_class = class_name_id; - mi->_name = method_name_id; - mi->_sig = method_sig_id; - mi->_type = FRAME_INTERPRETED; - mi->_is_entry = entry; - if (line_number_table != nullptr) { - mi->_line_number_table = std::make_shared( - line_number_table_size, line_number_table); - // Increment counter for tracking live line number tables - Counters::increment(LINE_NUMBER_TABLES); - } - - // strings are null or came from JVMTI - if (method_name) { - jvmti->Deallocate((unsigned char *)method_name); - } - if (method_sig) { - jvmti->Deallocate((unsigned char *)method_sig); - } - if (class_name) { - jvmti->Deallocate((unsigned char *)class_name); - } - } - jni->PopLocalFrame(NULL); -} - -MethodInfo *Lookup::resolveMethod(ASGCT_CallFrame &frame) { - static const char* UNKNOWN = "unknown"; - unsigned long key; - jint bci = frame.bci; - - jmethodID method = frame.method_id; - if (method == nullptr) { - key = MethodMap::makeKey(UNKNOWN); - } else if (bci == BCI_ERROR || bci == BCI_NATIVE_FRAME) { - key = MethodMap::makeKey(frame.native_function_name); - } else if (bci == BCI_NATIVE_FRAME_REMOTE) { - key = MethodMap::makeKey(frame.packed_remote_frame); - } else { - FrameTypeId frame_type = FrameType::decode(bci); - assert(frame_type == FRAME_INTERPRETED || frame_type == FRAME_JIT_COMPILED || - frame_type == FRAME_INLINED || frame_type == FRAME_C1_COMPILED || - VM::isOpenJ9()); // OpenJ9 may have bugs that produce invalid frame types - key = MethodMap::makeKey(method); - } - - MethodInfo *mi = &(*_method_map)[key]; - - if (!mi->_mark) { - mi->_mark = true; - bool first_time = mi->_key == 0; - if (first_time) { - mi->_key = _method_map->size() + 1; // avoid zero key - } - if (method == nullptr) { - fillNativeMethodInfo(mi, UNKNOWN, nullptr); - } else if (bci == BCI_ERROR) { - fillNativeMethodInfo(mi, (const char *)method, nullptr); - } else if (bci == BCI_NATIVE_FRAME) { - const char *name = (const char *)method; - fillNativeMethodInfo(mi, name, - Profiler::instance()->getLibraryName(name)); - } else if (bci == BCI_NATIVE_FRAME_REMOTE) { - // Unpack remote symbolication data using utility struct - // Layout: pc_offset (44 bits) | mark (3 bits) | lib_index (15 bits) - unsigned long packed_remote_frame = frame.packed_remote_frame; - uintptr_t pc_offset = Profiler::RemoteFramePacker::unpackPcOffset(packed_remote_frame); - [[maybe_unused]] char mark = Profiler::RemoteFramePacker::unpackMark(packed_remote_frame); - uint32_t lib_index = Profiler::RemoteFramePacker::unpackLibIndex(packed_remote_frame); - - TEST_LOG("Unpacking remote frame: packed=0x%zx, pc_offset=0x%lx, mark=%d, lib_index=%u", - packed_remote_frame, pc_offset, (int)mark, lib_index); - - // Lookup library by index to get build_id - // Note: This is called during JFR serialization with lockAll() held (see Profiler::dump), - // so the library array is stable - no concurrent dlopen_hook calls can modify it. - CodeCache* lib = Libraries::instance()->getLibraryByIndex(lib_index); - if (lib != nullptr && lib->hasBuildId() && Profiler::instance()->isRemoteSymbolication()) { - TEST_LOG("Found library: %s, build_id=%s", lib->name(), lib->buildId()); - // Remote symbolication: defer to backend - RemoteFrameInfo rfi(lib->buildId(), pc_offset, lib_index); - fillRemoteFrameInfo(mi, &rfi); - } else if (lib != nullptr) { - // Locally unsymbolized: render as [libname+0xoffset] - char name_buf[256]; - const char* s = lib->name(); - const char* basename = strrchr(s, '/'); - if (basename) basename++; else basename = s; - snprintf(name_buf, sizeof(name_buf), "[%s+0x%" PRIxPTR "]", basename, pc_offset); - fillNativeMethodInfo(mi, name_buf, nullptr); - } else { - TEST_LOG("WARNING: Library lookup failed for index %u", lib_index); - fillNativeMethodInfo(mi, "unknown_library", nullptr); - } - } else { - fillJavaMethodInfo(mi, method, first_time); - } - } - - return mi; -} - -u32 Lookup::getPackage(const char *class_name) { - const char *package = strrchr(class_name, '/'); - if (package == NULL) { - return 0; - } - if (package[1] >= '0' && package[1] <= '9') { - // Seems like a hidden or anonymous class, e.g. com/example/Foo/0x012345 - do { - if (package == class_name) - return 0; - } while (*--package != '/'); - } - if (class_name[0] == '[') { - class_name = strchr(class_name, 'L') + 1; - } - return _packages.lookup(class_name, package - class_name); -} - -u32 Lookup::getSymbol(const char *name) { return _symbols.lookup(name); } - char *Recording::_agent_properties = NULL; char *Recording::_jvm_args = NULL; char *Recording::_jvm_flags = NULL; diff --git a/ddprof-lib/src/main/cpp/flightRecorder.h b/ddprof-lib/src/main/cpp/flightRecorder.h index e9aa3cde1..b5e28507d 100644 --- a/ddprof-lib/src/main/cpp/flightRecorder.h +++ b/ddprof-lib/src/main/cpp/flightRecorder.h @@ -22,6 +22,7 @@ #include "frame.h" #include "jfrMetadata.h" #include "log.h" +#include "methodInfo.h" #include "mutex.h" #include "objectSampler.h" #include "threadFilter.h" @@ -55,92 +56,6 @@ struct CpuTimes { CpuTime proc; CpuTime total; }; - -class SharedLineNumberTable { -public: - int _size; - void *_ptr; - - SharedLineNumberTable(int size, void *ptr) : _size(size), _ptr(ptr) {} - ~SharedLineNumberTable(); -}; - -class MethodInfo { -public: - MethodInfo() - : _mark(false), _is_entry(false), _referenced(false), _age(0), _key(0), _class(0), - _name(0), _sig(0), _modifiers(0), _line_number_table(nullptr), _type() {} - - bool _mark; - bool _is_entry; - bool _referenced; // Tracked during writeStackTraces() for cleanup - int _age; // Consecutive chunks without reference (0 = recently used) - u32 _key; - u32 _class; - u32 _name; - u32 _sig; - jint _modifiers; - std::shared_ptr _line_number_table; - FrameTypeId _type; - - jint getLineNumber(jint bci) { - // if the shared pointer is not pointing to the line number table, consider - // size 0 - if (!_line_number_table || _line_number_table->_size == 0) { - return 0; - } - - int i = 1; - while (i < _line_number_table->_size && - bci >= ((jvmtiLineNumberEntry *)_line_number_table->_ptr)[i] - .start_location) { - i++; - } - return ((jvmtiLineNumberEntry *)_line_number_table->_ptr)[i - 1] - .line_number; - } - - bool isHidden() { - // 0x1400 = ACC_SYNTHETIC(0x1000) | ACC_BRIDGE(0x0040) - return _modifiers == 0 || (_modifiers & 0x1040); - } -}; - -// MethodMap's key can be derived from 3 sources: -// 1) jmethodID for Java methods -// 2) void* address for native method names -// 3) Encoded RemoteFrameInfo -// The values of the keys are potentially overlapping, so we use -// the highest 2 bits to distinguish them. -// 00 - jmethodID -// 10 - void* address -// 01 - RemoteFrameInfo -class MethodMap : public std::map { -public: - static constexpr unsigned long ADDRESS_MARK = 0x8000000000000000ULL; - static constexpr unsigned long REMOTE_FRAME_MARK = 0x4000000000000000ULL; - static constexpr unsigned long KEY_TYPE_MASK = ADDRESS_MARK | REMOTE_FRAME_MARK; - - MethodMap() {} - - static unsigned long makeKey(jmethodID method) { - unsigned long key = (unsigned long)method; - assert((key & KEY_TYPE_MASK) == 0); - return key; - } - - static unsigned long makeKey(const char* addr) { - unsigned long key = (unsigned long)addr; - assert((key & KEY_TYPE_MASK) == 0); - return (key | ADDRESS_MARK); - } - - static unsigned long makeKey(unsigned long packed_remote_frame) { - unsigned long key = packed_remote_frame; - assert((key & KEY_TYPE_MASK) == 0); - return (key | REMOTE_FRAME_MARK);} -}; - class Recording { friend ObjectSampler; friend Profiler; @@ -297,35 +212,6 @@ class Recording { private: void cleanupUnreferencedMethods(); }; - -class Lookup { -public: - Recording *_rec; - MethodMap *_method_map; - Dictionary *_classes; - Dictionary _packages; - Dictionary _symbols; - -private: - void fillNativeMethodInfo(MethodInfo *mi, const char *name, - const char *lib_name); - void fillRemoteFrameInfo(MethodInfo *mi, const RemoteFrameInfo *rfi); - void cutArguments(char *func); - void fillJavaMethodInfo(MethodInfo *mi, jmethodID method, bool first_time); - bool has_prefix(const char *str, const char *prefix) const { - return strncmp(str, prefix, strlen(prefix)) == 0; - } - -public: - Lookup(Recording *rec, MethodMap *method_map, Dictionary *classes) - : _rec(rec), _method_map(method_map), _classes(classes), _packages(), - _symbols() {} - - MethodInfo *resolveMethod(ASGCT_CallFrame &frame); - u32 getPackage(const char *class_name); - u32 getSymbol(const char *name); -}; - class FlightRecorder { friend Profiler; diff --git a/ddprof-lib/src/main/cpp/frame.h b/ddprof-lib/src/main/cpp/frame.h index dbd27c2e0..6694dbaa2 100644 --- a/ddprof-lib/src/main/cpp/frame.h +++ b/ddprof-lib/src/main/cpp/frame.h @@ -1,6 +1,9 @@ #ifndef _FRAME_H #define _FRAME_H +#include +#include "vmEntry.h" + enum FrameTypeId { FRAME_INTERPRETED = 0, FRAME_JIT_COMPILED = 1, @@ -10,12 +13,14 @@ enum FrameTypeId { FRAME_KERNEL = 5, FRAME_C1_COMPILED = 6, FRAME_NATIVE_REMOTE = 7, // Native frame with remote symbolication (build-id + pc-offset) - FRAME_TYPE_MAX = FRAME_NATIVE_REMOTE // Maximum valid frame type + FRAME_INTERPRETED_METHOD = 8, + FRAME_TYPE_MAX = FRAME_INTERPRETED_METHOD // Maximum valid frame type }; class FrameType { public: static inline int encode(int type, int bci) { + assert((type != FRAME_INTERPRETED_METHOD || VM::isHotspot()) && "FRAME_INTERPRETED_METHOD is only valid for hotspot"); return (1 << 24) | (type << 25) | (bci & 0xffffff); } diff --git a/ddprof-lib/src/main/cpp/hotspot/classloader.h b/ddprof-lib/src/main/cpp/hotspot/classloader.h new file mode 100644 index 000000000..c886c041b --- /dev/null +++ b/ddprof-lib/src/main/cpp/hotspot/classloader.h @@ -0,0 +1,19 @@ +/* + * Copyright The async-profiler authors + * Copyright 2026, Datadog, Inc. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef _HOTSPOT_CLASSLOADER_H +#define _HOTSPOT_CLASSLOADER_H + +#include "hotspot/vmStructs.h" + +class VMClassLoader { +public: + // Is the method belongs to a class that is loaded by bootstrap class loader + static inline bool isLoadedByBootstrapClassLoader(const VMMethod* method); +}; + +#endif // _HOTSPOT_CLASSLOADER_H + diff --git a/ddprof-lib/src/main/cpp/hotspot/classloader.inline.h b/ddprof-lib/src/main/cpp/hotspot/classloader.inline.h new file mode 100644 index 000000000..60db04d76 --- /dev/null +++ b/ddprof-lib/src/main/cpp/hotspot/classloader.inline.h @@ -0,0 +1,38 @@ +/* + * Copyright The async-profiler authors + * Copyright 2026, Datadog, Inc. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef _HOTSPOT_CLASSLOADER_INLINE_H +#define _HOTSPOT_CLASSLOADER_INLINE_H + +#include "hotspot/classloader.h" + +#include + +bool VMClassLoader::isLoadedByBootstrapClassLoader(const VMMethod* method) { + if (method == nullptr) { + return false; + } + + VMKlass* method_klass = method->methodHolderSafe(); + if (method_klass == nullptr) { + return false; + } + + VMClassLoaderData* cld = method_klass->classLoaderDataSafe(); + if (cld == nullptr) { + return false; + } + + // java/lang/Object must be loaded by bootstrap class loader + VMKlass* obj_klass = VMClasses::obj_klass(); + assert(obj_klass != nullptr && "VMClasses not yet initialized"); + assert(obj_klass->classLoaderData() != nullptr && "Object class has no class loader data"); + + return cld == obj_klass->classLoaderData(); +} + +#endif // _HOTSPOT_CLASSLOADER_INLINE_H + diff --git a/ddprof-lib/src/main/cpp/hotspot/hotspotSupport.cpp b/ddprof-lib/src/main/cpp/hotspot/hotspotSupport.cpp index 50bf0d6d0..35c9ee92e 100644 --- a/ddprof-lib/src/main/cpp/hotspot/hotspotSupport.cpp +++ b/ddprof-lib/src/main/cpp/hotspot/hotspotSupport.cpp @@ -7,6 +7,7 @@ #include #include #include "asyncSampleMutex.h" +#include "hotspot/classloader.inline.h" #include "hotspot/hotspotSupport.h" #include "hotspot/jitCodeCache.h" #include "hotspot/vmStructs.inline.h" @@ -30,11 +31,31 @@ static bool isAddressInCode(const void *pc, bool include_stubs = true) { } static jmethodID getMethodId(VMMethod* method) { + if (method == nullptr) { + return nullptr; + } + + jmethodID method_id = nullptr; if (!inDeadZone(method) && aligned((uintptr_t)method) && SafeAccess::isReadableRange(method, VMMethod::type_size())) { - return method->validatedId(); + method_id = method->validatedId(); } - return NULL; + + return method_id; +} + +static void printMethod(VMMethod* m) { + if (m == nullptr) { + TEST_LOG("*** Method == nullptr"); + } + VMConstMethod* const_method = m->constMethod(); + VMSymbol* name_sym = const_method->name(); + VMSymbol* sig_sym = const_method->signature(); + VMKlass* klass = m->methodHolder(); + VMSymbol* klass_sym = klass->name(); + + TEST_LOG("*** Method class: %.*s method: %.*s %.*s", klass_sym->length(), klass_sym->body(), + name_sym->length(), name_sym->body(), sig_sym->length(), sig_sym->body()); } /** @@ -79,13 +100,10 @@ inline EventType eventTypeFromBCI(jint bci_type) { static void fillFrameTypes(ASGCT_CallFrame *frames, int num_frames, VMNMethod *nmethod) { if (nmethod->isNMethod() && nmethod->isAlive()) { VMMethod *method = nmethod->method(); - if (method == NULL) { - return; - } - jmethodID current_method_id = method->id(); + jmethodID current_method_id = getMethodId(method); if (current_method_id == NULL) { - return; + return; } // Mark current_method as COMPILED and frames above current_method as @@ -116,6 +134,12 @@ static void fillFrameTypes(ASGCT_CallFrame *frames, int num_frames, VMNMethod *n } } +static inline void fillFrame(ASGCT_CallFrame& frame, int bci, const VMMethod* method) { + assert(method != nullptr); + frame.bci = FrameType::encode(FRAME_INTERPRETED_METHOD, bci); + frame.method = static_cast(method); +} + static ucontext_t empty_ucontext{}; #ifdef NDEBUG @@ -403,15 +427,18 @@ __attribute__((no_sanitize("address"))) int HotspotSupport::walkVM(void* ucontex bool is_plausible_interpreter_frame = StackWalkValidation::isPlausibleInterpreterFrame(fp, sp, bcp_offset); if (is_plausible_interpreter_frame) { - VMMethod* method = ((VMMethod**)fp)[InterpreterFrame::method_offset]; + VMMethod* method = VMMethod::cast_or_null(((void**)fp)[InterpreterFrame::method_offset]); jmethodID method_id = getMethodId(method); - if (method_id != NULL) { + if (method_id != NULL || VMClassLoader::isLoadedByBootstrapClassLoader(method)) { Counters::increment(WALKVM_JAVA_FRAME_OK); const char* bytecode_start = method->bytecode(); const char* bcp = ((const char**)fp)[bcp_offset]; int bci = bytecode_start == NULL || bcp < bytecode_start ? 0 : bcp - bytecode_start; - fillFrame(frames[depth++], FRAME_INTERPRETED, bci, method_id); - + if (method_id != nullptr) { + fillFrame(frames[depth++], FRAME_INTERPRETED, bci, method_id); + } else { + fillFrame(frames[depth++], bci, method); + } sp = ((uintptr_t*)fp)[InterpreterFrame::sender_sp_offset]; pc = stripPointer(((void**)fp)[FRAME_PC_SLOT]); fp = *(uintptr_t*)fp; @@ -420,12 +447,15 @@ __attribute__((no_sanitize("address"))) int HotspotSupport::walkVM(void* ucontex } if (depth == 0) { - VMMethod* method = (VMMethod*)frame.method(); + VMMethod* method = VMMethod::cast_or_null((const void*)frame.method()); jmethodID method_id = getMethodId(method); - if (method_id != NULL) { + if (method_id != NULL || VMClassLoader::isLoadedByBootstrapClassLoader(method)) { Counters::increment(WALKVM_JAVA_FRAME_OK); - fillFrame(frames[depth++], FRAME_INTERPRETED, 0, method_id); - + if (method_id != nullptr) { + fillFrame(frames[depth++], FRAME_INTERPRETED, 0, method_id); + } else { + fillFrame(frames[depth++], 0, method); + } if (is_plausible_interpreter_frame) { pc = stripPointer(((void**)fp)[FRAME_PC_SLOT]); sp = frame.senderSP(); @@ -460,7 +490,13 @@ __attribute__((no_sanitize("address"))) int HotspotSupport::walkVM(void* ucontex Counters::increment(WALKVM_JAVA_FRAME_OK); int level = nm->level(); FrameTypeId type = details && level >= 1 && level <= 3 ? FRAME_C1_COMPILED : FRAME_JIT_COMPILED; - fillFrame(frames[depth++], type, 0, nm->method()->id()); + VMMethod* method = VMMethod::cast_or_null((const void*)frame.method()); + jmethodID method_id = getMethodId(method); + if (method_id != nullptr) { + fillFrame(frames[depth++], type, 0, method_id); + } else if (VMClassLoader::isLoadedByBootstrapClassLoader(method)) { + fillFrame(frames[depth++], 0, method); + } if (nm->isFrameCompleteAt(pc)) { if (depth == 1 && frame.unwindEpilogue(nm, (uintptr_t&)pc, sp, fp)) { @@ -477,7 +513,13 @@ __attribute__((no_sanitize("address"))) int HotspotSupport::walkVM(void* ucontex type = scope_offset > 0 ? FRAME_INLINED : level >= 1 && level <= 3 ? FRAME_C1_COMPILED : FRAME_JIT_COMPILED; } - fillFrame(frames[depth++], type, scope.bci(), scope.method()->id()); + VMMethod* method = scope.method(); + jmethodID method_id = getMethodId(method); + if (method_id != nullptr) { + fillFrame(frames[depth++], type, scope.bci(), method_id); + } else if (VMClassLoader::isLoadedByBootstrapClassLoader(method)) { + fillFrame(frames[depth++], scope.bci(), scope.method()); + } } while (scope_offset > 0 && depth < max_depth); } @@ -586,9 +628,11 @@ __attribute__((no_sanitize("address"))) int HotspotSupport::walkVM(void* ucontex } else if (mark == MARK_COMPILER_ENTRY && features.comp_task && vm_thread != NULL) { // Insert current compile task as a pseudo Java frame VMMethod* method = vm_thread->compiledMethod(); - jmethodID method_id = method != NULL ? method->id() : NULL; + jmethodID method_id = getMethodId(method); if (method_id != NULL) { fillFrame(frames[depth++], FRAME_JIT_COMPILED, 0, method_id); + } else if (VMClassLoader::isLoadedByBootstrapClassLoader(method)) { + fillFrame(frames[depth++], 0, method); } } else if (mark == MARK_THREAD_ENTRY) { // Thread entry point detected via pre-computed mark - this is the root frame @@ -629,9 +673,9 @@ __attribute__((no_sanitize("address"))) int HotspotSupport::walkVM(void* ucontex // In HotSpot, lastJavaFP is non-zero only for interpreter frames; // compiled frames record FP=0 in the anchor. if (StackWalkValidation::isPlausibleInterpreterFrame(recovery_fp, recovery_sp, bcp_offset)) { - VMMethod* method = ((VMMethod**)recovery_fp)[InterpreterFrame::method_offset]; + VMMethod* method = VMMethod::cast_or_null(((VMMethod**)recovery_fp)[InterpreterFrame::method_offset]); jmethodID method_id = getMethodId(method); - if (method_id != NULL) { + if (method_id != nullptr || VMClassLoader::isLoadedByBootstrapClassLoader(method)) { anchor = NULL; prev_native_pc = NULL; if (depth > 0 && depth + 1 < actual_max_depth) { @@ -641,8 +685,11 @@ __attribute__((no_sanitize("address"))) int HotspotSupport::walkVM(void* ucontex const char* bytecode_start = method->bytecode(); const char* bcp = ((const char**)recovery_fp)[bcp_offset]; int bci = bytecode_start == NULL || bcp < bytecode_start ? 0 : bcp - bytecode_start; - fillFrame(frames[depth++], FRAME_INTERPRETED, bci, method_id); - + if (method_id != nullptr) { + fillFrame(frames[depth++], FRAME_INTERPRETED, bci, method_id); + } else { + fillFrame(frames[depth++], bci, method); + } sp = ((uintptr_t*)recovery_fp)[InterpreterFrame::sender_sp_offset]; pc = stripPointer(((void**)recovery_fp)[FRAME_PC_SLOT]); fp = *(uintptr_t*)recovery_fp; @@ -789,7 +836,7 @@ __attribute__((no_sanitize("address"))) int HotspotSupport::walkVM(void* ucontex if (StackWalkValidation::isPlausibleInterpreterFrame(anchor_fp, anchor_sp, bcp_offset)) { VMMethod* method = ((VMMethod**)anchor_fp)[InterpreterFrame::method_offset]; jmethodID method_id = getMethodId(method); - if (method_id != NULL) { + if (method_id != nullptr || VMClassLoader::isLoadedByBootstrapClassLoader(method)) { Counters::increment(WALKVM_ANCHOR_FALLBACK); Counters::increment(WALKVM_JAVA_FRAME_OK); anchor = NULL; @@ -798,7 +845,11 @@ __attribute__((no_sanitize("address"))) int HotspotSupport::walkVM(void* ucontex const char* bytecode_start = method->bytecode(); const char* bcp = ((const char**)anchor_fp)[bcp_offset]; int bci = bytecode_start == NULL || bcp < bytecode_start ? 0 : bcp - bytecode_start; - fillFrame(frames[depth++], FRAME_INTERPRETED, bci, method_id); + if (method_id != nullptr) { + fillFrame(frames[depth++], FRAME_INTERPRETED, bci, method_id); + } else { + fillFrame(frames[depth++], bci, method); + } sp = ((uintptr_t*)anchor_fp)[InterpreterFrame::sender_sp_offset]; pc = stripPointer(((void**)anchor_fp)[FRAME_PC_SLOT]); fp = *(uintptr_t*)anchor_fp; @@ -988,7 +1039,7 @@ int HotspotSupport::getJavaTraceAsync(void *ucontext, ASGCT_CallFrame *frames, if (nmethod != NULL && nmethod->isNMethod() && nmethod->isAlive()) { VMMethod *method = nmethod->method(); if (method != NULL) { - jmethodID method_id = method->id(); + jmethodID method_id = getMethodId(method); if (method_id != NULL) { max_depth -= makeFrame(trace.frames++, 0, method_id); } @@ -1092,7 +1143,6 @@ int HotspotSupport::getJavaTraceAsync(void *ucontext, ASGCT_CallFrame *frames, return trace.frames - frames + 1; } - int HotspotSupport::walkJavaStack(StackWalkRequest& request) { CStack cstack = Profiler::instance()->cstackMode(); StackWalkFeatures features = Profiler::instance()->stackWalkFeatures(); @@ -1136,3 +1186,135 @@ int HotspotSupport::walkJavaStack(StackWalkRequest& request) { } return java_frames; } + +static void patchClassLoaderData(JNIEnv* jni, jclass klass) { + bool needs_patch = VM::hotspot_version() == 8; + if (needs_patch) { + // Workaround for JVM bug https://bugs.openjdk.org/browse/JDK-8062116 + // Preallocate space for jmethodIDs at the beginning of the list (rather than at the end) + // This is relevant only for JDK 8 - later versions do not have this bug + if (VMStructs::hasClassLoaderData()) { + VMKlass *vmklass = VMKlass::fromJavaClass(jni, klass); + int method_count = vmklass->methodCount(); + if (method_count > 0) { + VMClassLoaderData *cld = vmklass->classLoaderData(); + cld->lock(); + for (int i = 0; i < method_count; i += MethodList::SIZE) { + *cld->methodList() = new MethodList(*cld->methodList()); + } + cld->unlock(); + } + } + } +} + +constexpr const char* LAMBDA_PREFIX = "Ljava/lang/invoke/LambdaForm$"; +constexpr const char* FFM_PREFIX = "Ljdk/internal/foreign/abi/"; +static bool isLambdaClass(const char* signature) { + return strncmp(signature, LAMBDA_PREFIX, strlen(LAMBDA_PREFIX)) == 0 || + strstr(signature, "$$Lambda.") != nullptr || + strstr(signature, "$$Lambda$") != nullptr || + strstr(signature, ".lambda$") != nullptr || + strncmp(signature, FFM_PREFIX, strlen(FFM_PREFIX)) == 0; +} + +bool HotspotSupport::loadMethodIDsImpl(jvmtiEnv *jvmti, JNIEnv *jni, jclass klass) { + jobject cl; + // Hotpsot only: loaded by bootstrap class loader, which is never unloaded, + // we use Method instead. + if (jvmti->GetClassLoader(klass, &cl) == JVMTI_ERROR_NONE && cl == nullptr) { + char* signature_ptr = nullptr; + if (jvmti->GetClassSignature(klass, &signature_ptr, nullptr) == JVMTI_ERROR_NONE) { + // Lambda classes, even loaded by bootstrap class loader, can be unloaded, + // fallback to jmethodID + if (!isLambdaClass(signature_ptr)) { + jvmti->Deallocate((unsigned char*)signature_ptr); + return false; + } + } + if (signature_ptr != nullptr) { + jvmti->Deallocate((unsigned char*)signature_ptr); + } + } + if (cl != nullptr) { + jni->DeleteLocalRef(cl); + } + patchClassLoaderData(jni, klass); + return JVMSupport::loadMethodIDsImpl(jvmti, jni, klass); +} + +jmethodID HotspotSupport::resolve(const void* method) { + assert(VM::isHotspot()); + assert(method != nullptr); + VMMethod* vm_method = VMMethod::cast_or_null(method); + if (vm_method == nullptr) { + return nullptr; + } + + // May have been populated by following code + jmethodID method_id = vm_method->validatedId(); + if (method_id != nullptr) { + return method_id; + } + + VMConstMethod* const_method = vm_method->constMethodSafe(); + if (const_method == nullptr) { + return nullptr; + } + + VMSymbol* name_sym = const_method->name(); + VMSymbol* sig_sym = const_method->signature(); + VMKlass* klass = vm_method->methodHolderSafe(); + + if (name_sym == nullptr || sig_sym == nullptr || klass == nullptr) { + return nullptr; + } + + VMSymbol* klass_sym = klass->name(); + if (klass_sym == nullptr) { + return nullptr; + } + + char* method_name = (char*)malloc(name_sym->length() + 1); + char* method_signature = (char*)malloc(sig_sym->length() + 1); + int klass_name_len = klass_sym->length(); + char* klass_name = (char*)malloc(klass_name_len + 1); + if (method_name !=nullptr && method_signature != nullptr && klass_name != nullptr) { + memcpy(method_name, name_sym->body(), name_sym->length()); + method_name[name_sym->length()] = '\0'; + memcpy(method_signature, sig_sym->body(), sig_sym->length()); + method_signature[sig_sym->length()] = '\0'; + memcpy(klass_name, klass_sym->body(), klass_name_len); + klass_name[klass_name_len] = '\0'; + + JNIEnv *jni = VM::jni(); + jclass clz = jni->FindClass(klass_name); + if (clz == nullptr) { + jni->ExceptionClear(); + } else { + method_id = jni->GetMethodID(clz, method_name, method_signature); + if (method_id == nullptr) { + jni->ExceptionClear(); + method_id = jni->GetStaticMethodID(clz, method_name, method_signature); + if (method_id == nullptr) { + jni->ExceptionClear(); + } + } + } + } +// TEST_LOG("Resolved: %s: %s %s", klass_name, method_name, method_signature); + + free(method_name); + free(method_signature); + free(klass_name); + + + return method_id; +} + +void JNICALL HotspotSupport::NativeMethodBind(jvmtiEnv *jvmti, JNIEnv *jni, jthread thread, + jmethodID method, void *address, + void **new_address_ptr) { + VMStructs::NativeMethodBind(jvmti, jni, thread, method, address, new_address_ptr); +} + diff --git a/ddprof-lib/src/main/cpp/hotspot/hotspotSupport.h b/ddprof-lib/src/main/cpp/hotspot/hotspotSupport.h index 47245d30a..f155d25ac 100644 --- a/ddprof-lib/src/main/cpp/hotspot/hotspotSupport.h +++ b/ddprof-lib/src/main/cpp/hotspot/hotspotSupport.h @@ -12,9 +12,14 @@ #include "stackFrame.h" #include "stackWalker.h" +#include +#include + class ProfiledThread; class HotspotSupport { + friend class JVMSupport; + private: static int walkVM(void* ucontext, ASGCT_CallFrame* frames, int max_depth, StackWalkFeatures features, EventType event_type, @@ -27,9 +32,11 @@ class HotspotSupport { int max_depth, StackContext *java_ctx, bool *truncated); + static bool loadMethodIDsImpl(jvmtiEnv *jvmti, JNIEnv *jni, jclass klass); public: static void checkFault(ProfiledThread* thrd = nullptr); static int walkJavaStack(StackWalkRequest& request); + static inline bool canUnwind(const StackFrame& frame, const void*& pc) { return HotspotStackFrame::unwindAtomicStub(frame, pc); } @@ -37,6 +44,12 @@ class HotspotSupport { static inline bool isJitCode(const void* p) { return JitCodeCache::isJitCode(p); } + + static jmethodID resolve(const void* method); + + static void JNICALL NativeMethodBind(jvmtiEnv *jvmti, JNIEnv *jni, + jthread thread, jmethodID method, + void *address, void **new_address_ptr); }; #endif // _HOTSPOT_HOTSPOTSUPPORT_H diff --git a/ddprof-lib/src/main/cpp/hotspot/vmStructs.cpp b/ddprof-lib/src/main/cpp/hotspot/vmStructs.cpp index 69b97c302..4f24af4eb 100644 --- a/ddprof-lib/src/main/cpp/hotspot/vmStructs.cpp +++ b/ddprof-lib/src/main/cpp/hotspot/vmStructs.cpp @@ -225,7 +225,6 @@ void VMStructs::init_type_sizes() { continue; \ } - void VMStructs::init_constants() { // Int constants uintptr_t entry = readSymbol("gHotSpotVMIntConstants"); @@ -245,7 +244,6 @@ void VMStructs::init_constants() { // Special case _frame_entry_frame_call_wrapper_offset *= sizeof(uintptr_t); - // Long constants entry = readSymbol("gHotSpotVMLongConstants"); stride = readSymbol("gHotSpotVMLongConstantEntryArrayStride"); @@ -264,10 +262,8 @@ void VMStructs::init_constants() { } } } - #undef READ_CONSTANT - #ifdef DEBUG void VMStructs::verify_offsets() { int hotspot_version = VM::hotspot_version(); diff --git a/ddprof-lib/src/main/cpp/hotspot/vmStructs.h b/ddprof-lib/src/main/cpp/hotspot/vmStructs.h index bd4d66efd..10f4873dc 100644 --- a/ddprof-lib/src/main/cpp/hotspot/vmStructs.h +++ b/ddprof-lib/src/main/cpp/hotspot/vmStructs.h @@ -47,6 +47,18 @@ inline T* cast_to(const void* ptr) { return reinterpret_cast(const_cast(ptr)); } +template +inline T* cast_or_null(const void* ptr) { + assert(VM::isHotspot()); // This should only be used in HotSpot-specific code + assert(T::type_size() > 0); // Ensure type size has been initialized + if(ptr == nullptr || SafeAccess::isReadableRange(ptr, T::type_size())) { + return reinterpret_cast(const_cast(ptr)); + } else { + return nullptr; + } +} + + #define TYPE_SIZE_NAME(name) _##name##_size // MATCH_SYMBOLS macro expands into a string list, that is consumed by matchAny() method @@ -71,12 +83,15 @@ inline T* cast_to(const void* ptr) { class name : VMStructs { \ public: \ static uint64_t type_size() { return TYPE_SIZE_NAME(name); } \ - static name * cast(const void* ptr) { return cast_to(ptr); } \ + static name * cast(const void* ptr) { return ::cast_to(ptr); } \ + static name * cast_or_null(const void* ptr) { return ::cast_or_null(ptr); } \ static name * cast_raw(const void* ptr) { return (name *)ptr; } \ static name * load_then_cast(const void* ptr) { \ - assert(ptr != nullptr); \ - return cast(*(const void**)ptr); } - + if (ptr == nullptr) return nullptr; \ + return cast(*(const void**)ptr); } \ + static name * safe_load_then_cast(const void* ptr) { \ + if (ptr == nullptr) return nullptr; \ + return cast(SafeAccess::loadPtr((void**)ptr, nullptr)); } #define DECLARE_END }; /** @@ -104,7 +119,7 @@ inline T* cast_to(const void* ptr) { * For example: * f(VMClassLoaderData, MATCH_SYMBOLS("ClassLoaderData")) -> * if (matchAny((char*)[] {"ClassLoaderData", nullptr})) { - * _ClassLoaderData_size = size; + * _VMClassLoaderData_size = size; * continue; * } * @@ -117,16 +132,17 @@ inline T* cast_to(const void* ptr) { */ #define DECLARE_TYPES_DO(f) \ - f(VMClassLoaderData, MATCH_SYMBOLS("ClassLoaderData")) \ - f(VMConstantPool, MATCH_SYMBOLS("ConstantPool")) \ - f(VMConstMethod, MATCH_SYMBOLS("ConstMethod")) \ - f(VMFlag, MATCH_SYMBOLS("JVMFlag", "Flag")) \ - f(VMJavaFrameAnchor, MATCH_SYMBOLS("JavaFrameAnchor")) \ - f(VMKlass, MATCH_SYMBOLS("Klass")) \ - f(VMMethod, MATCH_SYMBOLS("Method")) \ - f(VMNMethod, MATCH_SYMBOLS("nmethod")) \ - f(VMSymbol, MATCH_SYMBOLS("Symbol")) \ - f(VMThread, MATCH_SYMBOLS("Thread")) + f(VMClassLoaderData, MATCH_SYMBOLS("ClassLoaderData")) \ + f(VMConstantPool, MATCH_SYMBOLS("ConstantPool")) \ + f(VMConstMethod, MATCH_SYMBOLS("ConstMethod")) \ + f(VMFlag, MATCH_SYMBOLS("JVMFlag", "Flag")) \ + f(VMJavaFrameAnchor, MATCH_SYMBOLS("JavaFrameAnchor")) \ + f(VMKlass, MATCH_SYMBOLS("Klass")) \ + f(VMMethod, MATCH_SYMBOLS("Method")) \ + f(VMNMethod, MATCH_SYMBOLS("nmethod")) \ + f(VMSymbol, MATCH_SYMBOLS("Symbol")) \ + f(VMThread, MATCH_SYMBOLS("Thread")) \ + f(VMClasses, MATCH_SYMBOLS("vmClasses", "SystemDictionary")) // ContinuationEntry type. Only exported via gHotSpotVMTypes starting in // JDK 27 (JDK-8378985); there is no mangled-symbol fallback for its size. @@ -202,6 +218,8 @@ typedef void* address; type_begin(VMConstMethod, MATCH_SYMBOLS("ConstMethod")) \ field(_constmethod_constants_offset, offset, MATCH_SYMBOLS("_constants")) \ field(_constmethod_idnum_offset, offset, MATCH_SYMBOLS("_method_idnum")) \ + field(_constmethod_name_index_offset, offset, MATCH_SYMBOLS("_name_index")) \ + field(_constmethod_sig_index_offset, offset, MATCH_SYMBOLS("_signature_index")) \ type_end() \ type_begin(VMConstantPool, MATCH_SYMBOLS("ConstantPool")) \ field(_pool_holder_offset, offset, MATCH_SYMBOLS("_pool_holder")) \ @@ -214,6 +232,8 @@ typedef void* address; type_end() \ type_begin(VMClassLoaderData, MATCH_SYMBOLS("ClassLoaderData")) \ field(_class_loader_data_next_offset, offset, MATCH_SYMBOLS("_next")) \ + field_with_version(_class_loader_data_has_class_mirror_holder_offset, offset, 17, MAX_VERSION, MATCH_SYMBOLS("_has_class_mirror_holder")) \ + field_with_version(_class_loader_data_is_anonymous_offset, offset, 11, 11, MATCH_SYMBOLS("_is_anonymous")) \ type_end() \ type_begin(VMJavaClass, MATCH_SYMBOLS("java_lang_Class")) \ field(_klass_offset_addr, address, MATCH_SYMBOLS("_klass_offset")) \ @@ -283,7 +303,7 @@ typedef void* address; field(_vs_high_offset, offset, MATCH_SYMBOLS("_high")) \ type_end() \ type_begin(VMStubRoutine, MATCH_SYMBOLS("StubRoutines")) \ - field(_call_stub_return_addr, address, MATCH_SYMBOLS("_call_stub_return_address")) \ + field(_call_stub_return_addr, address, MATCH_SYMBOLS("_call_stub_return_address")) \ type_end() \ type_begin(VMGrowableArray, MATCH_SYMBOLS("GrowableArrayBase", "GenericGrowableArray")) \ field(_array_len_offset, offset, MATCH_SYMBOLS("_len")) \ @@ -306,6 +326,9 @@ typedef void* address; field(_narrow_klass_base_addr, address, MATCH_SYMBOLS("_narrow_klass._base", "_base")) \ field(_narrow_klass_shift_addr, address, MATCH_SYMBOLS("_narrow_klass._shift", "_shift")) \ field(_collected_heap_addr, address, MATCH_SYMBOLS("_collectedHeap")) \ + type_end() \ + type_begin(VMClasses, MATCH_SYMBOLS("vmClasses", "SystemDictionary")) \ + field(_obj_class_addr, address, MATCH_SYMBOLS("_klasses[static_cast(vmClassID::Object_klass_knum)]", "_well_known_klasses[SystemDictionary::Object_klass_knum]")) \ type_end() /** @@ -440,6 +463,12 @@ class VMStructs { return ptr; } + const char* at(int offset) const { + const char* ptr = (const char*)this + offset; + assert(crashProtectionActive() || SafeAccess::isReadable(ptr)); + return ptr; + } + static bool goodPtr(const void* ptr) { return (uintptr_t)ptr >= 0x1000 && ((uintptr_t)ptr & (sizeof(uintptr_t) - 1)) == 0; } @@ -626,6 +655,9 @@ DECLARE(VMClassLoaderData) MethodList** methodList() { return (MethodList**) at(sizeof(uintptr_t) * 6 + 8); } + + inline bool hasClassMirrorHolder() const; + inline bool isAnonymous() const; DECLARE_END DECLARE(VMKlass) @@ -665,11 +697,16 @@ DECLARE(VMKlass) return VMSymbol::load_then_cast(at(_klass_name_offset)); } - VMClassLoaderData* classLoaderData() { + VMClassLoaderData* classLoaderData() const { assert(_class_loader_data_offset >= 0); return VMClassLoaderData::load_then_cast(at(_class_loader_data_offset)); } + VMClassLoaderData* classLoaderDataSafe() const { + assert(_class_loader_data_offset >= 0); + return VMClassLoaderData::safe_load_then_cast(at(_class_loader_data_offset)); + } + int methodCount() { assert(_methods_offset >= 0); int* methods = *(int**) at(_methods_offset); @@ -863,16 +900,33 @@ DECLARE(VMThread) DECLARE_END -DECLARE(VMConstMethod) +DECLARE(VMConstantPool) +public: + inline VMKlass* holder() const; + inline VMKlass* holderSafe() const; + + inline VMSymbol* symbolAt(u16 index) const; + inline VMSymbol* symbolAtSafe(u16 index) const; +private: + inline intptr_t* base() const; DECLARE_END +DECLARE(VMConstMethod) +public: + inline VMConstantPool* constants() const; + inline VMConstantPool* constantsSafe() const; + inline VMSymbol* name() const; + inline VMSymbol* signature() const; +private: + inline u16 nameIndex() const; + inline u16 signatureIndex() const; +DECLARE_END DECLARE(VMMethod) - private: +private: static bool check_jmethodID_J9(jmethodID id); static bool check_jmethodID_hotspot(jmethodID id); - - public: +public: jmethodID id(); // Performs extra validation when VMMethod comes from incomplete frame @@ -891,7 +945,11 @@ DECLARE(VMMethod) return *(const char**) at(_method_constmethod_offset) + VMConstMethod::type_size(); } - inline VMNMethod* code(); + inline VMConstMethod* constMethod() const; + inline VMConstMethod* constMethodSafe() const; + inline VMNMethod* code() const; + inline VMKlass* methodHolder() const; + inline VMKlass* methodHolderSafe() const; static bool check_jmethodID(jmethodID id); DECLARE_END @@ -1033,6 +1091,11 @@ DECLARE(VMNMethod) int findScopeOffset(const void* pc); DECLARE_END +DECLARE(VMClasses) +public: + static inline VMKlass* obj_klass(); +DECLARE_END + class CodeHeap : VMStructs { private: static bool contains(char* heap, const void* pc) { diff --git a/ddprof-lib/src/main/cpp/hotspot/vmStructs.inline.h b/ddprof-lib/src/main/cpp/hotspot/vmStructs.inline.h index 03a81fea6..9c78c20a8 100644 --- a/ddprof-lib/src/main/cpp/hotspot/vmStructs.inline.h +++ b/ddprof-lib/src/main/cpp/hotspot/vmStructs.inline.h @@ -10,6 +10,8 @@ #include "hotspot/vmStructs.h" #include "jvmThread.h" +#include + VMThread* VMThread::current() { return VMThread::cast(JVMThread::current()); } @@ -36,6 +38,21 @@ void** VMThread::vtable() { return *(void***)this; } +VMMethod* VMThread::compiledMethod() { + if (!isJavaThread(this)) return NULL; + assert(_comp_method_offset >= 0); + assert(_comp_env_offset >= 0); + assert(_comp_task_offset >= 0); + const char* env = *(const char**) at(_comp_env_offset); + if (env != NULL) { + const char* task = *(const char**) (env + _comp_task_offset); + if (task != NULL) { + return VMMethod::load_then_cast((const void*)(task + _comp_method_offset)); + } + } + return NULL; +} + // This thread is considered a JavaThread if at least 2 of the selected 3 vtable entries // match those of a known JavaThread (which is either application thread or AttachListener). // Indexes were carefully chosen to work on OpenJDK 8 to 25, both product an debug builds. @@ -46,25 +63,106 @@ bool VMThread::hasJavaThreadVtable() { (SafeAccess::load(&vtbl[5]) == _java_thread_vtbl[5]) >= 2; } -VMNMethod* VMMethod::code() { +bool VMClassLoaderData::isAnonymous() const { + if (_class_loader_data_is_anonymous_offset >= 0) { + return *(bool*) at(_class_loader_data_is_anonymous_offset); + } + return false; +} + +bool VMClassLoaderData::hasClassMirrorHolder() const { + if (_class_loader_data_has_class_mirror_holder_offset >= 0) { + return *(bool*) at(_class_loader_data_has_class_mirror_holder_offset); + } + return false; +} + +VMKlass* VMConstantPool::holder() const { + assert(_pool_holder_offset >= 0); + return VMKlass::load_then_cast(at(_pool_holder_offset)); +} + +VMKlass* VMConstantPool::holderSafe() const { + assert(_pool_holder_offset >= 0); + return VMKlass::safe_load_then_cast(at(_pool_holder_offset)); +} + +VMSymbol* VMConstantPool::symbolAt(u16 index) const { + return VMSymbol::cast(*(void**)&base()[index]); +} + +VMSymbol* VMConstantPool::symbolAtSafe(u16 index) const { + return VMSymbol::cast_or_null(*(void**)&base()[index]); +} + +intptr_t* VMConstantPool::base() const { + assert(_VMConstantPool_size > 0); + return (intptr_t*)(((char*)this) + _VMConstantPool_size); +} + +VMConstMethod* VMMethod::constMethod() const { + return VMConstMethod::load_then_cast(at(_method_constmethod_offset)); +} + +VMConstMethod* VMMethod::constMethodSafe() const { + return VMConstMethod::safe_load_then_cast(at(_method_constmethod_offset)); +} + +VMNMethod* VMMethod::code() const { assert(_method_code_offset >= 0); const void* code_ptr = *(const void**) at(_method_code_offset); return VMNMethod::cast(code_ptr); } -VMMethod* VMThread::compiledMethod() { - if (!isJavaThread(this)) return NULL; - assert(_comp_method_offset >= 0); - assert(_comp_env_offset >= 0); - assert(_comp_task_offset >= 0); - const char* env = *(const char**) at(_comp_env_offset); - if (env != NULL) { - const char* task = *(const char**) (env + _comp_task_offset); - if (task != NULL) { - return VMMethod::load_then_cast((const void*)(task + _comp_method_offset)); - } - } - return NULL; +VMKlass* VMMethod::methodHolder() const { + VMConstMethod* constMthd = constMethod(); + VMConstantPool* pool = constMthd->constants(); + VMKlass* holder = pool->holder(); + return holder; +} + +VMKlass* VMMethod::methodHolderSafe() const { + VMConstMethod* constMthd = constMethodSafe(); + if (constMthd == nullptr) return nullptr; + + VMConstantPool* pool = constMthd->constantsSafe(); + if (pool == nullptr) return nullptr; + + return pool->holderSafe(); +} + +VMConstantPool* VMConstMethod::constants() const { + return VMConstantPool::load_then_cast(at(_constmethod_constants_offset)); +} + +VMConstantPool* VMConstMethod::constantsSafe() const { + return VMConstantPool::safe_load_then_cast(at(_constmethod_constants_offset)); +} + +u16 VMConstMethod::nameIndex() const { + assert(_constmethod_name_index_offset >= 0 && "Invalid name index"); + return *(u16*)at(_constmethod_name_index_offset); +} + +u16 VMConstMethod::signatureIndex() const { + assert(_constmethod_sig_index_offset >= 0 && "Invalid signature index"); + return *(u16*)at(_constmethod_sig_index_offset); +} + +VMSymbol* VMConstMethod::name() const { + VMConstantPool* cpool = constants(); + u16 name_index = nameIndex(); + return cpool->symbolAtSafe(name_index); +} + +VMSymbol* VMConstMethod::signature() const { + VMConstantPool* cpool = constants(); + u16 sig_index = signatureIndex(); + return cpool->symbolAtSafe(sig_index); +} + +VMKlass* VMClasses::obj_klass() { + return VMKlass::load_then_cast(_obj_class_addr); } #endif // _HOTSPOT_VMSTRUCTS_INLINE_H diff --git a/ddprof-lib/src/main/cpp/j9/j9Support.h b/ddprof-lib/src/main/cpp/j9/j9Support.h index 7031fb8f5..2e67dc42d 100644 --- a/ddprof-lib/src/main/cpp/j9/j9Support.h +++ b/ddprof-lib/src/main/cpp/j9/j9Support.h @@ -20,6 +20,7 @@ #include +#include "frame.h" #include "log.h" #include "vmEntry.h" diff --git a/ddprof-lib/src/main/cpp/jvmSupport.cpp b/ddprof-lib/src/main/cpp/jvmSupport.cpp index 6e3f5bc3a..e665347cc 100644 --- a/ddprof-lib/src/main/cpp/jvmSupport.cpp +++ b/ddprof-lib/src/main/cpp/jvmSupport.cpp @@ -12,9 +12,8 @@ #include "thread.h" #include "vmEntry.h" -#include "hotspot/hotspotSupport.h" - #include +#include int JVMSupport::walkJavaStack(StackWalkRequest& request) { if (VM::isHotspot()) { @@ -57,3 +56,60 @@ int JVMSupport::asyncGetCallTrace(ASGCT_CallFrame *frames, int max_depth, void* Profiler::instance()->incFailure(-trace.num_frames); return makeFrame(frames, BCI_ERROR, err_string); } + +void JVMSupport::loadAllMethodIDs(jvmtiEnv *jvmti, JNIEnv *jni) { + jint class_count = 0; + jclass *classes = nullptr; + int loaded_count = 0; + + if (jvmti->GetLoadedClasses(&class_count, &classes) == JVMTI_ERROR_NONE) { + for (int i = 0; i < class_count; i++) { + if(loadMethodIDs(jvmti, jni, classes[i])) { + loaded_count++; + } + } + jvmti->Deallocate((unsigned char *)classes); + } + TEST_LOG("Preloaded jmethodIDs for %d/%d classes", loaded_count, class_count); +} + +bool JVMSupport::loadMethodIDs(jvmtiEnv *jvmti, JNIEnv *jni, jclass klass) { + if (VM::isHotspot()) { + return HotspotSupport::loadMethodIDsImpl(jvmti, jni, klass); + } else { + return loadMethodIDsImpl(jvmti, jni, klass); + } + +} + +bool JVMSupport::loadMethodIDsImpl(jvmtiEnv *jvmti, JNIEnv *jni, jclass klass) { + // CRITICAL: GetClassMethods must be called to preallocate jmethodIDs for AsyncGetCallTrace. + // AGCT operates in signal handlers where lock acquisition is forbidden, so jmethodIDs must + // exist before profiling encounters them. Without preallocation, AGCT cannot identify methods + // in stack traces, breaking profiling functionality. + // + // JVM-internal allocation: This triggers JVM to allocate jmethodIDs internally, which persist + // until class unload. High class churn causes significant memory growth, but this is inherent + // to AGCT architecture and necessary for signal-safe profiling. + // + // See: https://mostlynerdless.de/blog/2023/07/17/jmethodids-in-profiling-a-tale-of-nightmares/ + jint method_count; + jmethodID *methods; + if (jvmti->GetClassMethods(klass, &method_count, &methods) == JVMTI_ERROR_NONE) { + jvmti->Deallocate((unsigned char *)methods); + return true; + } + return false; +} + +// JVMTI callbacks +void JNICALL JVMSupport::ClassPrepare(jvmtiEnv *jvmti, JNIEnv *jni, jthread thread, + jclass klass) { + loadMethodIDs(jvmti, jni, klass); +} + +void JNICALL JVMSupport::ClassLoad(jvmtiEnv *jvmti, JNIEnv *jni, jthread thread, + jclass klass) { + // Needed only for AsyncGetCallTrace support +} + diff --git a/ddprof-lib/src/main/cpp/jvmSupport.h b/ddprof-lib/src/main/cpp/jvmSupport.h index 1cd387511..5912eaf4c 100644 --- a/ddprof-lib/src/main/cpp/jvmSupport.h +++ b/ddprof-lib/src/main/cpp/jvmSupport.h @@ -6,8 +6,10 @@ #ifndef _JVMSUPPORT_H #define _JVMSUPPORT_H +#include "hotspot/hotspotSupport.h" #include "stackFrame.h" #include "stackWalker.h" +#include "vmEntry.h" // Stack recovery techniques used to workaround AsyncGetCallTrace flaws. // Can be disabled with 'safemode' option. @@ -20,13 +22,37 @@ enum StackRecovery { PROBE_SP = 0x100, }; - class JVMSupport { + friend class HotspotSupport; + static int asyncGetCallTrace(ASGCT_CallFrame *frames, int max_depth, void* ucontext); + + // J9 and Zing shared implementation + static bool loadMethodIDsImpl(jvmtiEnv *jvmti, JNIEnv *jni, jclass klass); + public: static int walkJavaStack(StackWalkRequest& request); static inline bool canUnwind(const StackFrame& frame, const void*& pc); static inline bool isJitCode(const void* pc); + + static void loadAllMethodIDs(jvmtiEnv *jvmti, JNIEnv *jni); + static bool loadMethodIDs(jvmtiEnv *jvmti, JNIEnv *jni, jclass klass); + + // Resolve method pointer to jmethodID + static jmethodID resolve(const void* method) { + if (VM::isHotspot()) { + return HotspotSupport::resolve(method); + } else { + assert(false && "Should not reach here"); + return nullptr; + } + } + + // JVMTI callback + static void JNICALL ClassPrepare(jvmtiEnv *jvmti, JNIEnv *jni, jthread thread, + jclass klass); + static void JNICALL ClassLoad(jvmtiEnv *jvmti, JNIEnv *jni, jthread thread, + jclass klass); }; #endif // _JVMSUPPORT_H diff --git a/ddprof-lib/src/main/cpp/lookup.cpp b/ddprof-lib/src/main/cpp/lookup.cpp new file mode 100644 index 000000000..203feb72e --- /dev/null +++ b/ddprof-lib/src/main/cpp/lookup.cpp @@ -0,0 +1,408 @@ +/* + * Copyright The async-profiler authors + * Copyright 2026, Datadog, Inc. + * SPDX-License-Identifier: Apache-2.0 + */ + + +#include "lookup.h" + +#include +#include +#include + +#include "codeCache.h" +#include "common.h" +#include "counters.h" +#include "jniHelper.h" +#include "jvmSupport.h" +#include "libraries.h" +#include "methodInfo.h" +#include "profiler.h" +#include "rustDemangler.h" + +#include "hotspot/vmStructs.inline.h" + +void Lookup::fillNativeMethodInfo(MethodInfo *mi, const char *name, + const char *lib_name) { + mi->_class = _classes->lookup(""); + // TODO return the library name once we figured out how to cooperate with the + // backend + // if (lib_name == NULL) { + // mi->_class = _classes->lookup(""); + // } else if (lib_name[0] == '[' && lib_name[1] != 0) { + // mi->_class = _classes->lookup(lib_name + 1, strlen(lib_name) - + // 2); + // } else { + // mi->_class = _classes->lookup(lib_name); + // } + + mi->_modifiers = 0x100; + mi->_line_number_table = nullptr; + + if (name[0] == '_' && name[1] == 'Z') { + int status; + char *demangled = abi::__cxa_demangle(name, NULL, NULL, &status); + if (demangled != NULL) { + cutArguments(demangled); + mi->_sig = _symbols.lookup("()L;"); + mi->_type = FRAME_CPP; + + // Rust legacy demangling + if (RustDemangler::is_probably_rust_legacy(demangled)) { + std::string rust_demangled = RustDemangler::demangle(demangled); + mi->_name = _symbols.lookup(rust_demangled.c_str()); + } else { + mi->_name = _symbols.lookup(demangled); + } + free(demangled); + return; + } + } + + size_t len = strlen(name); + if (len >= 4 && strcmp(name + len - 4, "_[k]") == 0) { + mi->_name = _symbols.lookup(name, len - 4); + mi->_sig = _symbols.lookup("(Lk;)L;"); + mi->_type = FRAME_KERNEL; + } else { + mi->_name = _symbols.lookup(name); + mi->_sig = _symbols.lookup("()L;"); + mi->_type = FRAME_NATIVE; + } +} + +void Lookup::fillRemoteFrameInfo(MethodInfo *mi, const RemoteFrameInfo *rfi) { + // Store build-id in the class name field + mi->_class = _classes->lookup(rfi->build_id); + + // Store PC offset in hex format in the signature field + char offset_hex[32]; + snprintf(offset_hex, sizeof(offset_hex), "0x%" PRIxPTR, rfi->pc_offset); + mi->_sig = _symbols.lookup(offset_hex); + + // Use same modifiers as regular native frames (0x100 = ACC_NATIVE for consistency) + mi->_modifiers = 0x100; + // Use FRAME_NATIVE_REMOTE type to indicate remote symbolication + mi->_type = FRAME_NATIVE_REMOTE; + mi->_line_number_table = nullptr; + + // Method name indicates need for remote symbolication + mi->_name = _symbols.lookup(""); +} + +void Lookup::cutArguments(char *func) { + char *p = strrchr(func, ')'); + if (p == NULL) + return; + + int balance = 1; + while (--p > func) { + if (*p == '(' && --balance == 0) { + *p = 0; + return; + } else if (*p == ')') { + balance++; + } + } +} + +void Lookup::fillJavaMethodInfo(MethodInfo *mi, jmethodID method, + bool first_time) { + JNIEnv *jni = VM::jni(); + if (jni->PushLocalFrame(64) != 0) { + return; + } + jvmtiEnv *jvmti = VM::jvmti(); + + jvmtiPhase phase; + jclass method_class = NULL; + // invariant: these strings must remain null, or be assigned by JVMTI + char *class_name = nullptr; + char *method_name = nullptr; + char *method_sig = nullptr; + + jint line_number_table_size = 0; + jvmtiLineNumberEntry *line_number_table = NULL; + + jvmti->GetPhase(&phase); + if ((phase & (JVMTI_PHASE_START | JVMTI_PHASE_LIVE)) != 0) { + if (VMMethod::check_jmethodID(method) && + jvmti->GetMethodDeclaringClass(method, &method_class) == 0 && + // GetMethodDeclaringClass may return a jclass wrapping a stale/garbage oop when the class was + // unloaded between sample capture and dump (TOCTOU race with class unloading). Guard against + // null handles before calling GetClassSignature. + method_class != NULL && + // On some older versions of J9, the JVMTI call to GetMethodDeclaringClass will return OK = 0, but when a + // classloader is unloaded they free all JNIIDs. This means that anyone holding on to a jmethodID is + // pointing to corrupt data and the behaviour is undefined. + // The behaviour is adjusted so that when asgct() is used or if `-XX:+KeepJNIIDs` is specified, + // when a classloader is unloaded, the jmethodIDs are not freed, but instead marked as -1. + // The check below mitigates these crashes on J9. + (!VM::isOpenJ9() || method_class != reinterpret_cast(-1)) && + jvmti->GetClassSignature(method_class, &class_name, NULL) == JVMTI_ERROR_NONE && + jvmti->GetMethodName(method, &method_name, &method_sig, NULL) == JVMTI_ERROR_NONE) { + if (first_time) { + // Populate modifier (async profiler has this call) + if (jvmti->GetMethodModifiers(method, &mi->_modifiers) != JVMTI_ERROR_NONE) { + mi->_modifiers = 0; + } + jvmtiError line_table_error = jvmti->GetLineNumberTable(method, &line_number_table_size, + &line_number_table); + // Defensive: if GetLineNumberTable failed, clean up any potentially allocated memory + // Some buggy JVMTI implementations might allocate despite returning an error + if (line_table_error != JVMTI_ERROR_NONE) { + if (line_number_table != nullptr) { + // Try to deallocate to prevent leak from buggy JVM + jvmti->Deallocate((unsigned char *)line_number_table); + } + line_number_table = nullptr; + line_number_table_size = 0; + } + } + + fillMethodInfo(mi, method_class, class_name, method_name, method_sig, line_number_table_size, line_number_table); + + // strings are null or came from JVMTI + if (method_name) { + jvmti->Deallocate((unsigned char *)method_name); + } + if (method_sig) { + jvmti->Deallocate((unsigned char *)method_sig); + } + if (class_name) { + jvmti->Deallocate((unsigned char *)class_name); + } + } else { + Counters::increment(JMETHODID_SKIPPED); + mi->_class = _classes->lookup(""); + mi->_name = _symbols.lookup("jvmtiError"); + mi->_sig = _symbols.lookup("()L;"); + mi->_type = FRAME_INTERPRETED; + mi->_is_entry = false; + } + } + jni->PopLocalFrame(NULL); +} + +void Lookup::fillMethodInfo(MethodInfo *mi, jclass method_class, char* class_name, char* method_name, char* method_sig, jint line_number_table_size, jvmtiLineNumberEntry* line_number_table) { + bool entry = false; + u32 class_name_id = 0; + u32 method_name_id = 0; + u32 method_sig_id = 0; + + JNIEnv *jni = VM::jni(); + if (jni == nullptr) { + return; + } + + // Check if the frame is Thread.run or inherits from it + if (strncmp(method_name, "run", 4) == 0 && + strncmp(method_sig, "()V", 3) == 0) { + jclass Thread_class = jni->FindClass("java/lang/Thread"); + jclass Class_class = jni->FindClass("java/lang/Class"); + if (Thread_class != nullptr && Class_class != nullptr) { + jmethodID equals = jni->GetMethodID(Class_class, + "equals", "(Ljava/lang/Object;)Z"); + if (equals != nullptr) { + jclass klass = method_class; + do { + entry = jni->CallBooleanMethod(Thread_class, equals, klass); + if (jniExceptionCheck(jni)) { + entry = false; + break; + } + if (entry) { + break; + } + } while ((klass = jni->GetSuperclass(klass)) != NULL); + } + } + // Clear any exceptions from the reflection calls above + jniExceptionCheck(jni); + } else if (strncmp(method_name, "main", 5) == 0 && + strncmp(method_sig, "(Ljava/lang/String;)V", 21) == 0) { + // public static void main(String[] args) - 'public static' translates + // to modifier bits 0 and 3, hence check for '9' + entry = true; + } + + // maybe we should store the lookups below in initialisation-time + // constants... + if (has_prefix(class_name, + "Ljdk/internal/reflect/GeneratedConstructorAccessor")) { + class_name_id = _classes->lookup( + "jdk/internal/reflect/GeneratedConstructorAccessor"); + method_name_id = + _symbols.lookup("Object " + "jdk.internal.reflect.GeneratedConstructorAccessor." + "newInstance(Object[])"); + method_sig_id = _symbols.lookup(method_sig); + } else if (has_prefix(class_name, + "Lsun/reflect/GeneratedConstructorAccessor")) { + class_name_id = + _classes->lookup("sun/reflect/GeneratedConstructorAccessor"); + method_name_id = _symbols.lookup( + "Object " + "sun.reflect.GeneratedConstructorAccessor.newInstance(Object[])"); + method_sig_id = _symbols.lookup(method_sig); + } else if (has_prefix(class_name, + "Ljdk/internal/reflect/GeneratedMethodAccessor")) { + class_name_id = + _classes->lookup("jdk/internal/reflect.GeneratedMethodAccessor"); + method_name_id = + _symbols.lookup("Object " + "jdk.internal.reflect.GeneratedMethodAccessor." + "invoke(Object, Object[])"); + method_sig_id = _symbols.lookup(method_sig); + } else if (has_prefix(class_name, + "Lsun/reflect/GeneratedMethodAccessor")) { + class_name_id = _classes->lookup("sun/reflect/GeneratedMethodAccessor"); + method_name_id = _symbols.lookup( + "Object sun.reflect.GeneratedMethodAccessor.invoke(Object, " + "Object[])"); + method_sig_id = _symbols.lookup(method_sig); + } else if (has_prefix(class_name, "Ljava/lang/invoke/LambdaForm$")) { + const int lambdaFormPrefixLength = + strlen("Ljava/lang/invoke/LambdaForm$"); + // we want to normalise to java/lang/invoke/LambdaForm$MH, + // java/lang/invoke/LambdaForm$DMH, java/lang/invoke/LambdaForm$BMH, + if (has_prefix(class_name + lambdaFormPrefixLength, "MH")) { + class_name_id = _classes->lookup("java/lang/invoke/LambdaForm$MH"); + } else if (has_prefix(class_name + lambdaFormPrefixLength, "BMH")) { + class_name_id = _classes->lookup("java/lang/invoke/LambdaForm$BMH"); + } else if (has_prefix(class_name + lambdaFormPrefixLength, "DMH")) { + class_name_id = _classes->lookup("java/lang/invoke/LambdaForm$DMH"); + } else { + // don't recognise the suffix, so don't normalise + class_name_id = + _classes->lookup(class_name + 1, strlen(class_name) - 2); + } + method_name_id = _symbols.lookup(method_name); + method_sig_id = _symbols.lookup(method_sig); + } else { + class_name_id = + _classes->lookup(class_name + 1, strlen(class_name) - 2); + method_name_id = _symbols.lookup(method_name); + method_sig_id = _symbols.lookup(method_sig); + } + + mi->_class = class_name_id; + mi->_name = method_name_id; + mi->_sig = method_sig_id; + mi->_type = FRAME_INTERPRETED; + mi->_is_entry = entry; + if (line_number_table != nullptr) { + mi->_line_number_table = std::make_shared( + line_number_table_size, line_number_table); + // Increment counter for tracking live line number tables + Counters::increment(LINE_NUMBER_TABLES); + } +} + +MethodInfo *Lookup::resolveMethod(ASGCT_CallFrame &frame) { + static const char* UNKNOWN = "unknown"; + unsigned long key; + jint bci = frame.bci; + FrameTypeId frame_type = FrameType::decode(bci); + jmethodID method_id = frame.method_id; + + // Resolve this frame into FRAME_INTERPRETED + if (frame_type == FRAME_INTERPRETED_METHOD) { + method_id = JVMSupport::resolve(frame.method); + frame.bci = FrameType::encode(FRAME_INTERPRETED, frame.bci); + frame.method_id = method_id; + frame_type = FRAME_INTERPRETED; + } + + if (method_id == nullptr) { + TEST_LOG("Unknown: frameType = %d, bci = %d", (int)frame_type, bci); + key = MethodMap::makeKey(UNKNOWN); + } else if (bci == BCI_ERROR || bci == BCI_NATIVE_FRAME) { + key = MethodMap::makeKey(frame.native_function_name); + } else if (bci == BCI_NATIVE_FRAME_REMOTE) { + key = MethodMap::makeKey(frame.packed_remote_frame); + } else { + assert(frame_type == FRAME_INTERPRETED || frame_type == FRAME_JIT_COMPILED || + frame_type == FRAME_INLINED || frame_type == FRAME_C1_COMPILED || + VM::isOpenJ9()); // OpenJ9 may have bugs that produce invalid frame types + key = MethodMap::makeKey(method_id); + } + + MethodInfo *mi = &(*_method_map)[key]; + + if (!mi->_mark) { + mi->_mark = true; + bool first_time = mi->_key == 0; + if (first_time) { + mi->_key = _method_map->size() + 1; // avoid zero key + } + if (method_id == nullptr) { + fillNativeMethodInfo(mi, UNKNOWN, nullptr); + } else if (bci == BCI_ERROR) { + fillNativeMethodInfo(mi, (const char *)method_id, nullptr); + } else if (bci == BCI_NATIVE_FRAME) { + const char *name = (const char *)method_id; + fillNativeMethodInfo(mi, name, + Profiler::instance()->getLibraryName(name)); + } else if (bci == BCI_NATIVE_FRAME_REMOTE) { + // Unpack remote symbolication data using utility struct + // Layout: pc_offset (44 bits) | mark (3 bits) | lib_index (15 bits) + unsigned long packed_remote_frame = frame.packed_remote_frame; + uintptr_t pc_offset = Profiler::RemoteFramePacker::unpackPcOffset(packed_remote_frame); + [[maybe_unused]] char mark = Profiler::RemoteFramePacker::unpackMark(packed_remote_frame); + uint32_t lib_index = Profiler::RemoteFramePacker::unpackLibIndex(packed_remote_frame); + + TEST_LOG("Unpacking remote frame: packed=0x%zx, pc_offset=0x%lx, mark=%d, lib_index=%u", + packed_remote_frame, pc_offset, (int)mark, lib_index); + + // Lookup library by index to get build_id + // Note: This is called during JFR serialization with lockAll() held (see Profiler::dump), + // so the library array is stable - no concurrent dlopen_hook calls can modify it. + CodeCache* lib = Libraries::instance()->getLibraryByIndex(lib_index); + if (lib != nullptr && lib->hasBuildId() && Profiler::instance()->isRemoteSymbolication()) { + TEST_LOG("Found library: %s, build_id=%s", lib->name(), lib->buildId()); + // Remote symbolication: defer to backend + RemoteFrameInfo rfi(lib->buildId(), pc_offset, lib_index); + fillRemoteFrameInfo(mi, &rfi); + } else if (lib != nullptr) { + // Locally unsymbolized: render as [libname+0xoffset] + char name_buf[256]; + const char* s = lib->name(); + const char* basename = strrchr(s, '/'); + if (basename) basename++; else basename = s; + snprintf(name_buf, sizeof(name_buf), "[%s+0x%" PRIxPTR "]", basename, pc_offset); + fillNativeMethodInfo(mi, name_buf, nullptr); + } else { + TEST_LOG("WARNING: Library lookup failed for index %u", lib_index); + fillNativeMethodInfo(mi, "unknown_library", nullptr); + } + } else { + fillJavaMethodInfo(mi, method_id, first_time); + } + } + + return mi; +} + +u32 Lookup::getPackage(const char *class_name) { + const char *package = strrchr(class_name, '/'); + if (package == NULL) { + return 0; + } + if (package[1] >= '0' && package[1] <= '9') { + // Seems like a hidden or anonymous class, e.g. com/example/Foo/0x012345 + do { + if (package == class_name) + return 0; + } while (*--package != '/'); + } + if (class_name[0] == '[') { + class_name = strchr(class_name, 'L') + 1; + } + return _packages.lookup(class_name, package - class_name); +} + +u32 Lookup::getSymbol(const char *name) { + return _symbols.lookup(name); +} diff --git a/ddprof-lib/src/main/cpp/lookup.h b/ddprof-lib/src/main/cpp/lookup.h new file mode 100644 index 000000000..f084c7edd --- /dev/null +++ b/ddprof-lib/src/main/cpp/lookup.h @@ -0,0 +1,48 @@ +/* + * Copyright The async-profiler authors + * Copyright 2026, Datadog, Inc. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef _LOOKUP_H +#define _LOOKUP_H + +#include "dictionary.h" +#include "vmEntry.h" + +class MethodInfo; +class MethodMap; +class Recording; +class RemoteFrameInfo; + +class Lookup { +public: + Recording *_rec; + MethodMap *_method_map; + Dictionary *_classes; + Dictionary _packages; + Dictionary _symbols; + +private: + void fillNativeMethodInfo(MethodInfo *mi, const char *name, + const char *lib_name); + void fillRemoteFrameInfo(MethodInfo *mi, const RemoteFrameInfo *rfi); + void cutArguments(char *func); + bool has_prefix(const char *str, const char *prefix) const { + return strncmp(str, prefix, strlen(prefix)) == 0; + } + + void fillJavaMethodInfo(MethodInfo *mi, jmethodID method, bool first_time); + void fillMethodInfo(MethodInfo *mi, jclass method_class, char* class_name, char* method_name, char* method_sig, + jint line_number_table_size, jvmtiLineNumberEntry* line_number_table); +public: + Lookup(Recording *rec, MethodMap *method_map, Dictionary *classes) + : _rec(rec), _method_map(method_map), _classes(classes), _packages(), + _symbols() {} + + MethodInfo *resolveMethod(ASGCT_CallFrame &frame); + u32 getPackage(const char *class_name); + u32 getSymbol(const char *name); +}; + +#endif // _LOOKUP_H diff --git a/ddprof-lib/src/main/cpp/methodInfo.cpp b/ddprof-lib/src/main/cpp/methodInfo.cpp new file mode 100644 index 000000000..702378561 --- /dev/null +++ b/ddprof-lib/src/main/cpp/methodInfo.cpp @@ -0,0 +1,35 @@ +/* + * Copyright The async-profiler authors + * Copyright 2026, Datadog, Inc. + * SPDX-License-Identifier: Apache-2.0 + */ + +#include "methodInfo.h" + +#include + +#include "common.h" +#include "counters.h" +#include "vmEntry.h" + +SharedLineNumberTable::~SharedLineNumberTable() { + // Always attempt to deallocate if we have a valid pointer + // JVMTI spec requires that memory allocated by GetLineNumberTable + // must be freed with Deallocate + if (_ptr != nullptr) { + jvmtiEnv *jvmti = VM::jvmti(); + if (jvmti != nullptr) { + jvmtiError err = jvmti->Deallocate((unsigned char *)_ptr); + // If Deallocate fails, log it for debugging (this could indicate a JVM bug) + // JVMTI_ERROR_ILLEGAL_ARGUMENT means the memory wasn't allocated by JVMTI + // which would be a serious bug in GetLineNumberTable + if (err != JVMTI_ERROR_NONE) { + TEST_LOG("Unexpected error while deallocating linenumber table: %d", err); + } + } else { + TEST_LOG("WARNING: Cannot deallocate line number table - JVMTI is null"); + } + // Decrement counter whenever destructor runs (symmetric with increment at creation) + Counters::decrement(LINE_NUMBER_TABLES); + } +} diff --git a/ddprof-lib/src/main/cpp/methodInfo.h b/ddprof-lib/src/main/cpp/methodInfo.h new file mode 100644 index 000000000..9b2c45138 --- /dev/null +++ b/ddprof-lib/src/main/cpp/methodInfo.h @@ -0,0 +1,112 @@ +/* + * Copyright The async-profiler authors + * Copyright 2026, Datadog, Inc. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef _METHODINFO_H +#define _METHODINFO_H + +#include +#include +#include +#include +#include + +#include "arch.h" +#include "frame.h" + +class SharedLineNumberTable { +public: + int _size; + void *_ptr; + + SharedLineNumberTable(int size, void *ptr) : _size(size), _ptr(ptr) {} + ~SharedLineNumberTable(); +}; + +class MethodInfo { +public: + MethodInfo() + : _mark(false), _is_entry(false), _referenced(false), _age(0), _key(0), _class(0), + _name(0), _sig(0), _modifiers(0), _line_number_table(nullptr), _type() {} + + bool _mark; + bool _is_entry; + bool _referenced; // Tracked during writeStackTraces() for cleanup + int _age; // Consecutive chunks without reference (0 = recently used) + u32 _key; + u32 _class; + u32 _name; + u32 _sig; + jint _modifiers; + std::shared_ptr _line_number_table; + FrameTypeId _type; + + jint getLineNumber(jint bci) { + // if the shared pointer is not pointing to the line number table, consider + // size 0 + if (!_line_number_table || _line_number_table->_size == 0) { + return 0; + } + + int i = 1; + while (i < _line_number_table->_size && + bci >= ((jvmtiLineNumberEntry *)_line_number_table->_ptr)[i] + .start_location) { + i++; + } + return ((jvmtiLineNumberEntry *)_line_number_table->_ptr)[i - 1] + .line_number; + } + + bool isHidden() { + // 0x1400 = ACC_SYNTHETIC(0x1000) | ACC_BRIDGE(0x0040) + return _modifiers == 0 || (_modifiers & 0x1040); + } +}; + +// MethodMap's key can be derived from 3 sources: +// 1) jmethodID for Java methods +// 2) void* address for native method names +// 3) Encoded RemoteFrameInfo +// The values of the keys are potentially overlapping, so we use +// the highest 2 bits to distinguish them. +// 00 - jmethodID +// 10 - void* address +// 01 - RemoteFrameInfo +class MethodMap : public std::map { +public: + static constexpr unsigned long ADDRESS_MARK = 0x8000000000000000ULL; + static constexpr unsigned long REMOTE_FRAME_MARK = 0x4000000000000000ULL; + static constexpr unsigned long METHOD_MARK = 0xc000000000000000ULL; + static constexpr unsigned long KEY_TYPE_MASK = ADDRESS_MARK | REMOTE_FRAME_MARK | METHOD_MARK; + + MethodMap() {} + + static unsigned long makeKey(jmethodID method) { + unsigned long key = (unsigned long)method; + assert((key & KEY_TYPE_MASK) == 0); + return key; + } + + static unsigned long makeKey(const char* addr) { + unsigned long key = (unsigned long)addr; + assert((key & KEY_TYPE_MASK) == 0); + return (key | ADDRESS_MARK); + } + + static unsigned long makeKey(unsigned long packed_remote_frame) { + unsigned long key = packed_remote_frame; + assert((key & KEY_TYPE_MASK) == 0); + return (key | REMOTE_FRAME_MARK); + } + + static unsigned long makeKey(const void* method) { + unsigned long key = reinterpret_cast(method); + assert((key & KEY_TYPE_MASK) == 0); + return (key | METHOD_MARK); + } +}; + +#endif // _METHODINFO_H diff --git a/ddprof-lib/src/main/cpp/stackWalker.inline.h b/ddprof-lib/src/main/cpp/stackWalker.inline.h index d8399242c..b4764b1bf 100644 --- a/ddprof-lib/src/main/cpp/stackWalker.inline.h +++ b/ddprof-lib/src/main/cpp/stackWalker.inline.h @@ -7,6 +7,7 @@ #ifndef _STACKWALKER_INLINE_H #define _STACKWALKER_INLINE_H +#include "frame.h" #include "stackWalker.h" #include "safeAccess.h" diff --git a/ddprof-lib/src/main/cpp/vmEntry.cpp b/ddprof-lib/src/main/cpp/vmEntry.cpp index 76fd49c68..402adfa4c 100644 --- a/ddprof-lib/src/main/cpp/vmEntry.cpp +++ b/ddprof-lib/src/main/cpp/vmEntry.cpp @@ -10,6 +10,7 @@ #include "context.h" #include "j9/j9Support.h" #include "jniHelper.h" +#include "jvmSupport.h" #include "jvmThread.h" #include "libraries.h" #include "log.h" @@ -18,6 +19,7 @@ #include "safeAccess.h" #include "hotspot/vmStructs.h" #include "hotspot/jitCodeCache.h" +#include "hotspot/hotspotSupport.h" #include #include #include @@ -427,41 +429,48 @@ bool VM::initProfilerBridge(JavaVM *vm, bool attach) { _jvmti->AddCapabilities(&capabilities); jvmtiEventCallbacks callbacks = {0}; + callbacks.VMInit = VMInit; callbacks.VMDeath = VMDeath; - callbacks.ClassLoad = ClassLoad; - callbacks.ClassPrepare = ClassPrepare; - callbacks.CompiledMethodLoad = JitCodeCache::CompiledMethodLoad; - callbacks.DynamicCodeGenerated = JitCodeCache::DynamicCodeGenerated; + callbacks.ClassLoad = JVMSupport::ClassLoad; + callbacks.ClassPrepare = JVMSupport::ClassPrepare; callbacks.ThreadStart = Profiler::ThreadStart; callbacks.ThreadEnd = Profiler::ThreadEnd; callbacks.SampledObjectAlloc = ObjectSampler::SampledObjectAlloc; callbacks.GarbageCollectionFinish = LivenessTracker::GarbageCollectionFinish; - callbacks.NativeMethodBind = VMStructs::NativeMethodBind; + + // Hotspot only + callbacks.CompiledMethodLoad = JitCodeCache::CompiledMethodLoad; + callbacks.DynamicCodeGenerated = JitCodeCache::DynamicCodeGenerated; + callbacks.NativeMethodBind = HotspotSupport::NativeMethodBind; + _jvmti->SetEventCallbacks(&callbacks, sizeof(callbacks)); _jvmti->SetEventNotificationMode(JVMTI_ENABLE, JVMTI_EVENT_VM_DEATH, NULL); _jvmti->SetEventNotificationMode(JVMTI_ENABLE, JVMTI_EVENT_CLASS_LOAD, NULL); _jvmti->SetEventNotificationMode(JVMTI_ENABLE, JVMTI_EVENT_CLASS_PREPARE, NULL); - _jvmti->SetEventNotificationMode(JVMTI_ENABLE, - JVMTI_EVENT_DYNAMIC_CODE_GENERATED, NULL); - _jvmti->SetEventNotificationMode(JVMTI_ENABLE, JVMTI_EVENT_NATIVE_METHOD_BIND, - NULL); - - if (hotspot_version() == 0 || !CodeHeap::available()) { - // Workaround for JDK-8173361: avoid CompiledMethodLoad events when possible + // Hotspot only + if (isHotspot()) { _jvmti->SetEventNotificationMode(JVMTI_ENABLE, - JVMTI_EVENT_COMPILED_METHOD_LOAD, NULL); - } else { - // DebugNonSafepoints is automatically enabled with CompiledMethodLoad, - // otherwise we set the flag manually - VMFlag* f = VMFlag::find("DebugNonSafepoints", {VMFlag::Type::Bool}); - if (f != NULL && f->isDefault()) { - f->set(1); + JVMTI_EVENT_DYNAMIC_CODE_GENERATED, NULL); + _jvmti->SetEventNotificationMode(JVMTI_ENABLE, JVMTI_EVENT_NATIVE_METHOD_BIND, + NULL); + if (hotspot_version() == 0 || !CodeHeap::available()) { + // Workaround for JDK-8173361: avoid CompiledMethodLoad events when possible + _jvmti->SetEventNotificationMode(JVMTI_ENABLE, + JVMTI_EVENT_COMPILED_METHOD_LOAD, NULL); + } else { + // DebugNonSafepoints is automatically enabled with CompiledMethodLoad, + // otherwise we set the flag manually + VMFlag* f = VMFlag::find("DebugNonSafepoints", {VMFlag::Type::Bool}); + if (f != NULL && f->isDefault()) { + f->set(1); + } } } + // if the user sets -XX:+UseAdaptiveGCBoundary we will just disable the // profiler to avoid the risk of crashing flag was made obsolete (inert) in 15 // (see JDK-8228991) and removed in 16 (see JDK-8231560) @@ -478,7 +487,7 @@ bool VM::initProfilerBridge(JavaVM *vm, bool attach) { functions->RetransformClasses = RetransformClassesHook; if (attach) { - loadAllMethodIDs(_jvmti, jni()); + JVMSupport::loadAllMethodIDs(_jvmti, jni()); _jvmti->GenerateEvents(JVMTI_EVENT_DYNAMIC_CODE_GENERATED); _jvmti->GenerateEvents(JVMTI_EVENT_COMPILED_METHOD_LOAD); } else { @@ -527,57 +536,9 @@ void *VM::getLibraryHandle(const char *name) { return RTLD_DEFAULT; } -void VM::loadMethodIDs(jvmtiEnv *jvmti, JNIEnv *jni, jclass klass) { - bool needs_patch = VM::hotspot_version() == 8; - if (needs_patch) { - // Workaround for JVM bug https://bugs.openjdk.org/browse/JDK-8062116 - // Preallocate space for jmethodIDs at the beginning of the list (rather than at the end) - // This is relevant only for JDK 8 - later versions do not have this bug - if (VMStructs::hasClassLoaderData()) { - VMKlass *vmklass = VMKlass::fromJavaClass(jni, klass); - int method_count = vmklass->methodCount(); - if (method_count > 0) { - VMClassLoaderData *cld = vmklass->classLoaderData(); - cld->lock(); - for (int i = 0; i < method_count; i += MethodList::SIZE) { - *cld->methodList() = new MethodList(*cld->methodList()); - } - cld->unlock(); - } - } - } - - // CRITICAL: GetClassMethods must be called to preallocate jmethodIDs for AsyncGetCallTrace. - // AGCT operates in signal handlers where lock acquisition is forbidden, so jmethodIDs must - // exist before profiling encounters them. Without preallocation, AGCT cannot identify methods - // in stack traces, breaking profiling functionality. - // - // JVM-internal allocation: This triggers JVM to allocate jmethodIDs internally, which persist - // until class unload. High class churn causes significant memory growth, but this is inherent - // to AGCT architecture and necessary for signal-safe profiling. - // - // See: https://mostlynerdless.de/blog/2023/07/17/jmethodids-in-profiling-a-tale-of-nightmares/ - jint method_count; - jmethodID *methods; - if (jvmti->GetClassMethods(klass, &method_count, &methods) == 0) { - jvmti->Deallocate((unsigned char *)methods); - } -} - -void VM::loadAllMethodIDs(jvmtiEnv *jvmti, JNIEnv *jni) { - jint class_count; - jclass *classes; - if (jvmti->GetLoadedClasses(&class_count, &classes) == 0) { - for (int i = 0; i < class_count; i++) { - loadMethodIDs(jvmti, jni, classes[i]); - } - jvmti->Deallocate((unsigned char *)classes); - } -} - void JNICALL VM::VMInit(jvmtiEnv* jvmti, JNIEnv* jni, jthread thread) { ready(jvmti, jni); - loadAllMethodIDs(jvmti, jni); + JVMSupport::loadAllMethodIDs(jvmti, jni); // initialize the heap usage tracking only after the VM is ready HeapUsage::initJMXUsage(VM::jni()); @@ -604,7 +565,7 @@ VM::RedefineClassesHook(jvmtiEnv *jvmti, jint class_count, JNIEnv *env = jni(); for (int i = 0; i < class_count; i++) { if (class_definitions[i].klass != NULL) { - loadMethodIDs(jvmti, env, class_definitions[i].klass); + JVMSupport::loadMethodIDs(jvmti, env, class_definitions[i].klass); } } } @@ -621,7 +582,7 @@ jvmtiError VM::RetransformClassesHook(jvmtiEnv *jvmti, jint class_count, JNIEnv *env = jni(); for (int i = 0; i < class_count; i++) { if (classes[i] != NULL) { - loadMethodIDs(jvmti, env, classes[i]); + JVMSupport::loadMethodIDs(jvmti, env, classes[i]); } } } diff --git a/ddprof-lib/src/main/cpp/vmEntry.h b/ddprof-lib/src/main/cpp/vmEntry.h index 6be4c87bb..e793fcdb8 100644 --- a/ddprof-lib/src/main/cpp/vmEntry.h +++ b/ddprof-lib/src/main/cpp/vmEntry.h @@ -12,7 +12,6 @@ #include "arch.h" #include "codeCache.h" -#include "frame.h" #ifdef __clang__ #define DLLEXPORT __attribute__((visibility("default"))) @@ -76,6 +75,7 @@ typedef struct _asgct_callframe { jmethodID method_id; unsigned long packed_remote_frame; // packed RemoteFrameInfo data const char* native_function_name; + const void* method; // Hotspot only, direct pointer to JVM method }; } ASGCT_CallFrame; @@ -136,8 +136,6 @@ class VM { static void ready(jvmtiEnv *jvmti, JNIEnv *jni); static void applyPatch(char *func, const char *patch, const char *end_patch); static void *getLibraryHandle(const char *name); - static void loadMethodIDs(jvmtiEnv *jvmti, JNIEnv *jni, jclass klass); - static void loadAllMethodIDs(jvmtiEnv *jvmti, JNIEnv *jni); static bool initShared(JavaVM *vm); @@ -193,15 +191,6 @@ class VM { static void JNICALL VMInit(jvmtiEnv *jvmti, JNIEnv *jni, jthread thread); static void JNICALL VMDeath(jvmtiEnv *jvmti, JNIEnv *jni); - static void JNICALL ClassLoad(jvmtiEnv *jvmti, JNIEnv *jni, jthread thread, - jclass klass) { - // Needed only for AsyncGetCallTrace support - } - - static void JNICALL ClassPrepare(jvmtiEnv *jvmti, JNIEnv *jni, jthread thread, - jclass klass) { - loadMethodIDs(jvmti, jni, klass); - } static jvmtiError JNICALL RedefineClassesHook(jvmtiEnv *jvmti, jint class_count, diff --git a/ddprof-test/src/test/java/com/datadoghq/profiler/ContendedCallTraceStorageTest.java b/ddprof-test/src/test/java/com/datadoghq/profiler/ContendedCallTraceStorageTest.java index 80da81c91..a0685a71a 100644 --- a/ddprof-test/src/test/java/com/datadoghq/profiler/ContendedCallTraceStorageTest.java +++ b/ddprof-test/src/test/java/com/datadoghq/profiler/ContendedCallTraceStorageTest.java @@ -114,7 +114,6 @@ private List measureContention() throws Exception { recordings.add(tempDump); Thread.sleep(500); } - // Wait for all allocation threads to finish finishLatch.await(); diff --git a/ddprof-test/src/test/java/com/datadoghq/profiler/context/TagContextTest.java b/ddprof-test/src/test/java/com/datadoghq/profiler/context/TagContextTest.java index 9b8fe4404..21b82ac55 100644 --- a/ddprof-test/src/test/java/com/datadoghq/profiler/context/TagContextTest.java +++ b/ddprof-test/src/test/java/com/datadoghq/profiler/context/TagContextTest.java @@ -67,6 +67,7 @@ public void test() throws InterruptedException { IMemberAccessor stacktraceAccessor = JdkAttributes.STACK_TRACE_STRING.getAccessor(wallclockSamples.getType()); for (IItem sample : wallclockSamples) { String stacktrace = stacktraceAccessor.getMember(sample); +System.out.println(stacktrace); if (!stacktrace.contains("sleep")) { // we don't know the context has been set for sure until the sleep has started continue; @@ -82,7 +83,6 @@ public void test() throws InterruptedException { droppedSamplesWeight += weight; continue; } - String tag = tag1Accessor.getMember(sample); weightsByTagValue.computeIfAbsent(tag, v -> new AtomicLong()) .addAndGet(weight); @@ -92,6 +92,7 @@ public void test() throws InterruptedException { long sum = 0; long[] weights = new long[strings.length]; System.out.println("Found tag values: " + weightsByTagValue.keySet()); + System.out.println("Strings: " + strings.length); for (int i = 0; i < strings.length; i++) { AtomicLong weight = weightsByTagValue.get(strings[i]); assertNotNull(weight, "Weight for " + strings[i] + " not found. Found: " + weightsByTagValue.keySet()); diff --git a/ddprof-test/src/test/java/com/datadoghq/profiler/wallclock/BaseContextWallClockTest.java b/ddprof-test/src/test/java/com/datadoghq/profiler/wallclock/BaseContextWallClockTest.java index 2c9f5e464..3e2f169c1 100644 --- a/ddprof-test/src/test/java/com/datadoghq/profiler/wallclock/BaseContextWallClockTest.java +++ b/ddprof-test/src/test/java/com/datadoghq/profiler/wallclock/BaseContextWallClockTest.java @@ -100,6 +100,10 @@ void test(AbstractProfilerTest test, boolean assertContext, String cstack) throw IMemberAccessor modeAccessor = THREAD_EXECUTION_MODE.getAccessor(wallclockSamples.getType()); for (IItem sample : wallclockSamples) { String stackTrace = frameAccessor.getMember(sample); + System.out.println("StackTrace:"); + System.out.println(stackTrace); + System.out.println(); + long spanId = spanIdAccessor.getMember(sample).longValue(); long rootSpanId = rootSpanIdAccessor.getMember(sample).longValue(); long weight = weightAccessor.getMember(sample).longValue(); diff --git a/ddprof-test/src/test/java/com/datadoghq/profiler/wallclock/CollapsingSleepTest.java b/ddprof-test/src/test/java/com/datadoghq/profiler/wallclock/CollapsingSleepTest.java index ff362085f..3c9b749fc 100644 --- a/ddprof-test/src/test/java/com/datadoghq/profiler/wallclock/CollapsingSleepTest.java +++ b/ddprof-test/src/test/java/com/datadoghq/profiler/wallclock/CollapsingSleepTest.java @@ -27,6 +27,8 @@ public void testSleep() { stopProfiler(); IItemCollection events = verifyEvents("datadog.MethodSample"); assertTrue(events.hasItems()); + + System.out.println("Weight: " + events.getAggregate(Aggregators.sum(WEIGHT)).longValue() + " count = " + events.getAggregate(Aggregators.count()).longValue()); assertTrue(events.getAggregate(Aggregators.sum(WEIGHT)).longValue() > 700); assertTrue(events.getAggregate(Aggregators.count()).longValue() > 9); }