1 /* 2 * Copyright (C) 2008 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 /* 18 * Dalvik-specific side of debugger support. (The JDWP code is intended to 19 * be relatively generic.) 20 */ 21 #ifndef ART_RUNTIME_DEBUGGER_H_ 22 #define ART_RUNTIME_DEBUGGER_H_ 23 24 #include <pthread.h> 25 26 #include <map> 27 #include <set> 28 #include <string> 29 #include <vector> 30 31 #include "jdwp/jdwp.h" 32 #include "jni.h" 33 #include "jvalue.h" 34 #include "object_callbacks.h" 35 #include "thread_state.h" 36 37 namespace art { 38 namespace mirror { 39 class ArtField; 40 class ArtMethod; 41 class Class; 42 class Object; 43 class Throwable; 44 } // namespace mirror 45 class AllocRecord; 46 class ObjectRegistry; 47 class ScopedObjectAccessUnchecked; 48 class Thread; 49 class ThrowLocation; 50 51 /* 52 * Invoke-during-breakpoint support. 53 */ 54 struct DebugInvokeReq { DebugInvokeReqDebugInvokeReq55 DebugInvokeReq() 56 : ready(false), invoke_needed(false), 57 receiver(NULL), thread(NULL), klass(NULL), method(NULL), 58 arg_count(0), arg_values(NULL), options(0), error(JDWP::ERR_NONE), 59 result_tag(JDWP::JT_VOID), exception(0), 60 lock("a DebugInvokeReq lock", kBreakpointInvokeLock), 61 cond("a DebugInvokeReq condition variable", lock) { 62 } 63 64 /* boolean; only set when we're in the tail end of an event handler */ 65 bool ready; 66 67 /* boolean; set if the JDWP thread wants this thread to do work */ 68 bool invoke_needed; 69 70 /* request */ 71 mirror::Object* receiver; /* not used for ClassType.InvokeMethod */ 72 mirror::Object* thread; 73 mirror::Class* klass; 74 mirror::ArtMethod* method; 75 uint32_t arg_count; 76 uint64_t* arg_values; /* will be NULL if arg_count_ == 0 */ 77 uint32_t options; 78 79 /* result */ 80 JDWP::JdwpError error; 81 JDWP::JdwpTag result_tag; 82 JValue result_value; 83 JDWP::ObjectId exception; 84 85 /* condition variable to wait on while the method executes */ 86 Mutex lock DEFAULT_MUTEX_ACQUIRED_AFTER; 87 ConditionVariable cond GUARDED_BY(lock); 88 89 void VisitRoots(RootCallback* callback, void* arg, uint32_t tid, RootType root_type) 90 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 91 92 void Clear(); 93 94 private: 95 DISALLOW_COPY_AND_ASSIGN(DebugInvokeReq); 96 }; 97 98 // Thread local data-structure that holds fields for controlling single-stepping. 99 struct SingleStepControl { SingleStepControlSingleStepControl100 SingleStepControl() 101 : is_active(false), step_size(JDWP::SS_MIN), step_depth(JDWP::SD_INTO), 102 method(nullptr), stack_depth(0) { 103 } 104 105 // Are we single-stepping right now? 106 bool is_active; 107 108 // See JdwpStepSize and JdwpStepDepth for details. 109 JDWP::JdwpStepSize step_size; 110 JDWP::JdwpStepDepth step_depth; 111 112 // The location this single-step was initiated from. 113 // A single-step is initiated in a suspended thread. We save here the current method and the 114 // set of DEX pcs associated to the source line number where the suspension occurred. 115 // This is used to support SD_INTO and SD_OVER single-step depths so we detect when a single-step 116 // causes the execution of an instruction in a different method or at a different line number. 117 mirror::ArtMethod* method; 118 std::set<uint32_t> dex_pcs; 119 120 // The stack depth when this single-step was initiated. This is used to support SD_OVER and SD_OUT 121 // single-step depth. 122 int stack_depth; 123 124 void VisitRoots(RootCallback* callback, void* arg, uint32_t tid, RootType root_type) 125 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 126 127 bool ContainsDexPc(uint32_t dex_pc) const; 128 129 void Clear(); 130 131 private: 132 DISALLOW_COPY_AND_ASSIGN(SingleStepControl); 133 }; 134 135 // TODO rename to InstrumentationRequest. 136 class DeoptimizationRequest { 137 public: 138 enum Kind { 139 kNothing, // no action. 140 kRegisterForEvent, // start listening for instrumentation event. 141 kUnregisterForEvent, // stop listening for instrumentation event. 142 kFullDeoptimization, // deoptimize everything. 143 kFullUndeoptimization, // undeoptimize everything. 144 kSelectiveDeoptimization, // deoptimize one method. 145 kSelectiveUndeoptimization // undeoptimize one method. 146 }; 147 DeoptimizationRequest()148 DeoptimizationRequest() : kind_(kNothing), instrumentation_event_(0), method_(nullptr) {} 149 150 DeoptimizationRequest(const DeoptimizationRequest& other) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)151 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 152 : kind_(other.kind_), instrumentation_event_(other.instrumentation_event_) { 153 // Create a new JNI global reference for the method. 154 SetMethod(other.Method()); 155 } 156 157 mirror::ArtMethod* Method() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 158 159 void SetMethod(mirror::ArtMethod* m) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 160 161 // Name 'Kind()' would collide with the above enum name. GetKind()162 Kind GetKind() const { 163 return kind_; 164 } 165 SetKind(Kind kind)166 void SetKind(Kind kind) { 167 kind_ = kind; 168 } 169 InstrumentationEvent()170 uint32_t InstrumentationEvent() const { 171 return instrumentation_event_; 172 } 173 SetInstrumentationEvent(uint32_t instrumentation_event)174 void SetInstrumentationEvent(uint32_t instrumentation_event) { 175 instrumentation_event_ = instrumentation_event; 176 } 177 178 private: 179 Kind kind_; 180 181 // TODO we could use a union to hold the instrumentation_event and the method since they 182 // respectively have sense only for kRegisterForEvent/kUnregisterForEvent and 183 // kSelectiveDeoptimization/kSelectiveUndeoptimization. 184 185 // Event to start or stop listening to. Only for kRegisterForEvent and kUnregisterForEvent. 186 uint32_t instrumentation_event_; 187 188 // Method for selective deoptimization. 189 jmethodID method_; 190 }; 191 192 class Dbg { 193 public: 194 class TypeCache { 195 public: 196 // Returns a weak global for the input type. Deduplicates. 197 jobject Add(mirror::Class* t) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, 198 Locks::alloc_tracker_lock_); 199 // Clears the type cache and deletes all the weak global refs. 200 void Clear() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, 201 Locks::alloc_tracker_lock_); 202 203 private: 204 std::multimap<int32_t, jobject> objects_; 205 }; 206 207 static bool ParseJdwpOptions(const std::string& options); 208 static void SetJdwpAllowed(bool allowed); 209 210 static void StartJdwp(); 211 static void StopJdwp(); 212 213 // Invoked by the GC in case we need to keep DDMS informed. 214 static void GcDidFinish() LOCKS_EXCLUDED(Locks::mutator_lock_); 215 216 // Return the DebugInvokeReq for the current thread. 217 static DebugInvokeReq* GetInvokeReq(); 218 219 static Thread* GetDebugThread(); 220 static void ClearWaitForEventThread(); 221 222 /* 223 * Enable/disable breakpoints and step modes. Used to provide a heads-up 224 * when the debugger attaches. 225 */ 226 static void Connected(); 227 static void GoActive() 228 LOCKS_EXCLUDED(Locks::breakpoint_lock_, Locks::deoptimization_lock_, Locks::mutator_lock_); 229 static void Disconnected() LOCKS_EXCLUDED(Locks::deoptimization_lock_, Locks::mutator_lock_); 230 static void Disposed(); 231 232 // Returns true if we're actually debugging with a real debugger, false if it's 233 // just DDMS (or nothing at all). 234 static bool IsDebuggerActive(); 235 236 // Returns true if we had -Xrunjdwp or -agentlib:jdwp= on the command line. 237 static bool IsJdwpConfigured(); 238 239 static bool IsDisposed(); 240 241 /* 242 * Time, in milliseconds, since the last debugger activity. Does not 243 * include DDMS activity. Returns -1 if there has been no activity. 244 * Returns 0 if we're in the middle of handling a debugger request. 245 */ 246 static int64_t LastDebuggerActivity(); 247 248 static void UndoDebuggerSuspensions(); 249 250 /* 251 * Class, Object, Array 252 */ 253 static std::string GetClassName(JDWP::RefTypeId id) 254 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 255 static std::string GetClassName(mirror::Class* klass) 256 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 257 static JDWP::JdwpError GetClassObject(JDWP::RefTypeId id, JDWP::ObjectId& class_object_id) 258 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 259 static JDWP::JdwpError GetSuperclass(JDWP::RefTypeId id, JDWP::RefTypeId& superclass_id) 260 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 261 static JDWP::JdwpError GetClassLoader(JDWP::RefTypeId id, JDWP::ExpandBuf* pReply) 262 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 263 static JDWP::JdwpError GetModifiers(JDWP::RefTypeId id, JDWP::ExpandBuf* pReply) 264 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 265 static JDWP::JdwpError GetReflectedType(JDWP::RefTypeId class_id, JDWP::ExpandBuf* pReply) 266 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 267 static void GetClassList(std::vector<JDWP::RefTypeId>& classes) 268 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 269 static JDWP::JdwpError GetClassInfo(JDWP::RefTypeId class_id, JDWP::JdwpTypeTag* pTypeTag, 270 uint32_t* pStatus, std::string* pDescriptor) 271 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 272 static void FindLoadedClassBySignature(const char* descriptor, std::vector<JDWP::RefTypeId>& ids) 273 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 274 static JDWP::JdwpError GetReferenceType(JDWP::ObjectId object_id, JDWP::ExpandBuf* pReply) 275 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 276 static JDWP::JdwpError GetSignature(JDWP::RefTypeId ref_type_id, std::string* signature) 277 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 278 static JDWP::JdwpError GetSourceFile(JDWP::RefTypeId ref_type_id, std::string& source_file) 279 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 280 static JDWP::JdwpError GetObjectTag(JDWP::ObjectId object_id, uint8_t& tag) 281 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 282 static size_t GetTagWidth(JDWP::JdwpTag tag); 283 284 static JDWP::JdwpError GetArrayLength(JDWP::ObjectId array_id, int& length) 285 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 286 static JDWP::JdwpError OutputArray(JDWP::ObjectId array_id, int offset, int count, 287 JDWP::ExpandBuf* pReply) 288 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 289 static JDWP::JdwpError SetArrayElements(JDWP::ObjectId array_id, int offset, int count, 290 JDWP::Request& request) 291 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 292 293 static JDWP::ObjectId CreateString(const std::string& str) 294 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 295 static JDWP::JdwpError CreateObject(JDWP::RefTypeId class_id, JDWP::ObjectId& new_object) 296 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 297 static JDWP::JdwpError CreateArrayObject(JDWP::RefTypeId array_class_id, uint32_t length, 298 JDWP::ObjectId& new_array) 299 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 300 301 // 302 // Event filtering. 303 // 304 static bool MatchThread(JDWP::ObjectId expected_thread_id, Thread* event_thread) 305 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 306 307 static bool MatchLocation(const JDWP::JdwpLocation& expected_location, 308 const JDWP::EventLocation& event_location) 309 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 310 311 static bool MatchType(mirror::Class* event_class, JDWP::RefTypeId class_id) 312 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 313 314 static bool MatchField(JDWP::RefTypeId expected_type_id, JDWP::FieldId expected_field_id, 315 mirror::ArtField* event_field) 316 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 317 318 static bool MatchInstance(JDWP::ObjectId expected_instance_id, mirror::Object* event_instance) 319 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 320 321 // 322 // Monitors. 323 // 324 static JDWP::JdwpError GetMonitorInfo(JDWP::ObjectId object_id, JDWP::ExpandBuf* reply) 325 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 326 static JDWP::JdwpError GetOwnedMonitors(JDWP::ObjectId thread_id, 327 std::vector<JDWP::ObjectId>& monitors, 328 std::vector<uint32_t>& stack_depths) 329 LOCKS_EXCLUDED(Locks::thread_list_lock_) 330 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 331 static JDWP::JdwpError GetContendedMonitor(JDWP::ObjectId thread_id, 332 JDWP::ObjectId& contended_monitor) 333 LOCKS_EXCLUDED(Locks::thread_list_lock_) 334 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 335 336 // 337 // Heap. 338 // 339 static JDWP::JdwpError GetInstanceCounts(const std::vector<JDWP::RefTypeId>& class_ids, 340 std::vector<uint64_t>& counts) 341 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 342 static JDWP::JdwpError GetInstances(JDWP::RefTypeId class_id, int32_t max_count, 343 std::vector<JDWP::ObjectId>& instances) 344 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 345 static JDWP::JdwpError GetReferringObjects(JDWP::ObjectId object_id, int32_t max_count, 346 std::vector<JDWP::ObjectId>& referring_objects) 347 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 348 static JDWP::JdwpError DisableCollection(JDWP::ObjectId object_id) 349 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 350 static JDWP::JdwpError EnableCollection(JDWP::ObjectId object_id) 351 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 352 static JDWP::JdwpError IsCollected(JDWP::ObjectId object_id, bool& is_collected) 353 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 354 static void DisposeObject(JDWP::ObjectId object_id, uint32_t reference_count) 355 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 356 357 // 358 // Methods and fields. 359 // 360 static std::string GetMethodName(JDWP::MethodId method_id) 361 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 362 static JDWP::JdwpError OutputDeclaredFields(JDWP::RefTypeId ref_type_id, bool with_generic, 363 JDWP::ExpandBuf* pReply) 364 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 365 static JDWP::JdwpError OutputDeclaredMethods(JDWP::RefTypeId ref_type_id, bool with_generic, 366 JDWP::ExpandBuf* pReply) 367 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 368 static JDWP::JdwpError OutputDeclaredInterfaces(JDWP::RefTypeId ref_type_id, 369 JDWP::ExpandBuf* pReply) 370 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 371 static void OutputLineTable(JDWP::RefTypeId ref_type_id, JDWP::MethodId method_id, 372 JDWP::ExpandBuf* pReply) 373 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 374 static void OutputVariableTable(JDWP::RefTypeId ref_type_id, JDWP::MethodId id, bool with_generic, 375 JDWP::ExpandBuf* pReply) 376 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 377 static void OutputMethodReturnValue(JDWP::MethodId method_id, const JValue* return_value, 378 JDWP::ExpandBuf* pReply) 379 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 380 static void OutputFieldValue(JDWP::FieldId field_id, const JValue* field_value, 381 JDWP::ExpandBuf* pReply) 382 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 383 static JDWP::JdwpError GetBytecodes(JDWP::RefTypeId class_id, JDWP::MethodId method_id, 384 std::vector<uint8_t>& bytecodes) 385 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 386 387 static std::string GetFieldName(JDWP::FieldId field_id) 388 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 389 static JDWP::JdwpTag GetFieldBasicTag(JDWP::FieldId field_id) 390 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 391 static JDWP::JdwpTag GetStaticFieldBasicTag(JDWP::FieldId field_id) 392 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 393 static JDWP::JdwpError GetFieldValue(JDWP::ObjectId object_id, JDWP::FieldId field_id, 394 JDWP::ExpandBuf* pReply) 395 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 396 static JDWP::JdwpError SetFieldValue(JDWP::ObjectId object_id, JDWP::FieldId field_id, 397 uint64_t value, int width) 398 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 399 static JDWP::JdwpError GetStaticFieldValue(JDWP::RefTypeId ref_type_id, JDWP::FieldId field_id, 400 JDWP::ExpandBuf* pReply) 401 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 402 static JDWP::JdwpError SetStaticFieldValue(JDWP::FieldId field_id, uint64_t value, int width) 403 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 404 405 static JDWP::JdwpError StringToUtf8(JDWP::ObjectId string_id, std::string* str) 406 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 407 static void OutputJValue(JDWP::JdwpTag tag, const JValue* return_value, JDWP::ExpandBuf* pReply) 408 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 409 410 /* 411 * Thread, ThreadGroup, Frame 412 */ 413 static JDWP::JdwpError GetThreadName(JDWP::ObjectId thread_id, std::string& name) 414 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 415 LOCKS_EXCLUDED(Locks::thread_list_lock_); 416 static JDWP::JdwpError GetThreadGroup(JDWP::ObjectId thread_id, JDWP::ExpandBuf* pReply) 417 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 418 LOCKS_EXCLUDED(Locks::thread_list_lock_); 419 static JDWP::JdwpError GetThreadGroupName(JDWP::ObjectId thread_group_id, 420 JDWP::ExpandBuf* pReply) 421 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 422 static JDWP::JdwpError GetThreadGroupParent(JDWP::ObjectId thread_group_id, 423 JDWP::ExpandBuf* pReply) 424 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 425 static JDWP::JdwpError GetThreadGroupChildren(JDWP::ObjectId thread_group_id, 426 JDWP::ExpandBuf* pReply) 427 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 428 static JDWP::ObjectId GetSystemThreadGroupId() 429 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 430 431 static JDWP::JdwpThreadStatus ToJdwpThreadStatus(ThreadState state); 432 static JDWP::JdwpError GetThreadStatus(JDWP::ObjectId thread_id, 433 JDWP::JdwpThreadStatus* pThreadStatus, 434 JDWP::JdwpSuspendStatus* pSuspendStatus) 435 LOCKS_EXCLUDED(Locks::thread_list_lock_); 436 static JDWP::JdwpError GetThreadDebugSuspendCount(JDWP::ObjectId thread_id, 437 JDWP::ExpandBuf* pReply) 438 LOCKS_EXCLUDED(Locks::thread_list_lock_, 439 Locks::thread_suspend_count_lock_); 440 // static void WaitForSuspend(JDWP::ObjectId thread_id); 441 442 // Fills 'thread_ids' with the threads in the given thread group. If thread_group_id == 0, 443 // returns all threads. 444 static void GetThreads(mirror::Object* thread_group, std::vector<JDWP::ObjectId>* thread_ids) 445 LOCKS_EXCLUDED(Locks::thread_list_lock_) 446 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 447 448 static JDWP::JdwpError GetThreadFrameCount(JDWP::ObjectId thread_id, size_t& result) 449 LOCKS_EXCLUDED(Locks::thread_list_lock_); 450 static JDWP::JdwpError GetThreadFrames(JDWP::ObjectId thread_id, size_t start_frame, 451 size_t frame_count, JDWP::ExpandBuf* buf) 452 LOCKS_EXCLUDED(Locks::thread_list_lock_) 453 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 454 455 static JDWP::ObjectId GetThreadSelfId() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 456 static JDWP::ObjectId GetThreadId(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 457 458 static void SuspendVM() 459 LOCKS_EXCLUDED(Locks::thread_list_lock_, 460 Locks::thread_suspend_count_lock_); 461 static void ResumeVM(); 462 static JDWP::JdwpError SuspendThread(JDWP::ObjectId thread_id, bool request_suspension = true) 463 LOCKS_EXCLUDED(Locks::mutator_lock_, 464 Locks::thread_list_lock_, 465 Locks::thread_suspend_count_lock_); 466 467 static void ResumeThread(JDWP::ObjectId thread_id) 468 LOCKS_EXCLUDED(Locks::thread_list_lock_, 469 Locks::thread_suspend_count_lock_) 470 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 471 static void SuspendSelf(); 472 473 static JDWP::JdwpError GetThisObject(JDWP::ObjectId thread_id, JDWP::FrameId frame_id, 474 JDWP::ObjectId* result) 475 LOCKS_EXCLUDED(Locks::thread_list_lock_, 476 Locks::thread_suspend_count_lock_) 477 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 478 static JDWP::JdwpError GetLocalValue(JDWP::ObjectId thread_id, JDWP::FrameId frame_id, int slot, 479 JDWP::JdwpTag tag, uint8_t* buf, size_t expectedLen) 480 LOCKS_EXCLUDED(Locks::thread_list_lock_) 481 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 482 static JDWP::JdwpError SetLocalValue(JDWP::ObjectId thread_id, JDWP::FrameId frame_id, int slot, 483 JDWP::JdwpTag tag, uint64_t value, size_t width) 484 LOCKS_EXCLUDED(Locks::thread_list_lock_) 485 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 486 487 static JDWP::JdwpError Interrupt(JDWP::ObjectId thread_id) 488 LOCKS_EXCLUDED(Locks::thread_list_lock_); 489 490 /* 491 * Debugger notification 492 */ 493 enum { 494 kBreakpoint = 0x01, 495 kSingleStep = 0x02, 496 kMethodEntry = 0x04, 497 kMethodExit = 0x08, 498 }; 499 static void PostFieldAccessEvent(mirror::ArtMethod* m, int dex_pc, mirror::Object* this_object, 500 mirror::ArtField* f) 501 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 502 static void PostFieldModificationEvent(mirror::ArtMethod* m, int dex_pc, 503 mirror::Object* this_object, mirror::ArtField* f, 504 const JValue* field_value) 505 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 506 static void PostException(const ThrowLocation& throw_location, mirror::ArtMethod* catch_method, 507 uint32_t catch_dex_pc, mirror::Throwable* exception) 508 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 509 static void PostThreadStart(Thread* t) 510 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 511 static void PostThreadDeath(Thread* t) 512 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 513 static void PostClassPrepare(mirror::Class* c) 514 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 515 516 static void UpdateDebugger(Thread* thread, mirror::Object* this_object, 517 mirror::ArtMethod* method, uint32_t new_dex_pc, 518 int event_flags, const JValue* return_value) 519 LOCKS_EXCLUDED(Locks::breakpoint_lock_) 520 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 521 522 // Records deoptimization request in the queue. 523 static void RequestDeoptimization(const DeoptimizationRequest& req) 524 LOCKS_EXCLUDED(Locks::deoptimization_lock_) 525 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 526 527 // Support delayed full undeoptimization requests. This is currently only used for single-step 528 // events. 529 static void DelayFullUndeoptimization() LOCKS_EXCLUDED(Locks::deoptimization_lock_); 530 static void ProcessDelayedFullUndeoptimizations() 531 LOCKS_EXCLUDED(Locks::deoptimization_lock_) 532 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 533 534 // Manage deoptimization after updating JDWP events list. Suspends all threads, processes each 535 // request and finally resumes all threads. 536 static void ManageDeoptimization() 537 LOCKS_EXCLUDED(Locks::deoptimization_lock_) 538 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 539 540 // Breakpoints. 541 static void WatchLocation(const JDWP::JdwpLocation* pLoc, DeoptimizationRequest* req) 542 LOCKS_EXCLUDED(Locks::breakpoint_lock_) 543 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 544 static void UnwatchLocation(const JDWP::JdwpLocation* pLoc, DeoptimizationRequest* req) 545 LOCKS_EXCLUDED(Locks::breakpoint_lock_) 546 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 547 548 // Single-stepping. 549 static JDWP::JdwpError ConfigureStep(JDWP::ObjectId thread_id, JDWP::JdwpStepSize size, 550 JDWP::JdwpStepDepth depth) 551 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 552 static void UnconfigureStep(JDWP::ObjectId thread_id) 553 LOCKS_EXCLUDED(Locks::thread_list_lock_) 554 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 555 556 static JDWP::JdwpError InvokeMethod(JDWP::ObjectId thread_id, JDWP::ObjectId object_id, 557 JDWP::RefTypeId class_id, JDWP::MethodId method_id, 558 uint32_t arg_count, uint64_t* arg_values, 559 JDWP::JdwpTag* arg_types, uint32_t options, 560 JDWP::JdwpTag* pResultTag, uint64_t* pResultValue, 561 JDWP::ObjectId* pExceptObj) 562 LOCKS_EXCLUDED(Locks::thread_list_lock_, 563 Locks::thread_suspend_count_lock_) 564 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 565 static void ExecuteMethod(DebugInvokeReq* pReq); 566 567 /* 568 * DDM support. 569 */ 570 static void DdmSendThreadNotification(Thread* t, uint32_t type) 571 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 572 static void DdmSetThreadNotification(bool enable) 573 LOCKS_EXCLUDED(Locks::thread_list_lock_); 574 static bool DdmHandlePacket(JDWP::Request& request, uint8_t** pReplyBuf, int* pReplyLen); 575 static void DdmConnected() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 576 static void DdmDisconnected() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 577 static void DdmSendChunk(uint32_t type, const std::vector<uint8_t>& bytes) 578 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 579 static void DdmSendChunk(uint32_t type, size_t len, const uint8_t* buf) 580 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 581 static void DdmSendChunkV(uint32_t type, const iovec* iov, int iov_count) 582 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 583 584 static void VisitRoots(RootCallback* callback, void* arg) 585 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 586 587 /* 588 * Recent allocation tracking support. 589 */ 590 static void RecordAllocation(mirror::Class* type, size_t byte_count) 591 LOCKS_EXCLUDED(Locks::alloc_tracker_lock_) 592 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 593 static void SetAllocTrackingEnabled(bool enabled) LOCKS_EXCLUDED(Locks::alloc_tracker_lock_); IsAllocTrackingEnabled()594 static bool IsAllocTrackingEnabled() { 595 return recent_allocation_records_ != nullptr; 596 } 597 static jbyteArray GetRecentAllocations() 598 LOCKS_EXCLUDED(Locks::alloc_tracker_lock_) 599 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 600 static size_t HeadIndex() EXCLUSIVE_LOCKS_REQUIRED(Locks::alloc_tracker_lock_); 601 static void DumpRecentAllocations() LOCKS_EXCLUDED(Locks::alloc_tracker_lock_); 602 603 enum HpifWhen { 604 HPIF_WHEN_NEVER = 0, 605 HPIF_WHEN_NOW = 1, 606 HPIF_WHEN_NEXT_GC = 2, 607 HPIF_WHEN_EVERY_GC = 3 608 }; 609 static int DdmHandleHpifChunk(HpifWhen when) 610 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 611 612 enum HpsgWhen { 613 HPSG_WHEN_NEVER = 0, 614 HPSG_WHEN_EVERY_GC = 1, 615 }; 616 enum HpsgWhat { 617 HPSG_WHAT_MERGED_OBJECTS = 0, 618 HPSG_WHAT_DISTINCT_OBJECTS = 1, 619 }; 620 static bool DdmHandleHpsgNhsgChunk(HpsgWhen when, HpsgWhat what, bool native); 621 622 static void DdmSendHeapInfo(HpifWhen reason) 623 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 624 static void DdmSendHeapSegments(bool native) 625 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 626 GetObjectRegistry()627 static ObjectRegistry* GetObjectRegistry() { 628 return gRegistry; 629 } 630 631 static JDWP::JdwpTag TagFromObject(const ScopedObjectAccessUnchecked& soa, mirror::Object* o) 632 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 633 634 static JDWP::JdwpTypeTag GetTypeTag(mirror::Class* klass) 635 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 636 637 static JDWP::FieldId ToFieldId(const mirror::ArtField* f) 638 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 639 640 static void SetJdwpLocation(JDWP::JdwpLocation* location, mirror::ArtMethod* m, uint32_t dex_pc) 641 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 642 643 private: 644 static void DdmBroadcast(bool connect) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 645 static void PostThreadStartOrStop(Thread*, uint32_t) 646 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 647 648 static void PostLocationEvent(mirror::ArtMethod* method, int pcOffset, 649 mirror::Object* thisPtr, int eventFlags, 650 const JValue* return_value) 651 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 652 653 static void ProcessDeoptimizationRequest(const DeoptimizationRequest& request) 654 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_); 655 656 static void RequestDeoptimizationLocked(const DeoptimizationRequest& req) 657 EXCLUSIVE_LOCKS_REQUIRED(Locks::deoptimization_lock_) 658 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 659 660 static AllocRecord* recent_allocation_records_ PT_GUARDED_BY(Locks::alloc_tracker_lock_); 661 static size_t alloc_record_max_ GUARDED_BY(Locks::alloc_tracker_lock_); 662 static size_t alloc_record_head_ GUARDED_BY(Locks::alloc_tracker_lock_); 663 static size_t alloc_record_count_ GUARDED_BY(Locks::alloc_tracker_lock_); 664 665 static ObjectRegistry* gRegistry; 666 667 // Deoptimization requests to be processed each time the event list is updated. This is used when 668 // registering and unregistering events so we do not deoptimize while holding the event list 669 // lock. 670 // TODO rename to instrumentation_requests. 671 static std::vector<DeoptimizationRequest> deoptimization_requests_ GUARDED_BY(Locks::deoptimization_lock_); 672 673 // Count the number of events requiring full deoptimization. When the counter is > 0, everything 674 // is deoptimized, otherwise everything is undeoptimized. 675 // Note: we fully deoptimize on the first event only (when the counter is set to 1). We fully 676 // undeoptimize when the last event is unregistered (when the counter is set to 0). 677 static size_t full_deoptimization_event_count_ GUARDED_BY(Locks::deoptimization_lock_); 678 679 // Count the number of full undeoptimization requests delayed to next resume or end of debug 680 // session. 681 static size_t delayed_full_undeoptimization_count_ GUARDED_BY(Locks::deoptimization_lock_); 682 683 static size_t* GetReferenceCounterForEvent(uint32_t instrumentation_event); 684 685 // Weak global type cache, TODO improve this. 686 static TypeCache type_cache_ GUARDED_BY(Locks::alloc_tracker_lock_); 687 688 // Instrumentation event reference counters. 689 // TODO we could use an array instead of having all these dedicated counters. Instrumentation 690 // events are bits of a mask so we could convert them to array index. 691 static size_t dex_pc_change_event_ref_count_ GUARDED_BY(Locks::deoptimization_lock_); 692 static size_t method_enter_event_ref_count_ GUARDED_BY(Locks::deoptimization_lock_); 693 static size_t method_exit_event_ref_count_ GUARDED_BY(Locks::deoptimization_lock_); 694 static size_t field_read_event_ref_count_ GUARDED_BY(Locks::deoptimization_lock_); 695 static size_t field_write_event_ref_count_ GUARDED_BY(Locks::deoptimization_lock_); 696 static size_t exception_catch_event_ref_count_ GUARDED_BY(Locks::deoptimization_lock_); 697 static uint32_t instrumentation_events_ GUARDED_BY(Locks::mutator_lock_); 698 699 friend class AllocRecord; // For type_cache_ with proper annotalysis. 700 DISALLOW_COPY_AND_ASSIGN(Dbg); 701 }; 702 703 #define CHUNK_TYPE(_name) \ 704 static_cast<uint32_t>((_name)[0] << 24 | (_name)[1] << 16 | (_name)[2] << 8 | (_name)[3]) 705 706 } // namespace art 707 708 #endif // ART_RUNTIME_DEBUGGER_H_ 709