1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
4 * Copyright 2016 Mozilla Foundation
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
19 #include "wasm/WasmDebug.h"
21 #include "mozilla/BinarySearch.h"
23 #include "debugger/Debugger.h"
25 #include "jit/MacroAssembler.h"
26 #include "js/ColumnNumber.h" // JS::WasmFunctionIndex
27 #include "wasm/WasmJS.h"
28 #include "wasm/WasmStubs.h"
29 #include "wasm/WasmValidate.h"
31 #include "gc/GCContext-inl.h"
32 #include "wasm/WasmInstance-inl.h"
35 using namespace js::jit
;
36 using namespace js::wasm
;
38 using mozilla::BinarySearchIf
;
40 DebugState::DebugState(const Code
& code
, const Module
& module
)
43 enterFrameTrapsEnabled_(false),
44 enterAndLeaveFrameTrapsCounter_(0) {
45 MOZ_RELEASE_ASSERT(code
.metadata().debugEnabled
);
46 MOZ_RELEASE_ASSERT(code
.hasTier(Tier::Debug
));
49 void DebugState::trace(JSTracer
* trc
) {
50 for (auto iter
= breakpointSites_
.iter(); !iter
.done(); iter
.next()) {
51 WasmBreakpointSite
* site
= iter
.get().value();
56 void DebugState::finalize(JS::GCContext
* gcx
) {
57 for (auto iter
= breakpointSites_
.iter(); !iter
.done(); iter
.next()) {
58 WasmBreakpointSite
* site
= iter
.get().value();
63 static const CallSite
* SlowCallSiteSearchByOffset(const MetadataTier
& metadata
,
65 for (const CallSite
& callSite
: metadata
.callSites
) {
66 if (callSite
.lineOrBytecode() == offset
&&
67 callSite
.kind() == CallSiteDesc::Breakpoint
) {
74 bool DebugState::getLineOffsets(size_t lineno
, Vector
<uint32_t>* offsets
) {
75 const CallSite
* callsite
=
76 SlowCallSiteSearchByOffset(metadata(Tier::Debug
), lineno
);
77 return !(callsite
&& !offsets
->append(lineno
));
80 bool DebugState::getAllColumnOffsets(Vector
<ExprLoc
>* offsets
) {
81 for (const CallSite
& callSite
: metadata(Tier::Debug
).callSites
) {
82 if (callSite
.kind() != CallSite::Breakpoint
) {
85 uint32_t offset
= callSite
.lineOrBytecode();
86 if (!offsets
->emplaceBack(
88 JS::WasmFunctionIndex::DefaultBinarySourceColumnNumberOneOrigin
,
96 bool DebugState::getOffsetLocation(uint32_t offset
, uint32_t* lineno
,
97 JS::LimitedColumnNumberOneOrigin
* column
) {
98 if (!SlowCallSiteSearchByOffset(metadata(Tier::Debug
), offset
)) {
102 *column
= JS::LimitedColumnNumberOneOrigin(
103 JS::WasmFunctionIndex::DefaultBinarySourceColumnNumberOneOrigin
);
107 bool DebugState::stepModeEnabled(uint32_t funcIndex
) const {
108 return stepperCounters_
.lookup(funcIndex
).found();
111 bool DebugState::incrementStepperCount(JSContext
* cx
, Instance
* instance
,
112 uint32_t funcIndex
) {
113 StepperCounters::AddPtr p
= stepperCounters_
.lookupForAdd(funcIndex
);
115 MOZ_ASSERT(p
->value() > 0);
120 if (!stepperCounters_
.add(p
, funcIndex
, 1)) {
121 ReportOutOfMemory(cx
);
125 enableDebuggingForFunction(instance
, funcIndex
);
126 enableDebugTrap(instance
);
131 void DebugState::decrementStepperCount(JS::GCContext
* gcx
, Instance
* instance
,
132 uint32_t funcIndex
) {
133 const CodeRange
& codeRange
=
134 codeRanges(Tier::Debug
)[funcToCodeRangeIndex(funcIndex
)];
135 MOZ_ASSERT(codeRange
.isFunction());
137 MOZ_ASSERT(!stepperCounters_
.empty());
138 StepperCounters::Ptr p
= stepperCounters_
.lookup(funcIndex
);
144 stepperCounters_
.remove(p
);
146 bool anyStepping
= !stepperCounters_
.empty();
147 bool anyBreakpoints
= !breakpointSites_
.empty();
148 bool anyEnterAndLeave
= enterAndLeaveFrameTrapsCounter_
> 0;
150 bool keepDebugging
= false;
151 for (const CallSite
& callSite
: callSites(Tier::Debug
)) {
152 if (callSite
.kind() != CallSite::Breakpoint
) {
155 uint32_t offset
= callSite
.returnAddressOffset();
156 if (codeRange
.begin() <= offset
&& offset
<= codeRange
.end()) {
157 keepDebugging
= keepDebugging
|| breakpointSites_
.has(offset
);
161 if (!keepDebugging
&& !anyEnterAndLeave
) {
162 disableDebuggingForFunction(instance
, funcIndex
);
163 if (!anyStepping
&& !anyBreakpoints
) {
164 disableDebugTrap(instance
);
169 bool DebugState::hasBreakpointTrapAtOffset(uint32_t offset
) {
170 return SlowCallSiteSearchByOffset(metadata(Tier::Debug
), offset
);
173 void DebugState::toggleBreakpointTrap(JSRuntime
* rt
, Instance
* instance
,
174 uint32_t offset
, bool enabled
) {
175 const CallSite
* callSite
=
176 SlowCallSiteSearchByOffset(metadata(Tier::Debug
), offset
);
180 size_t debugTrapOffset
= callSite
->returnAddressOffset();
182 const ModuleSegment
& codeSegment
= code_
->segment(Tier::Debug
);
183 const CodeRange
* codeRange
=
184 code_
->lookupFuncRange(codeSegment
.base() + debugTrapOffset
);
185 MOZ_ASSERT(codeRange
);
187 uint32_t funcIndex
= codeRange
->funcIndex();
188 if (stepperCounters_
.lookup(funcIndex
)) {
189 return; // no need to toggle when step mode is enabled
192 bool anyEnterAndLeave
= enterAndLeaveFrameTrapsCounter_
> 0;
193 bool anyStepping
= !stepperCounters_
.empty();
194 bool anyBreakpoints
= !breakpointSites_
.empty();
197 enableDebuggingForFunction(instance
, funcIndex
);
198 enableDebugTrap(instance
);
199 } else if (!anyEnterAndLeave
) {
200 disableDebuggingForFunction(instance
, funcIndex
);
201 if (!anyStepping
&& !anyBreakpoints
) {
202 disableDebugTrap(instance
);
207 WasmBreakpointSite
* DebugState::getBreakpointSite(uint32_t offset
) const {
208 WasmBreakpointSiteMap::Ptr p
= breakpointSites_
.lookup(offset
);
216 WasmBreakpointSite
* DebugState::getOrCreateBreakpointSite(JSContext
* cx
,
219 WasmBreakpointSite
* site
;
221 WasmBreakpointSiteMap::AddPtr p
= breakpointSites_
.lookupForAdd(offset
);
223 site
= cx
->new_
<WasmBreakpointSite
>(instance
->object(), offset
);
228 if (!breakpointSites_
.add(p
, offset
, site
)) {
230 ReportOutOfMemory(cx
);
234 AddCellMemory(instance
->object(), sizeof(WasmBreakpointSite
),
235 MemoryUse::BreakpointSite
);
237 toggleBreakpointTrap(cx
->runtime(), instance
, offset
, true);
244 bool DebugState::hasBreakpointSite(uint32_t offset
) {
245 return breakpointSites_
.has(offset
);
248 void DebugState::destroyBreakpointSite(JS::GCContext
* gcx
, Instance
* instance
,
250 WasmBreakpointSiteMap::Ptr p
= breakpointSites_
.lookup(offset
);
252 gcx
->delete_(instance
->objectUnbarriered(), p
->value(),
253 MemoryUse::BreakpointSite
);
254 breakpointSites_
.remove(p
);
255 toggleBreakpointTrap(gcx
->runtime(), instance
, offset
, false);
258 void DebugState::clearBreakpointsIn(JS::GCContext
* gcx
,
259 WasmInstanceObject
* instance
,
260 js::Debugger
* dbg
, JSObject
* handler
) {
261 MOZ_ASSERT(instance
);
263 // Breakpoints hold wrappers in the instance's compartment for the handler.
264 // Make sure we don't try to search for the unwrapped handler.
265 MOZ_ASSERT_IF(handler
, instance
->compartment() == handler
->compartment());
267 if (breakpointSites_
.empty()) {
270 for (WasmBreakpointSiteMap::Enum
e(breakpointSites_
); !e
.empty();
272 WasmBreakpointSite
* site
= e
.front().value();
273 MOZ_ASSERT(site
->instanceObject
== instance
);
276 for (Breakpoint
* bp
= site
->firstBreakpoint(); bp
; bp
= nextbp
) {
277 nextbp
= bp
->nextInSite();
278 MOZ_ASSERT(bp
->site
== site
);
279 if ((!dbg
|| bp
->debugger
== dbg
) &&
280 (!handler
|| bp
->getHandler() == handler
)) {
284 if (site
->isEmpty()) {
285 gcx
->delete_(instance
, site
, MemoryUse::BreakpointSite
);
291 void DebugState::enableDebuggingForFunction(Instance
* instance
,
292 uint32_t funcIndex
) {
293 instance
->setDebugFilter(funcIndex
, true);
296 void DebugState::disableDebuggingForFunction(Instance
* instance
,
297 uint32_t funcIndex
) {
298 instance
->setDebugFilter(funcIndex
, false);
301 void DebugState::enableDebugTrap(Instance
* instance
) {
302 instance
->setDebugTrapHandler(code_
->segment(Tier::Debug
).base() +
303 metadata(Tier::Debug
).debugTrapOffset
);
306 void DebugState::disableDebugTrap(Instance
* instance
) {
307 instance
->setDebugTrapHandler(nullptr);
310 void DebugState::adjustEnterAndLeaveFrameTrapsState(JSContext
* cx
,
313 MOZ_ASSERT_IF(!enabled
, enterAndLeaveFrameTrapsCounter_
> 0);
315 bool wasEnabled
= enterAndLeaveFrameTrapsCounter_
> 0;
316 enterAndLeaveFrameTrapsCounter_
+= enabled
? 1 : -1;
317 bool stillEnabled
= enterAndLeaveFrameTrapsCounter_
> 0;
318 if (wasEnabled
== stillEnabled
) {
322 MOZ_RELEASE_ASSERT(&instance
->metadata() == &metadata());
323 uint32_t numFuncs
= metadata().debugNumFuncs();
325 MOZ_ASSERT(enterAndLeaveFrameTrapsCounter_
> 0);
326 for (uint32_t funcIdx
= 0; funcIdx
< numFuncs
; funcIdx
++) {
327 enableDebuggingForFunction(instance
, funcIdx
);
329 enableDebugTrap(instance
);
331 MOZ_ASSERT(enterAndLeaveFrameTrapsCounter_
== 0);
332 bool anyEnabled
= false;
333 for (uint32_t funcIdx
= 0; funcIdx
< numFuncs
; funcIdx
++) {
334 // For each function, disable the bit if nothing else is going on. This
335 // means determining if there's stepping or breakpoints.
336 bool mustLeaveEnabled
= stepperCounters_
.lookup(funcIdx
).found();
337 for (auto iter
= breakpointSites_
.iter();
338 !iter
.done() && !mustLeaveEnabled
; iter
.next()) {
339 WasmBreakpointSite
* site
= iter
.get().value();
340 const CallSite
* callSite
=
341 SlowCallSiteSearchByOffset(metadata(Tier::Debug
), site
->offset
);
343 size_t debugTrapOffset
= callSite
->returnAddressOffset();
344 const ModuleSegment
& codeSegment
= code_
->segment(Tier::Debug
);
345 const CodeRange
* codeRange
=
346 code_
->lookupFuncRange(codeSegment
.base() + debugTrapOffset
);
347 MOZ_ASSERT(codeRange
);
348 mustLeaveEnabled
= codeRange
->funcIndex() == funcIdx
;
351 if (mustLeaveEnabled
) {
354 disableDebuggingForFunction(instance
, funcIdx
);
358 disableDebugTrap(instance
);
363 void DebugState::ensureEnterFrameTrapsState(JSContext
* cx
, Instance
* instance
,
365 if (enterFrameTrapsEnabled_
== enabled
) {
369 adjustEnterAndLeaveFrameTrapsState(cx
, instance
, enabled
);
371 enterFrameTrapsEnabled_
= enabled
;
374 bool DebugState::debugGetLocalTypes(uint32_t funcIndex
, ValTypeVector
* locals
,
376 StackResults
* stackResults
) {
377 const TypeContext
& types
= *metadata().types
;
378 const FuncType
& funcType
= metadata().debugFuncType(funcIndex
);
379 const ValTypeVector
& args
= funcType
.args();
380 const ValTypeVector
& results
= funcType
.results();
381 ResultType
resultType(ResultType::Vector(results
));
382 *argsLength
= args
.length();
383 *stackResults
= ABIResultIter::HasStackResults(resultType
)
384 ? StackResults::HasStackResults
385 : StackResults::NoStackResults
;
386 if (!locals
->appendAll(args
)) {
390 // Decode local var types from wasm binary function body.
391 const CodeRange
& range
=
392 codeRanges(Tier::Debug
)[funcToCodeRangeIndex(funcIndex
)];
393 // In wasm, the Code points to the function start via funcLineOrBytecode.
394 size_t offsetInModule
= range
.funcLineOrBytecode();
395 Decoder
d(bytecode().begin() + offsetInModule
, bytecode().end(),
397 /* error = */ nullptr);
398 return DecodeValidatedLocalEntries(types
, d
, locals
);
401 bool DebugState::getGlobal(Instance
& instance
, uint32_t globalIndex
,
402 MutableHandleValue vp
) {
403 const GlobalDesc
& global
= metadata().globals
[globalIndex
];
405 if (global
.isConstant()) {
406 LitVal value
= global
.constantValue();
407 switch (value
.type().kind()) {
409 vp
.set(Int32Value(value
.i32()));
412 // Just display as a Number; it's ok if we lose some precision
413 vp
.set(NumberValue((double)value
.i64()));
416 vp
.set(NumberValue(JS::CanonicalizeNaN(value
.f32())));
419 vp
.set(NumberValue(JS::CanonicalizeNaN(value
.f64())));
422 // It's possible to do better. We could try some kind of hashing
423 // scheme, to make the pointer recognizable without revealing it.
424 vp
.set(MagicValue(JS_OPTIMIZED_OUT
));
427 // Debugger must be updated to handle this, and should be updated to
428 // handle i64 in any case.
429 vp
.set(MagicValue(JS_OPTIMIZED_OUT
));
432 MOZ_CRASH("Global constant type");
437 void* dataPtr
= instance
.data() + global
.offset();
438 if (global
.isIndirect()) {
439 dataPtr
= *static_cast<void**>(dataPtr
);
441 switch (global
.type().kind()) {
443 vp
.set(Int32Value(*static_cast<int32_t*>(dataPtr
)));
447 // Just display as a Number; it's ok if we lose some precision
448 vp
.set(NumberValue((double)*static_cast<int64_t*>(dataPtr
)));
452 vp
.set(NumberValue(JS::CanonicalizeNaN(*static_cast<float*>(dataPtr
))));
456 vp
.set(NumberValue(JS::CanonicalizeNaN(*static_cast<double*>(dataPtr
))));
460 // Just hide it. See above.
461 vp
.set(MagicValue(JS_OPTIMIZED_OUT
));
464 case ValType::V128
: {
465 // Just hide it. See above.
466 vp
.set(MagicValue(JS_OPTIMIZED_OUT
));
470 MOZ_CRASH("Global variable type");
477 bool DebugState::getSourceMappingURL(JSContext
* cx
,
478 MutableHandleString result
) const {
481 for (const CustomSection
& customSection
: module_
->customSections()) {
482 const Bytes
& sectionName
= customSection
.name
;
483 if (strlen(SourceMappingURLSectionName
) != sectionName
.length() ||
484 memcmp(SourceMappingURLSectionName
, sectionName
.begin(),
485 sectionName
.length()) != 0) {
489 // Parse found "SourceMappingURL" custom section.
490 Decoder
d(customSection
.payload
->begin(), customSection
.payload
->end(), 0,
491 /* error = */ nullptr);
493 if (!d
.readVarU32(&nchars
)) {
494 return true; // ignoring invalid section data
496 const uint8_t* chars
;
497 if (!d
.readBytes(nchars
, &chars
) || d
.currentPosition() != d
.end()) {
498 return true; // ignoring invalid section data
501 JS::UTF8Chars
utf8Chars(reinterpret_cast<const char*>(chars
), nchars
);
502 JSString
* str
= JS_NewStringCopyUTF8N(cx
, utf8Chars
);
510 // Check presence of "SourceMap:" HTTP response header.
511 char* sourceMapURL
= metadata().sourceMapURL
.get();
512 if (sourceMapURL
&& strlen(sourceMapURL
)) {
513 JS::UTF8Chars
utf8Chars(sourceMapURL
, strlen(sourceMapURL
));
514 JSString
* str
= JS_NewStringCopyUTF8N(cx
, utf8Chars
);
523 void DebugState::addSizeOfMisc(MallocSizeOf mallocSizeOf
,
524 Metadata::SeenSet
* seenMetadata
,
525 Code::SeenSet
* seenCode
, size_t* code
,
526 size_t* data
) const {
527 code_
->addSizeOfMiscIfNotSeen(mallocSizeOf
, seenMetadata
, seenCode
, code
,
529 module_
->addSizeOfMisc(mallocSizeOf
, seenMetadata
, seenCode
, code
, data
);