xref: /haiku/src/system/kernel/debug/tracing.cpp (revision 1b80286772b529a3d6de3bbeb0720c62e6a32fed)
1 /*
2  * Copyright 2008, Ingo Weinhold, ingo_weinhold@gmx.de.
3  * Copyright 2008-2009, Axel Dörfler, axeld@pinc-software.de.
4  * Distributed under the terms of the MIT License.
5  */
6 
7 
8 #include <tracing.h>
9 
10 #include <stdarg.h>
11 #include <stdlib.h>
12 
13 #include <arch/debug.h>
14 #include <debug.h>
15 #include <elf.h>
16 #include <int.h>
17 #include <kernel.h>
18 #include <team.h>
19 #include <thread.h>
20 #include <util/AutoLock.h>
21 
22 
23 struct tracing_stack_trace {
24 	int32	depth;
25 	addr_t	return_addresses[0];
26 };
27 
28 
29 #if ENABLE_TRACING
30 
31 //#define TRACE_TRACING
32 #ifdef TRACE_TRACING
33 #	define TRACE(x) dprintf_no_syslog x
34 #else
35 #	define TRACE(x) ;
36 #endif
37 
38 
39 enum {
40 	WRAP_ENTRY			= 0x01,
41 	ENTRY_INITIALIZED	= 0x02,
42 	BUFFER_ENTRY		= 0x04,
43 	FILTER_MATCH		= 0x08
44 };
45 
46 
47 static const size_t kTraceOutputBufferSize = 10240;
48 static const size_t kBufferSize = MAX_TRACE_SIZE / 4;
49 
50 static trace_entry* sBuffer;
51 static trace_entry* sFirstEntry;
52 static trace_entry* sAfterLastEntry;
53 static uint32 sEntries;
54 static uint32 sWritten;
55 static spinlock sLock;
56 static char* sTraceOutputBuffer;
57 
58 
59 static trace_entry*
60 next_entry(trace_entry* entry)
61 {
62 	entry += entry->size;
63 	if ((entry->flags & WRAP_ENTRY) != 0)
64 		entry = sBuffer;
65 
66 	if (entry == sAfterLastEntry)
67 		return NULL;
68 
69 	return entry;
70 }
71 
72 
73 static trace_entry*
74 previous_entry(trace_entry* entry)
75 {
76 	if (entry == sFirstEntry)
77 		return NULL;
78 
79 	if (entry == sBuffer) {
80 		// beginning of buffer -- previous entry is a wrap entry
81 		entry = sBuffer + kBufferSize - entry->previous_size;
82 	}
83 
84 	return entry - entry->previous_size;
85 }
86 
87 
88 static bool
89 free_first_entry()
90 {
91 	TRACE(("  skip start %p, %lu*4 bytes\n", sFirstEntry, sFirstEntry->size));
92 
93 	trace_entry* newFirst = next_entry(sFirstEntry);
94 
95 	if (sFirstEntry->flags & BUFFER_ENTRY) {
96 		// a buffer entry -- just skip it
97 	} else if (sFirstEntry->flags & ENTRY_INITIALIZED) {
98 		// fully initialized TraceEntry -- destroy it
99 		TraceEntry::FromTraceEntry(sFirstEntry)->~TraceEntry();
100 		sEntries--;
101 	} else {
102 		// Not fully initialized TraceEntry. We can't free it, since
103 		// then it's constructor might still write into the memory and
104 		// overwrite data of the entry we're going to allocate.
105 		// We can't do anything until this entry can be discarded.
106 		return false;
107 	}
108 
109 	if (newFirst == NULL) {
110 		// everything is freed -- that practically this can't happen, if
111 		// the buffer is large enough to hold three max-sized entries
112 		sFirstEntry = sAfterLastEntry = sBuffer;
113 		TRACE(("free_first_entry(): all entries freed!\n"));
114 	} else
115 		sFirstEntry = newFirst;
116 
117 	return true;
118 }
119 
120 
121 /*!	Makes sure we have needed * 4 bytes of memory at sAfterLastEntry.
122 	Returns \c false, if unable to free that much.
123 */
124 static bool
125 make_space(size_t needed)
126 {
127 	// we need space for sAfterLastEntry, too (in case we need to wrap around
128 	// later)
129 	needed++;
130 
131 	// If there's not enough space (free or occupied) after sAfterLastEntry,
132 	// we free all entries in that region and wrap around.
133 	if (sAfterLastEntry + needed > sBuffer + kBufferSize) {
134 		TRACE(("make_space(%lu), wrapping around: after last: %p\n", needed,
135 			sAfterLastEntry));
136 
137 		// Free all entries after sAfterLastEntry and one more at the beginning
138 		// of the buffer.
139 		while (sFirstEntry > sAfterLastEntry) {
140 			if (!free_first_entry())
141 				return false;
142 		}
143 		if (sAfterLastEntry != sBuffer && !free_first_entry())
144 			return false;
145 
146 		// just in case free_first_entry() freed the very last existing entry
147 		if (sAfterLastEntry == sBuffer)
148 			return true;
149 
150 		// mark as wrap entry and actually wrap around
151 		trace_entry* wrapEntry = sAfterLastEntry;
152 		wrapEntry->size = 0;
153 		wrapEntry->flags = WRAP_ENTRY;
154 		sAfterLastEntry = sBuffer;
155 		sAfterLastEntry->previous_size = sBuffer + kBufferSize - wrapEntry;
156 	}
157 
158 	if (sFirstEntry <= sAfterLastEntry) {
159 		// buffer is empty or the space after sAfterLastEntry is unoccupied
160 		return true;
161 	}
162 
163 	// free the first entries, until there's enough space
164 	size_t space = sFirstEntry - sAfterLastEntry;
165 
166 	if (space < needed) {
167 		TRACE(("make_space(%lu), left %ld\n", needed, space));
168 	}
169 
170 	while (space < needed) {
171 		space += sFirstEntry->size;
172 
173 		if (!free_first_entry())
174 			return false;
175 	}
176 
177 	TRACE(("  out: start %p, entries %ld\n", sFirstEntry, sEntries));
178 
179 	return true;
180 }
181 
182 
183 static trace_entry*
184 allocate_entry(size_t size, uint16 flags)
185 {
186 	if (sAfterLastEntry == NULL || size == 0 || size >= 65532)
187 		return NULL;
188 
189 	InterruptsSpinLocker _(sLock);
190 
191 	size = (size + 3) >> 2;
192 		// 4 byte aligned, don't store the lower 2 bits
193 
194 	TRACE(("allocate_entry(%lu), start %p, end %p, buffer %p\n", size * 4,
195 		sFirstEntry, sAfterLastEntry, sBuffer));
196 
197 	if (!make_space(size))
198 		return NULL;
199 
200 	trace_entry* entry = sAfterLastEntry;
201 	entry->size = size;
202 	entry->flags = flags;
203 	sAfterLastEntry += size;
204 	sAfterLastEntry->previous_size = size;
205 
206 	if (!(flags & BUFFER_ENTRY))
207 		sEntries++;
208 
209 	TRACE(("  entry: %p, end %p, start %p, entries %ld\n", entry,
210 		sAfterLastEntry, sFirstEntry, sEntries));
211 
212 	return entry;
213 }
214 
215 
216 #endif	// ENABLE_TRACING
217 
218 
219 // #pragma mark -
220 
221 
222 TraceOutput::TraceOutput(char* buffer, size_t bufferSize, uint32 flags)
223 	: fBuffer(buffer),
224 	  fCapacity(bufferSize),
225 	  fFlags(flags)
226 {
227 	Clear();
228 }
229 
230 
231 void
232 TraceOutput::Clear()
233 {
234 	if (fCapacity > 0)
235 		fBuffer[0] = '\0';
236 	fSize = 0;
237 }
238 
239 
240 void
241 TraceOutput::Print(const char* format,...)
242 {
243 #if ENABLE_TRACING
244 	if (IsFull())
245 		return;
246 
247 	va_list args;
248 	va_start(args, format);
249 	fSize += vsnprintf(fBuffer + fSize, fCapacity - fSize, format, args);
250 	va_end(args);
251 #endif
252 }
253 
254 
255 void
256 TraceOutput::PrintStackTrace(tracing_stack_trace* stackTrace)
257 {
258 #if ENABLE_TRACING
259 	if (stackTrace == NULL || stackTrace->depth <= 0)
260 		return;
261 
262 	for (int32 i = 0; i < stackTrace->depth; i++) {
263 		addr_t address = stackTrace->return_addresses[i];
264 
265 		const char* symbol;
266 		const char* imageName;
267 		bool exactMatch;
268 		addr_t baseAddress;
269 
270 		if (elf_debug_lookup_symbol_address(address, &baseAddress, &symbol,
271 				&imageName, &exactMatch) == B_OK) {
272 			Print("  %p  %s + 0x%lx (%s)%s\n", (void*)address, symbol,
273 				address - baseAddress, imageName,
274 				exactMatch ? "" : " (nearest)");
275 		} else
276 			Print("  %p\n", (void*)address);
277 	}
278 #endif
279 }
280 
281 
282 void
283 TraceOutput::SetLastEntryTime(bigtime_t time)
284 {
285 	fLastEntryTime = time;
286 }
287 
288 
289 bigtime_t
290 TraceOutput::LastEntryTime() const
291 {
292 	return fLastEntryTime;
293 }
294 
295 
296 //	#pragma mark -
297 
298 
299 TraceEntry::TraceEntry()
300 {
301 }
302 
303 
304 TraceEntry::~TraceEntry()
305 {
306 }
307 
308 
309 void
310 TraceEntry::Dump(TraceOutput& out)
311 {
312 #if ENABLE_TRACING
313 	// to be overridden by subclasses
314 	out.Print("ENTRY %p", this);
315 #endif
316 }
317 
318 
319 void
320 TraceEntry::DumpStackTrace(TraceOutput& out)
321 {
322 }
323 
324 
325 void
326 TraceEntry::Initialized()
327 {
328 #if ENABLE_TRACING
329 	ToTraceEntry()->flags |= ENTRY_INITIALIZED;
330 	sWritten++;
331 #endif
332 }
333 
334 
335 void*
336 TraceEntry::operator new(size_t size, const std::nothrow_t&) throw()
337 {
338 #if ENABLE_TRACING
339 	trace_entry* entry = allocate_entry(size + sizeof(trace_entry), 0);
340 	return entry != NULL ? entry + 1 : NULL;
341 #endif
342 	return NULL;
343 }
344 
345 
346 //	#pragma mark -
347 
348 
349 AbstractTraceEntry::AbstractTraceEntry()
350 {
351 	struct thread* thread = thread_get_current_thread();
352 	if (thread != NULL) {
353 		fThread = thread->id;
354 		if (thread->team)
355 			fTeam = thread->team->id;
356 	}
357 	fTime = system_time();
358 }
359 
360 AbstractTraceEntry::~AbstractTraceEntry()
361 {
362 }
363 
364 
365 void
366 AbstractTraceEntry::Dump(TraceOutput& out)
367 {
368 	bigtime_t time = (out.Flags() & TRACE_OUTPUT_DIFF_TIME)
369 		? fTime - out.LastEntryTime()
370 		: fTime;
371 
372 	if (out.Flags() & TRACE_OUTPUT_TEAM_ID)
373 		out.Print("[%6ld:%6ld] %10Ld: ", fThread, fTeam, time);
374 	else
375 		out.Print("[%6ld] %10Ld: ", fThread, time);
376 
377 	AddDump(out);
378 
379 	out.SetLastEntryTime(fTime);
380 }
381 
382 
383 void
384 AbstractTraceEntry::AddDump(TraceOutput& out)
385 {
386 }
387 
388 
389 //	#pragma mark -
390 
391 
392 #if ENABLE_TRACING
393 
394 class KernelTraceEntry : public AbstractTraceEntry {
395 	public:
396 		KernelTraceEntry(const char* message)
397 		{
398 			fMessage = alloc_tracing_buffer_strcpy(message, 256, false);
399 
400 #if KTRACE_PRINTF_STACK_TRACE
401 			fStackTrace = capture_tracing_stack_trace(
402 				KTRACE_PRINTF_STACK_TRACE, 1, false);
403 #endif
404 			Initialized();
405 		}
406 
407 		virtual void AddDump(TraceOutput& out)
408 		{
409 			out.Print("kern: %s", fMessage);
410 		}
411 
412 #if KTRACE_PRINTF_STACK_TRACE
413 		virtual void DumpStackTrace(TraceOutput& out)
414 		{
415 			out.PrintStackTrace(fStackTrace);
416 		}
417 #endif
418 
419 	private:
420 		char*	fMessage;
421 #if KTRACE_PRINTF_STACK_TRACE
422 		tracing_stack_trace* fStackTrace;
423 #endif
424 };
425 
426 
427 class UserTraceEntry : public AbstractTraceEntry {
428 	public:
429 		UserTraceEntry(const char* message)
430 		{
431 			fMessage = alloc_tracing_buffer_strcpy(message, 256, true);
432 
433 #if KTRACE_PRINTF_STACK_TRACE
434 			fStackTrace = capture_tracing_stack_trace(
435 				KTRACE_PRINTF_STACK_TRACE, 1, false);
436 #endif
437 			Initialized();
438 		}
439 
440 		virtual void AddDump(TraceOutput& out)
441 		{
442 			out.Print("user: %s", fMessage);
443 		}
444 
445 #if KTRACE_PRINTF_STACK_TRACE
446 		virtual void DumpStackTrace(TraceOutput& out)
447 		{
448 			out.PrintStackTrace(fStackTrace);
449 		}
450 #endif
451 
452 	private:
453 		char*	fMessage;
454 #if KTRACE_PRINTF_STACK_TRACE
455 		tracing_stack_trace* fStackTrace;
456 #endif
457 };
458 
459 #endif	// ENABLE_TRACING
460 
461 
462 //	#pragma mark - trace filters
463 
464 
465 TraceFilter::~TraceFilter()
466 {
467 }
468 
469 
470 bool
471 TraceFilter::Filter(const TraceEntry* entry, LazyTraceOutput& out)
472 {
473 	return false;
474 }
475 
476 
477 
478 class ThreadTraceFilter : public TraceFilter {
479 public:
480 	virtual bool Filter(const TraceEntry* _entry, LazyTraceOutput& out)
481 	{
482 		const AbstractTraceEntry* entry
483 			= dynamic_cast<const AbstractTraceEntry*>(_entry);
484 		return (entry != NULL && entry->Thread() == fThread);
485 	}
486 };
487 
488 
489 class TeamTraceFilter : public TraceFilter {
490 public:
491 	virtual bool Filter(const TraceEntry* _entry, LazyTraceOutput& out)
492 	{
493 		const AbstractTraceEntry* entry
494 			= dynamic_cast<const AbstractTraceEntry*>(_entry);
495 		return (entry != NULL && entry->Team() == fTeam);
496 	}
497 };
498 
499 
500 class PatternTraceFilter : public TraceFilter {
501 public:
502 	virtual bool Filter(const TraceEntry* entry, LazyTraceOutput& out)
503 	{
504 		return strstr(out.DumpEntry(entry), fString) != NULL;
505 	}
506 };
507 
508 
509 class DecimalPatternTraceFilter : public TraceFilter {
510 public:
511 	virtual bool Filter(const TraceEntry* entry, LazyTraceOutput& out)
512 	{
513 		// TODO: this is *very* slow
514 		char buffer[64];
515 		snprintf(buffer, sizeof(buffer), "%Ld", fValue);
516 		return strstr(out.DumpEntry(entry), buffer) != NULL;
517 	}
518 };
519 
520 class HexPatternTraceFilter : public TraceFilter {
521 public:
522 	virtual bool Filter(const TraceEntry* entry, LazyTraceOutput& out)
523 	{
524 		// TODO: this is *very* slow
525 		char buffer[64];
526 		snprintf(buffer, sizeof(buffer), "%Lx", fValue);
527 		return strstr(out.DumpEntry(entry), buffer) != NULL;
528 	}
529 };
530 
531 class StringPatternTraceFilter : public TraceFilter {
532 public:
533 	virtual bool Filter(const TraceEntry* entry, LazyTraceOutput& out)
534 	{
535 		if (IS_KERNEL_ADDRESS(fValue))
536 			return strstr(out.DumpEntry(entry), (const char*)fValue) != NULL;
537 
538 		// TODO: this is *very* slow
539 		char buffer[64];
540 		user_strlcpy(buffer, (const char*)fValue, sizeof(buffer));
541 		return strstr(out.DumpEntry(entry), buffer) != NULL;
542 	}
543 };
544 
545 class NotTraceFilter : public TraceFilter {
546 public:
547 	virtual bool Filter(const TraceEntry* entry, LazyTraceOutput& out)
548 	{
549 		return !fSubFilters.first->Filter(entry, out);
550 	}
551 };
552 
553 
554 class AndTraceFilter : public TraceFilter {
555 public:
556 	virtual bool Filter(const TraceEntry* entry, LazyTraceOutput& out)
557 	{
558 		return fSubFilters.first->Filter(entry, out)
559 			&& fSubFilters.second->Filter(entry, out);
560 	}
561 };
562 
563 
564 class OrTraceFilter : public TraceFilter {
565 public:
566 	virtual bool Filter(const TraceEntry* entry, LazyTraceOutput& out)
567 	{
568 		return fSubFilters.first->Filter(entry, out)
569 			|| fSubFilters.second->Filter(entry, out);
570 	}
571 };
572 
573 
574 class TraceFilterParser {
575 public:
576 	static TraceFilterParser* Default()
577 	{
578 		return &sParser;
579 	}
580 
581 	bool Parse(int argc, const char* const* argv)
582 	{
583 		fTokens = argv;
584 		fTokenCount = argc;
585 		fTokenIndex = 0;
586 		fFilterCount = 0;
587 
588 		TraceFilter* filter = _ParseExpression();
589 		return fTokenIndex == fTokenCount && filter != NULL;
590 	}
591 
592 	TraceFilter* Filter()
593 	{
594 		return &fFilters[0];
595 	}
596 
597 private:
598 	TraceFilter* _ParseExpression()
599 	{
600 		const char* token = _NextToken();
601 		if (!token) {
602 			// unexpected end of expression
603 			return NULL;
604 		}
605 
606 		if (fFilterCount == MAX_FILTERS) {
607 			// too many filters
608 			return NULL;
609 		}
610 
611 		if (token[0] == '#') {
612 			TraceFilter* filter = new(&fFilters[fFilterCount++])
613 				PatternTraceFilter;
614 			filter->fString = token + 1;
615 			return filter;
616 		} else if (token[0] == 'd' && token[1] == '#') {
617 			TraceFilter* filter = new(&fFilters[fFilterCount++])
618 				DecimalPatternTraceFilter;
619 			filter->fValue = parse_expression(token + 2);
620 			return filter;
621 		} else if (token[0] == 'x' && token[1] == '#') {
622 			TraceFilter* filter = new(&fFilters[fFilterCount++])
623 				HexPatternTraceFilter;
624 			filter->fValue = parse_expression(token + 2);
625 			return filter;
626 		} else if (token[0] == 's' && token[1] == '#') {
627 			TraceFilter* filter = new(&fFilters[fFilterCount++])
628 				StringPatternTraceFilter;
629 			filter->fValue = parse_expression(token + 2);
630 			return filter;
631 		} else if (strcmp(token, "not") == 0) {
632 			TraceFilter* filter = new(&fFilters[fFilterCount++]) NotTraceFilter;
633 			if ((filter->fSubFilters.first = _ParseExpression()) != NULL)
634 				return filter;
635 			return NULL;
636 		} else if (strcmp(token, "and") == 0) {
637 			TraceFilter* filter = new(&fFilters[fFilterCount++]) AndTraceFilter;
638 			if ((filter->fSubFilters.first = _ParseExpression()) != NULL
639 				&& (filter->fSubFilters.second = _ParseExpression()) != NULL) {
640 				return filter;
641 			}
642 			return NULL;
643 		} else if (strcmp(token, "or") == 0) {
644 			TraceFilter* filter = new(&fFilters[fFilterCount++]) OrTraceFilter;
645 			if ((filter->fSubFilters.first = _ParseExpression()) != NULL
646 				&& (filter->fSubFilters.second = _ParseExpression()) != NULL) {
647 				return filter;
648 			}
649 			return NULL;
650 		} else if (strcmp(token, "thread") == 0) {
651 			const char* arg = _NextToken();
652 			if (arg == NULL) {
653 				// unexpected end of expression
654 				return NULL;
655 			}
656 
657 			TraceFilter* filter = new(&fFilters[fFilterCount++])
658 				ThreadTraceFilter;
659 			filter->fThread = strtol(arg, NULL, 0);
660 			return filter;
661 		} else if (strcmp(token, "team") == 0) {
662 			const char* arg = _NextToken();
663 			if (arg == NULL) {
664 				// unexpected end of expression
665 				return NULL;
666 			}
667 
668 			TraceFilter* filter = new(&fFilters[fFilterCount++])
669 				TeamTraceFilter;
670 			filter->fTeam = strtol(arg, NULL, 0);
671 			return filter;
672 		} else {
673 			// invalid token
674 			return NULL;
675 		}
676 	}
677 
678 	const char* _CurrentToken() const
679 	{
680 		if (fTokenIndex >= 1 && fTokenIndex <= fTokenCount)
681 			return fTokens[fTokenIndex - 1];
682 		return NULL;
683 	}
684 
685 	const char* _NextToken()
686 	{
687 		if (fTokenIndex >= fTokenCount)
688 			return NULL;
689 		return fTokens[fTokenIndex++];
690 	}
691 
692 private:
693 	enum { MAX_FILTERS = 32 };
694 
695 	const char* const*			fTokens;
696 	int							fTokenCount;
697 	int							fTokenIndex;
698 	TraceFilter					fFilters[MAX_FILTERS];
699 	int							fFilterCount;
700 
701 	static TraceFilterParser	sParser;
702 };
703 
704 
705 TraceFilterParser TraceFilterParser::sParser;
706 
707 
708 //	#pragma mark -
709 
710 
711 #if ENABLE_TRACING
712 
713 
714 TraceEntry*
715 TraceEntryIterator::Next()
716 {
717 	if (fIndex == 0) {
718 		fEntry = _NextNonBufferEntry(sFirstEntry);
719 		fIndex = 1;
720 	} else if (fEntry != NULL) {
721 		fEntry = _NextNonBufferEntry(next_entry(fEntry));
722 		fIndex++;
723 	}
724 
725 	return Current();
726 }
727 
728 
729 TraceEntry*
730 TraceEntryIterator::Previous()
731 {
732 	if (fIndex == (int32)sEntries + 1)
733 		fEntry = sAfterLastEntry;
734 
735 	if (fEntry != NULL) {
736 		fEntry = _PreviousNonBufferEntry(previous_entry(fEntry));
737 		fIndex--;
738 	}
739 
740 	return Current();
741 }
742 
743 
744 TraceEntry*
745 TraceEntryIterator::MoveTo(int32 index)
746 {
747 	if (index == fIndex)
748 		return Current();
749 
750 	if (index <= 0 || index > (int32)sEntries) {
751 		fIndex = (index <= 0 ? 0 : sEntries + 1);
752 		fEntry = NULL;
753 		return NULL;
754 	}
755 
756 	// get the shortest iteration path
757 	int32 distance = index - fIndex;
758 	int32 direction = distance < 0 ? -1 : 1;
759 	distance *= direction;
760 
761 	if (index < distance) {
762 		distance = index;
763 		direction = 1;
764 		fEntry = NULL;
765 		fIndex = 0;
766 	}
767 	if ((int32)sEntries + 1 - fIndex < distance) {
768 		distance = sEntries + 1 - fIndex;
769 		direction = -1;
770 		fEntry = NULL;
771 		fIndex = sEntries + 1;
772 	}
773 
774 	// iterate to the index
775 	if (direction < 0) {
776 		while (fIndex != index)
777 			Previous();
778 	} else {
779 		while (fIndex != index)
780 			Next();
781 	}
782 
783 	return Current();
784 }
785 
786 
787 trace_entry*
788 TraceEntryIterator::_NextNonBufferEntry(trace_entry* entry)
789 {
790 	while (entry != NULL && (entry->flags & BUFFER_ENTRY) != 0)
791 		entry = next_entry(entry);
792 
793 	return entry;
794 }
795 
796 
797 trace_entry*
798 TraceEntryIterator::_PreviousNonBufferEntry(trace_entry* entry)
799 {
800 	while (entry != NULL && (entry->flags & BUFFER_ENTRY) != 0)
801 		entry = previous_entry(entry);
802 
803 	return entry;
804 }
805 
806 
807 int
808 dump_tracing_internal(int argc, char** argv, WrapperTraceFilter* wrapperFilter)
809 {
810 	int argi = 1;
811 
812 	// variables in which we store our state to be continuable
813 	static int32 _previousCount = 0;
814 	static bool _previousHasFilter = false;
815 	static bool _previousPrintStackTrace = false;
816 	static int32 _previousMaxToCheck = 0;
817 	static int32 _previousFirstChecked = 1;
818 	static int32 _previousLastChecked = -1;
819 	static int32 _previousDirection = 1;
820 	static uint32 _previousWritten = 0;
821 	static uint32 _previousEntries = 0;
822 	static uint32 _previousOutputFlags = 0;
823 	static TraceEntryIterator iterator;
824 
825 	// Note: start and index are Pascal-like indices (i.e. in [1, sEntries]).
826 	int32 start = 0;	// special index: print the last count entries
827 	int32 count = 0;
828 	int32 maxToCheck = 0;
829 	int32 cont = 0;
830 
831 	bool hasFilter = false;
832 	bool printStackTrace = false;
833 
834 	uint32 outputFlags = 0;
835 	while (argi < argc) {
836 		if (strcmp(argv[argi], "--difftime") == 0) {
837 			outputFlags |= TRACE_OUTPUT_DIFF_TIME;
838 			argi++;
839 		} else if (strcmp(argv[argi], "--printteam") == 0) {
840 			outputFlags |= TRACE_OUTPUT_TEAM_ID;
841 			argi++;
842 		} else if (strcmp(argv[argi], "--stacktrace") == 0) {
843 			printStackTrace = true;
844 			argi++;
845 		} else
846 			break;
847 	}
848 
849 	if (argi < argc) {
850 		if (strcmp(argv[argi], "forward") == 0) {
851 			cont = 1;
852 			argi++;
853 		} else if (strcmp(argv[argi], "backward") == 0) {
854 			cont = -1;
855 			argi++;
856 		}
857 	} else
858 		cont = _previousDirection;
859 
860 	if (cont != 0) {
861 		if (argi < argc) {
862 			print_debugger_command_usage(argv[0]);
863 			return 0;
864 		}
865 		if (sWritten == 0 || sWritten != _previousWritten
866 			|| sEntries != _previousEntries) {
867 			kprintf("Can't continue iteration. \"%s\" has not been invoked "
868 				"before, or there were new entries written since the last "
869 				"invocation.\n", argv[0]);
870 			return 0;
871 		}
872 	}
873 
874 	// get start, count, maxToCheck
875 	int32* params[3] = { &start, &count, &maxToCheck };
876 	for (int i = 0; i < 3 && !hasFilter && argi < argc; i++) {
877 		if (strcmp(argv[argi], "filter") == 0) {
878 			hasFilter = true;
879 			argi++;
880 		} else if (argv[argi][0] == '#') {
881 			hasFilter = true;
882 		} else {
883 			*params[i] = parse_expression(argv[argi]);
884 			argi++;
885 		}
886 	}
887 
888 	// filter specification
889 	if (argi < argc) {
890 		hasFilter = true;
891 		if (strcmp(argv[argi], "filter") == 0)
892 			argi++;
893 
894 		if (!TraceFilterParser::Default()->Parse(argc - argi, argv + argi)) {
895 			print_debugger_command_usage(argv[0]);
896 			return 0;
897 		}
898 	}
899 
900 	int32 direction;
901 	int32 firstToCheck;
902 	int32 lastToCheck;
903 
904 	if (cont != 0) {
905 		// get values from the previous iteration
906 		direction = cont;
907 		count = _previousCount;
908 		maxToCheck = _previousMaxToCheck;
909 		hasFilter = _previousHasFilter;
910 		outputFlags = _previousOutputFlags;
911 		printStackTrace = _previousPrintStackTrace;
912 
913 		if (direction < 0)
914 			start = _previousFirstChecked - 1;
915 		else
916 			start = _previousLastChecked + 1;
917 	} else {
918 		// defaults for count and maxToCheck
919 		if (count == 0)
920 			count = 30;
921 		if (maxToCheck == 0 || !hasFilter)
922 			maxToCheck = count;
923 		else if (maxToCheck < 0)
924 			maxToCheck = sEntries;
925 
926 		// determine iteration direction
927 		direction = (start <= 0 || count < 0 ? -1 : 1);
928 
929 		// validate count and maxToCheck
930 		if (count < 0)
931 			count = -count;
932 		if (maxToCheck < 0)
933 			maxToCheck = -maxToCheck;
934 		if (maxToCheck > (int32)sEntries)
935 			maxToCheck = sEntries;
936 		if (count > maxToCheck)
937 			count = maxToCheck;
938 
939 		// validate start
940 		if (start <= 0 || start > (int32)sEntries)
941 			start = max_c(1, sEntries);
942 	}
943 
944 	if (direction < 0) {
945 		firstToCheck = max_c(1, start - maxToCheck + 1);
946 		lastToCheck = start;
947 	} else {
948 		firstToCheck = start;
949 		lastToCheck = min_c((int32)sEntries, start + maxToCheck - 1);
950 	}
951 
952 	// reset the iterator, if something changed in the meantime
953 	if (sWritten == 0 || sWritten != _previousWritten
954 		|| sEntries != _previousEntries) {
955 		iterator.Reset();
956 	}
957 
958 	LazyTraceOutput out(sTraceOutputBuffer, kTraceOutputBufferSize,
959 		outputFlags);
960 
961 	bool markedMatching = false;
962 	int32 firstToDump = firstToCheck;
963 	int32 lastToDump = lastToCheck;
964 
965 	TraceFilter* filter = NULL;
966 	if (hasFilter)
967 		filter = TraceFilterParser::Default()->Filter();
968 
969 	if (wrapperFilter != NULL) {
970 		wrapperFilter->Init(filter, direction, cont != 0);
971 		filter = wrapperFilter;
972 	}
973 
974 	if (direction < 0 && filter && lastToCheck - firstToCheck >= count) {
975 		// iteration direction is backwards
976 		markedMatching = true;
977 
978 		// From the last entry to check iterate backwards to check filter
979 		// matches.
980 		int32 matching = 0;
981 
982 		// move to the entry after the last entry to check
983 		iterator.MoveTo(lastToCheck + 1);
984 
985 		// iterate backwards
986 		firstToDump = -1;
987 		lastToDump = -1;
988 		while (iterator.Index() > firstToCheck) {
989 			TraceEntry* entry = iterator.Previous();
990 			if ((entry->Flags() & ENTRY_INITIALIZED) != 0) {
991 				out.Clear();
992 				if (filter->Filter(entry, out)) {
993 					entry->ToTraceEntry()->flags |= FILTER_MATCH;
994 					if (lastToDump == -1)
995 						lastToDump = iterator.Index();
996 					firstToDump = iterator.Index();
997 
998 					matching++;
999 					if (matching >= count)
1000 						break;
1001 				} else
1002 					entry->ToTraceEntry()->flags &= ~FILTER_MATCH;
1003 			}
1004 		}
1005 
1006 		firstToCheck = iterator.Index();
1007 
1008 		// iterate to the previous entry, so that the next loop starts at the
1009 		// right one
1010 		iterator.Previous();
1011 	}
1012 
1013 	out.SetLastEntryTime(0);
1014 
1015 	// set the iterator to the entry before the first one to dump
1016 	iterator.MoveTo(firstToDump - 1);
1017 
1018 	// dump the entries matching the filter in the range
1019 	// [firstToDump, lastToDump]
1020 	int32 dumped = 0;
1021 
1022 	while (TraceEntry* entry = iterator.Next()) {
1023 		int32 index = iterator.Index();
1024 		if (index < firstToDump)
1025 			continue;
1026 		if (index > lastToDump || dumped >= count) {
1027 			if (direction > 0)
1028 				lastToCheck = index - 1;
1029 			break;
1030 		}
1031 
1032 		if ((entry->Flags() & ENTRY_INITIALIZED) != 0) {
1033 			out.Clear();
1034 			if (filter &&  (markedMatching
1035 					? (entry->Flags() & FILTER_MATCH) == 0
1036 					: !filter->Filter(entry, out))) {
1037 				continue;
1038 			}
1039 
1040 			// don't print trailing new line
1041 			const char* dump = out.DumpEntry(entry);
1042 			int len = strlen(dump);
1043 			if (len > 0 && dump[len - 1] == '\n')
1044 				len--;
1045 
1046 			kprintf("%5ld. %.*s\n", index, len, dump);
1047 
1048 			if (printStackTrace) {
1049 				out.Clear();
1050 				entry->DumpStackTrace(out);
1051 				if (out.Size() > 0)
1052 					kputs(out.Buffer());
1053 			}
1054 		} else if (!filter)
1055 			kprintf("%5ld. ** uninitialized entry **\n", index);
1056 
1057 		dumped++;
1058 	}
1059 
1060 	kprintf("printed %ld entries within range %ld to %ld (%ld of %ld total, "
1061 		"%ld ever)\n", dumped, firstToCheck, lastToCheck,
1062 		lastToCheck - firstToCheck + 1, sEntries, sWritten);
1063 
1064 	// store iteration state
1065 	_previousCount = count;
1066 	_previousMaxToCheck = maxToCheck;
1067 	_previousHasFilter = hasFilter;
1068 	_previousPrintStackTrace = printStackTrace;
1069 	_previousFirstChecked = firstToCheck;
1070 	_previousLastChecked = lastToCheck;
1071 	_previousDirection = direction;
1072 	_previousWritten = sWritten;
1073 	_previousEntries = sEntries;
1074 	_previousOutputFlags = outputFlags;
1075 
1076 	return cont != 0 ? B_KDEBUG_CONT : 0;
1077 }
1078 
1079 
1080 static int
1081 dump_tracing_command(int argc, char** argv)
1082 {
1083 	return dump_tracing_internal(argc, argv, NULL);
1084 }
1085 
1086 
1087 #endif	// ENABLE_TRACING
1088 
1089 
1090 extern "C" uint8*
1091 alloc_tracing_buffer(size_t size)
1092 {
1093 #if	ENABLE_TRACING
1094 	trace_entry* entry = allocate_entry(size + sizeof(trace_entry),
1095 		BUFFER_ENTRY);
1096 	if (entry == NULL)
1097 		return NULL;
1098 
1099 	return (uint8*)(entry + 1);
1100 #else
1101 	return NULL;
1102 #endif
1103 }
1104 
1105 
1106 uint8*
1107 alloc_tracing_buffer_memcpy(const void* source, size_t size, bool user)
1108 {
1109 	if (user && !IS_USER_ADDRESS(source))
1110 		return NULL;
1111 
1112 	uint8* buffer = alloc_tracing_buffer(size);
1113 	if (buffer == NULL)
1114 		return NULL;
1115 
1116 	if (user) {
1117 		if (user_memcpy(buffer, source, size) != B_OK)
1118 			return NULL;
1119 	} else
1120 		memcpy(buffer, source, size);
1121 
1122 	return buffer;
1123 }
1124 
1125 
1126 char*
1127 alloc_tracing_buffer_strcpy(const char* source, size_t maxSize, bool user)
1128 {
1129 	if (source == NULL || maxSize == 0)
1130 		return NULL;
1131 
1132 	if (user && !IS_USER_ADDRESS(source))
1133 		return NULL;
1134 
1135 	// limit maxSize to the actual source string len
1136 	if (user) {
1137 		ssize_t size = user_strlcpy(NULL, source, 0);
1138 			// there's no user_strnlen()
1139 		if (size < 0)
1140 			return 0;
1141 		maxSize = min_c(maxSize, (size_t)size + 1);
1142 	} else
1143 		maxSize = strnlen(source, maxSize - 1) + 1;
1144 
1145 	char* buffer = (char*)alloc_tracing_buffer(maxSize);
1146 	if (buffer == NULL)
1147 		return NULL;
1148 
1149 	if (user) {
1150 		if (user_strlcpy(buffer, source, maxSize) < B_OK)
1151 			return NULL;
1152 	} else
1153 		strlcpy(buffer, source, maxSize);
1154 
1155 	return buffer;
1156 }
1157 
1158 
1159 tracing_stack_trace*
1160 capture_tracing_stack_trace(int32 maxCount, int32 skipFrames, bool userOnly)
1161 {
1162 #if	ENABLE_TRACING
1163 	// TODO: page_fault_exception() doesn't allow us to gracefully handle
1164 	// a bad address in the stack trace, if interrupts are disabled.
1165 	if (!are_interrupts_enabled())
1166 		return NULL;
1167 
1168 	tracing_stack_trace* stackTrace
1169 		= (tracing_stack_trace*)alloc_tracing_buffer(
1170 			sizeof(tracing_stack_trace) + maxCount * sizeof(addr_t));
1171 
1172 	if (stackTrace != NULL) {
1173 		stackTrace->depth = arch_debug_get_stack_trace(
1174 			stackTrace->return_addresses, maxCount, 0, skipFrames + 1,
1175 			userOnly);
1176 	}
1177 
1178 	return stackTrace;
1179 #else
1180 	return NULL;
1181 #endif
1182 }
1183 
1184 
1185 int
1186 dump_tracing(int argc, char** argv, WrapperTraceFilter* wrapperFilter)
1187 {
1188 #if	ENABLE_TRACING
1189 	return dump_tracing_internal(argc, argv, wrapperFilter);
1190 #else
1191 	return 0;
1192 #endif
1193 }
1194 
1195 
1196 void
1197 lock_tracing_buffer()
1198 {
1199 #if ENABLE_TRACING
1200 	acquire_spinlock(&sLock);
1201 #endif
1202 }
1203 
1204 
1205 void
1206 unlock_tracing_buffer()
1207 {
1208 #if ENABLE_TRACING
1209 	release_spinlock(&sLock);
1210 #endif
1211 }
1212 
1213 
1214 extern "C" status_t
1215 tracing_init(void)
1216 {
1217 #if	ENABLE_TRACING
1218 	area_id area = create_area("tracing log", (void**)&sTraceOutputBuffer,
1219 		B_ANY_KERNEL_ADDRESS, MAX_TRACE_SIZE + kTraceOutputBufferSize,
1220 		B_FULL_LOCK, B_KERNEL_READ_AREA | B_KERNEL_WRITE_AREA);
1221 	if (area < B_OK)
1222 		return area;
1223 
1224 	sBuffer = (trace_entry*)(sTraceOutputBuffer + kTraceOutputBufferSize);
1225 	sFirstEntry = sBuffer;
1226 	sAfterLastEntry = sBuffer;
1227 
1228 	add_debugger_command_etc("traced", &dump_tracing_command,
1229 		"Dump recorded trace entries",
1230 		"[ --printteam ] [ --difftime ] [ --stacktrace ] "
1231 			"(\"forward\" | \"backward\") "
1232 			"| ([ <start> [ <count> [ <range> ] ] ] "
1233 			"[ #<pattern> | (\"filter\" <filter>) ])\n"
1234 		"Prints recorded trace entries. If \"backward\" or \"forward\" is\n"
1235 		"specified, the command continues where the previous invocation left\n"
1236 		"off, i.e. printing the previous respectively next entries (as many\n"
1237 		"as printed before). In this case the command is continuable, that is\n"
1238 		"afterwards entering an empty line in the debugger will reinvoke it.\n"
1239 		"If no arguments are given, the command continues in the direction\n"
1240 		"of the last invocation.\n"
1241 		"--printteam  - enables printing the entries' team IDs.\n"
1242 		"--difftime   - print difference times for all but the first entry.\n"
1243 		"--stacktrace - print stack traces for entries that captured one.\n"
1244 		"  <start>    - The base index of the entries to print. Depending on\n"
1245 		"               whether the iteration direction is forward or\n"
1246 		"               backward this will be the first or last entry printed\n"
1247 		"               (potentially, if a filter is specified). The index of\n"
1248 		"               the first entry in the trace buffer is 1. If 0 is\n"
1249 		"               specified, the last <count> recorded entries are\n"
1250 		"               printed (iteration direction is backward). Defaults \n"
1251 		"               to 0.\n"
1252 		"  <count>    - The number of entries to be printed. Defaults to 30.\n"
1253 		"               If negative, the -<count> entries before and\n"
1254 		"               including <start> will be printed.\n"
1255 		"  <range>    - Only relevant if a filter is specified. Specifies the\n"
1256 		"               number of entries to be filtered -- depending on the\n"
1257 		"               iteration direction the entries before or after\n"
1258 		"               <start>. If more than <count> entries match the\n"
1259 		"               filter, only the first (forward) or last (backward)\n"
1260 		"               <count> matching entries will be printed. If 0 is\n"
1261 		"               specified <range> will be set to <count>. If -1,\n"
1262 		"               <range> will be set to the number of recorded\n"
1263 		"               entries.\n"
1264 		"  <pattern>  - If specified only entries containing this string are\n"
1265 		"               printed.\n"
1266 		"  <filter>   - If specified only entries matching this filter\n"
1267 		"               expression are printed. The expression can consist of\n"
1268 		"               prefix operators \"not\", \"and\", \"or\", and\n"
1269 		"               filters \"'thread' <thread>\" (matching entries\n"
1270 		"               with the given thread ID), \"'team' <team>\"\n"
1271 						"(matching entries with the given team ID), and\n"
1272 		"               \"#<pattern>\" (matching entries containing the given\n"
1273 		"               string).\n", 0);
1274 #endif	// ENABLE_TRACING
1275 	return B_OK;
1276 }
1277 
1278 
1279 void
1280 ktrace_printf(const char *format, ...)
1281 {
1282 #if	ENABLE_TRACING
1283 	va_list list;
1284 	va_start(list, format);
1285 
1286 	char buffer[256];
1287 	vsnprintf(buffer, sizeof(buffer), format, list);
1288 
1289 	va_end(list);
1290 
1291 	new(nothrow) KernelTraceEntry(buffer);
1292 #endif	// ENABLE_TRACING
1293 }
1294 
1295 
1296 void
1297 _user_ktrace_output(const char *message)
1298 {
1299 #if	ENABLE_TRACING
1300 	new(nothrow) UserTraceEntry(message);
1301 #endif	// ENABLE_TRACING
1302 }
1303 
1304