kernel - Implement ppoll system call with precise microseconds timeout.
[dragonfly.git] / sbin / hammer / cmd_show.c
blobf75119c3aeb1299cf6966cd236b13813abb7d603
1 /*
2 * Copyright (c) 2008 The DragonFly Project. All rights reserved.
4 * This code is derived from software contributed to The DragonFly Project
5 * by Matthew Dillon <dillon@backplane.com>
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
11 * 1. Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * 2. Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
16 * distribution.
17 * 3. Neither the name of The DragonFly Project nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific, prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
24 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
25 * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
26 * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
27 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
28 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
29 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
31 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
32 * SUCH DAMAGE.
35 #include <libutil.h>
37 #include "hammer.h"
39 #define FLAG_TOOFARLEFT 0x0001
40 #define FLAG_TOOFARRIGHT 0x0002
41 #define FLAG_BADTYPE 0x0004
42 #define FLAG_BADCHILDPARENT 0x0008
43 #define FLAG_BADMIRRORTID 0x0010
45 struct {
46 struct hammer_base_elm base;
47 int limit; /* # of fields to test */
48 int filter; /* filter type (default -1) */
49 int obfuscate; /* obfuscate direntry name */
50 int indent; /* use depth indentation */
51 struct zone_stat *stats;
52 } opt;
54 static __inline void print_btree(hammer_off_t node_offset);
55 static __inline void print_subtree(hammer_btree_elm_t elm);
56 static void print_btree_node(hammer_off_t node_offset,
57 hammer_tid_t mirror_tid,
58 hammer_base_elm_t left_bound,
59 hammer_base_elm_t right_bound);
60 static int test_node_count(hammer_node_ondisk_t node, char *badmp);
61 static void print_btree_elm(hammer_node_ondisk_t node, hammer_off_t node_offset,
62 hammer_btree_elm_t elm,
63 hammer_base_elm_t left_bound,
64 hammer_base_elm_t right_bound,
65 const char *ext);
66 static int get_elm_flags(hammer_node_ondisk_t node, hammer_off_t node_offset,
67 hammer_btree_elm_t elm,
68 hammer_base_elm_t left_bound,
69 hammer_base_elm_t right_bound);
70 static int test_lr(hammer_btree_elm_t elm,
71 hammer_base_elm_t left_bound,
72 hammer_base_elm_t right_bound);
73 static int test_rbn_lr(hammer_btree_elm_t elm,
74 hammer_base_elm_t left_bound,
75 hammer_base_elm_t right_bound);
76 static void print_bigblock_fill(hammer_off_t offset);
77 static const char *check_data_crc(hammer_btree_elm_t elm);
78 static uint32_t get_buf_crc(hammer_off_t buf_offset, int32_t buf_len);
79 static void print_record(hammer_btree_elm_t elm);
80 static int init_btree_search(const char *arg);
81 static int test_btree_search(hammer_btree_elm_t elm);
82 static int test_btree_match(hammer_btree_elm_t elm);
83 static int test_btree_out_of_range(hammer_btree_elm_t elm);
84 static void hexdump_record(const void *ptr, int length, const char *hdr);
86 static int num_bad_node = 0;
87 static int num_bad_elm = 0;
88 static int num_bad_rec = 0;
89 static int depth;
91 #define _X "\t"
92 static const char* _indents[] = {
93 "",
94 _X,
95 _X _X,
96 _X _X _X,
97 _X _X _X _X,
98 _X _X _X _X _X,
99 _X _X _X _X _X _X,
100 _X _X _X _X _X _X _X,
101 _X _X _X _X _X _X _X _X,
102 _X _X _X _X _X _X _X _X _X,
103 _X _X _X _X _X _X _X _X _X _X,
104 _X _X _X _X _X _X _X _X _X _X _X,
105 _X _X _X _X _X _X _X _X _X _X _X _X,
106 _X _X _X _X _X _X _X _X _X _X _X _X _X,
107 _X _X _X _X _X _X _X _X _X _X _X _X _X _X,
108 _X _X _X _X _X _X _X _X _X _X _X _X _X _X _X,
109 _X _X _X _X _X _X _X _X _X _X _X _X _X _X _X _X,
110 /* deep enough */
112 #define INDENT _indents[opt.indent ? depth : 0]
114 void
115 hammer_cmd_show(const char *arg, int filter, int obfuscate, int indent)
117 struct volume_info *volume;
118 struct hammer_volume_ondisk *ondisk;
119 struct hammer_blockmap *blockmap;
120 struct zone_stat *stats = NULL;
121 int zone;
123 AssertOnFailure = (DebugOpt != 0);
125 if (VerboseOpt)
126 stats = hammer_init_zone_stat_bits();
128 volume = get_root_volume();
129 ondisk = volume->ondisk;
130 if (QuietOpt < 3) {
131 printf("Volume header\tnext_tid=%016jx\n",
132 (uintmax_t)ondisk->vol0_next_tid);
133 printf("\t\tbufoffset=%016jx\n",
134 (uintmax_t)ondisk->vol_buf_beg);
135 for (zone = 0; zone < HAMMER_MAX_ZONES; ++zone) {
136 blockmap = ondisk->vol0_blockmap + zone;
137 printf("\t\tzone %d\tnext_offset=%016jx\n",
138 zone, blockmap->next_offset);
141 rel_volume(volume);
143 bzero(&opt, sizeof(opt));
144 opt.filter = filter;
145 opt.obfuscate = obfuscate;
146 opt.indent = indent;
147 opt.stats = stats;
149 if (init_btree_search(arg) > 0) {
150 printf("arg=\"%s\"", arg);
151 if (opt.limit > 0)
152 printf(" lo=%08x", opt.base.localization);
153 if (opt.limit > 1)
154 printf(" obj=%016jx", (uintmax_t)opt.base.obj_id);
155 if (opt.limit > 2)
156 printf(" rt=%02x", opt.base.rec_type);
157 if (opt.limit > 3)
158 printf(" key=%016jx", (uintmax_t)opt.base.key);
159 if (opt.limit > 4)
160 printf(" tid=%016jx", (uintmax_t)opt.base.create_tid);
161 printf("\n");
163 print_btree(ondisk->vol0_btree_root);
165 if (VerboseOpt) {
166 hammer_print_zone_stat(stats);
167 hammer_cleanup_zone_stat(stats);
170 if (num_bad_node || VerboseOpt) {
171 printf("%d bad nodes\n", num_bad_node);
173 if (num_bad_elm || VerboseOpt) {
174 printf("%d bad elms\n", num_bad_elm);
176 if (num_bad_rec || VerboseOpt) {
177 printf("%d bad records\n", num_bad_rec);
181 static __inline
182 void
183 print_btree(hammer_off_t node_offset)
185 depth = -1;
186 print_btree_node(node_offset, HAMMER_MAX_TID, NULL, NULL);
187 assert(depth == -1);
190 static __inline
191 void
192 print_subtree(hammer_btree_elm_t elm)
194 print_btree_node(elm->internal.subtree_offset,
195 elm->internal.mirror_tid, &elm[0].base, &elm[1].base);
198 static void
199 print_btree_node(hammer_off_t node_offset,
200 hammer_tid_t mirror_tid,
201 hammer_base_elm_t left_bound, hammer_base_elm_t right_bound)
203 struct buffer_info *buffer = NULL;
204 hammer_node_ondisk_t node;
205 hammer_btree_elm_t elm;
206 int i;
207 char badc = ' '; /* good */
208 char badm = ' '; /* good */
209 const char *ext;
211 depth++;
212 node = get_node(node_offset, &buffer);
214 if (node == NULL) {
215 badc = 'B';
216 badm = 'I';
217 } else {
218 if (crc32(&node->crc + 1, HAMMER_BTREE_CRCSIZE) != node->crc)
219 badc = 'B';
220 if (node->mirror_tid > mirror_tid) {
221 badc = 'B';
222 badm = 'M';
224 if (test_node_count(node, &badm) == -1) {
225 badc = 'B';
226 assert(badm != ' ');
230 if (badm != ' ' || badc != ' ') /* not good */
231 ++num_bad_node;
233 printf("%s%c%c NODE %016jx ",
234 INDENT, badc, badm, (uintmax_t)node_offset);
235 printf("cnt=%02d p=%016jx type=%c depth=%d mirror=%016jx",
236 node->count,
237 (uintmax_t)node->parent,
238 (node->type ? node->type : '?'),
239 depth,
240 (uintmax_t)node->mirror_tid);
241 if (QuietOpt < 3) {
242 printf(" fill=");
243 print_bigblock_fill(node_offset);
245 printf(" {\n");
247 if (VerboseOpt)
248 hammer_add_zone_stat(opt.stats, node_offset, sizeof(*node));
250 for (i = 0; i < node->count; ++i) {
251 elm = &node->elms[i];
252 ext = NULL;
253 if (opt.limit) {
254 switch (node->type) {
255 case HAMMER_BTREE_TYPE_INTERNAL:
256 if (!test_btree_out_of_range(elm))
257 ext = "*";
258 break;
259 case HAMMER_BTREE_TYPE_LEAF:
260 if (test_btree_match(elm))
261 ext = "*";
262 break;
265 print_btree_elm(node, node_offset,
266 elm, left_bound, right_bound, ext);
268 if (node->type == HAMMER_BTREE_TYPE_INTERNAL) {
269 assert(i == node->count); /* boundary */
270 elm = &node->elms[i];
271 print_btree_elm(node, node_offset,
272 elm, left_bound, right_bound, NULL);
274 printf("%s }\n", INDENT);
276 if (node->type == HAMMER_BTREE_TYPE_INTERNAL) {
277 for (i = 0; i < node->count; ++i) {
278 elm = &node->elms[i];
279 if (opt.limit && opt.filter) {
280 if (test_btree_out_of_range(elm))
281 continue;
283 if (elm->internal.subtree_offset) {
284 print_subtree(elm);
286 * Cause show to do normal iteration after
287 * seeking to the lo:objid:rectype:key:tid
288 * by default
290 if (opt.limit && opt.filter == -1) /* default */
291 opt.filter = 0;
295 rel_buffer(buffer);
296 depth--;
299 static int
300 test_node_count(hammer_node_ondisk_t node, char *badmp)
302 hammer_node_ondisk_t parent_node;
303 struct buffer_info *buffer = NULL;
304 int maxcount;
306 maxcount = hammer_node_max_elements(node->type);
308 if (maxcount == -1) {
309 *badmp = 'U';
310 return(-1);
311 } else if (node->count > maxcount) {
312 *badmp = 'C';
313 return(-1);
314 } else if (node->count == 0) {
315 parent_node = get_node(node->parent, &buffer);
316 if (parent_node->count != 1) {
317 *badmp = 'C';
318 rel_buffer(buffer);
319 return(-1);
321 rel_buffer(buffer);
324 return(0);
327 static __inline
329 is_root_btree_beg(uint8_t type, int i, hammer_btree_elm_t elm)
332 * elm->base.btype depends on what the original node had
333 * so it could be anything but HAMMER_BTREE_TYPE_NONE.
335 return (type == HAMMER_BTREE_TYPE_INTERNAL &&
336 i == 0 &&
337 elm->base.localization == 0 &&
338 elm->base.obj_id == (int64_t)-0x8000000000000000LL &&
339 elm->base.key == (int64_t)-0x8000000000000000LL &&
340 elm->base.create_tid == 1 &&
341 elm->base.delete_tid == 1 &&
342 elm->base.rec_type == 0 &&
343 elm->base.obj_type == 0 &&
344 elm->base.btype != HAMMER_BTREE_TYPE_NONE);
347 static __inline
349 is_root_btree_end(uint8_t type, int i, hammer_btree_elm_t elm)
351 return (type == HAMMER_BTREE_TYPE_INTERNAL &&
352 i != 0 &&
353 elm->base.localization == 0xFFFFFFFFU &&
354 elm->base.obj_id == 0x7FFFFFFFFFFFFFFFLL &&
355 elm->base.key == 0x7FFFFFFFFFFFFFFFLL &&
356 elm->base.create_tid == 0xFFFFFFFFFFFFFFFFULL &&
357 elm->base.delete_tid == 0 &&
358 elm->base.rec_type == 0xFFFFU &&
359 elm->base.obj_type == 0 &&
360 elm->base.btype == HAMMER_BTREE_TYPE_NONE);
363 static
364 void
365 print_btree_elm(hammer_node_ondisk_t node, hammer_off_t node_offset,
366 hammer_btree_elm_t elm,
367 hammer_base_elm_t left_bound,
368 hammer_base_elm_t right_bound,
369 const char *ext)
371 char flagstr[8] = { 0, '-', '-', '-', '-', '-', '-', 0 };
372 char deleted;
373 char rootelm;
374 const char *label;
375 int flags;
376 int i = ((char*)elm - (char*)node) / (int)sizeof(*elm) - 1;
378 flags = get_elm_flags(node, node_offset, elm, left_bound, right_bound);
379 flagstr[0] = flags ? 'B' : 'G';
380 if (flags & FLAG_TOOFARLEFT)
381 flagstr[2] = 'L';
382 if (flags & FLAG_TOOFARRIGHT)
383 flagstr[3] = 'R';
384 if (flags & FLAG_BADTYPE)
385 flagstr[4] = 'T';
386 if (flags & FLAG_BADCHILDPARENT)
387 flagstr[5] = 'C';
388 if (flags & FLAG_BADMIRRORTID)
389 flagstr[6] = 'M';
390 if (flagstr[0] == 'B')
391 ++num_bad_elm;
394 * Check if elm is derived from root split
396 if (is_root_btree_beg(node->type, i, elm))
397 rootelm = '>';
398 else if (is_root_btree_end(node->type, i, elm))
399 rootelm = '<';
400 else
401 rootelm = ' ';
403 if (elm->base.delete_tid)
404 deleted = 'd';
405 else
406 deleted = ' ';
408 if (node->type == HAMMER_BTREE_TYPE_INTERNAL && node->count == i)
409 label = "RBN";
410 else
411 label = "ELM";
413 printf("%s%s %s %2d %c ",
414 INDENT, flagstr, label, i, hammer_elm_btype(elm));
415 printf("lo=%08x obj=%016jx rt=%02x key=%016jx tid=%016jx\n",
416 elm->base.localization,
417 (uintmax_t)elm->base.obj_id,
418 elm->base.rec_type,
419 (uintmax_t)elm->base.key,
420 (uintmax_t)elm->base.create_tid);
421 printf("%s %c del=%016jx ot=%02x",
422 INDENT,
423 (rootelm == ' ' ? deleted : rootelm),
424 (uintmax_t)elm->base.delete_tid,
425 elm->base.obj_type);
427 switch(node->type) {
428 case HAMMER_BTREE_TYPE_INTERNAL:
429 printf(" suboff=%016jx",
430 (uintmax_t)elm->internal.subtree_offset);
431 if (QuietOpt < 3) {
432 printf(" mirror=%016jx",
433 (uintmax_t)elm->internal.mirror_tid);
435 if (ext)
436 printf(" %s", ext);
437 break;
438 case HAMMER_BTREE_TYPE_LEAF:
439 switch(elm->base.btype) {
440 case HAMMER_BTREE_TYPE_RECORD:
441 printf(" dataoff=%016jx/%d",
442 (uintmax_t)elm->leaf.data_offset,
443 elm->leaf.data_len);
444 if (QuietOpt < 3) {
445 const char *p = check_data_crc(elm);
446 printf(" crc=%08x", elm->leaf.data_crc);
447 if (p) {
448 printf(" error=%s", p);
449 ++num_bad_rec;
451 printf(" fill=");
452 print_bigblock_fill(elm->leaf.data_offset);
454 if (QuietOpt < 2)
455 print_record(elm);
456 if (VerboseOpt)
457 hammer_add_zone_stat(opt.stats,
458 elm->leaf.data_offset,
459 elm->leaf.data_len);
460 break;
461 default:
462 printf(" badtype=%d", elm->base.btype);
463 break;
465 if (ext)
466 printf(" %s", ext);
467 break;
469 printf("\n");
472 static
474 get_elm_flags(hammer_node_ondisk_t node, hammer_off_t node_offset,
475 hammer_btree_elm_t elm,
476 hammer_base_elm_t left_bound,
477 hammer_base_elm_t right_bound)
479 hammer_off_t child_offset;
480 int flags = 0;
481 int i = ((char*)elm - (char*)node) / (int)sizeof(*elm) - 1;
483 switch(node->type) {
484 case HAMMER_BTREE_TYPE_INTERNAL:
485 child_offset = elm->internal.subtree_offset;
486 if (elm->internal.mirror_tid > node->mirror_tid)
487 flags |= FLAG_BADMIRRORTID;
489 if (i == node->count) {
490 if (child_offset != 0)
491 flags |= FLAG_BADCHILDPARENT;
492 switch(elm->base.btype) {
493 case HAMMER_BTREE_TYPE_NONE:
494 flags |= test_rbn_lr(elm, left_bound, right_bound);
495 break;
496 default:
497 flags |= FLAG_BADTYPE;
498 break;
500 } else {
501 if (child_offset == 0) {
502 flags |= FLAG_BADCHILDPARENT;
503 } else {
504 struct buffer_info *buffer = NULL;
505 hammer_node_ondisk_t subnode;
506 subnode = get_node(child_offset, &buffer);
507 if (subnode == NULL)
508 flags |= FLAG_BADCHILDPARENT;
509 else if (subnode->parent != node_offset)
510 flags |= FLAG_BADCHILDPARENT;
511 rel_buffer(buffer);
513 switch(elm->base.btype) {
514 case HAMMER_BTREE_TYPE_INTERNAL:
515 case HAMMER_BTREE_TYPE_LEAF:
516 flags |= test_lr(elm, left_bound, right_bound);
517 break;
518 default:
519 flags |= FLAG_BADTYPE;
520 break;
523 break;
524 case HAMMER_BTREE_TYPE_LEAF:
525 if (elm->leaf.data_offset == 0) {
526 flags |= FLAG_BADCHILDPARENT;
528 if (elm->leaf.data_len == 0) {
529 flags |= FLAG_BADCHILDPARENT;
532 if (node->mirror_tid == 0 &&
533 !(node->parent == 0 && node->count == 2)) {
534 flags |= FLAG_BADMIRRORTID;
536 if (elm->base.create_tid && node->mirror_tid &&
537 elm->base.create_tid > node->mirror_tid) {
538 flags |= FLAG_BADMIRRORTID;
540 if (elm->base.delete_tid && node->mirror_tid &&
541 elm->base.delete_tid > node->mirror_tid) {
542 flags |= FLAG_BADMIRRORTID;
544 switch(elm->base.btype) {
545 case HAMMER_BTREE_TYPE_RECORD:
546 flags |= test_lr(elm, left_bound, right_bound);
547 break;
548 default:
549 flags |= FLAG_BADTYPE;
550 break;
552 break;
553 default:
554 flags |= FLAG_BADTYPE;
555 break;
557 return(flags);
560 static
562 test_lr(hammer_btree_elm_t elm,
563 hammer_base_elm_t left_bound, hammer_base_elm_t right_bound)
565 if (left_bound == NULL || right_bound == NULL)
566 return(0);
567 if (hammer_btree_cmp(&elm->base, left_bound) < 0)
568 return(FLAG_TOOFARLEFT);
569 if (hammer_btree_cmp(&elm->base, right_bound) >= 0)
570 return(FLAG_TOOFARRIGHT);
571 return(0);
574 static
576 test_rbn_lr(hammer_btree_elm_t rbn,
577 hammer_base_elm_t left_bound, hammer_base_elm_t right_bound)
579 if (left_bound == NULL || right_bound == NULL)
580 return(0);
581 if (hammer_btree_cmp(&rbn->base, left_bound) < 0)
582 return(FLAG_TOOFARLEFT);
583 if (hammer_btree_cmp(&rbn->base, right_bound) > 0)
584 return(FLAG_TOOFARRIGHT);
585 return(0);
588 static
589 void
590 print_bigblock_fill(hammer_off_t offset)
592 struct hammer_blockmap_layer1 layer1;
593 struct hammer_blockmap_layer2 layer2;
594 int fill;
595 int error;
597 blockmap_lookup(offset, &layer1, &layer2, &error);
598 printf("z%d:v%d:%d:%d:%lu=",
599 HAMMER_ZONE_DECODE(offset),
600 HAMMER_VOL_DECODE(offset),
601 HAMMER_BLOCKMAP_LAYER1_INDEX(offset),
602 HAMMER_BLOCKMAP_LAYER2_INDEX(offset),
603 offset & HAMMER_BIGBLOCK_MASK64);
605 if (error) {
606 printf("B%d", error);
607 } else {
608 fill = layer2.bytes_free * 100 / HAMMER_BIGBLOCK_SIZE;
609 fill = 100 - fill;
610 printf("%d%%", fill);
615 * Check the generic crc on a data element. Inodes record types are
616 * special in that some of their fields are not CRCed.
618 * Also check that the zone is valid.
620 static
621 const char *
622 check_data_crc(hammer_btree_elm_t elm)
624 struct buffer_info *data_buffer;
625 hammer_off_t data_offset;
626 hammer_off_t buf_offset;
627 int32_t data_len;
628 uint32_t crc;
629 int error;
630 char *ptr;
631 static char bo[5];
633 data_offset = elm->leaf.data_offset;
634 data_len = elm->leaf.data_len;
635 data_buffer = NULL;
636 if (data_offset == 0 || data_len == 0)
637 return("ZO"); /* zero offset or length */
639 error = 0;
640 buf_offset = blockmap_lookup(data_offset, NULL, NULL, &error);
641 if (error) {
642 bzero(bo, sizeof(bo));
643 snprintf(bo, sizeof(bo), "BO%d", -error);
644 return(bo);
647 crc = 0;
648 switch (elm->leaf.base.rec_type) {
649 case HAMMER_RECTYPE_INODE:
650 /* this should always match */
651 if (data_len == sizeof(struct hammer_inode_data)) {
652 ptr = get_buffer_data(buf_offset, &data_buffer, 0);
653 crc = crc32(ptr, HAMMER_INODE_CRCSIZE);
654 rel_buffer(data_buffer);
656 break;
657 default:
658 crc = get_buf_crc(buf_offset, data_len);
659 break;
662 if (crc == 0)
663 return("Bx"); /* bad crc */
664 if (crc != elm->leaf.data_crc)
665 return("BX"); /* bad crc */
666 return(NULL); /* success */
669 static
670 uint32_t
671 get_buf_crc(hammer_off_t buf_offset, int32_t buf_len)
673 struct buffer_info *data_buffer = NULL;
674 int32_t len;
675 uint32_t crc = 0;
676 char *ptr;
678 while (buf_len) {
679 ptr = get_buffer_data(buf_offset, &data_buffer, 0);
680 len = HAMMER_BUFSIZE - ((int)buf_offset & HAMMER_BUFMASK);
681 if (len > buf_len)
682 len = (int)buf_len;
683 assert(len <= HAMMER_BUFSIZE);
684 crc = crc32_ext(ptr, len, crc);
685 buf_len -= len;
686 buf_offset += len;
688 rel_buffer(data_buffer);
690 return(crc);
693 static
694 void
695 print_config(char *cfgtxt)
697 char *token;
699 printf("\n%s%17s", INDENT, "");
700 printf("config text=\"\n");
701 if (cfgtxt != NULL) {
702 while((token = strsep(&cfgtxt, "\r\n")) != NULL)
703 if (strlen(token))
704 printf("%s%17s %s\n",
705 INDENT, "", token);
707 printf("%s%17s \"", INDENT, "");
710 static
711 void
712 print_record(hammer_btree_elm_t elm)
714 struct buffer_info *data_buffer;
715 hammer_off_t data_offset;
716 int32_t data_len;
717 hammer_data_ondisk_t data;
718 uint32_t status;
719 char *str1 = NULL;
720 char *str2 = NULL;
722 data_offset = elm->leaf.data_offset;
723 data_len = elm->leaf.data_len;
724 assert(data_offset != 0);
725 assert(data_len != 0);
727 data_buffer = NULL;
728 data = get_buffer_data(data_offset, &data_buffer, 0);
729 assert(data != NULL);
731 switch(elm->leaf.base.rec_type) {
732 case HAMMER_RECTYPE_UNKNOWN:
733 printf("\n%s%17s", INDENT, "");
734 printf("unknown");
735 break;
736 case HAMMER_RECTYPE_INODE:
737 printf("\n%s%17s", INDENT, "");
738 printf("inode size=%jd nlinks=%jd",
739 (intmax_t)data->inode.size,
740 (intmax_t)data->inode.nlinks);
741 if (QuietOpt < 1) {
742 printf(" mode=%05o uflags=%08x caps=%02x",
743 data->inode.mode,
744 data->inode.uflags,
745 data->inode.cap_flags);
746 printf(" pobjid=%016jx ot=%02x\n",
747 (uintmax_t)data->inode.parent_obj_id,
748 data->inode.obj_type);
749 printf("%s%17s", INDENT, "");
750 printf(" ctime=%016jx mtime=%016jx atime=%016jx",
751 (uintmax_t)data->inode.ctime,
752 (uintmax_t)data->inode.mtime,
753 (uintmax_t)data->inode.atime);
754 if (data->inode.ext.symlink[0])
755 printf(" symlink=\"%s\"",
756 data->inode.ext.symlink);
758 break;
759 case HAMMER_RECTYPE_DIRENTRY:
760 data_len -= HAMMER_ENTRY_NAME_OFF;
761 printf("\n%s%17s", INDENT, "");
762 printf("dir-entry ino=%016jx lo=%08x",
763 (uintmax_t)data->entry.obj_id,
764 data->entry.localization);
765 if (!opt.obfuscate)
766 printf(" name=\"%*.*s\"",
767 data_len, data_len, data->entry.name);
768 break;
769 case HAMMER_RECTYPE_FIX:
770 switch(elm->leaf.base.key) {
771 case HAMMER_FIXKEY_SYMLINK:
772 data_len -= HAMMER_SYMLINK_NAME_OFF;
773 printf("\n%s%17s", INDENT, "");
774 printf("fix-symlink name=\"%*.*s\"",
775 data_len, data_len, data->symlink.name);
776 break;
778 break;
779 case HAMMER_RECTYPE_PFS:
780 printf("\n%s%17s", INDENT, "");
781 printf("pfs sync_beg_tid=%016jx sync_end_tid=%016jx\n",
782 (intmax_t)data->pfsd.sync_beg_tid,
783 (intmax_t)data->pfsd.sync_end_tid);
784 uuid_to_string(&data->pfsd.shared_uuid, &str1, &status);
785 uuid_to_string(&data->pfsd.unique_uuid, &str2, &status);
786 printf("%17s", "");
787 printf(" shared_uuid=%s\n", str1);
788 printf("%17s", "");
789 printf(" unique_uuid=%s\n", str2);
790 printf("%17s", "");
791 printf(" mirror_flags=%08x label=\"%s\"",
792 data->pfsd.mirror_flags, data->pfsd.label);
793 if (data->pfsd.snapshots[0])
794 printf(" snapshots=\"%s\"", data->pfsd.snapshots);
795 free(str1);
796 free(str2);
797 break;
798 case HAMMER_RECTYPE_SNAPSHOT:
799 printf("\n%s%17s", INDENT, "");
800 printf("snapshot tid=%016jx label=\"%s\"",
801 (intmax_t)data->snap.tid, data->snap.label);
802 break;
803 case HAMMER_RECTYPE_CONFIG:
804 if (VerboseOpt > 2) {
805 char *p = strdup(data->config.text);
806 print_config(p);
807 free(p);
809 break;
810 case HAMMER_RECTYPE_DATA:
811 if (VerboseOpt > 3) {
812 printf("\n");
813 hexdump_record(data, data_len, "\t\t ");
815 break;
816 case HAMMER_RECTYPE_EXT:
817 case HAMMER_RECTYPE_DB:
818 if (VerboseOpt > 2) {
819 printf("\n");
820 hexdump_record(data, data_len, "\t\t ");
822 break;
823 default:
824 assert(0);
825 break;
827 rel_buffer(data_buffer);
831 * HAMMER userspace only supports buffer size upto HAMMER_BUFSIZE
832 * which is 16KB. Passing record data length larger than 16KB to
833 * hexdump(3) is invalid even if the leaf node elm says >16KB data.
835 static void
836 hexdump_record(const void *ptr, int length, const char *hdr)
838 int data_len = length;
840 if (data_len > HAMMER_BUFSIZE) /* XXX */
841 data_len = HAMMER_BUFSIZE;
842 hexdump(ptr, data_len, hdr, 0);
844 if (length > data_len)
845 printf("%s....\n", hdr);
848 static __inline
849 unsigned long
850 _strtoul(const char *p, int base)
852 unsigned long retval;
854 errno = 0; /* clear */
855 retval = strtoul(p, NULL, base);
856 if (errno == ERANGE && retval == ULONG_MAX)
857 err(1, "strtoul");
858 return retval;
861 static __inline
862 unsigned long long
863 _strtoull(const char *p, int base)
865 unsigned long long retval;
867 errno = 0; /* clear */
868 retval = strtoull(p, NULL, base);
869 if (errno == ERANGE && retval == ULLONG_MAX)
870 err(1, "strtoull");
871 return retval;
874 static int
875 init_btree_search(const char *arg)
877 char *s, *p;
878 int i = 0;
880 bzero(&opt.base, sizeof(opt.base));
881 opt.limit = 0;
883 if (arg == NULL)
884 return(-1);
885 if (strcmp(arg, "none") == 0)
886 return(-1);
888 s = strdup(arg);
889 if (s == NULL)
890 return(-1);
892 while ((p = s) != NULL) {
893 if ((s = strchr(s, ':')) != NULL)
894 *s++ = 0;
895 if (++i == 1) {
896 opt.base.localization = _strtoul(p, 16);
897 } else if (i == 2) {
898 opt.base.obj_id = _strtoull(p, 16);
899 } else if (i == 3) {
900 opt.base.rec_type = _strtoul(p, 16);
901 } else if (i == 4) {
902 opt.base.key = _strtoull(p, 16);
903 } else if (i == 5) {
904 opt.base.create_tid = _strtoull(p, 16);
905 break;
908 opt.limit = i;
909 free(s);
911 return(i);
914 static int
915 test_btree_search(hammer_btree_elm_t elm)
917 hammer_base_elm_t base1 = &elm->base;
918 hammer_base_elm_t base2 = &opt.base;
919 int limit = opt.limit;
921 if (base1->localization < base2->localization)
922 return(-1);
923 if (base1->localization > base2->localization)
924 return(1);
925 if (limit == 1)
926 return(0); /* ignore below */
928 if (base1->obj_id < base2->obj_id)
929 return(-2);
930 if (base1->obj_id > base2->obj_id)
931 return(2);
932 if (limit == 2)
933 return(0); /* ignore below */
935 if (base1->rec_type < base2->rec_type)
936 return(-3);
937 if (base1->rec_type > base2->rec_type)
938 return(3);
939 if (limit == 3)
940 return(0); /* ignore below */
942 if (base1->key < base2->key)
943 return(-4);
944 if (base1->key > base2->key)
945 return(4);
946 if (limit == 4)
947 return(0); /* ignore below */
949 if (base1->create_tid == 0) {
950 if (base2->create_tid == 0)
951 return(0);
952 return(5);
954 if (base2->create_tid == 0)
955 return(-5);
956 if (base1->create_tid < base2->create_tid)
957 return(-5);
958 if (base1->create_tid > base2->create_tid)
959 return(5);
960 return(0);
963 static __inline
965 test_btree_match(hammer_btree_elm_t elm)
967 if (test_btree_search(elm) == 0)
968 return(1);
969 return(0);
972 static
974 test_btree_out_of_range(hammer_btree_elm_t elm)
976 if (test_btree_search(elm) > 0)
977 return(1); /* conditions < this elm */
979 if (opt.limit >= 5) {
980 if (test_btree_search(elm + 1) <= 0)
981 return(1); /* next elm <= conditions */
982 } else {
983 if (test_btree_search(elm + 1) < 0)
984 return(1); /* next elm < conditions */
986 return(0);
990 * Dump the UNDO FIFO
992 void
993 hammer_cmd_show_undo(void)
995 struct volume_info *volume;
996 hammer_blockmap_t rootmap;
997 hammer_off_t scan_offset;
998 hammer_fifo_any_t head;
999 struct buffer_info *data_buffer = NULL;
1000 int64_t bytes;
1002 volume = get_root_volume();
1003 rootmap = &volume->ondisk->vol0_blockmap[HAMMER_ZONE_UNDO_INDEX];
1004 if (rootmap->first_offset <= rootmap->next_offset)
1005 bytes = rootmap->next_offset - rootmap->first_offset;
1006 else
1007 bytes = rootmap->alloc_offset - rootmap->first_offset +
1008 (rootmap->next_offset & HAMMER_OFF_LONG_MASK);
1010 printf("Volume header UNDO %016jx-%016jx/%016jx\n",
1011 (intmax_t)rootmap->first_offset,
1012 (intmax_t)rootmap->next_offset,
1013 (intmax_t)rootmap->alloc_offset);
1014 printf("UNDO map is %jdMB\n",
1015 (intmax_t)((rootmap->alloc_offset & HAMMER_OFF_LONG_MASK) /
1016 (1024 * 1024)));
1017 printf("UNDO being used is %jdB\n", (intmax_t)bytes);
1019 scan_offset = HAMMER_ZONE_ENCODE(HAMMER_ZONE_UNDO_INDEX, 0);
1020 while (scan_offset < rootmap->alloc_offset) {
1021 head = get_buffer_data(scan_offset, &data_buffer, 0);
1022 printf("%016jx ", scan_offset);
1024 switch(head->head.hdr_type) {
1025 case HAMMER_HEAD_TYPE_PAD:
1026 printf("PAD(%04x)", head->head.hdr_size);
1027 break;
1028 case HAMMER_HEAD_TYPE_DUMMY:
1029 printf("DUMMY(%04x) seq=%08x",
1030 head->head.hdr_size, head->head.hdr_seq);
1031 break;
1032 case HAMMER_HEAD_TYPE_UNDO:
1033 printf("UNDO(%04x) seq=%08x "
1034 "dataoff=%016jx bytes=%d",
1035 head->head.hdr_size, head->head.hdr_seq,
1036 (intmax_t)head->undo.undo_offset,
1037 head->undo.undo_data_bytes);
1038 break;
1039 case HAMMER_HEAD_TYPE_REDO:
1040 printf("REDO(%04x) seq=%08x flags=%08x "
1041 "objid=%016jx logoff=%016jx bytes=%d",
1042 head->head.hdr_size, head->head.hdr_seq,
1043 head->redo.redo_flags,
1044 (intmax_t)head->redo.redo_objid,
1045 (intmax_t)head->redo.redo_offset,
1046 head->redo.redo_data_bytes);
1047 break;
1048 default:
1049 printf("UNKNOWN(%04x,%04x) seq=%08x",
1050 head->head.hdr_type,
1051 head->head.hdr_size,
1052 head->head.hdr_seq);
1053 break;
1056 if (scan_offset == rootmap->first_offset)
1057 printf(" >");
1058 if (scan_offset == rootmap->next_offset)
1059 printf(" <");
1060 printf("\n");
1062 if ((head->head.hdr_size & HAMMER_HEAD_ALIGN_MASK) ||
1063 head->head.hdr_size == 0 ||
1064 head->head.hdr_size > HAMMER_UNDO_ALIGN -
1065 ((u_int)scan_offset & HAMMER_UNDO_MASK)) {
1066 printf("Illegal size field, skipping to "
1067 "next boundary\n");
1068 scan_offset = (scan_offset + HAMMER_UNDO_MASK) &
1069 ~HAMMER_UNDO_MASK64;
1070 } else {
1071 scan_offset += head->head.hdr_size;
1074 rel_buffer(data_buffer);