1#!/usr/bin/env @PYTHON_SHEBANG@ 2# SPDX-License-Identifier: CDDL-1.0 3# 4# Print out statistics for all cached dmu buffers. This information 5# is available through the dbufs kstat and may be post-processed as 6# needed by the script. 7# 8# CDDL HEADER START 9# 10# The contents of this file are subject to the terms of the 11# Common Development and Distribution License, Version 1.0 only 12# (the "License"). You may not use this file except in compliance 13# with the License. 14# 15# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 16# or https://opensource.org/licenses/CDDL-1.0. 17# See the License for the specific language governing permissions 18# and limitations under the License. 19# 20# When distributing Covered Code, include this CDDL HEADER in each 21# file and include the License file at usr/src/OPENSOLARIS.LICENSE. 22# If applicable, add the following below this CDDL HEADER, with the 23# fields enclosed by brackets "[]" replaced with your own identifying 24# information: Portions Copyright [yyyy] [name of copyright owner] 25# 26# CDDL HEADER END 27# 28# Copyright (C) 2013 Lawrence Livermore National Security, LLC. 29# Produced at Lawrence Livermore National Laboratory (cf, DISCLAIMER). 30# 31# This script must remain compatible with and Python 3.6+. 32# 33 34import sys 35import getopt 36import errno 37import re 38 39bhdr = ["pool", "objset", "object", "level", "blkid", "offset", "dbsize"] 40bxhdr = ["pool", "objset", "object", "level", "blkid", "offset", "dbsize", 41 "usize", "meta", "state", "dbholds", "dbc", "list", "atype", "flags", 42 "count", "asize", "access", "mru", "gmru", "mfu", "gmfu", "l2", 43 "l2_dattr", "l2_asize", "l2_comp", "aholds", "dtype", "btype", 44 "data_bs", "meta_bs", "bsize", "lvls", "dholds", "blocks", "dsize"] 45bincompat = ["cached", "direct", "indirect", "bonus", "spill"] 46 47dhdr = ["pool", "objset", "object", "dtype", "cached"] 48dxhdr = ["pool", "objset", "object", "dtype", "btype", "data_bs", "meta_bs", 49 "bsize", "lvls", "dholds", "blocks", "dsize", "cached", "direct", 50 "indirect", "bonus", "spill"] 51dincompat = ["level", "blkid", "offset", "dbsize", "usize", "meta", "state", 52 "dbholds", "dbc", "list", "atype", "flags", "count", "asize", 53 "access", "mru", "gmru", "mfu", "gmfu", "l2", "l2_dattr", 54 "l2_asize", "l2_comp", "aholds"] 55 56thdr = ["pool", "objset", "dtype", "cached"] 57txhdr = ["pool", "objset", "dtype", "cached", "direct", "indirect", 58 "bonus", "spill"] 59tincompat = ["object", "level", "blkid", "offset", "dbsize", "usize", "meta", 60 "state", "dbc", "dbholds", "list", "atype", "flags", "count", 61 "asize", "access", "mru", "gmru", "mfu", "gmfu", "l2", "l2_dattr", 62 "l2_asize", "l2_comp", "aholds", "btype", "data_bs", "meta_bs", 63 "bsize", "lvls", "dholds", "blocks", "dsize"] 64 65cols = { 66 # hdr: [size, scale, description] 67 "pool": [15, -1, "pool name"], 68 "objset": [6, -1, "dataset identification number"], 69 "object": [10, -1, "object number"], 70 "level": [5, -1, "indirection level of buffer"], 71 "blkid": [8, -1, "block number of buffer"], 72 "offset": [12, 1024, "offset in object of buffer"], 73 "dbsize": [7, 1024, "size of buffer"], 74 "usize": [7, 1024, "size of attached user data"], 75 "meta": [4, -1, "is this buffer metadata?"], 76 "state": [5, -1, "state of buffer (read, cached, etc)"], 77 "dbholds": [7, 1000, "number of holds on buffer"], 78 "dbc": [3, -1, "in dbuf cache"], 79 "list": [4, -1, "which ARC list contains this buffer"], 80 "atype": [7, -1, "ARC header type (data or metadata)"], 81 "flags": [9, -1, "ARC read flags"], 82 "count": [5, -1, "ARC data count"], 83 "asize": [7, 1024, "size of this ARC buffer"], 84 "access": [10, -1, "time this ARC buffer was last accessed"], 85 "mru": [5, 1000, "hits while on the ARC's MRU list"], 86 "gmru": [5, 1000, "hits while on the ARC's MRU ghost list"], 87 "mfu": [5, 1000, "hits while on the ARC's MFU list"], 88 "gmfu": [5, 1000, "hits while on the ARC's MFU ghost list"], 89 "l2": [5, 1000, "hits while on the L2ARC"], 90 "l2_dattr": [8, -1, "L2ARC disk address/offset"], 91 "l2_asize": [8, 1024, "L2ARC alloc'd size (depending on compression)"], 92 "l2_comp": [21, -1, "L2ARC compression algorithm for buffer"], 93 "aholds": [6, 1000, "number of holds on this ARC buffer"], 94 "dtype": [27, -1, "dnode type"], 95 "btype": [27, -1, "bonus buffer type"], 96 "data_bs": [7, 1024, "data block size"], 97 "meta_bs": [7, 1024, "metadata block size"], 98 "bsize": [6, 1024, "bonus buffer size"], 99 "lvls": [6, -1, "number of indirection levels"], 100 "dholds": [6, 1000, "number of holds on dnode"], 101 "blocks": [8, 1000, "number of allocated blocks"], 102 "dsize": [12, 1024, "size of dnode"], 103 "cached": [6, 1024, "bytes cached for all blocks"], 104 "direct": [6, 1024, "bytes cached for direct blocks"], 105 "indirect": [8, 1024, "bytes cached for indirect blocks"], 106 "bonus": [5, 1024, "bytes cached for bonus buffer"], 107 "spill": [5, 1024, "bytes cached for spill block"], 108} 109 110hdr = None 111xhdr = None 112sep = " " # Default separator is 2 spaces 113cmd = ("Usage: dbufstat [-bdhnrtvx] [-i file] [-f fields] [-o file] " 114 "[-s string] [-F filter]\n") 115raw = 0 116 117 118if sys.platform.startswith("freebsd"): 119 import io 120 # Requires py-sysctl on FreeBSD 121 import sysctl 122 123 def default_ifile(): 124 dbufs = sysctl.filter("kstat.zfs.misc.dbufs")[0].value 125 sys.stdin = io.StringIO(dbufs) 126 return "-" 127 128elif sys.platform.startswith("linux"): 129 def default_ifile(): 130 return "/proc/spl/kstat/zfs/dbufs" 131 132 133def print_incompat_helper(incompat): 134 cnt = 0 135 for key in sorted(incompat): 136 if cnt == 0: 137 sys.stderr.write("\t") 138 elif cnt > 8: 139 sys.stderr.write(",\n\t") 140 cnt = 0 141 else: 142 sys.stderr.write(", ") 143 144 sys.stderr.write("%s" % key) 145 cnt += 1 146 147 sys.stderr.write("\n\n") 148 149 150def detailed_usage(): 151 sys.stderr.write("%s\n" % cmd) 152 153 sys.stderr.write("Field definitions incompatible with '-b' option:\n") 154 print_incompat_helper(bincompat) 155 156 sys.stderr.write("Field definitions incompatible with '-d' option:\n") 157 print_incompat_helper(dincompat) 158 159 sys.stderr.write("Field definitions incompatible with '-t' option:\n") 160 print_incompat_helper(tincompat) 161 162 sys.stderr.write("Field definitions are as follows:\n") 163 for key in sorted(cols.keys()): 164 sys.stderr.write("%11s : %s\n" % (key, cols[key][2])) 165 sys.stderr.write("\n") 166 167 sys.exit(0) 168 169 170def usage(): 171 sys.stderr.write("%s\n" % cmd) 172 sys.stderr.write("\t -b : Print table of information for each dbuf\n") 173 sys.stderr.write("\t -d : Print table of information for each dnode\n") 174 sys.stderr.write("\t -h : Print this help message\n") 175 sys.stderr.write("\t -n : Exclude header from output\n") 176 sys.stderr.write("\t -r : Print raw values\n") 177 sys.stderr.write("\t -t : Print table of information for each dnode type" 178 "\n") 179 sys.stderr.write("\t -v : List all possible field headers and definitions" 180 "\n") 181 sys.stderr.write("\t -x : Print extended stats\n") 182 sys.stderr.write("\t -i : Redirect input from the specified file\n") 183 sys.stderr.write("\t -f : Specify specific fields to print (see -v)\n") 184 sys.stderr.write("\t -o : Redirect output to the specified file\n") 185 sys.stderr.write("\t -s : Override default field separator with custom " 186 "character or string\n") 187 sys.stderr.write("\t -F : Filter output by value or regex\n") 188 sys.stderr.write("\nExamples:\n") 189 sys.stderr.write("\tdbufstat -d -o /tmp/d.log\n") 190 sys.stderr.write("\tdbufstat -t -s \",\" -o /tmp/t.log\n") 191 sys.stderr.write("\tdbufstat -v\n") 192 sys.stderr.write("\tdbufstat -d -f pool,object,objset,dsize,cached\n") 193 sys.stderr.write("\tdbufstat -bx -F dbc=1,objset=54,pool=testpool\n") 194 sys.stderr.write("\n") 195 196 sys.exit(1) 197 198 199def prettynum(sz, scale, num=0): 200 global raw 201 202 suffix = [' ', 'K', 'M', 'G', 'T', 'P', 'E', 'Z'] 203 index = 0 204 save = 0 205 206 if raw or scale == -1: 207 return "%*s" % (sz, num) 208 209 # Rounding error, return 0 210 elif 0 < num < 1: 211 num = 0 212 213 while num > scale and index < 5: 214 save = num 215 num = num / scale 216 index += 1 217 218 if index == 0: 219 return "%*d" % (sz, num) 220 221 if (save / scale) < 10: 222 return "%*.1f%s" % (sz - 1, num, suffix[index]) 223 else: 224 return "%*d%s" % (sz - 1, num, suffix[index]) 225 226 227def print_values(v): 228 global hdr 229 global sep 230 231 try: 232 for col in hdr: 233 sys.stdout.write("%s%s" % ( 234 prettynum(cols[col][0], cols[col][1], v[col]), sep)) 235 sys.stdout.write("\n") 236 except IOError as e: 237 if e.errno == errno.EPIPE: 238 sys.exit(1) 239 240 241def print_header(): 242 global hdr 243 global sep 244 245 try: 246 for col in hdr: 247 sys.stdout.write("%*s%s" % (cols[col][0], col, sep)) 248 sys.stdout.write("\n") 249 except IOError as e: 250 if e.errno == errno.EPIPE: 251 sys.exit(1) 252 253 254def get_typestring(t): 255 ot_strings = [ 256 "DMU_OT_NONE", 257 # general: 258 "DMU_OT_OBJECT_DIRECTORY", 259 "DMU_OT_OBJECT_ARRAY", 260 "DMU_OT_PACKED_NVLIST", 261 "DMU_OT_PACKED_NVLIST_SIZE", 262 "DMU_OT_BPOBJ", 263 "DMU_OT_BPOBJ_HDR", 264 # spa: 265 "DMU_OT_SPACE_MAP_HEADER", 266 "DMU_OT_SPACE_MAP", 267 # zil: 268 "DMU_OT_INTENT_LOG", 269 # dmu: 270 "DMU_OT_DNODE", 271 "DMU_OT_OBJSET", 272 # dsl: 273 "DMU_OT_DSL_DIR", 274 "DMU_OT_DSL_DIR_CHILD_MAP", 275 "DMU_OT_DSL_DS_SNAP_MAP", 276 "DMU_OT_DSL_PROPS", 277 "DMU_OT_DSL_DATASET", 278 # zpl: 279 "DMU_OT_ZNODE", 280 "DMU_OT_OLDACL", 281 "DMU_OT_PLAIN_FILE_CONTENTS", 282 "DMU_OT_DIRECTORY_CONTENTS", 283 "DMU_OT_MASTER_NODE", 284 "DMU_OT_UNLINKED_SET", 285 # zvol: 286 "DMU_OT_ZVOL", 287 "DMU_OT_ZVOL_PROP", 288 # other; for testing only! 289 "DMU_OT_PLAIN_OTHER", 290 "DMU_OT_UINT64_OTHER", 291 "DMU_OT_ZAP_OTHER", 292 # new object types: 293 "DMU_OT_ERROR_LOG", 294 "DMU_OT_SPA_HISTORY", 295 "DMU_OT_SPA_HISTORY_OFFSETS", 296 "DMU_OT_POOL_PROPS", 297 "DMU_OT_DSL_PERMS", 298 "DMU_OT_ACL", 299 "DMU_OT_SYSACL", 300 "DMU_OT_FUID", 301 "DMU_OT_FUID_SIZE", 302 "DMU_OT_NEXT_CLONES", 303 "DMU_OT_SCAN_QUEUE", 304 "DMU_OT_USERGROUP_USED", 305 "DMU_OT_USERGROUP_QUOTA", 306 "DMU_OT_USERREFS", 307 "DMU_OT_DDT_ZAP", 308 "DMU_OT_DDT_STATS", 309 "DMU_OT_SA", 310 "DMU_OT_SA_MASTER_NODE", 311 "DMU_OT_SA_ATTR_REGISTRATION", 312 "DMU_OT_SA_ATTR_LAYOUTS", 313 "DMU_OT_SCAN_XLATE", 314 "DMU_OT_DEDUP", 315 "DMU_OT_DEADLIST", 316 "DMU_OT_DEADLIST_HDR", 317 "DMU_OT_DSL_CLONES", 318 "DMU_OT_BPOBJ_SUBOBJ"] 319 otn_strings = { 320 0x80: "DMU_OTN_UINT8_DATA", 321 0xc0: "DMU_OTN_UINT8_METADATA", 322 0x81: "DMU_OTN_UINT16_DATA", 323 0xc1: "DMU_OTN_UINT16_METADATA", 324 0x82: "DMU_OTN_UINT32_DATA", 325 0xc2: "DMU_OTN_UINT32_METADATA", 326 0x83: "DMU_OTN_UINT64_DATA", 327 0xc3: "DMU_OTN_UINT64_METADATA", 328 0x84: "DMU_OTN_ZAP_DATA", 329 0xc4: "DMU_OTN_ZAP_METADATA", 330 0xa0: "DMU_OTN_UINT8_ENC_DATA", 331 0xe0: "DMU_OTN_UINT8_ENC_METADATA", 332 0xa1: "DMU_OTN_UINT16_ENC_DATA", 333 0xe1: "DMU_OTN_UINT16_ENC_METADATA", 334 0xa2: "DMU_OTN_UINT32_ENC_DATA", 335 0xe2: "DMU_OTN_UINT32_ENC_METADATA", 336 0xa3: "DMU_OTN_UINT64_ENC_DATA", 337 0xe3: "DMU_OTN_UINT64_ENC_METADATA", 338 0xa4: "DMU_OTN_ZAP_ENC_DATA", 339 0xe4: "DMU_OTN_ZAP_ENC_METADATA"} 340 341 # If "-rr" option is used, don't convert to string representation 342 if raw > 1: 343 return "%i" % t 344 345 try: 346 if t < len(ot_strings): 347 return ot_strings[t] 348 else: 349 return otn_strings[t] 350 except (IndexError, KeyError): 351 return "(UNKNOWN)" 352 353 354def get_compstring(c): 355 comp_strings = ["ZIO_COMPRESS_INHERIT", "ZIO_COMPRESS_ON", 356 "ZIO_COMPRESS_OFF", "ZIO_COMPRESS_LZJB", 357 "ZIO_COMPRESS_EMPTY", "ZIO_COMPRESS_GZIP_1", 358 "ZIO_COMPRESS_GZIP_2", "ZIO_COMPRESS_GZIP_3", 359 "ZIO_COMPRESS_GZIP_4", "ZIO_COMPRESS_GZIP_5", 360 "ZIO_COMPRESS_GZIP_6", "ZIO_COMPRESS_GZIP_7", 361 "ZIO_COMPRESS_GZIP_8", "ZIO_COMPRESS_GZIP_9", 362 "ZIO_COMPRESS_ZLE", "ZIO_COMPRESS_LZ4", 363 "ZIO_COMPRESS_ZSTD", "ZIO_COMPRESS_FUNCTION"] 364 365 # If "-rr" option is used, don't convert to string representation 366 if raw > 1: 367 return "%i" % c 368 369 try: 370 return comp_strings[c] 371 except IndexError: 372 return "%i" % c 373 374 375def parse_line(line, labels): 376 global hdr 377 378 new = dict() 379 val = None 380 for col in hdr: 381 # These are "special" fields computed in the update_dict 382 # function, prevent KeyError exception on labels[col] for these. 383 if col not in ['bonus', 'cached', 'direct', 'indirect', 'spill']: 384 val = line[labels[col]] 385 386 if col in ['pool', 'flags']: 387 new[col] = str(val) 388 elif col in ['dtype', 'btype']: 389 new[col] = get_typestring(int(val)) 390 elif col in ['l2_comp']: 391 new[col] = get_compstring(int(val)) 392 else: 393 new[col] = int(val) 394 395 return new 396 397 398def update_dict(d, k, line, labels): 399 pool = line[labels['pool']] 400 objset = line[labels['objset']] 401 key = line[labels[k]] 402 403 dbsize = int(line[labels['dbsize']]) 404 usize = int(line[labels['usize']]) 405 blkid = int(line[labels['blkid']]) 406 level = int(line[labels['level']]) 407 408 if pool not in d: 409 d[pool] = dict() 410 411 if objset not in d[pool]: 412 d[pool][objset] = dict() 413 414 if key not in d[pool][objset]: 415 d[pool][objset][key] = parse_line(line, labels) 416 d[pool][objset][key]['bonus'] = 0 417 d[pool][objset][key]['cached'] = 0 418 d[pool][objset][key]['direct'] = 0 419 d[pool][objset][key]['indirect'] = 0 420 d[pool][objset][key]['spill'] = 0 421 422 d[pool][objset][key]['cached'] += dbsize + usize 423 424 if blkid == -1: 425 d[pool][objset][key]['bonus'] += dbsize 426 elif blkid == -2: 427 d[pool][objset][key]['spill'] += dbsize 428 else: 429 if level == 0: 430 d[pool][objset][key]['direct'] += dbsize 431 else: 432 d[pool][objset][key]['indirect'] += dbsize 433 434 return d 435 436 437def skip_line(vals, filters): 438 ''' 439 Determines if a line should be skipped during printing 440 based on a set of filters 441 ''' 442 if len(filters) == 0: 443 return False 444 445 for key in vals: 446 if key in filters: 447 val = prettynum(cols[key][0], cols[key][1], vals[key]).strip() 448 # we want a full match here 449 if re.match("(?:" + filters[key] + r")\Z", val) is None: 450 return True 451 452 return False 453 454 455def print_dict(d, filters, noheader): 456 if not noheader: 457 print_header() 458 for pool in list(d.keys()): 459 for objset in list(d[pool].keys()): 460 for v in list(d[pool][objset].values()): 461 if not skip_line(v, filters): 462 print_values(v) 463 464 465def dnodes_build_dict(filehandle): 466 labels = dict() 467 dnodes = dict() 468 469 # First 3 lines are header information, skip the first two 470 for i in range(2): 471 next(filehandle) 472 473 # The third line contains the labels and index locations 474 for i, v in enumerate(next(filehandle).split()): 475 labels[v] = i 476 477 # The rest of the file is buffer information 478 for line in filehandle: 479 update_dict(dnodes, 'object', line.split(), labels) 480 481 return dnodes 482 483 484def types_build_dict(filehandle): 485 labels = dict() 486 types = dict() 487 488 # First 3 lines are header information, skip the first two 489 for i in range(2): 490 next(filehandle) 491 492 # The third line contains the labels and index locations 493 for i, v in enumerate(next(filehandle).split()): 494 labels[v] = i 495 496 # The rest of the file is buffer information 497 for line in filehandle: 498 update_dict(types, 'dtype', line.split(), labels) 499 500 return types 501 502 503def buffers_print_all(filehandle, filters, noheader): 504 labels = dict() 505 506 # First 3 lines are header information, skip the first two 507 for i in range(2): 508 next(filehandle) 509 510 # The third line contains the labels and index locations 511 for i, v in enumerate(next(filehandle).split()): 512 labels[v] = i 513 514 if not noheader: 515 print_header() 516 517 # The rest of the file is buffer information 518 for line in filehandle: 519 vals = parse_line(line.split(), labels) 520 if not skip_line(vals, filters): 521 print_values(vals) 522 523 524def main(): 525 global hdr 526 global sep 527 global raw 528 529 desired_cols = None 530 bflag = False 531 dflag = False 532 hflag = False 533 ifile = None 534 ofile = None 535 tflag = False 536 vflag = False 537 xflag = False 538 nflag = False 539 filters = dict() 540 541 try: 542 opts, args = getopt.getopt( 543 sys.argv[1:], 544 "bdf:hi:o:rs:tvxF:n", 545 [ 546 "buffers", 547 "dnodes", 548 "columns", 549 "help", 550 "infile", 551 "outfile", 552 "separator", 553 "types", 554 "verbose", 555 "extended", 556 "filter" 557 ] 558 ) 559 except getopt.error: 560 usage() 561 opts = None 562 563 for opt, arg in opts: 564 if opt in ('-b', '--buffers'): 565 bflag = True 566 if opt in ('-d', '--dnodes'): 567 dflag = True 568 if opt in ('-f', '--columns'): 569 desired_cols = arg 570 if opt in ('-h', '--help'): 571 hflag = True 572 if opt in ('-i', '--infile'): 573 ifile = arg 574 if opt in ('-o', '--outfile'): 575 ofile = arg 576 if opt in ('-r', '--raw'): 577 raw += 1 578 if opt in ('-s', '--separator'): 579 sep = arg 580 if opt in ('-t', '--types'): 581 tflag = True 582 if opt in ('-v', '--verbose'): 583 vflag = True 584 if opt in ('-x', '--extended'): 585 xflag = True 586 if opt in ('-n', '--noheader'): 587 nflag = True 588 if opt in ('-F', '--filter'): 589 fils = [x.strip() for x in arg.split(",")] 590 591 for fil in fils: 592 f = [x.strip() for x in fil.split("=")] 593 594 if len(f) != 2: 595 sys.stderr.write("Invalid filter '%s'.\n" % fil) 596 sys.exit(1) 597 598 if f[0] not in cols: 599 sys.stderr.write("Invalid field '%s' in filter.\n" % f[0]) 600 sys.exit(1) 601 602 if f[0] in filters: 603 sys.stderr.write("Field '%s' specified multiple times in " 604 "filter.\n" % f[0]) 605 sys.exit(1) 606 607 try: 608 re.compile("(?:" + f[1] + r")\Z") 609 except re.error: 610 sys.stderr.write("Invalid regex for field '%s' in " 611 "filter.\n" % f[0]) 612 sys.exit(1) 613 614 filters[f[0]] = f[1] 615 616 if hflag or (xflag and desired_cols): 617 usage() 618 619 if vflag: 620 detailed_usage() 621 622 # Ensure at most only one of b, d, or t flags are set 623 if (bflag and dflag) or (bflag and tflag) or (dflag and tflag): 624 usage() 625 626 if bflag: 627 hdr = bxhdr if xflag else bhdr 628 elif tflag: 629 hdr = txhdr if xflag else thdr 630 else: # Even if dflag is False, it's the default if none set 631 dflag = True 632 hdr = dxhdr if xflag else dhdr 633 634 if desired_cols: 635 hdr = desired_cols.split(",") 636 637 invalid = [] 638 incompat = [] 639 for ele in hdr: 640 if ele not in cols: 641 invalid.append(ele) 642 elif ((bflag and bincompat and ele in bincompat) or 643 (dflag and dincompat and ele in dincompat) or 644 (tflag and tincompat and ele in tincompat)): 645 incompat.append(ele) 646 647 if len(invalid) > 0: 648 sys.stderr.write("Invalid column definition! -- %s\n" % invalid) 649 usage() 650 651 if len(incompat) > 0: 652 sys.stderr.write("Incompatible field specified! -- %s\n" % 653 incompat) 654 usage() 655 656 if ofile: 657 try: 658 tmp = open(ofile, "w") 659 sys.stdout = tmp 660 661 except IOError: 662 sys.stderr.write("Cannot open %s for writing\n" % ofile) 663 sys.exit(1) 664 665 if not ifile: 666 ifile = default_ifile() 667 668 if ifile != "-": 669 try: 670 tmp = open(ifile, "r") 671 sys.stdin = tmp 672 except IOError: 673 sys.stderr.write("Cannot open %s for reading\n" % ifile) 674 sys.exit(1) 675 676 if bflag: 677 buffers_print_all(sys.stdin, filters, nflag) 678 679 if dflag: 680 print_dict(dnodes_build_dict(sys.stdin), filters, nflag) 681 682 if tflag: 683 print_dict(types_build_dict(sys.stdin), filters, nflag) 684 685 686if __name__ == '__main__': 687 main() 688