xref: /freebsd/sys/contrib/openzfs/cmd/dbufstat.in (revision b197d4b893974c9eb4d7b38704c6d5c486235d6f)
1#!/usr/bin/env @PYTHON_SHEBANG@
2#
3# Print out statistics for all cached dmu buffers.  This information
4# is available through the dbufs kstat and may be post-processed as
5# needed by the script.
6#
7# CDDL HEADER START
8#
9# The contents of this file are subject to the terms of the
10# Common Development and Distribution License, Version 1.0 only
11# (the "License").  You may not use this file except in compliance
12# with the License.
13#
14# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
15# or https://opensource.org/licenses/CDDL-1.0.
16# See the License for the specific language governing permissions
17# and limitations under the License.
18#
19# When distributing Covered Code, include this CDDL HEADER in each
20# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
21# If applicable, add the following below this CDDL HEADER, with the
22# fields enclosed by brackets "[]" replaced with your own identifying
23# information: Portions Copyright [yyyy] [name of copyright owner]
24#
25# CDDL HEADER END
26#
27# Copyright (C) 2013 Lawrence Livermore National Security, LLC.
28# Produced at Lawrence Livermore National Laboratory (cf, DISCLAIMER).
29#
30# This script must remain compatible with and Python 3.6+.
31#
32
33import sys
34import getopt
35import errno
36import re
37
38bhdr = ["pool", "objset", "object", "level", "blkid", "offset", "dbsize"]
39bxhdr = ["pool", "objset", "object", "level", "blkid", "offset", "dbsize",
40         "meta", "state", "dbholds", "dbc", "list", "atype", "flags",
41         "count", "asize", "access", "mru", "gmru", "mfu", "gmfu", "l2",
42         "l2_dattr", "l2_asize", "l2_comp", "aholds", "dtype", "btype",
43         "data_bs", "meta_bs", "bsize", "lvls", "dholds", "blocks", "dsize"]
44bincompat = ["cached", "direct", "indirect", "bonus", "spill"]
45
46dhdr = ["pool", "objset", "object", "dtype", "cached"]
47dxhdr = ["pool", "objset", "object", "dtype", "btype", "data_bs", "meta_bs",
48         "bsize", "lvls", "dholds", "blocks", "dsize", "cached", "direct",
49         "indirect", "bonus", "spill"]
50dincompat = ["level", "blkid", "offset", "dbsize", "meta", "state", "dbholds",
51             "dbc", "list", "atype", "flags", "count", "asize", "access",
52             "mru", "gmru", "mfu", "gmfu", "l2", "l2_dattr", "l2_asize",
53             "l2_comp", "aholds"]
54
55thdr = ["pool", "objset", "dtype", "cached"]
56txhdr = ["pool", "objset", "dtype", "cached", "direct", "indirect",
57         "bonus", "spill"]
58tincompat = ["object", "level", "blkid", "offset", "dbsize", "meta", "state",
59             "dbc", "dbholds", "list", "atype", "flags", "count", "asize",
60             "access", "mru", "gmru", "mfu", "gmfu", "l2", "l2_dattr",
61             "l2_asize", "l2_comp", "aholds", "btype", "data_bs", "meta_bs",
62             "bsize", "lvls", "dholds", "blocks", "dsize"]
63
64cols = {
65    # hdr:        [size, scale, description]
66    "pool":       [15,   -1, "pool name"],
67    "objset":     [6,    -1, "dataset identification number"],
68    "object":     [10,   -1, "object number"],
69    "level":      [5,    -1, "indirection level of buffer"],
70    "blkid":      [8,    -1, "block number of buffer"],
71    "offset":     [12, 1024, "offset in object of buffer"],
72    "dbsize":     [7,  1024, "size of buffer"],
73    "meta":       [4,    -1, "is this buffer metadata?"],
74    "state":      [5,    -1, "state of buffer (read, cached, etc)"],
75    "dbholds":    [7,  1000, "number of holds on buffer"],
76    "dbc":        [3,    -1, "in dbuf cache"],
77    "list":       [4,    -1, "which ARC list contains this buffer"],
78    "atype":      [7,    -1, "ARC header type (data or metadata)"],
79    "flags":      [9,    -1, "ARC read flags"],
80    "count":      [5,    -1, "ARC data count"],
81    "asize":      [7,  1024, "size of this ARC buffer"],
82    "access":     [10,   -1, "time this ARC buffer was last accessed"],
83    "mru":        [5,  1000, "hits while on the ARC's MRU list"],
84    "gmru":       [5,  1000, "hits while on the ARC's MRU ghost list"],
85    "mfu":        [5,  1000, "hits while on the ARC's MFU list"],
86    "gmfu":       [5,  1000, "hits while on the ARC's MFU ghost list"],
87    "l2":         [5,  1000, "hits while on the L2ARC"],
88    "l2_dattr":   [8,    -1, "L2ARC disk address/offset"],
89    "l2_asize":   [8,  1024, "L2ARC alloc'd size (depending on compression)"],
90    "l2_comp":    [21,   -1, "L2ARC compression algorithm for buffer"],
91    "aholds":     [6,  1000, "number of holds on this ARC buffer"],
92    "dtype":      [27,   -1, "dnode type"],
93    "btype":      [27,   -1, "bonus buffer type"],
94    "data_bs":    [7,  1024, "data block size"],
95    "meta_bs":    [7,  1024, "metadata block size"],
96    "bsize":      [6,  1024, "bonus buffer size"],
97    "lvls":       [6,    -1, "number of indirection levels"],
98    "dholds":     [6,  1000, "number of holds on dnode"],
99    "blocks":     [8,  1000, "number of allocated blocks"],
100    "dsize":      [12, 1024, "size of dnode"],
101    "cached":     [6,  1024, "bytes cached for all blocks"],
102    "direct":     [6,  1024, "bytes cached for direct blocks"],
103    "indirect":   [8,  1024, "bytes cached for indirect blocks"],
104    "bonus":      [5,  1024, "bytes cached for bonus buffer"],
105    "spill":      [5,  1024, "bytes cached for spill block"],
106}
107
108hdr = None
109xhdr = None
110sep = "  "  # Default separator is 2 spaces
111cmd = ("Usage: dbufstat [-bdhnrtvx] [-i file] [-f fields] [-o file] "
112       "[-s string] [-F filter]\n")
113raw = 0
114
115
116if sys.platform.startswith("freebsd"):
117    import io
118    # Requires py-sysctl on FreeBSD
119    import sysctl
120
121    def default_ifile():
122        dbufs = sysctl.filter("kstat.zfs.misc.dbufs")[0].value
123        sys.stdin = io.StringIO(dbufs)
124        return "-"
125
126elif sys.platform.startswith("linux"):
127    def default_ifile():
128        return "/proc/spl/kstat/zfs/dbufs"
129
130
131def print_incompat_helper(incompat):
132    cnt = 0
133    for key in sorted(incompat):
134        if cnt == 0:
135            sys.stderr.write("\t")
136        elif cnt > 8:
137            sys.stderr.write(",\n\t")
138            cnt = 0
139        else:
140            sys.stderr.write(", ")
141
142        sys.stderr.write("%s" % key)
143        cnt += 1
144
145    sys.stderr.write("\n\n")
146
147
148def detailed_usage():
149    sys.stderr.write("%s\n" % cmd)
150
151    sys.stderr.write("Field definitions incompatible with '-b' option:\n")
152    print_incompat_helper(bincompat)
153
154    sys.stderr.write("Field definitions incompatible with '-d' option:\n")
155    print_incompat_helper(dincompat)
156
157    sys.stderr.write("Field definitions incompatible with '-t' option:\n")
158    print_incompat_helper(tincompat)
159
160    sys.stderr.write("Field definitions are as follows:\n")
161    for key in sorted(cols.keys()):
162        sys.stderr.write("%11s : %s\n" % (key, cols[key][2]))
163    sys.stderr.write("\n")
164
165    sys.exit(0)
166
167
168def usage():
169    sys.stderr.write("%s\n" % cmd)
170    sys.stderr.write("\t -b : Print table of information for each dbuf\n")
171    sys.stderr.write("\t -d : Print table of information for each dnode\n")
172    sys.stderr.write("\t -h : Print this help message\n")
173    sys.stderr.write("\t -n : Exclude header from output\n")
174    sys.stderr.write("\t -r : Print raw values\n")
175    sys.stderr.write("\t -t : Print table of information for each dnode type"
176                     "\n")
177    sys.stderr.write("\t -v : List all possible field headers and definitions"
178                     "\n")
179    sys.stderr.write("\t -x : Print extended stats\n")
180    sys.stderr.write("\t -i : Redirect input from the specified file\n")
181    sys.stderr.write("\t -f : Specify specific fields to print (see -v)\n")
182    sys.stderr.write("\t -o : Redirect output to the specified file\n")
183    sys.stderr.write("\t -s : Override default field separator with custom "
184                     "character or string\n")
185    sys.stderr.write("\t -F : Filter output by value or regex\n")
186    sys.stderr.write("\nExamples:\n")
187    sys.stderr.write("\tdbufstat -d -o /tmp/d.log\n")
188    sys.stderr.write("\tdbufstat -t -s \",\" -o /tmp/t.log\n")
189    sys.stderr.write("\tdbufstat -v\n")
190    sys.stderr.write("\tdbufstat -d -f pool,object,objset,dsize,cached\n")
191    sys.stderr.write("\tdbufstat -bx -F dbc=1,objset=54,pool=testpool\n")
192    sys.stderr.write("\n")
193
194    sys.exit(1)
195
196
197def prettynum(sz, scale, num=0):
198    global raw
199
200    suffix = [' ', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']
201    index = 0
202    save = 0
203
204    if raw or scale == -1:
205        return "%*s" % (sz, num)
206
207    # Rounding error, return 0
208    elif 0 < num < 1:
209        num = 0
210
211    while num > scale and index < 5:
212        save = num
213        num = num / scale
214        index += 1
215
216    if index == 0:
217        return "%*d" % (sz, num)
218
219    if (save / scale) < 10:
220        return "%*.1f%s" % (sz - 1, num, suffix[index])
221    else:
222        return "%*d%s" % (sz - 1, num, suffix[index])
223
224
225def print_values(v):
226    global hdr
227    global sep
228
229    try:
230        for col in hdr:
231            sys.stdout.write("%s%s" % (
232                prettynum(cols[col][0], cols[col][1], v[col]), sep))
233        sys.stdout.write("\n")
234    except IOError as e:
235        if e.errno == errno.EPIPE:
236            sys.exit(1)
237
238
239def print_header():
240    global hdr
241    global sep
242
243    try:
244        for col in hdr:
245            sys.stdout.write("%*s%s" % (cols[col][0], col, sep))
246        sys.stdout.write("\n")
247    except IOError as e:
248        if e.errno == errno.EPIPE:
249            sys.exit(1)
250
251
252def get_typestring(t):
253    ot_strings = [
254                    "DMU_OT_NONE",
255                    # general:
256                    "DMU_OT_OBJECT_DIRECTORY",
257                    "DMU_OT_OBJECT_ARRAY",
258                    "DMU_OT_PACKED_NVLIST",
259                    "DMU_OT_PACKED_NVLIST_SIZE",
260                    "DMU_OT_BPOBJ",
261                    "DMU_OT_BPOBJ_HDR",
262                    # spa:
263                    "DMU_OT_SPACE_MAP_HEADER",
264                    "DMU_OT_SPACE_MAP",
265                    # zil:
266                    "DMU_OT_INTENT_LOG",
267                    # dmu:
268                    "DMU_OT_DNODE",
269                    "DMU_OT_OBJSET",
270                    # dsl:
271                    "DMU_OT_DSL_DIR",
272                    "DMU_OT_DSL_DIR_CHILD_MAP",
273                    "DMU_OT_DSL_DS_SNAP_MAP",
274                    "DMU_OT_DSL_PROPS",
275                    "DMU_OT_DSL_DATASET",
276                    # zpl:
277                    "DMU_OT_ZNODE",
278                    "DMU_OT_OLDACL",
279                    "DMU_OT_PLAIN_FILE_CONTENTS",
280                    "DMU_OT_DIRECTORY_CONTENTS",
281                    "DMU_OT_MASTER_NODE",
282                    "DMU_OT_UNLINKED_SET",
283                    # zvol:
284                    "DMU_OT_ZVOL",
285                    "DMU_OT_ZVOL_PROP",
286                    # other; for testing only!
287                    "DMU_OT_PLAIN_OTHER",
288                    "DMU_OT_UINT64_OTHER",
289                    "DMU_OT_ZAP_OTHER",
290                    # new object types:
291                    "DMU_OT_ERROR_LOG",
292                    "DMU_OT_SPA_HISTORY",
293                    "DMU_OT_SPA_HISTORY_OFFSETS",
294                    "DMU_OT_POOL_PROPS",
295                    "DMU_OT_DSL_PERMS",
296                    "DMU_OT_ACL",
297                    "DMU_OT_SYSACL",
298                    "DMU_OT_FUID",
299                    "DMU_OT_FUID_SIZE",
300                    "DMU_OT_NEXT_CLONES",
301                    "DMU_OT_SCAN_QUEUE",
302                    "DMU_OT_USERGROUP_USED",
303                    "DMU_OT_USERGROUP_QUOTA",
304                    "DMU_OT_USERREFS",
305                    "DMU_OT_DDT_ZAP",
306                    "DMU_OT_DDT_STATS",
307                    "DMU_OT_SA",
308                    "DMU_OT_SA_MASTER_NODE",
309                    "DMU_OT_SA_ATTR_REGISTRATION",
310                    "DMU_OT_SA_ATTR_LAYOUTS",
311                    "DMU_OT_SCAN_XLATE",
312                    "DMU_OT_DEDUP",
313                    "DMU_OT_DEADLIST",
314                    "DMU_OT_DEADLIST_HDR",
315                    "DMU_OT_DSL_CLONES",
316                    "DMU_OT_BPOBJ_SUBOBJ"]
317    otn_strings = {
318                    0x80: "DMU_OTN_UINT8_DATA",
319                    0xc0: "DMU_OTN_UINT8_METADATA",
320                    0x81: "DMU_OTN_UINT16_DATA",
321                    0xc1: "DMU_OTN_UINT16_METADATA",
322                    0x82: "DMU_OTN_UINT32_DATA",
323                    0xc2: "DMU_OTN_UINT32_METADATA",
324                    0x83: "DMU_OTN_UINT64_DATA",
325                    0xc3: "DMU_OTN_UINT64_METADATA",
326                    0x84: "DMU_OTN_ZAP_DATA",
327                    0xc4: "DMU_OTN_ZAP_METADATA",
328                    0xa0: "DMU_OTN_UINT8_ENC_DATA",
329                    0xe0: "DMU_OTN_UINT8_ENC_METADATA",
330                    0xa1: "DMU_OTN_UINT16_ENC_DATA",
331                    0xe1: "DMU_OTN_UINT16_ENC_METADATA",
332                    0xa2: "DMU_OTN_UINT32_ENC_DATA",
333                    0xe2: "DMU_OTN_UINT32_ENC_METADATA",
334                    0xa3: "DMU_OTN_UINT64_ENC_DATA",
335                    0xe3: "DMU_OTN_UINT64_ENC_METADATA",
336                    0xa4: "DMU_OTN_ZAP_ENC_DATA",
337                    0xe4: "DMU_OTN_ZAP_ENC_METADATA"}
338
339    # If "-rr" option is used, don't convert to string representation
340    if raw > 1:
341        return "%i" % t
342
343    try:
344        if t < len(ot_strings):
345            return ot_strings[t]
346        else:
347            return otn_strings[t]
348    except (IndexError, KeyError):
349        return "(UNKNOWN)"
350
351
352def get_compstring(c):
353    comp_strings = ["ZIO_COMPRESS_INHERIT", "ZIO_COMPRESS_ON",
354                    "ZIO_COMPRESS_OFF",     "ZIO_COMPRESS_LZJB",
355                    "ZIO_COMPRESS_EMPTY",   "ZIO_COMPRESS_GZIP_1",
356                    "ZIO_COMPRESS_GZIP_2",  "ZIO_COMPRESS_GZIP_3",
357                    "ZIO_COMPRESS_GZIP_4",  "ZIO_COMPRESS_GZIP_5",
358                    "ZIO_COMPRESS_GZIP_6",  "ZIO_COMPRESS_GZIP_7",
359                    "ZIO_COMPRESS_GZIP_8",  "ZIO_COMPRESS_GZIP_9",
360                    "ZIO_COMPRESS_ZLE",     "ZIO_COMPRESS_LZ4",
361                    "ZIO_COMPRESS_ZSTD",    "ZIO_COMPRESS_FUNCTION"]
362
363    # If "-rr" option is used, don't convert to string representation
364    if raw > 1:
365        return "%i" % c
366
367    try:
368        return comp_strings[c]
369    except IndexError:
370        return "%i" % c
371
372
373def parse_line(line, labels):
374    global hdr
375
376    new = dict()
377    val = None
378    for col in hdr:
379        # These are "special" fields computed in the update_dict
380        # function, prevent KeyError exception on labels[col] for these.
381        if col not in ['bonus', 'cached', 'direct', 'indirect', 'spill']:
382            val = line[labels[col]]
383
384        if col in ['pool', 'flags']:
385            new[col] = str(val)
386        elif col in ['dtype', 'btype']:
387            new[col] = get_typestring(int(val))
388        elif col in ['l2_comp']:
389            new[col] = get_compstring(int(val))
390        else:
391            new[col] = int(val)
392
393    return new
394
395
396def update_dict(d, k, line, labels):
397    pool = line[labels['pool']]
398    objset = line[labels['objset']]
399    key = line[labels[k]]
400
401    dbsize = int(line[labels['dbsize']])
402    blkid = int(line[labels['blkid']])
403    level = int(line[labels['level']])
404
405    if pool not in d:
406        d[pool] = dict()
407
408    if objset not in d[pool]:
409        d[pool][objset] = dict()
410
411    if key not in d[pool][objset]:
412        d[pool][objset][key] = parse_line(line, labels)
413        d[pool][objset][key]['bonus'] = 0
414        d[pool][objset][key]['cached'] = 0
415        d[pool][objset][key]['direct'] = 0
416        d[pool][objset][key]['indirect'] = 0
417        d[pool][objset][key]['spill'] = 0
418
419    d[pool][objset][key]['cached'] += dbsize
420
421    if blkid == -1:
422        d[pool][objset][key]['bonus'] += dbsize
423    elif blkid == -2:
424        d[pool][objset][key]['spill'] += dbsize
425    else:
426        if level == 0:
427            d[pool][objset][key]['direct'] += dbsize
428        else:
429            d[pool][objset][key]['indirect'] += dbsize
430
431    return d
432
433
434def skip_line(vals, filters):
435    '''
436    Determines if a line should be skipped during printing
437    based on a set of filters
438    '''
439    if len(filters) == 0:
440        return False
441
442    for key in vals:
443        if key in filters:
444            val = prettynum(cols[key][0], cols[key][1], vals[key]).strip()
445            # we want a full match here
446            if re.match("(?:" + filters[key] + r")\Z", val) is None:
447                return True
448
449    return False
450
451
452def print_dict(d, filters, noheader):
453    if not noheader:
454        print_header()
455    for pool in list(d.keys()):
456        for objset in list(d[pool].keys()):
457            for v in list(d[pool][objset].values()):
458                if not skip_line(v, filters):
459                    print_values(v)
460
461
462def dnodes_build_dict(filehandle):
463    labels = dict()
464    dnodes = dict()
465
466    # First 3 lines are header information, skip the first two
467    for i in range(2):
468        next(filehandle)
469
470    # The third line contains the labels and index locations
471    for i, v in enumerate(next(filehandle).split()):
472        labels[v] = i
473
474    # The rest of the file is buffer information
475    for line in filehandle:
476        update_dict(dnodes, 'object', line.split(), labels)
477
478    return dnodes
479
480
481def types_build_dict(filehandle):
482    labels = dict()
483    types = dict()
484
485    # First 3 lines are header information, skip the first two
486    for i in range(2):
487        next(filehandle)
488
489    # The third line contains the labels and index locations
490    for i, v in enumerate(next(filehandle).split()):
491        labels[v] = i
492
493    # The rest of the file is buffer information
494    for line in filehandle:
495        update_dict(types, 'dtype', line.split(), labels)
496
497    return types
498
499
500def buffers_print_all(filehandle, filters, noheader):
501    labels = dict()
502
503    # First 3 lines are header information, skip the first two
504    for i in range(2):
505        next(filehandle)
506
507    # The third line contains the labels and index locations
508    for i, v in enumerate(next(filehandle).split()):
509        labels[v] = i
510
511    if not noheader:
512        print_header()
513
514    # The rest of the file is buffer information
515    for line in filehandle:
516        vals = parse_line(line.split(), labels)
517        if not skip_line(vals, filters):
518            print_values(vals)
519
520
521def main():
522    global hdr
523    global sep
524    global raw
525
526    desired_cols = None
527    bflag = False
528    dflag = False
529    hflag = False
530    ifile = None
531    ofile = None
532    tflag = False
533    vflag = False
534    xflag = False
535    nflag = False
536    filters = dict()
537
538    try:
539        opts, args = getopt.getopt(
540            sys.argv[1:],
541            "bdf:hi:o:rs:tvxF:n",
542            [
543                "buffers",
544                "dnodes",
545                "columns",
546                "help",
547                "infile",
548                "outfile",
549                "separator",
550                "types",
551                "verbose",
552                "extended",
553                "filter"
554            ]
555        )
556    except getopt.error:
557        usage()
558        opts = None
559
560    for opt, arg in opts:
561        if opt in ('-b', '--buffers'):
562            bflag = True
563        if opt in ('-d', '--dnodes'):
564            dflag = True
565        if opt in ('-f', '--columns'):
566            desired_cols = arg
567        if opt in ('-h', '--help'):
568            hflag = True
569        if opt in ('-i', '--infile'):
570            ifile = arg
571        if opt in ('-o', '--outfile'):
572            ofile = arg
573        if opt in ('-r', '--raw'):
574            raw += 1
575        if opt in ('-s', '--separator'):
576            sep = arg
577        if opt in ('-t', '--types'):
578            tflag = True
579        if opt in ('-v', '--verbose'):
580            vflag = True
581        if opt in ('-x', '--extended'):
582            xflag = True
583        if opt in ('-n', '--noheader'):
584            nflag = True
585        if opt in ('-F', '--filter'):
586            fils = [x.strip() for x in arg.split(",")]
587
588            for fil in fils:
589                f = [x.strip() for x in fil.split("=")]
590
591                if len(f) != 2:
592                    sys.stderr.write("Invalid filter '%s'.\n" % fil)
593                    sys.exit(1)
594
595                if f[0] not in cols:
596                    sys.stderr.write("Invalid field '%s' in filter.\n" % f[0])
597                    sys.exit(1)
598
599                if f[0] in filters:
600                    sys.stderr.write("Field '%s' specified multiple times in "
601                                     "filter.\n" % f[0])
602                    sys.exit(1)
603
604                try:
605                    re.compile("(?:" + f[1] + r")\Z")
606                except re.error:
607                    sys.stderr.write("Invalid regex for field '%s' in "
608                                     "filter.\n" % f[0])
609                    sys.exit(1)
610
611                filters[f[0]] = f[1]
612
613    if hflag or (xflag and desired_cols):
614        usage()
615
616    if vflag:
617        detailed_usage()
618
619    # Ensure at most only one of b, d, or t flags are set
620    if (bflag and dflag) or (bflag and tflag) or (dflag and tflag):
621        usage()
622
623    if bflag:
624        hdr = bxhdr if xflag else bhdr
625    elif tflag:
626        hdr = txhdr if xflag else thdr
627    else:  # Even if dflag is False, it's the default if none set
628        dflag = True
629        hdr = dxhdr if xflag else dhdr
630
631    if desired_cols:
632        hdr = desired_cols.split(",")
633
634        invalid = []
635        incompat = []
636        for ele in hdr:
637            if ele not in cols:
638                invalid.append(ele)
639            elif ((bflag and bincompat and ele in bincompat) or
640                  (dflag and dincompat and ele in dincompat) or
641                  (tflag and tincompat and ele in tincompat)):
642                    incompat.append(ele)
643
644        if len(invalid) > 0:
645            sys.stderr.write("Invalid column definition! -- %s\n" % invalid)
646            usage()
647
648        if len(incompat) > 0:
649            sys.stderr.write("Incompatible field specified! -- %s\n" %
650                             incompat)
651            usage()
652
653    if ofile:
654        try:
655            tmp = open(ofile, "w")
656            sys.stdout = tmp
657
658        except IOError:
659            sys.stderr.write("Cannot open %s for writing\n" % ofile)
660            sys.exit(1)
661
662    if not ifile:
663        ifile = default_ifile()
664
665    if ifile != "-":
666        try:
667            tmp = open(ifile, "r")
668            sys.stdin = tmp
669        except IOError:
670            sys.stderr.write("Cannot open %s for reading\n" % ifile)
671            sys.exit(1)
672
673    if bflag:
674        buffers_print_all(sys.stdin, filters, nflag)
675
676    if dflag:
677        print_dict(dnodes_build_dict(sys.stdin), filters, nflag)
678
679    if tflag:
680        print_dict(types_build_dict(sys.stdin), filters, nflag)
681
682
683if __name__ == '__main__':
684    main()
685