xref: /freebsd/sys/contrib/openzfs/cmd/dbufstat.in (revision 5e3934b15a2741b2de6b217e77dc9d798d740804)
1#!/usr/bin/env @PYTHON_SHEBANG@
2#
3# Print out statistics for all cached dmu buffers.  This information
4# is available through the dbufs kstat and may be post-processed as
5# needed by the script.
6#
7# CDDL HEADER START
8#
9# The contents of this file are subject to the terms of the
10# Common Development and Distribution License, Version 1.0 only
11# (the "License").  You may not use this file except in compliance
12# with the License.
13#
14# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
15# or https://opensource.org/licenses/CDDL-1.0.
16# See the License for the specific language governing permissions
17# and limitations under the License.
18#
19# When distributing Covered Code, include this CDDL HEADER in each
20# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
21# If applicable, add the following below this CDDL HEADER, with the
22# fields enclosed by brackets "[]" replaced with your own identifying
23# information: Portions Copyright [yyyy] [name of copyright owner]
24#
25# CDDL HEADER END
26#
27# Copyright (C) 2013 Lawrence Livermore National Security, LLC.
28# Produced at Lawrence Livermore National Laboratory (cf, DISCLAIMER).
29#
30# This script must remain compatible with and Python 3.6+.
31#
32
33import sys
34import getopt
35import errno
36import re
37
38bhdr = ["pool", "objset", "object", "level", "blkid", "offset", "dbsize"]
39bxhdr = ["pool", "objset", "object", "level", "blkid", "offset", "dbsize",
40         "usize", "meta", "state", "dbholds", "dbc", "list", "atype", "flags",
41         "count", "asize", "access", "mru", "gmru", "mfu", "gmfu", "l2",
42         "l2_dattr", "l2_asize", "l2_comp", "aholds", "dtype", "btype",
43         "data_bs", "meta_bs", "bsize", "lvls", "dholds", "blocks", "dsize"]
44bincompat = ["cached", "direct", "indirect", "bonus", "spill"]
45
46dhdr = ["pool", "objset", "object", "dtype", "cached"]
47dxhdr = ["pool", "objset", "object", "dtype", "btype", "data_bs", "meta_bs",
48         "bsize", "lvls", "dholds", "blocks", "dsize", "cached", "direct",
49         "indirect", "bonus", "spill"]
50dincompat = ["level", "blkid", "offset", "dbsize", "usize", "meta", "state",
51             "dbholds", "dbc", "list", "atype", "flags", "count", "asize",
52             "access", "mru", "gmru", "mfu", "gmfu", "l2", "l2_dattr",
53             "l2_asize", "l2_comp", "aholds"]
54
55thdr = ["pool", "objset", "dtype", "cached"]
56txhdr = ["pool", "objset", "dtype", "cached", "direct", "indirect",
57         "bonus", "spill"]
58tincompat = ["object", "level", "blkid", "offset", "dbsize", "usize", "meta",
59             "state", "dbc", "dbholds", "list", "atype", "flags", "count",
60             "asize", "access", "mru", "gmru", "mfu", "gmfu", "l2", "l2_dattr",
61             "l2_asize", "l2_comp", "aholds", "btype", "data_bs", "meta_bs",
62             "bsize", "lvls", "dholds", "blocks", "dsize"]
63
64cols = {
65    # hdr:        [size, scale, description]
66    "pool":       [15,   -1, "pool name"],
67    "objset":     [6,    -1, "dataset identification number"],
68    "object":     [10,   -1, "object number"],
69    "level":      [5,    -1, "indirection level of buffer"],
70    "blkid":      [8,    -1, "block number of buffer"],
71    "offset":     [12, 1024, "offset in object of buffer"],
72    "dbsize":     [7,  1024, "size of buffer"],
73    "usize":      [7,  1024, "size of attached user data"],
74    "meta":       [4,    -1, "is this buffer metadata?"],
75    "state":      [5,    -1, "state of buffer (read, cached, etc)"],
76    "dbholds":    [7,  1000, "number of holds on buffer"],
77    "dbc":        [3,    -1, "in dbuf cache"],
78    "list":       [4,    -1, "which ARC list contains this buffer"],
79    "atype":      [7,    -1, "ARC header type (data or metadata)"],
80    "flags":      [9,    -1, "ARC read flags"],
81    "count":      [5,    -1, "ARC data count"],
82    "asize":      [7,  1024, "size of this ARC buffer"],
83    "access":     [10,   -1, "time this ARC buffer was last accessed"],
84    "mru":        [5,  1000, "hits while on the ARC's MRU list"],
85    "gmru":       [5,  1000, "hits while on the ARC's MRU ghost list"],
86    "mfu":        [5,  1000, "hits while on the ARC's MFU list"],
87    "gmfu":       [5,  1000, "hits while on the ARC's MFU ghost list"],
88    "l2":         [5,  1000, "hits while on the L2ARC"],
89    "l2_dattr":   [8,    -1, "L2ARC disk address/offset"],
90    "l2_asize":   [8,  1024, "L2ARC alloc'd size (depending on compression)"],
91    "l2_comp":    [21,   -1, "L2ARC compression algorithm for buffer"],
92    "aholds":     [6,  1000, "number of holds on this ARC buffer"],
93    "dtype":      [27,   -1, "dnode type"],
94    "btype":      [27,   -1, "bonus buffer type"],
95    "data_bs":    [7,  1024, "data block size"],
96    "meta_bs":    [7,  1024, "metadata block size"],
97    "bsize":      [6,  1024, "bonus buffer size"],
98    "lvls":       [6,    -1, "number of indirection levels"],
99    "dholds":     [6,  1000, "number of holds on dnode"],
100    "blocks":     [8,  1000, "number of allocated blocks"],
101    "dsize":      [12, 1024, "size of dnode"],
102    "cached":     [6,  1024, "bytes cached for all blocks"],
103    "direct":     [6,  1024, "bytes cached for direct blocks"],
104    "indirect":   [8,  1024, "bytes cached for indirect blocks"],
105    "bonus":      [5,  1024, "bytes cached for bonus buffer"],
106    "spill":      [5,  1024, "bytes cached for spill block"],
107}
108
109hdr = None
110xhdr = None
111sep = "  "  # Default separator is 2 spaces
112cmd = ("Usage: dbufstat [-bdhnrtvx] [-i file] [-f fields] [-o file] "
113       "[-s string] [-F filter]\n")
114raw = 0
115
116
117if sys.platform.startswith("freebsd"):
118    import io
119    # Requires py-sysctl on FreeBSD
120    import sysctl
121
122    def default_ifile():
123        dbufs = sysctl.filter("kstat.zfs.misc.dbufs")[0].value
124        sys.stdin = io.StringIO(dbufs)
125        return "-"
126
127elif sys.platform.startswith("linux"):
128    def default_ifile():
129        return "/proc/spl/kstat/zfs/dbufs"
130
131
132def print_incompat_helper(incompat):
133    cnt = 0
134    for key in sorted(incompat):
135        if cnt == 0:
136            sys.stderr.write("\t")
137        elif cnt > 8:
138            sys.stderr.write(",\n\t")
139            cnt = 0
140        else:
141            sys.stderr.write(", ")
142
143        sys.stderr.write("%s" % key)
144        cnt += 1
145
146    sys.stderr.write("\n\n")
147
148
149def detailed_usage():
150    sys.stderr.write("%s\n" % cmd)
151
152    sys.stderr.write("Field definitions incompatible with '-b' option:\n")
153    print_incompat_helper(bincompat)
154
155    sys.stderr.write("Field definitions incompatible with '-d' option:\n")
156    print_incompat_helper(dincompat)
157
158    sys.stderr.write("Field definitions incompatible with '-t' option:\n")
159    print_incompat_helper(tincompat)
160
161    sys.stderr.write("Field definitions are as follows:\n")
162    for key in sorted(cols.keys()):
163        sys.stderr.write("%11s : %s\n" % (key, cols[key][2]))
164    sys.stderr.write("\n")
165
166    sys.exit(0)
167
168
169def usage():
170    sys.stderr.write("%s\n" % cmd)
171    sys.stderr.write("\t -b : Print table of information for each dbuf\n")
172    sys.stderr.write("\t -d : Print table of information for each dnode\n")
173    sys.stderr.write("\t -h : Print this help message\n")
174    sys.stderr.write("\t -n : Exclude header from output\n")
175    sys.stderr.write("\t -r : Print raw values\n")
176    sys.stderr.write("\t -t : Print table of information for each dnode type"
177                     "\n")
178    sys.stderr.write("\t -v : List all possible field headers and definitions"
179                     "\n")
180    sys.stderr.write("\t -x : Print extended stats\n")
181    sys.stderr.write("\t -i : Redirect input from the specified file\n")
182    sys.stderr.write("\t -f : Specify specific fields to print (see -v)\n")
183    sys.stderr.write("\t -o : Redirect output to the specified file\n")
184    sys.stderr.write("\t -s : Override default field separator with custom "
185                     "character or string\n")
186    sys.stderr.write("\t -F : Filter output by value or regex\n")
187    sys.stderr.write("\nExamples:\n")
188    sys.stderr.write("\tdbufstat -d -o /tmp/d.log\n")
189    sys.stderr.write("\tdbufstat -t -s \",\" -o /tmp/t.log\n")
190    sys.stderr.write("\tdbufstat -v\n")
191    sys.stderr.write("\tdbufstat -d -f pool,object,objset,dsize,cached\n")
192    sys.stderr.write("\tdbufstat -bx -F dbc=1,objset=54,pool=testpool\n")
193    sys.stderr.write("\n")
194
195    sys.exit(1)
196
197
198def prettynum(sz, scale, num=0):
199    global raw
200
201    suffix = [' ', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']
202    index = 0
203    save = 0
204
205    if raw or scale == -1:
206        return "%*s" % (sz, num)
207
208    # Rounding error, return 0
209    elif 0 < num < 1:
210        num = 0
211
212    while num > scale and index < 5:
213        save = num
214        num = num / scale
215        index += 1
216
217    if index == 0:
218        return "%*d" % (sz, num)
219
220    if (save / scale) < 10:
221        return "%*.1f%s" % (sz - 1, num, suffix[index])
222    else:
223        return "%*d%s" % (sz - 1, num, suffix[index])
224
225
226def print_values(v):
227    global hdr
228    global sep
229
230    try:
231        for col in hdr:
232            sys.stdout.write("%s%s" % (
233                prettynum(cols[col][0], cols[col][1], v[col]), sep))
234        sys.stdout.write("\n")
235    except IOError as e:
236        if e.errno == errno.EPIPE:
237            sys.exit(1)
238
239
240def print_header():
241    global hdr
242    global sep
243
244    try:
245        for col in hdr:
246            sys.stdout.write("%*s%s" % (cols[col][0], col, sep))
247        sys.stdout.write("\n")
248    except IOError as e:
249        if e.errno == errno.EPIPE:
250            sys.exit(1)
251
252
253def get_typestring(t):
254    ot_strings = [
255                    "DMU_OT_NONE",
256                    # general:
257                    "DMU_OT_OBJECT_DIRECTORY",
258                    "DMU_OT_OBJECT_ARRAY",
259                    "DMU_OT_PACKED_NVLIST",
260                    "DMU_OT_PACKED_NVLIST_SIZE",
261                    "DMU_OT_BPOBJ",
262                    "DMU_OT_BPOBJ_HDR",
263                    # spa:
264                    "DMU_OT_SPACE_MAP_HEADER",
265                    "DMU_OT_SPACE_MAP",
266                    # zil:
267                    "DMU_OT_INTENT_LOG",
268                    # dmu:
269                    "DMU_OT_DNODE",
270                    "DMU_OT_OBJSET",
271                    # dsl:
272                    "DMU_OT_DSL_DIR",
273                    "DMU_OT_DSL_DIR_CHILD_MAP",
274                    "DMU_OT_DSL_DS_SNAP_MAP",
275                    "DMU_OT_DSL_PROPS",
276                    "DMU_OT_DSL_DATASET",
277                    # zpl:
278                    "DMU_OT_ZNODE",
279                    "DMU_OT_OLDACL",
280                    "DMU_OT_PLAIN_FILE_CONTENTS",
281                    "DMU_OT_DIRECTORY_CONTENTS",
282                    "DMU_OT_MASTER_NODE",
283                    "DMU_OT_UNLINKED_SET",
284                    # zvol:
285                    "DMU_OT_ZVOL",
286                    "DMU_OT_ZVOL_PROP",
287                    # other; for testing only!
288                    "DMU_OT_PLAIN_OTHER",
289                    "DMU_OT_UINT64_OTHER",
290                    "DMU_OT_ZAP_OTHER",
291                    # new object types:
292                    "DMU_OT_ERROR_LOG",
293                    "DMU_OT_SPA_HISTORY",
294                    "DMU_OT_SPA_HISTORY_OFFSETS",
295                    "DMU_OT_POOL_PROPS",
296                    "DMU_OT_DSL_PERMS",
297                    "DMU_OT_ACL",
298                    "DMU_OT_SYSACL",
299                    "DMU_OT_FUID",
300                    "DMU_OT_FUID_SIZE",
301                    "DMU_OT_NEXT_CLONES",
302                    "DMU_OT_SCAN_QUEUE",
303                    "DMU_OT_USERGROUP_USED",
304                    "DMU_OT_USERGROUP_QUOTA",
305                    "DMU_OT_USERREFS",
306                    "DMU_OT_DDT_ZAP",
307                    "DMU_OT_DDT_STATS",
308                    "DMU_OT_SA",
309                    "DMU_OT_SA_MASTER_NODE",
310                    "DMU_OT_SA_ATTR_REGISTRATION",
311                    "DMU_OT_SA_ATTR_LAYOUTS",
312                    "DMU_OT_SCAN_XLATE",
313                    "DMU_OT_DEDUP",
314                    "DMU_OT_DEADLIST",
315                    "DMU_OT_DEADLIST_HDR",
316                    "DMU_OT_DSL_CLONES",
317                    "DMU_OT_BPOBJ_SUBOBJ"]
318    otn_strings = {
319                    0x80: "DMU_OTN_UINT8_DATA",
320                    0xc0: "DMU_OTN_UINT8_METADATA",
321                    0x81: "DMU_OTN_UINT16_DATA",
322                    0xc1: "DMU_OTN_UINT16_METADATA",
323                    0x82: "DMU_OTN_UINT32_DATA",
324                    0xc2: "DMU_OTN_UINT32_METADATA",
325                    0x83: "DMU_OTN_UINT64_DATA",
326                    0xc3: "DMU_OTN_UINT64_METADATA",
327                    0x84: "DMU_OTN_ZAP_DATA",
328                    0xc4: "DMU_OTN_ZAP_METADATA",
329                    0xa0: "DMU_OTN_UINT8_ENC_DATA",
330                    0xe0: "DMU_OTN_UINT8_ENC_METADATA",
331                    0xa1: "DMU_OTN_UINT16_ENC_DATA",
332                    0xe1: "DMU_OTN_UINT16_ENC_METADATA",
333                    0xa2: "DMU_OTN_UINT32_ENC_DATA",
334                    0xe2: "DMU_OTN_UINT32_ENC_METADATA",
335                    0xa3: "DMU_OTN_UINT64_ENC_DATA",
336                    0xe3: "DMU_OTN_UINT64_ENC_METADATA",
337                    0xa4: "DMU_OTN_ZAP_ENC_DATA",
338                    0xe4: "DMU_OTN_ZAP_ENC_METADATA"}
339
340    # If "-rr" option is used, don't convert to string representation
341    if raw > 1:
342        return "%i" % t
343
344    try:
345        if t < len(ot_strings):
346            return ot_strings[t]
347        else:
348            return otn_strings[t]
349    except (IndexError, KeyError):
350        return "(UNKNOWN)"
351
352
353def get_compstring(c):
354    comp_strings = ["ZIO_COMPRESS_INHERIT", "ZIO_COMPRESS_ON",
355                    "ZIO_COMPRESS_OFF",     "ZIO_COMPRESS_LZJB",
356                    "ZIO_COMPRESS_EMPTY",   "ZIO_COMPRESS_GZIP_1",
357                    "ZIO_COMPRESS_GZIP_2",  "ZIO_COMPRESS_GZIP_3",
358                    "ZIO_COMPRESS_GZIP_4",  "ZIO_COMPRESS_GZIP_5",
359                    "ZIO_COMPRESS_GZIP_6",  "ZIO_COMPRESS_GZIP_7",
360                    "ZIO_COMPRESS_GZIP_8",  "ZIO_COMPRESS_GZIP_9",
361                    "ZIO_COMPRESS_ZLE",     "ZIO_COMPRESS_LZ4",
362                    "ZIO_COMPRESS_ZSTD",    "ZIO_COMPRESS_FUNCTION"]
363
364    # If "-rr" option is used, don't convert to string representation
365    if raw > 1:
366        return "%i" % c
367
368    try:
369        return comp_strings[c]
370    except IndexError:
371        return "%i" % c
372
373
374def parse_line(line, labels):
375    global hdr
376
377    new = dict()
378    val = None
379    for col in hdr:
380        # These are "special" fields computed in the update_dict
381        # function, prevent KeyError exception on labels[col] for these.
382        if col not in ['bonus', 'cached', 'direct', 'indirect', 'spill']:
383            val = line[labels[col]]
384
385        if col in ['pool', 'flags']:
386            new[col] = str(val)
387        elif col in ['dtype', 'btype']:
388            new[col] = get_typestring(int(val))
389        elif col in ['l2_comp']:
390            new[col] = get_compstring(int(val))
391        else:
392            new[col] = int(val)
393
394    return new
395
396
397def update_dict(d, k, line, labels):
398    pool = line[labels['pool']]
399    objset = line[labels['objset']]
400    key = line[labels[k]]
401
402    dbsize = int(line[labels['dbsize']])
403    usize = int(line[labels['usize']])
404    blkid = int(line[labels['blkid']])
405    level = int(line[labels['level']])
406
407    if pool not in d:
408        d[pool] = dict()
409
410    if objset not in d[pool]:
411        d[pool][objset] = dict()
412
413    if key not in d[pool][objset]:
414        d[pool][objset][key] = parse_line(line, labels)
415        d[pool][objset][key]['bonus'] = 0
416        d[pool][objset][key]['cached'] = 0
417        d[pool][objset][key]['direct'] = 0
418        d[pool][objset][key]['indirect'] = 0
419        d[pool][objset][key]['spill'] = 0
420
421    d[pool][objset][key]['cached'] += dbsize + usize
422
423    if blkid == -1:
424        d[pool][objset][key]['bonus'] += dbsize
425    elif blkid == -2:
426        d[pool][objset][key]['spill'] += dbsize
427    else:
428        if level == 0:
429            d[pool][objset][key]['direct'] += dbsize
430        else:
431            d[pool][objset][key]['indirect'] += dbsize
432
433    return d
434
435
436def skip_line(vals, filters):
437    '''
438    Determines if a line should be skipped during printing
439    based on a set of filters
440    '''
441    if len(filters) == 0:
442        return False
443
444    for key in vals:
445        if key in filters:
446            val = prettynum(cols[key][0], cols[key][1], vals[key]).strip()
447            # we want a full match here
448            if re.match("(?:" + filters[key] + r")\Z", val) is None:
449                return True
450
451    return False
452
453
454def print_dict(d, filters, noheader):
455    if not noheader:
456        print_header()
457    for pool in list(d.keys()):
458        for objset in list(d[pool].keys()):
459            for v in list(d[pool][objset].values()):
460                if not skip_line(v, filters):
461                    print_values(v)
462
463
464def dnodes_build_dict(filehandle):
465    labels = dict()
466    dnodes = dict()
467
468    # First 3 lines are header information, skip the first two
469    for i in range(2):
470        next(filehandle)
471
472    # The third line contains the labels and index locations
473    for i, v in enumerate(next(filehandle).split()):
474        labels[v] = i
475
476    # The rest of the file is buffer information
477    for line in filehandle:
478        update_dict(dnodes, 'object', line.split(), labels)
479
480    return dnodes
481
482
483def types_build_dict(filehandle):
484    labels = dict()
485    types = dict()
486
487    # First 3 lines are header information, skip the first two
488    for i in range(2):
489        next(filehandle)
490
491    # The third line contains the labels and index locations
492    for i, v in enumerate(next(filehandle).split()):
493        labels[v] = i
494
495    # The rest of the file is buffer information
496    for line in filehandle:
497        update_dict(types, 'dtype', line.split(), labels)
498
499    return types
500
501
502def buffers_print_all(filehandle, filters, noheader):
503    labels = dict()
504
505    # First 3 lines are header information, skip the first two
506    for i in range(2):
507        next(filehandle)
508
509    # The third line contains the labels and index locations
510    for i, v in enumerate(next(filehandle).split()):
511        labels[v] = i
512
513    if not noheader:
514        print_header()
515
516    # The rest of the file is buffer information
517    for line in filehandle:
518        vals = parse_line(line.split(), labels)
519        if not skip_line(vals, filters):
520            print_values(vals)
521
522
523def main():
524    global hdr
525    global sep
526    global raw
527
528    desired_cols = None
529    bflag = False
530    dflag = False
531    hflag = False
532    ifile = None
533    ofile = None
534    tflag = False
535    vflag = False
536    xflag = False
537    nflag = False
538    filters = dict()
539
540    try:
541        opts, args = getopt.getopt(
542            sys.argv[1:],
543            "bdf:hi:o:rs:tvxF:n",
544            [
545                "buffers",
546                "dnodes",
547                "columns",
548                "help",
549                "infile",
550                "outfile",
551                "separator",
552                "types",
553                "verbose",
554                "extended",
555                "filter"
556            ]
557        )
558    except getopt.error:
559        usage()
560        opts = None
561
562    for opt, arg in opts:
563        if opt in ('-b', '--buffers'):
564            bflag = True
565        if opt in ('-d', '--dnodes'):
566            dflag = True
567        if opt in ('-f', '--columns'):
568            desired_cols = arg
569        if opt in ('-h', '--help'):
570            hflag = True
571        if opt in ('-i', '--infile'):
572            ifile = arg
573        if opt in ('-o', '--outfile'):
574            ofile = arg
575        if opt in ('-r', '--raw'):
576            raw += 1
577        if opt in ('-s', '--separator'):
578            sep = arg
579        if opt in ('-t', '--types'):
580            tflag = True
581        if opt in ('-v', '--verbose'):
582            vflag = True
583        if opt in ('-x', '--extended'):
584            xflag = True
585        if opt in ('-n', '--noheader'):
586            nflag = True
587        if opt in ('-F', '--filter'):
588            fils = [x.strip() for x in arg.split(",")]
589
590            for fil in fils:
591                f = [x.strip() for x in fil.split("=")]
592
593                if len(f) != 2:
594                    sys.stderr.write("Invalid filter '%s'.\n" % fil)
595                    sys.exit(1)
596
597                if f[0] not in cols:
598                    sys.stderr.write("Invalid field '%s' in filter.\n" % f[0])
599                    sys.exit(1)
600
601                if f[0] in filters:
602                    sys.stderr.write("Field '%s' specified multiple times in "
603                                     "filter.\n" % f[0])
604                    sys.exit(1)
605
606                try:
607                    re.compile("(?:" + f[1] + r")\Z")
608                except re.error:
609                    sys.stderr.write("Invalid regex for field '%s' in "
610                                     "filter.\n" % f[0])
611                    sys.exit(1)
612
613                filters[f[0]] = f[1]
614
615    if hflag or (xflag and desired_cols):
616        usage()
617
618    if vflag:
619        detailed_usage()
620
621    # Ensure at most only one of b, d, or t flags are set
622    if (bflag and dflag) or (bflag and tflag) or (dflag and tflag):
623        usage()
624
625    if bflag:
626        hdr = bxhdr if xflag else bhdr
627    elif tflag:
628        hdr = txhdr if xflag else thdr
629    else:  # Even if dflag is False, it's the default if none set
630        dflag = True
631        hdr = dxhdr if xflag else dhdr
632
633    if desired_cols:
634        hdr = desired_cols.split(",")
635
636        invalid = []
637        incompat = []
638        for ele in hdr:
639            if ele not in cols:
640                invalid.append(ele)
641            elif ((bflag and bincompat and ele in bincompat) or
642                  (dflag and dincompat and ele in dincompat) or
643                  (tflag and tincompat and ele in tincompat)):
644                    incompat.append(ele)
645
646        if len(invalid) > 0:
647            sys.stderr.write("Invalid column definition! -- %s\n" % invalid)
648            usage()
649
650        if len(incompat) > 0:
651            sys.stderr.write("Incompatible field specified! -- %s\n" %
652                             incompat)
653            usage()
654
655    if ofile:
656        try:
657            tmp = open(ofile, "w")
658            sys.stdout = tmp
659
660        except IOError:
661            sys.stderr.write("Cannot open %s for writing\n" % ofile)
662            sys.exit(1)
663
664    if not ifile:
665        ifile = default_ifile()
666
667    if ifile != "-":
668        try:
669            tmp = open(ifile, "r")
670            sys.stdin = tmp
671        except IOError:
672            sys.stderr.write("Cannot open %s for reading\n" % ifile)
673            sys.exit(1)
674
675    if bflag:
676        buffers_print_all(sys.stdin, filters, nflag)
677
678    if dflag:
679        print_dict(dnodes_build_dict(sys.stdin), filters, nflag)
680
681    if tflag:
682        print_dict(types_build_dict(sys.stdin), filters, nflag)
683
684
685if __name__ == '__main__':
686    main()
687