xref: /titanic_52/usr/src/cmd/gss/gsscred_clean/gsscred_clean.ksh (revision fcf3ce441efd61da9bb2884968af01cb7c1452cc)
1#!/bin/ksh
2#
3# CDDL HEADER START
4#
5# The contents of this file are subject to the terms of the
6# Common Development and Distribution License, Version 1.0 only
7# (the "License").  You may not use this file except in compliance
8# with the License.
9#
10# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
11# or http://www.opensolaris.org/os/licensing.
12# See the License for the specific language governing permissions
13# and limitations under the License.
14#
15# When distributing Covered Code, include this CDDL HEADER in each
16# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
17# If applicable, add the following below this CDDL HEADER, with the
18# fields enclosed by brackets "[]" replaced with your own identifying
19# information: Portions Copyright [yyyy] [name of copyright owner]
20#
21# CDDL HEADER END
22#
23#
24# Copyright (c) 1998 by Sun Microsystems, Inc.
25# All rights reserved.
26#
27#pragma ident	"%Z%%M%	%I%	%E% SMI"
28#
29# gsscred_db clean up script
30#
31# This file is used to remove duplicate entries from
32# the gsscred_db file. It is activated as a root cron
33# job once a day. It only performs cleanup when
34# the gsscred_db file has changed since last operation.
35
36FILE_TO_CLEAN=/etc/gss/gsscred_db
37CLEAN_TIME=/etc/gss/.gsscred_clean
38TMP_FILE=/etc/gss/gsscred_clean$$
39
40trap "rm -f $TMP_FILE; exit" 0 1 2 3 13 15
41
42
43if [ -s $FILE_TO_CLEAN ] && [ $FILE_TO_CLEAN -nt $CLEAN_TIME ]
44then
45
46#
47#	The file being sorted has the following format:
48#		name	uid	comment
49#
50#	We are trying to remove duplicate entries for the name
51#	which may have different uids. Entries lower in the file
52#	are newer since addition performs an append. We use cat -n
53#	in order to preserve the order of the duplicate entries and
54#	only keep the latest. We then sort on the name, and line
55#	number (line number in reverse). The line numbers are then
56#	removed and duplicate entries are cut out.
57#
58	cat -n $FILE_TO_CLEAN | sort -k 2,2 -k 1,1nr 2> /dev/null \
59		| cut -f2- | \
60		awk ' (NR > 1 && $1 != key) || NR == 1 {
61				key = $1;
62				print $0;
63			}
64		' > $TMP_FILE
65
66	if [ $? -eq 0 ] && mv $TMP_FILE $FILE_TO_CLEAN; then
67#
68#		update time stamp for this sort
69#
70		touch -r $FILE_TO_CLEAN $CLEAN_TIME
71	else
72		rm -f $TMP_FILE
73	fi
74fi
75