xref: /titanic_41/usr/src/tools/onbld/Scm/Backup.py (revision 7535ae1914017b0e648abd7a139aca709fa82be3)
1#
2#  This program is free software; you can redistribute it and/or modify
3#  it under the terms of the GNU General Public License version 2
4#  as published by the Free Software Foundation.
5#
6#  This program is distributed in the hope that it will be useful,
7#  but WITHOUT ANY WARRANTY; without even the implied warranty of
8#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
9#  GNU General Public License for more details.
10#
11#  You should have received a copy of the GNU General Public License
12#  along with this program; if not, write to the Free Software
13#  Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
14#
15
16#
17# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
18# Use is subject to license terms.
19#
20# Copyright 2008, 2011, Richard Lowe
21#
22
23
24'''
25Workspace backup
26
27Backup format is:
28   backupdir/
29      wsname/
30         generation#/
31            dirstate (handled by CdmUncommittedBackup)
32                File containing dirstate nodeid (the changeset we need
33                to update the workspace to after applying the bundle).
34                This is the node to which the working copy changes
35                (see 'diff', below) will be applied if applicable.
36
37            bundle (handled by CdmCommittedBackup)
38                An Hg bundle containing outgoing committed changes.
39
40            nodes (handled by CdmCommittedBackup)
41                A text file listing the full (hex) nodeid of all nodes in
42                bundle, used by need_backup.
43
44            diff (handled by CdmUncommittedBackup)
45                A Git-formatted diff containing uncommitted changes.
46
47            renames (handled by CdmUncommittedBackup)
48                A list of renames in the working copy that have to be
49                applied manually, rather than by the diff.
50
51            metadata.tar.gz (handled by CdmMetadataBackup)
52                $CODEMGR_WS/.hg/hgrc
53                $CODEMGR_WS/.hg/localtags
54                $CODEMGR_WS/.hg/patches (Mq data)
55
56            clear.tar.gz (handled by CdmClearBackup)
57                <short node>/
58                    copies of each modified or added file, as it is in
59                    this head.
60
61                 ... for each outgoing head
62
63                working/
64                     copies of each modified or added file in the
65                     working copy if any.
66
67         latest -> generation#
68            Newest backup generation.
69
70All files in a given backup generation, with the exception of
71dirstate, are optional.
72'''
73
74import grp, os, pwd, shutil, tarfile, time, traceback
75from cStringIO import StringIO
76
77from mercurial import changegroup, cmdutil, error, node, patch, util
78from onbld.Scm import Version
79
80
81class CdmNodeMissing(util.Abort):
82    '''a required node is not present in the destination workspace.
83
84    This may occur both in the case where the bundle contains a
85    changeset which is a child of a node not present in the
86    destination workspace (because the destination workspace is not as
87    up-to-date as the source), or because the source and destination
88    workspace are not related.
89
90    It may also happen in cases where the uncommitted changes need to
91    be applied onto a node that the workspace does not possess even
92    after application of the bundle (on a branch not present
93    in the bundle or destination workspace, for instance)'''
94
95    def __init__(self, msg, name):
96        #
97        # If e.name is a string 20 characters long, it is
98        # assumed to be a node.  (Mercurial makes this
99        # same assumption, when creating a LookupError)
100        #
101        if isinstance(name, str) and len(name) == 20:
102            n = node.short(name)
103        else:
104            n = name
105
106        util.Abort.__init__(self, "%s: changeset '%s' is missing\n"
107                            "Your workspace is either not "
108                            "sufficiently up to date,\n"
109                            "or is unrelated to the workspace from "
110                            "which the backup was taken.\n" % (msg, n))
111
112
113class CdmTarFile(tarfile.TarFile):
114    '''Tar file access + simple comparison to the filesystem, and
115    creation addition of files from Mercurial filectx objects.'''
116
117    def __init__(self, *args, **kwargs):
118        tarfile.TarFile.__init__(self, *args, **kwargs)
119        self.errorlevel = 2
120
121    def members_match_fs(self, rootpath):
122        '''Compare the contents of the tar archive to the directory
123        specified by rootpath.  Return False if they differ.
124
125        Every file in the archive must match the equivalent file in
126        the filesystem.
127
128        The existence, modification time, and size of each file are
129        compared, content is not.'''
130
131        def _member_matches_fs(member, rootpath):
132            '''Compare a single member to its filesystem counterpart'''
133            fpath = os.path.join(rootpath, member.name)
134
135            if not os.path.exists(fpath):
136                return False
137            elif ((os.path.isfile(fpath) != member.isfile()) or
138                  (os.path.isdir(fpath) != member.isdir()) or
139                  (os.path.islink(fpath) != member.issym())):
140                return False
141
142            #
143            # The filesystem may return a modification time with a
144            # fractional component (as a float), whereas the tar format
145            # only stores it to the whole second, perform the comparison
146            # using integers (truncated, not rounded)
147            #
148            elif member.mtime != int(os.path.getmtime(fpath)):
149                return False
150            elif not member.isdir() and member.size != os.path.getsize(fpath):
151                return False
152            else:
153                return True
154
155        for elt in self:
156            if not _member_matches_fs(elt, rootpath):
157                return False
158
159        return True
160
161    def addfilectx(self, filectx, path=None):
162        '''Add a filectx object to the archive.
163
164        Use the path specified by the filectx object or, if specified,
165        the PATH argument.
166
167        The size, modification time, type and permissions of the tar
168        member are taken from the filectx object, user and group id
169        are those of the invoking user, user and group name are those
170        of the invoking user if information is available, or "unknown"
171        if it is not.
172        '''
173
174        t = tarfile.TarInfo(path or filectx.path())
175        t.size = filectx.size()
176        t.mtime = filectx.date()[0]
177        t.uid = os.getuid()
178        t.gid = os.getgid()
179
180        try:
181            t.uname = pwd.getpwuid(t.uid).pw_name
182        except KeyError:
183            t.uname = "unknown"
184
185        try:
186            t.gname = grp.getgrgid(t.gid).gr_name
187        except KeyError:
188            t.gname = "unknown"
189
190        #
191        # Mercurial versions symlinks by setting a flag and storing
192        # the destination path in place of the file content.  The
193        # actual contents (in the tar), should be empty.
194        #
195        if 'l' in filectx.flags():
196            t.type = tarfile.SYMTYPE
197            t.mode = 0777
198            t.linkname = filectx.data()
199            data = None
200        else:
201            t.type = tarfile.REGTYPE
202            t.mode = 'x' in filectx.flags() and 0755 or 0644
203            data = StringIO(filectx.data())
204
205        self.addfile(t, data)
206
207
208class CdmCommittedBackup(object):
209    '''Backup of committed changes'''
210
211    def __init__(self, backup, ws):
212        self.ws = ws
213        self.bu = backup
214        self.files = ('bundle', 'nodes')
215
216    def _outgoing_nodes(self, parent):
217        '''Return a list of all outgoing nodes in hex format'''
218
219        if parent:
220            outgoing = self.ws.findoutgoing(parent)
221            nodes = self.ws.repo.changelog.nodesbetween(outgoing)[0]
222            return map(node.hex, nodes)
223        else:
224            return []
225
226    def backup(self):
227        '''Backup committed changes'''
228        parent = self.ws.parent()
229
230        if not parent:
231            self.ws.ui.warn('Workspace has no parent, committed changes will '
232                            'not be backed up\n')
233            return
234
235        out = self.ws.findoutgoing(parent)
236        if not out:
237            return
238
239        cg = self.ws.repo.changegroup(out, 'bundle')
240        changegroup.writebundle(cg, self.bu.backupfile('bundle'), 'HG10BZ')
241
242        outnodes = self._outgoing_nodes(parent)
243        if not outnodes:
244            return
245
246        fp = None
247        try:
248            try:
249                fp = self.bu.open('nodes', 'w')
250                fp.write('%s\n' % '\n'.join(outnodes))
251            except EnvironmentError, e:
252                raise util.Abort("couldn't store outgoing nodes: %s" % e)
253        finally:
254            if fp and not fp.closed:
255                fp.close()
256
257    def restore(self):
258        '''Restore committed changes from backup'''
259
260        if not self.bu.exists('bundle'):
261            return
262
263        bpath = self.bu.backupfile('bundle')
264        f = None
265        try:
266            try:
267                f = self.bu.open('bundle')
268                bundle = changegroup.readbundle(f, bpath)
269                self.ws.repo.addchangegroup(bundle, 'strip',
270                                            'bundle:%s' % bpath)
271            except EnvironmentError, e:
272                raise util.Abort("couldn't restore committed changes: %s\n"
273                                 "   %s" % (bpath, e))
274            except error.LookupError, e:
275                raise CdmNodeMissing("couldn't restore committed changes",
276                                                 e.name)
277        finally:
278            if f and not f.closed:
279                f.close()
280
281    def need_backup(self):
282        '''Compare backup of committed changes to workspace'''
283
284        if self.bu.exists('nodes'):
285            f = None
286            try:
287                try:
288                    f = self.bu.open('nodes')
289                    bnodes = set(line.rstrip('\r\n') for line in f.readlines())
290                    f.close()
291                except EnvironmentError, e:
292                    raise util.Abort("couldn't open backup node list: %s" % e)
293            finally:
294                if f and not f.closed:
295                    f.close()
296        else:
297            bnodes = set()
298
299        outnodes = set(self._outgoing_nodes(self.ws.parent()))
300
301        #
302        # If there are outgoing nodes not in the prior backup we need
303        # to take a new backup; it's fine if there are nodes in the
304        # old backup which are no longer outgoing, however.
305        #
306        if not outnodes <= bnodes:
307            return True
308
309        return False
310
311    def cleanup(self):
312        '''Remove backed up committed changes'''
313
314        for f in self.files:
315            self.bu.unlink(f)
316
317
318class CdmUncommittedBackup(object):
319    '''Backup of uncommitted changes'''
320
321    def __init__(self, backup, ws):
322        self.ws = ws
323        self.bu = backup
324        self.wctx = self.ws.workingctx(worklist=True)
325
326    def _clobbering_renames(self):
327        '''Return a list of pairs of files representing renames/copies
328        that clobber already versioned files.  [(old-name new-name)...]
329        '''
330
331        #
332        # Note that this doesn't handle uncommitted merges
333        # as CdmUncommittedBackup itself doesn't.
334        #
335        parent = self.wctx.parents()[0]
336
337        ret = []
338        for fname in self.wctx.added() + self.wctx.modified():
339            rn = self.wctx.filectx(fname).renamed()
340            if rn and fname in parent:
341                ret.append((rn[0], fname))
342        return ret
343
344    def backup(self):
345        '''Backup uncommitted changes'''
346
347        if self.ws.merged():
348            raise util.Abort("Unable to backup an uncommitted merge.\n"
349                             "Please complete your merge and commit")
350
351        dirstate = node.hex(self.wctx.parents()[0].node())
352
353        fp = None
354        try:
355            try:
356                fp = self.bu.open('dirstate', 'w')
357                fp.write(dirstate + '\n')
358                fp.close()
359            except EnvironmentError, e:
360                raise util.Abort("couldn't save working copy parent: %s" % e)
361
362            try:
363                fp = self.bu.open('renames', 'w')
364                for cons in self._clobbering_renames():
365                    fp.write("%s %s\n" % cons)
366                fp.close()
367            except EnvironmentError, e:
368                raise util.Abort("couldn't save clobbering copies: %s" % e)
369
370            try:
371                fp = self.bu.open('diff', 'w')
372                match = self.ws.matcher(files=self.wctx.files())
373                fp.write(self.ws.diff(opts={'git': True}, match=match))
374            except EnvironmentError, e:
375                raise util.Abort("couldn't save working copy diff: %s" % e)
376        finally:
377            if fp and not fp.closed:
378                fp.close()
379
380    def _dirstate(self):
381        '''Return the desired working copy node from the backup'''
382        fp = None
383        try:
384            try:
385                fp = self.bu.open('dirstate')
386                dirstate = fp.readline().strip()
387            except EnvironmentError, e:
388                raise util.Abort("couldn't read saved parent: %s" % e)
389        finally:
390            if fp and not fp.closed:
391                fp.close()
392
393        return dirstate
394
395    def restore(self):
396        '''Restore uncommitted changes'''
397        dirstate = self._dirstate()
398
399        #
400        # Check that the patch's parent changeset exists.
401        #
402        try:
403            n = node.bin(dirstate)
404            self.ws.repo.changelog.lookup(n)
405        except error.LookupError, e:
406            raise CdmNodeMissing("couldn't restore uncommitted changes",
407                                 e.name)
408
409        try:
410            self.ws.clean(rev=dirstate)
411        except util.Abort, e:
412            raise util.Abort("couldn't update to saved node: %s" % e)
413
414        if not self.bu.exists('diff'):
415            return
416
417        #
418        # There's a race here whereby if the patch (or part thereof)
419        # is applied within the same second as the clean above (such
420        # that modification time doesn't change) and if the size of
421        # that file does not change, Hg may not see the change.
422        #
423        # We sleep a full second to avoid this, as sleeping merely
424        # until the next second begins would require very close clock
425        # synchronization on network filesystems.
426        #
427        time.sleep(1)
428
429        files = {}
430        try:
431            diff = self.bu.backupfile('diff')
432            try:
433                fuzz = patch.patch(diff, self.ws.ui, strip=1,
434                                   cwd=self.ws.repo.root, files=files)
435                if fuzz:
436                    raise util.Abort('working copy diff applied with fuzz')
437            except Exception, e:
438                raise util.Abort("couldn't apply working copy diff: %s\n"
439                                 "   %s" % (diff, e))
440        finally:
441            if Version.at_least("1.7"):
442                cmdutil.updatedir(self.ws.ui, self.ws.repo, files)
443            else:
444                patch.updatedir(self.ws.ui, self.ws.repo, files)
445
446        if not self.bu.exists('renames'):
447            return
448
449        #
450        # We need to re-apply name changes where the new name
451        # (rename/copy destination) is an already versioned file, as
452        # Hg would otherwise ignore them.
453        #
454        try:
455            fp = self.bu.open('renames')
456            for line in fp:
457                source, dest = line.strip().split()
458                self.ws.copy(source, dest)
459        except EnvironmentError, e:
460            raise util.Abort('unable to open renames file: %s' % e)
461        except ValueError:
462            raise util.Abort('corrupt renames file: %s' %
463                             self.bu.backupfile('renames'))
464
465    def need_backup(self):
466        '''Compare backup of uncommitted changes to workspace'''
467        cnode = self.wctx.parents()[0].node()
468        if self._dirstate() != node.hex(cnode):
469            return True
470
471        fd = None
472        match = self.ws.matcher(files=self.wctx.files())
473        curdiff = self.ws.diff(opts={'git': True}, match=match)
474
475        try:
476            if self.bu.exists('diff'):
477                try:
478                    fd = self.bu.open('diff')
479                    backdiff = fd.read()
480                    fd.close()
481                except EnvironmentError, e:
482                    raise util.Abort("couldn't open backup diff %s\n"
483                                     "   %s" % (self.bu.backupfile('diff'), e))
484            else:
485                backdiff = ''
486
487            if backdiff != curdiff:
488                return True
489
490            currrenamed = self._clobbering_renames()
491            bakrenamed = None
492
493            if self.bu.exists('renames'):
494                try:
495                    fd = self.bu.open('renames')
496                    bakrenamed = [tuple(line.strip().split(' ')) for line in fd]
497                    fd.close()
498                except EnvironmentError, e:
499                    raise util.Abort("couldn't open renames file %s: %s\n" %
500                                     (self.bu.backupfile('renames'), e))
501
502            if currrenamed != bakrenamed:
503                return True
504        finally:
505            if fd and not fd.closed:
506                fd.close()
507
508        return False
509
510    def cleanup(self):
511        '''Remove backed up uncommitted changes'''
512
513        for f in ('dirstate', 'diff', 'renames'):
514            self.bu.unlink(f)
515
516
517class CdmMetadataBackup(object):
518    '''Backup of workspace metadata'''
519
520    def __init__(self, backup, ws):
521        self.bu = backup
522        self.ws = ws
523        self.files = ('hgrc', 'localtags', 'patches', 'cdm')
524
525    def backup(self):
526        '''Backup workspace metadata'''
527
528        tarpath = self.bu.backupfile('metadata.tar.gz')
529
530        #
531        # Files is a list of tuples (name, path), where name is as in
532        # self.files, and path is the absolute path.
533        #
534        files = filter(lambda (name, path): os.path.exists(path),
535                       zip(self.files, map(self.ws.repo.join, self.files)))
536
537        if not files:
538            return
539
540        try:
541            tar = CdmTarFile.gzopen(tarpath, 'w')
542        except (EnvironmentError, tarfile.TarError), e:
543            raise util.Abort("couldn't open %s for writing: %s" %
544                             (tarpath, e))
545
546        try:
547            for name, path in files:
548                try:
549                    tar.add(path, name)
550                except (EnvironmentError, tarfile.TarError), e:
551                    #
552                    # tarfile.TarError doesn't include the tar member or file
553                    # in question, so we have to do so ourselves.
554                    #
555                    if isinstance(e, tarfile.TarError):
556                        errstr = "%s: %s" % (name, e)
557                    else:
558                        errstr = str(e)
559
560                    raise util.Abort("couldn't backup metadata to %s:\n"
561                                     "  %s" % (tarpath, errstr))
562        finally:
563            tar.close()
564
565    def old_restore(self):
566        '''Restore workspace metadata from an pre-tar backup'''
567
568        for fname in self.files:
569            if self.bu.exists(fname):
570                bfile = self.bu.backupfile(fname)
571                wfile = self.ws.repo.join(fname)
572
573                try:
574                    shutil.copy2(bfile, wfile)
575                except EnvironmentError, e:
576                    raise util.Abort("couldn't restore metadata from %s:\n"
577                                     "   %s" % (bfile, e))
578
579    def tar_restore(self):
580        '''Restore workspace metadata (from a tar-style backup)'''
581
582        if not self.bu.exists('metadata.tar.gz'):
583            return
584
585        tarpath = self.bu.backupfile('metadata.tar.gz')
586
587        try:
588            tar = CdmTarFile.gzopen(tarpath)
589        except (EnvironmentError, tarfile.TarError), e:
590            raise util.Abort("couldn't open %s: %s" % (tarpath, e))
591
592        try:
593            for elt in tar:
594                try:
595                    tar.extract(elt, path=self.ws.repo.path)
596                except (EnvironmentError, tarfile.TarError), e:
597                    # Make sure the member name is in the exception message.
598                    if isinstance(e, tarfile.TarError):
599                        errstr = "%s: %s" % (elt.name, e)
600                    else:
601                        errstr = str(e)
602
603                    raise util.Abort("couldn't restore metadata from %s:\n"
604                                     "   %s" %
605                                     (tarpath, errstr))
606        finally:
607            if tar and not tar.closed:
608                tar.close()
609
610    def restore(self):
611        '''Restore workspace metadata'''
612
613        if self.bu.exists('hgrc'):
614            self.old_restore()
615        else:
616            self.tar_restore()
617
618    def _walk(self):
619        '''Yield the repo-relative path to each file we operate on,
620        including each file within any affected directory'''
621
622        for elt in self.files:
623            path = self.ws.repo.join(elt)
624
625            if not os.path.exists(path):
626                continue
627
628            if os.path.isdir(path):
629                for root, dirs, files in os.walk(path, topdown=True):
630                    yield root
631
632                    for f in files:
633                        yield os.path.join(root, f)
634            else:
635                yield path
636
637    def need_backup(self):
638        '''Compare backed up workspace metadata to workspace'''
639
640        def strip_trailing_pathsep(pathname):
641            '''Remove a possible trailing path separator from PATHNAME'''
642            return pathname.endswith('/') and pathname[:-1] or pathname
643
644        if self.bu.exists('metadata.tar.gz'):
645            tarpath = self.bu.backupfile('metadata.tar.gz')
646            try:
647                tar = CdmTarFile.gzopen(tarpath)
648            except (EnvironmentError, tarfile.TarError), e:
649                raise util.Abort("couldn't open metadata tarball: %s\n"
650                                 "   %s" % (tarpath, e))
651
652            if not tar.members_match_fs(self.ws.repo.path):
653                tar.close()
654                return True
655
656            tarnames = map(strip_trailing_pathsep, tar.getnames())
657            tar.close()
658        else:
659            tarnames = []
660
661        repopath = self.ws.repo.path
662        if not repopath.endswith('/'):
663            repopath += '/'
664
665        for path in self._walk():
666            if path.replace(repopath, '', 1) not in tarnames:
667                return True
668
669        return False
670
671    def cleanup(self):
672        '''Remove backed up workspace metadata'''
673        self.bu.unlink('metadata.tar.gz')
674
675
676class CdmClearBackup(object):
677    '''A backup (in tar format) of complete source files from every
678    workspace head.
679
680    Paths in the tarball are prefixed by the revision and node of the
681    head, or "working" for the working directory.
682
683    This is done purely for the benefit of the user, and as such takes
684    no part in restore or need_backup checking, restore always
685    succeeds, need_backup always returns False
686    '''
687
688    def __init__(self, backup, ws):
689        self.bu = backup
690        self.ws = ws
691
692    def _branch_pairs(self):
693        '''Return a list of tuples (parenttip, localtip) for each
694        outgoing head.  If the working copy contains modified files,
695        it is a head, and neither of its parents are.
696        '''
697
698        parent = self.ws.parent()
699
700        if parent:
701            outgoing = self.ws.findoutgoing(parent)
702            outnodes = set(self.ws.repo.changelog.nodesbetween(outgoing)[0])
703
704            heads = [self.ws.repo.changectx(n) for n in self.ws.repo.heads()
705                     if n in outnodes]
706        else:
707            heads = []
708            outnodes = []
709
710        wctx = self.ws.workingctx()
711        if wctx.files():        # We only care about file changes.
712            heads = filter(lambda x: x not in wctx.parents(), heads) + [wctx]
713
714        pairs = []
715        for head in heads:
716            if head.rev() is None:
717                c = head.parents()
718            else:
719                c = [head]
720
721            pairs.append((self.ws.parenttip(c, outnodes), head))
722        return pairs
723
724    def backup(self):
725        '''Save a clear copy of each source file modified between each
726        head and that head's parenttip (see WorkSpace.parenttip).
727        '''
728
729        tarpath = self.bu.backupfile('clear.tar.gz')
730        branches = self._branch_pairs()
731
732        if not branches:
733            return
734
735        try:
736            tar = CdmTarFile.gzopen(tarpath, 'w')
737        except (EnvironmentError, tarfile.TarError), e:
738            raise util.Abort("Could not open %s for writing: %s" %
739                             (tarpath, e))
740
741        try:
742            for parent, child in branches:
743                tpath = child.node() and node.short(child.node()) or "working"
744
745                for fname, change in self.ws.status(parent, child).iteritems():
746                    if change not in ('added', 'modified'):
747                        continue
748
749                    try:
750                        tar.addfilectx(child.filectx(fname),
751                                       os.path.join(tpath, fname))
752                    except ValueError, e:
753                        crev = child.rev()
754                        if crev is None:
755                            crev = "working copy"
756                        raise util.Abort("Could not backup clear file %s "
757                                         "from %s: %s\n" % (fname, crev, e))
758        finally:
759            tar.close()
760
761    def cleanup(self):
762        '''Cleanup a failed Clear backup.
763
764        Remove the clear tarball from the backup directory.
765        '''
766
767        self.bu.unlink('clear.tar.gz')
768
769    def restore(self):
770        '''Clear backups are never restored, do nothing'''
771        pass
772
773    def need_backup(self):
774        '''Clear backups are never compared, return False (no backup needed).
775
776        Should a backup actually be needed, one of the other
777        implementation classes would notice in any situation we would.
778        '''
779
780        return False
781
782
783class CdmBackup(object):
784    '''A backup of a given workspace'''
785
786    def __init__(self, ui, ws, name):
787        self.ws = ws
788        self.ui = ui
789        self.backupdir = self._find_backup_dir(name)
790
791        #
792        # The order of instances here controls the order the various operations
793        # are run.
794        #
795        # There's some inherent dependence, in that on restore we need
796        # to restore committed changes prior to uncommitted changes
797        # (as the parent revision of any uncommitted changes is quite
798        # likely to not exist until committed changes are restored).
799        # Metadata restore can happen at any point, but happens last
800        # as a matter of convention.
801        #
802        self.modules = [x(self, ws) for x in [CdmCommittedBackup,
803                                              CdmUncommittedBackup,
804                                              CdmClearBackup,
805                                              CdmMetadataBackup]]
806
807        if os.path.exists(os.path.join(self.backupdir, 'latest')):
808            generation = os.readlink(os.path.join(self.backupdir, 'latest'))
809            self.generation = int(os.path.split(generation)[1])
810        else:
811            self.generation = 0
812
813    def _find_backup_dir(self, name):
814        '''Find the path to an appropriate backup directory based on NAME'''
815
816        if os.path.isabs(name):
817            return name
818
819        if self.ui.config('cdm', 'backupdir'):
820            backupbase = os.path.expanduser(self.ui.config('cdm', 'backupdir'))
821        else:
822            home = None
823
824            try:
825                home = os.getenv('HOME') or pwd.getpwuid(os.getuid()).pw_dir
826            except KeyError:
827                pass                    # Handled anyway
828
829            if not home:
830                raise util.Abort('Could not determine your HOME directory to '
831                                 'find backup path')
832
833            backupbase = os.path.join(home, 'cdm.backup')
834
835        backupdir = os.path.join(backupbase, name)
836
837        # If backupdir exists, it must be a directory.
838        if (os.path.exists(backupdir) and not os.path.isdir(backupdir)):
839            raise util.Abort('%s exists but is not a directory' % backupdir)
840
841        return backupdir
842
843    def _update_latest(self, gen):
844        '''Update latest symlink to point to the current generation'''
845        linkpath = os.path.join(self.backupdir, 'latest')
846
847        if os.path.lexists(linkpath):
848            os.unlink(linkpath)
849
850        os.symlink(str(gen), linkpath)
851
852    def _create_gen(self, gen):
853        '''Create a new backup generation'''
854        try:
855            os.makedirs(os.path.join(self.backupdir, str(gen)))
856            self._update_latest(gen)
857        except EnvironmentError, e:
858            raise util.Abort("Couldn't create backup generation %s: %s" %
859                             (os.path.join(self.backupdir, str(gen)), e))
860
861    def backupfile(self, path):
862        '''return full path to backup file FILE at GEN'''
863        return os.path.join(self.backupdir, str(self.generation), path)
864
865    def unlink(self, name):
866        '''Unlink the specified path from the backup directory.
867        A no-op if the path does not exist.
868        '''
869
870        fpath = self.backupfile(name)
871        if os.path.exists(fpath):
872            os.unlink(fpath)
873
874    def open(self, name, mode='r'):
875        '''Open the specified file in the backup directory'''
876        return open(self.backupfile(name), mode)
877
878    def exists(self, name):
879        '''Return boolean indicating wether a given file exists in the
880        backup directory.'''
881        return os.path.exists(self.backupfile(name))
882
883    def need_backup(self):
884        '''Compare backed up changes to workspace'''
885        #
886        # If there's no current backup generation, or the last backup was
887        # invalid (lacking the dirstate file), we need a backup regardless
888        # of anything else.
889        #
890        if not self.generation or not self.exists('dirstate'):
891            return True
892
893        for x in self.modules:
894            if x.need_backup():
895                return True
896
897        return False
898
899    def backup(self):
900        '''Take a backup of the current workspace
901
902        Calling code is expected to hold both the working copy lock
903        and repository lock.'''
904
905        if not os.path.exists(self.backupdir):
906            try:
907                os.makedirs(self.backupdir)
908            except EnvironmentError, e:
909                raise util.Abort('Could not create backup directory %s: %s' %
910                                 (self.backupdir, e))
911
912        self.generation += 1
913        self._create_gen(self.generation)
914
915        try:
916            for x in self.modules:
917                x.backup()
918        except Exception, e:
919            if isinstance(e, KeyboardInterrupt):
920                self.ws.ui.warn("Interrupted\n")
921            else:
922                self.ws.ui.warn("Error: %s\n" % e)
923                show_traceback = self.ws.ui.configbool('ui', 'traceback',
924                                                       False)
925
926                #
927                # If it's not a 'normal' error, we want to print a stack
928                # trace now in case the attempt to remove the partial
929                # backup also fails, and raises a second exception.
930                #
931                if (not isinstance(e, (EnvironmentError, util.Abort))
932                    or show_traceback):
933                    traceback.print_exc()
934
935            for x in self.modules:
936                x.cleanup()
937
938            os.rmdir(os.path.join(self.backupdir, str(self.generation)))
939            self.generation -= 1
940
941            if self.generation != 0:
942                self._update_latest(self.generation)
943            else:
944                os.unlink(os.path.join(self.backupdir, 'latest'))
945
946            raise util.Abort('Backup failed')
947
948    def restore(self, gen=None):
949        '''Restore workspace from backup
950
951        Restores from backup generation GEN (defaulting to the latest)
952        into workspace WS.
953
954        Calling code is expected to hold both the working copy lock
955        and repository lock of the destination workspace.'''
956
957        if not os.path.exists(self.backupdir):
958            raise util.Abort('Backup directory does not exist: %s' %
959                             (self.backupdir))
960
961        if gen:
962            if not os.path.exists(os.path.join(self.backupdir, str(gen))):
963                raise util.Abort('Backup generation does not exist: %s' %
964                                 (os.path.join(self.backupdir, str(gen))))
965            self.generation = int(gen)
966
967        if not self.generation: # This is OK, 0 is not a valid generation
968            raise util.Abort('Backup has no generations: %s' % self.backupdir)
969
970        if not self.exists('dirstate'):
971            raise util.Abort('Backup %s/%s is incomplete (dirstate missing)' %
972                             (self.backupdir, self.generation))
973
974        try:
975            for x in self.modules:
976                x.restore()
977        except util.Abort, e:
978            raise util.Abort('Error restoring workspace:\n'
979                             '%s\n'
980                             'Workspace may be partially restored' % e)
981