summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--Documentation/diff-options.txt4
-rw-r--r--Documentation/git-clean.txt50
-rw-r--r--Documentation/git-commit.txt25
-rw-r--r--Documentation/git-imap-send.txt60
-rw-r--r--Documentation/technical/pack-format.txt111
-rw-r--r--Documentation/technical/pack-heuristics.txt466
-rw-r--r--Makefile2
-rw-r--r--blame.c1
-rw-r--r--date.c4
-rw-r--r--delta.h5
-rw-r--r--diff.c46
-rw-r--r--diff.h1
-rw-r--r--diffcore-pickaxe.c66
-rwxr-xr-xgit-clean.sh80
-rwxr-xr-xgit-clone.sh47
-rwxr-xr-xgit-cvsimport.perl6
-rwxr-xr-xgitk335
-rw-r--r--imap-send.c8
-rw-r--r--patch-delta.c26
-rw-r--r--sha1_file.c6
21 files changed, 1169 insertions, 181 deletions
diff --git a/.gitignore b/.gitignore
index 75891c393b..b5959d6311 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,6 +15,7 @@ git-checkout
git-checkout-index
git-cherry
git-cherry-pick
+git-clean
git-clone
git-clone-pack
git-commit
diff --git a/Documentation/diff-options.txt b/Documentation/diff-options.txt
index 2a0275eeda..ec6811c718 100644
--- a/Documentation/diff-options.txt
+++ b/Documentation/diff-options.txt
@@ -69,6 +69,10 @@
changeset, not just the files that contain the change
in <string>.
+--pickaxe-regex::
+ Make the <string> not a plain string but an extended POSIX
+ regex to match.
+
-O<orderfile>::
Output the patch in the order specified in the
<orderfile>, which has one shell glob pattern per line.
diff --git a/Documentation/git-clean.txt b/Documentation/git-clean.txt
new file mode 100644
index 0000000000..36890c543d
--- /dev/null
+++ b/Documentation/git-clean.txt
@@ -0,0 +1,50 @@
+git-clean(1)
+============
+
+NAME
+----
+git-clean - Remove untracked files from the working tree
+
+SYNOPSIS
+--------
+[verse]
+'git-clean' [-d] [-n] [-q] [-x | -X]
+
+DESCRIPTION
+-----------
+Removes files unknown to git. This allows to clean the working tree
+from files that are not under version control. If the '-x' option is
+specified, ignored files are also removed, allowing to remove all
+build products.
+
+OPTIONS
+-------
+-d::
+ Remove untracked directories in addition to untracked files.
+
+-n::
+ Don't actually remove anything, just show what would be done.
+
+-q::
+ Be quiet, only report errors, but not the files that are
+ successfully removed.
+
+-x::
+ Don't use the ignore rules. This allows removing all untracked
+ files, including build products. This can be used (possibly in
+ conjunction with gitlink:git-reset[1]) to create a pristine
+ working directory to test a clean build.
+
+-X::
+ Remove only files ignored by git. This may be useful to rebuild
+ everything from scratch, but keep manually created files.
+
+
+Author
+------
+Written by Pavel Roskin <proski@gnu.org>
+
+
+GIT
+---
+Part of the gitlink:git[7] suite
diff --git a/Documentation/git-commit.txt b/Documentation/git-commit.txt
index d04b342a95..0a7365b9a8 100644
--- a/Documentation/git-commit.txt
+++ b/Documentation/git-commit.txt
@@ -9,7 +9,8 @@ SYNOPSIS
--------
[verse]
'git-commit' [-a] [-s] [-v] [(-c | -C) <commit> | -F <file> | -m <msg>]
- [-e] [--author <author>] [--] [[-i | -o ]<file>...]
+ [--no-verify] [--amend] [-e] [--author <author>]
+ [--] [[-i | -o ]<file>...]
DESCRIPTION
-----------
@@ -71,6 +72,28 @@ OPTIONS
commit log message unmodified. This option lets you
further edit the message taken from these sources.
+--amend::
+
+ Used to amend the tip of the current branch. Prepare the tree
+ object you would want to replace the latest commit as usual
+ (this includes the usual -i/-o and explicit paths), and the
+ commit log editor is seeded with the commit message from the
+ tip of the current branch. The commit you create replaces the
+ current tip -- if it was a merge, it will have the parents of
+ the current tip as parents -- so the current top commit is
+ discarded.
++
+--
+It is a rough equivalent for:
+------
+ $ git reset --soft HEAD^
+ $ ... do something else to come up with the right tree ...
+ $ git commit -c ORIG_HEAD
+
+------
+but can be used to amend a merge commit.
+--
+
-i|--include::
Instead of committing only the files specified on the
command line, update them in the index file and then
diff --git a/Documentation/git-imap-send.txt b/Documentation/git-imap-send.txt
new file mode 100644
index 0000000000..cfc0d88d02
--- /dev/null
+++ b/Documentation/git-imap-send.txt
@@ -0,0 +1,60 @@
+git-imap-send(1)
+================
+
+NAME
+----
+git-imap-send - Dump a mailbox from stdin into an imap folder
+
+
+SYNOPSIS
+--------
+'git-imap-send'
+
+
+DESCRIPTION
+-----------
+This command uploads a mailbox generated with git-format-patch
+into an imap drafts folder. This allows patches to be sent as
+other email is sent with mail clients that cannot read mailbox
+files directly.
+
+Typical usage is something like:
+
+git-format-patch --signoff --stdout --attach origin | git-imap-send
+
+
+CONFIGURATION
+-------------
+
+git-imap-send requires the following values in the repository
+configuration file (shown with examples):
+
+[imap]
+ Folder = "INBOX.Drafts"
+
+[imap]
+ Tunnel = "ssh -q user@server.com /usr/bin/imapd ./Maildir 2> /dev/null"
+
+[imap]
+ Host = imap.server.com
+ User = bob
+ Password = pwd
+ Port = 143
+
+
+BUGS
+----
+Doesn't handle lines starting with "From " in the message body.
+
+
+Author
+------
+Derived from isync 1.0.1 by Mike McCormack.
+
+Documentation
+--------------
+Documentation by Mike McCormack
+
+GIT
+---
+Part of the gitlink:git[7] suite
diff --git a/Documentation/technical/pack-format.txt b/Documentation/technical/pack-format.txt
new file mode 100644
index 0000000000..ed2decc107
--- /dev/null
+++ b/Documentation/technical/pack-format.txt
@@ -0,0 +1,111 @@
+GIT pack format
+===============
+
+= pack-*.pack file has the following format:
+
+ - The header appears at the beginning and consists of the following:
+
+ 4-byte signature
+ 4-byte version number (network byte order)
+ 4-byte number of objects contained in the pack (network byte order)
+
+ Observation: we cannot have more than 4G versions ;-) and
+ more than 4G objects in a pack.
+
+ - The header is followed by number of object entries, each of
+ which looks like this:
+
+ (undeltified representation)
+ n-byte type and length (4-bit type, (n-1)*7+4-bit length)
+ compressed data
+
+ (deltified representation)
+ n-byte type and length (4-bit type, (n-1)*7+4-bit length)
+ 20-byte base object name
+ compressed delta data
+
+ Observation: length of each object is encoded in a variable
+ length format and is not constrained to 32-bit or anything.
+
+ - The trailer records 20-byte SHA1 checksum of all of the above.
+
+= pack-*.idx file has the following format:
+
+ - The header consists of 256 4-byte network byte order
+ integers. N-th entry of this table records the number of
+ objects in the corresponding pack, the first byte of whose
+ object name are smaller than N. This is called the
+ 'first-level fan-out' table.
+
+ Observation: we would need to extend this to an array of
+ 8-byte integers to go beyond 4G objects per pack, but it is
+ not strictly necessary.
+
+ - The header is followed by sorted 28-byte entries, one entry
+ per object in the pack. Each entry is:
+
+ 4-byte network byte order integer, recording where the
+ object is stored in the packfile as the offset from the
+ beginning.
+
+ 20-byte object name.
+
+ Observation: we would definitely need to extend this to
+ 8-byte integer plus 20-byte object name to handle a packfile
+ that is larger than 4GB.
+
+ - The file is concluded with a trailer:
+
+ A copy of the 20-byte SHA1 checksum at the end of
+ corresponding packfile.
+
+ 20-byte SHA1-checksum of all of the above.
+
+Pack Idx file:
+
+ idx
+ +--------------------------------+
+ | fanout[0] = 2 |-.
+ +--------------------------------+ |
+ | fanout[1] | |
+ +--------------------------------+ |
+ | fanout[2] | |
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
+ | fanout[255] | |
+ +--------------------------------+ |
+main | offset | |
+index | object name 00XXXXXXXXXXXXXXXX | |
+table +--------------------------------+ |
+ | offset | |
+ | object name 00XXXXXXXXXXXXXXXX | |
+ +--------------------------------+ |
+ .-| offset |<+
+ | | object name 01XXXXXXXXXXXXXXXX |
+ | +--------------------------------+
+ | | offset |
+ | | object name 01XXXXXXXXXXXXXXXX |
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ | | offset |
+ | | object name FFXXXXXXXXXXXXXXXX |
+ | +--------------------------------+
+trailer | | packfile checksum |
+ | +--------------------------------+
+ | | idxfile checksum |
+ | +--------------------------------+
+ .-------.
+ |
+Pack file entry: <+
+
+ packed object header:
+ 1-byte type (upper 4-bit)
+ size0 (lower 4-bit)
+ n-byte sizeN (as long as MSB is set, each 7-bit)
+ size0..sizeN form 4+7+7+..+7 bit integer, size0
+ is the most significant part.
+ packed object data:
+ If it is not DELTA, then deflated bytes (the size above
+ is the size before compression).
+ If it is DELTA, then
+ 20-byte base object name SHA1 (the size above is the
+ size of the delta data that follows).
+ delta data, deflated.
diff --git a/Documentation/technical/pack-heuristics.txt b/Documentation/technical/pack-heuristics.txt
new file mode 100644
index 0000000000..eaab3eecd7
--- /dev/null
+++ b/Documentation/technical/pack-heuristics.txt
@@ -0,0 +1,466 @@
+ Concerning Git's Packing Heuristics
+ ===================================
+
+ Oh, here's a really stupid question:
+
+ Where do I go
+ to learn the details
+ of git's packing heuristics?
+
+Be careful what you ask!
+
+Followers of the git, please open the git IRC Log and turn to
+February 10, 2006.
+
+It's a rare occasion, and we are joined by the King Git Himself,
+Linus Torvalds (linus). Nathaniel Smith, (njs`), has the floor
+and seeks enlightenment. Others are present, but silent.
+
+Let's listen in!
+
+ <njs`> Oh, here's a really stupid question -- where do I go to
+ learn the details of git's packing heuristics? google avails
+ me not, reading the source didn't help a lot, and wading
+ through the whole mailing list seems less efficient than any
+ of that.
+
+It is a bold start! A plea for help combined with a simultaneous
+tri-part attack on some of the tried and true mainstays in the quest
+for enlightenment. Brash accusations of google being useless. Hubris!
+Maligning the source. Heresy! Disdain for the mailing list archives.
+Woe.
+
+ <pasky> yes, the packing-related delta stuff is somewhat
+ mysterious even for me ;)
+
+Ah! Modesty after all.
+
+ <linus> njs, I don't think the docs exist. That's something where
+ I don't think anybody else than me even really got involved.
+ Most of the rest of git others have been busy with (especially
+ Junio), but packing nobody touched after I did it.
+
+It's cryptic, yet vague. Linus in style for sure. Wise men
+interpret this as an apology. A few argue it is merely a
+statement of fact.
+
+ <njs`> I guess the next step is "read the source again", but I
+ have to build up a certain level of gumption first :-)
+
+Indeed! On both points.
+
+ <linus> The packing heuristic is actually really really simple.
+
+Bait...
+
+ <linus> But strange.
+
+And switch. That ought to do it!
+
+ <linus> Remember: git really doesn't follow files. So what it does is
+ - generate a list of all objects
+ - sort the list according to magic heuristics
+ - walk the list, using a sliding window, seeing if an object
+ can be diffed against another object in the window
+ - write out the list in recency order
+
+The traditional understatement:
+
+ <njs`> I suspect that what I'm missing is the precise definition of
+ the word "magic"
+
+The traditional insight:
+
+ <pasky> yes
+
+And Bable-like confusion flowed.
+
+ <njs`> oh, hmm, and I'm not sure what this sliding window means either
+
+ <pasky> iirc, it appeared to me to be just the sha1 of the object
+ when reading the code casually ...
+
+ ... which simply doesn't sound as a very good heuristics, though ;)
+
+ <njs`> .....and recency order. okay, I think it's clear I didn't
+ even realize how much I wasn't realizing :-)
+
+Ah, grasshopper! And thus the enlightenment begins anew.
+
+ <linus> The "magic" is actually in theory totally arbitrary.
+ ANY order will give you a working pack, but no, it's not
+ ordered by SHA1.
+
+ Before talking about the ordering for the sliding delta
+ window, let's talk about the recency order. That's more
+ important in one way.
+
+ <njs`> Right, but if all you want is a working way to pack things
+ together, you could just use cat and save yourself some
+ trouble...
+
+Waaait for it....
+
+ <linus> The recency ordering (which is basically: put objects
+ _physically_ into the pack in the order that they are
+ "reachable" from the head) is important.
+
+ <njs`> okay
+
+ <linus> It's important because that's the thing that gives packs
+ good locality. It keeps the objects close to the head (whether
+ they are old or new, but they are _reachable_ from the head)
+ at the head of the pack. So packs actually have absolutely
+ _wonderful_ IO patterns.
+
+Read that again, because it is important.
+
+ <linus> But recency ordering is totally useless for deciding how
+ to actually generate the deltas, so the delta ordering is
+ something else.
+
+ The delta ordering is (wait for it):
+ - first sort by the "basename" of the object, as defined by
+ the name the object was _first_ reached through when
+ generating the object list
+ - within the same basename, sort by size of the object
+ - but always sort different types separately (commits first).
+
+ That's not exactly it, but it's very close.
+
+ <njs`> The "_first_ reached" thing is not too important, just you
+ need some way to break ties since the same objects may be
+ reachable many ways, yes?
+
+And as if to clarify:
+
+ <linus> The point is that it's all really just any random
+ heuristic, and the ordering is totally unimportant for
+ correctness, but it helps a lot if the heuristic gives
+ "clumping" for things that are likely to delta well against
+ each other.
+
+It is an important point, so secretly, I did my own research and have
+included my results below. To be fair, it has changed some over time.
+And through the magic of Revisionistic History, I draw upon this entry
+from The Git IRC Logs on my father's birthday, March 1:
+
+ <gitster> The quote from the above linus should be rewritten a
+ bit (wait for it):
+ - first sort by type. Different objects never delta with
+ each other.
+ - then sort by filename/dirname. hash of the basename
+ occupies the top BITS_PER_INT-DIR_BITS bits, and bottom
+ DIR_BITS are for the hash of leading path elements.
+ - then if we are doing "thin" pack, the objects we are _not_
+ going to pack but we know about are sorted earlier than
+ other objects.
+ - and finally sort by size, larger to smaller.
+
+In one swell-foop, clarification and obscurification! Nonetheless,
+authoritative. Cryptic, yet concise. It even solicits notions of
+quotes from The Source Code. Clearly, more study is needed.
+
+ <gitster> That's the sort order. What this means is:
+ - we do not delta different object types.
+ - we prefer to delta the objects with the same full path, but
+ allow files with the same name from different directories.
+ - we always prefer to delta against objects we are not going
+ to send, if there are some.
+ - we prefer to delta against larger objects, so that we have
+ lots of removals.
+
+ The penultimate rule is for "thin" packs. It is used when
+ the other side is known to have such objects.
+
+There it is again. "Thin" packs. I'm thinking to myself, "What
+is a 'thin' pack?" So I ask:
+
+ <jdl> What is a "thin" pack?
+
+ <gitster> Use of --objects-edge to rev-list as the upstream of
+ pack-objects. The pack transfer protocol negotiates that.
+
+Woo hoo! Cleared that _right_ up!
+
+ <gitster> There are two directions - push and fetch.
+
+There! Did you see it? It is not '"push" and "pull"'! How often the
+confusion has started here. So casually mentioned, too!
+
+ <gitster> For push, git-send-pack invokes git-receive-pack on the
+ other end. The receive-pack says "I have up to these commits".
+ send-pack looks at them, and computes what are missing from
+ the other end. So "thin" could be the default there.
+
+ In the other direction, fetch, git-fetch-pack and
+ git-clone-pack invokes git-upload-pack on the other end
+ (via ssh or by talking to the daemon).
+
+ There are two cases: fetch-pack with -k and clone-pack is one,
+ fetch-pack without -k is the other. clone-pack and fetch-pack
+ with -k will keep the downloaded packfile without expanded, so
+ we do not use thin pack transfer. Otherwise, the generated
+ pack will have delta without base object in the same pack.
+
+ But fetch-pack without -k will explode the received pack into
+ individual objects, so we automatically ask upload-pack to
+ give us a thin pack if upload-pack supports it.
+
+OK then.
+
+Uh.
+
+Let's return to the previous conversation still in progress.
+
+ <njs`> and "basename" means something like "the tail of end of
+ path of file objects and dir objects, as per basename(3), and
+ we just declare all commit and tag objects to have the same
+ basename" or something?
+
+Luckily, that too is a point that gitster clarified for us!
+
+If I might add, the trick is to make files that _might_ be similar be
+located close to each other in the hash buckets based on their file
+names. It used to be that "foo/Makefile", "bar/baz/quux/Makefile" and
+"Makefile" all landed in the same bucket due to their common basename,
+"Makefile". However, now they land in "close" buckets.
+
+The algorithm allows not just for the _same_ bucket, but for _close_
+buckets to be considered delta candidates. The rationale is
+essentially that files, like Makefiles, often have very similar
+content no matter what directory they live in.
+
+ <linus> I played around with different delta algorithms, and with
+ making the "delta window" bigger, but having too big of a
+ sliding window makes it very expensive to generate the pack:
+ you need to compare every object with a _ton_ of other objects.
+
+ There are a number of other trivial heuristics too, which
+ basically boil down to "don't bother even trying to delta this
+ pair" if we can tell before-hand that the delta isn't worth it
+ (due to size differences, where we can take a previous delta
+ result into account to decide that "ok, no point in trying
+ that one, it will be worse").
+
+ End result: packing is actually very size efficient. It's
+ somewhat CPU-wasteful, but on the other hand, since you're
+ really only supposed to do it maybe once a month (and you can
+ do it during the night), nobody really seems to care.
+
+Nice Engineering Touch, there. Find when it doesn't matter, and
+proclaim it a non-issue. Good style too!
+
+ <njs`> So, just to repeat to see if I'm following, we start by
+ getting a list of the objects we want to pack, we sort it by
+ this heuristic (basically lexicographically on the tuple
+ (type, basename, size)).
+
+ Then we walk through this list, and calculate a delta of
+ each object against the last n (tunable paramater) objects,
+ and pick the smallest of these deltas.
+
+Vastly simplified, but the essence is there!
+
+ <linus> Correct.
+
+ <njs`> And then once we have picked a delta or fulltext to
+ represent each object, we re-sort by recency, and write them
+ out in that order.
+
+ <linus> Yup. Some other small details:
+
+And of course there is the "Other Shoe" Factor too.
+
+ <linus> - We limit the delta depth to another magic value (right
+ now both the window and delta depth magic values are just "10")
+
+ <njs`> Hrm, my intuition is that you'd end up with really _bad_ IO
+ patterns, because the things you want are near by, but to
+ actually reconstruct them you may have to jump all over in
+ random ways.
+
+ <linus> - When we write out a delta, and we haven't yet written
+ out the object it is a delta against, we write out the base
+ object first. And no, when we reconstruct them, we actually
+ get nice IO patterns, because:
+ - larger objects tend to be "more recent" (Linus' law: files grow)
+ - we actively try to generate deltas from a larger object to a
+ smaller one
+ - this means that the top-of-tree very seldom has deltas
+ (ie deltas in _practice_ are "backwards deltas")
+
+Again, we should reread that whole paragraph. Not just because
+Linus has slipped Linus's Law in there on us, but because it is
+important. Let's make sure we clarify some of the points here:
+
+ <njs`> So the point is just that in practice, delta order and
+ recency order match each other quite well.
+
+ <linus> Yes. There's another nice side to this (and yes, it was
+ designed that way ;):
+ - the reason we generate deltas against the larger object is
+ actually a big space saver too!
+
+ <njs`> Hmm, but your last comment (if "we haven't yet written out
+ the object it is a delta against, we write out the base object
+ first"), seems like it would make these facts mostly
+ irrelevant because even if in practice you would not have to
+ wander around much, in fact you just brute-force say that in
+ the cases where you might have to wander, don't do that :-)
+
+ <linus> Yes and no. Notice the rule: we only write out the base
+ object first if the delta against it was more recent. That
+ means that you can actually have deltas that refer to a base
+ object that is _not_ close to the delta object, but that only
+ happens when the delta is needed to generate an _old_ object.
+
+ <linus> See?
+
+Yeah, no. I missed that on the first two or three readings myself.
+
+ <linus> This keeps the front of the pack dense. The front of the
+ pack never contains data that isn't relevant to a "recent"
+ object. The size optimization comes from our use of xdelta
+ (but is true for many other delta algorithms): removing data
+ is cheaper (in size) than adding data.
+
+ When you remove data, you only need to say "copy bytes n--m".
+ In contrast, in a delta that _adds_ data, you have to say "add
+ these bytes: 'actual data goes here'"
+
+ *** njs` has quit: Read error: 104 (Connection reset by peer)
+
+ <linus> Uhhuh. I hope I didn't blow njs` mind.
+
+ *** njs` has joined channel #git
+
+ <pasky> :)
+
+The silent observers are amused. Of course.
+
+And as if njs` was expected to be omniscient:
+
+ <linus> njs - did you miss anything?
+
+OK, I'll spell it out. That's Geek Humor. If njs` was not actually
+connected for a little bit there, how would he know if missed anything
+while he was disconnected? He's a benevolent dictator with a sense of
+humor! Well noted!
+
+ <njs`> Stupid router. Or gremlins, or whatever.
+
+It's a cheap shot at Cisco. Take 'em when you can.
+
+ <njs`> Yes and no. Notice the rule: we only write out the base
+ object first if the delta against it was more recent.
+
+ I'm getting lost in all these orders, let me re-read :-)
+ So the write-out order is from most recent to least recent?
+ (Conceivably it could be the opposite way too, I'm not sure if
+ we've said) though my connection back at home is logging, so I
+ can just read what you said there :-)
+
+And for those of you paying attention, the Omniscient Trick has just
+been detailed!
+
+ <linus> Yes, we always write out most recent first
+
+For the other record:
+
+ <pasky> njs`: http://pastebin.com/547965
+
+The 'net never forgets, so that should be good until the end of time.
+
+ <njs`> And, yeah, I got the part about deeper-in-history stuff
+ having worse IO characteristics, one sort of doesn't care.
+
+ <linus> With the caveat that if the "most recent" needs an older
+ object to delta against (hey, shrinking sometimes does
+ happen), we write out the old object with the delta.
+
+ <njs`> (if only it happened more...)
+
+ <linus> Anyway, the pack-file could easily be denser still, but
+ because it's used both for streaming (the git protocol) and
+ for on-disk, it has a few pessimizations.
+
+Actually, it is a made-up word. But it is a made-up word being
+used as setup for a later optimization, which is a real word:
+
+ <linus> In particular, while the pack-file is then compressed,
+ it's compressed just one object at a time, so the actual
+ compression factor is less than it could be in theory. But it
+ means that it's all nice random-access with a simple index to
+ do "object name->location in packfile" translation.
+
+ <njs`> I'm assuming the real win for delta-ing large->small is
+ more homogenous statistics for gzip to run over?
+
+ (You have to put the bytes in one place or another, but
+ putting them in a larger blob wins on compression)
+
+ Actually, what is the compression strategy -- each delta
+ individually gzipped, the whole file gzipped, somewhere in
+ between, no compression at all, ....?
+
+ Right.
+
+Reality IRC sets in. For example:
+
+ <pasky> I'll read the rest in the morning, I really have to go
+ sleep or there's no hope whatsoever for me at the today's
+ exam... g'nite all.
+
+Heh.
+
+ <linus> pasky: g'nite
+
+ <njs`> pasky: 'luck
+
+ <linus> Right: large->small matters exactly because of compression
+ behaviour. If it was non-compressed, it probably wouldn't make
+ any difference.
+
+ <njs`> yeah
+
+ <linus> Anyway: I'm not even trying to claim that the pack-files
+ are perfect, but they do tend to have a nice balance of
+ density vs ease-of use.
+
+Gasp! OK, saved. That's a fair Engineering trade off. Close call!
+In fact, Linus reflects on some Basic Engineering Fundamentals,
+design options, etc.
+
+ <linus> More importantly, they allow git to still _conceptually_
+ never deal with deltas at all, and be a "whole object" store.
+
+ Which has some problems (we discussed bad huge-file
+ behaviour on the git lists the other day), but it does mean
+ that the basic git concepts are really really simple and
+ straightforward.
+
+ It's all been quite stable.
+
+ Which I think is very much a result of having very simple
+ basic ideas, so that there's never any confusion about what's
+ going on.
+
+ Bugs happen, but they are "simple" bugs. And bugs that
+ actually get some object store detail wrong are almost always
+ so obious that they never go anywhere.
+
+ <njs`> Yeah.
+
+Nuff said.
+
+ <linus> Anyway. I'm off for bed. It's not 6AM here, but I've got
+ three kids, and have to get up early in the morning to send
+ them off. I need my beauty sleep.
+
+ <njs`> :-)
+
+ <njs`> appreciate the infodump, I really was failing to find the
+ details on git packs :-)
+
+And now you know the rest of the story.
diff --git a/Makefile b/Makefile
index 7030370c6f..fe40b652e6 100644
--- a/Makefile
+++ b/Makefile
@@ -114,7 +114,7 @@ SPARSE_FLAGS = -D__BIG_ENDIAN__ -D__powerpc__
SCRIPT_SH = \
git-add.sh git-bisect.sh git-branch.sh git-checkout.sh \
- git-cherry.sh git-clone.sh git-commit.sh \
+ git-cherry.sh git-clean.sh git-clone.sh git-commit.sh \
git-count-objects.sh git-diff.sh git-fetch.sh \
git-format-patch.sh git-log.sh git-ls-remote.sh \
git-merge-one-file.sh git-parse-remote.sh \
diff --git a/blame.c b/blame.c
index 75cffd8a3f..6730b10b11 100644
--- a/blame.c
+++ b/blame.c
@@ -819,6 +819,7 @@ int main(int argc, const char **argv)
rev.prune_fn = simplify_commit;
rev.topo_setter = topo_setter;
rev.topo_getter = topo_getter;
+ rev.parents = 1;
rev.limited = 1;
commit_list_insert(start_commit, &rev.commits);
diff --git a/date.c b/date.c
index 1c1917b4e8..376d25d241 100644
--- a/date.c
+++ b/date.c
@@ -255,10 +255,10 @@ static int match_multi_number(unsigned long num, char c, const char *date, char
break;
}
/* mm/dd/yy ? */
- if (is_date(num3, num2, num, tm))
+ if (is_date(num3, num, num2, tm))
break;
/* dd/mm/yy ? */
- if (is_date(num3, num, num2, tm))
+ if (is_date(num3, num2, num, tm))
break;
return 0;
}
diff --git a/delta.h b/delta.h
index a15350dabc..9464f3e9b0 100644
--- a/delta.h
+++ b/delta.h
@@ -16,7 +16,8 @@ extern void *patch_delta(void *src_buf, unsigned long src_size,
* This must be called twice on the delta data buffer, first to get the
* expected reference buffer size, and again to get the result buffer size.
*/
-static inline unsigned long get_delta_hdr_size(const unsigned char **datap)
+static inline unsigned long get_delta_hdr_size(const unsigned char **datap,
+ const unsigned char *top)
{
const unsigned char *data = *datap;
unsigned char cmd;
@@ -26,7 +27,7 @@ static inline unsigned long get_delta_hdr_size(const unsigned char **datap)
cmd = *data++;
size |= (cmd & ~0x80) << i;
i += 7;
- } while (cmd & 0x80);
+ } while (cmd & 0x80 && data < top);
*datap = data;
return size;
}
diff --git a/diff.c b/diff.c
index e496905bad..ce98a90805 100644
--- a/diff.c
+++ b/diff.c
@@ -883,6 +883,8 @@ int diff_opt_parse(struct diff_options *options, const char **av, int ac)
options->filter = arg + 14;
else if (!strcmp(arg, "--pickaxe-all"))
options->pickaxe_opts = DIFF_PICKAXE_ALL;
+ else if (!strcmp(arg, "--pickaxe-regex"))
+ options->pickaxe_opts = DIFF_PICKAXE_REGEX;
else if (!strncmp(arg, "-B", 2)) {
if ((options->break_opt =
diff_scoreopt_parse(arg)) == -1)
@@ -1280,28 +1282,34 @@ void diff_flush(struct diff_options *options)
for (i = 0; i < q->nr; i++) {
struct diff_filepair *p = q->queue[i];
- if ((diff_output_format == DIFF_FORMAT_NO_OUTPUT) ||
- (p->status == DIFF_STATUS_UNKNOWN))
- continue;
- if (p->status == 0)
- die("internal error in diff-resolve-rename-copy");
- switch (diff_output_format) {
- case DIFF_FORMAT_PATCH:
- diff_flush_patch(p, options);
- break;
- case DIFF_FORMAT_RAW:
- case DIFF_FORMAT_NAME_STATUS:
- diff_flush_raw(p, line_termination,
- inter_name_termination,
- options);
+
+ switch (p->status) {
+ case DIFF_STATUS_UNKNOWN:
break;
- case DIFF_FORMAT_NAME:
- diff_flush_name(p,
- inter_name_termination,
- line_termination);
+ case 0:
+ die("internal error in diff-resolve-rename-copy");
break;
+ default:
+ switch (diff_output_format) {
+ case DIFF_FORMAT_PATCH:
+ diff_flush_patch(p, options);
+ break;
+ case DIFF_FORMAT_RAW:
+ case DIFF_FORMAT_NAME_STATUS:
+ diff_flush_raw(p, line_termination,
+ inter_name_termination,
+ options);
+ break;
+ case DIFF_FORMAT_NAME:
+ diff_flush_name(p,
+ inter_name_termination,
+ line_termination);
+ break;
+ case DIFF_FORMAT_NO_OUTPUT:
+ break;
+ }
}
- diff_free_filepair(q->queue[i]);
+ diff_free_filepair(p);
}
free(q->queue);
q->queue = NULL;
diff --git a/diff.h b/diff.h
index a268d16ff7..0cebec113f 100644
--- a/diff.h
+++ b/diff.h
@@ -102,6 +102,7 @@ extern int diff_setup_done(struct diff_options *);
#define DIFF_DETECT_COPY 2
#define DIFF_PICKAXE_ALL 1
+#define DIFF_PICKAXE_REGEX 2
extern void diffcore_std(struct diff_options *);
diff --git a/diffcore-pickaxe.c b/diffcore-pickaxe.c
index 50e46ab863..cfcce315ba 100644
--- a/diffcore-pickaxe.c
+++ b/diffcore-pickaxe.c
@@ -5,8 +5,11 @@
#include "diff.h"
#include "diffcore.h"
+#include <regex.h>
+
static unsigned int contains(struct diff_filespec *one,
- const char *needle, unsigned long len)
+ const char *needle, unsigned long len,
+ regex_t *regexp)
{
unsigned int cnt;
unsigned long offset, sz;
@@ -18,15 +21,28 @@ static unsigned int contains(struct diff_filespec *one,
data = one->data;
cnt = 0;
- /* Yes, I've heard of strstr(), but the thing is *data may
- * not be NUL terminated. Sue me.
- */
- for (offset = 0; offset + len <= sz; offset++) {
- /* we count non-overlapping occurrences of needle */
- if (!memcmp(needle, data + offset, len)) {
- offset += len - 1;
+ if (regexp) {
+ regmatch_t regmatch;
+ int flags = 0;
+
+ while (*data && !regexec(regexp, data, 1, &regmatch, flags)) {
+ flags |= REG_NOTBOL;
+ data += regmatch.rm_so;
+ if (*data) data++;
cnt++;
}
+
+ } else { /* Classic exact string match */
+ /* Yes, I've heard of strstr(), but the thing is *data may
+ * not be NUL terminated. Sue me.
+ */
+ for (offset = 0; offset + len <= sz; offset++) {
+ /* we count non-overlapping occurrences of needle */
+ if (!memcmp(needle, data + offset, len)) {
+ offset += len - 1;
+ cnt++;
+ }
+ }
}
return cnt;
}
@@ -36,10 +52,24 @@ void diffcore_pickaxe(const char *needle, int opts)
struct diff_queue_struct *q = &diff_queued_diff;
unsigned long len = strlen(needle);
int i, has_changes;
+ regex_t regex, *regexp = NULL;
struct diff_queue_struct outq;
outq.queue = NULL;
outq.nr = outq.alloc = 0;
+ if (opts & DIFF_PICKAXE_REGEX) {
+ int err;
+ err = regcomp(&regex, needle, REG_EXTENDED | REG_NEWLINE);
+ if (err) {
+ /* The POSIX.2 people are surely sick */
+ char errbuf[1024];
+ regerror(err, &regex, errbuf, 1024);
+ regfree(&regex);
+ die("invalid pickaxe regex: %s", errbuf);
+ }
+ regexp = &regex;
+ }
+
if (opts & DIFF_PICKAXE_ALL) {
/* Showing the whole changeset if needle exists */
for (i = has_changes = 0; !has_changes && i < q->nr; i++) {
@@ -48,16 +78,16 @@ void diffcore_pickaxe(const char *needle, int opts)
if (!DIFF_FILE_VALID(p->two))
continue; /* ignore unmerged */
/* created */
- if (contains(p->two, needle, len))
+ if (contains(p->two, needle, len, regexp))
has_changes++;
}
else if (!DIFF_FILE_VALID(p->two)) {
- if (contains(p->one, needle, len))
+ if (contains(p->one, needle, len, regexp))
has_changes++;
}
else if (!diff_unmodified_pair(p) &&
- contains(p->one, needle, len) !=
- contains(p->two, needle, len))
+ contains(p->one, needle, len, regexp) !=
+ contains(p->two, needle, len, regexp))
has_changes++;
}
if (has_changes)
@@ -80,16 +110,16 @@ void diffcore_pickaxe(const char *needle, int opts)
if (!DIFF_FILE_VALID(p->two))
; /* ignore unmerged */
/* created */
- else if (contains(p->two, needle, len))
+ else if (contains(p->two, needle, len, regexp))
has_changes = 1;
}
else if (!DIFF_FILE_VALID(p->two)) {
- if (contains(p->one, needle, len))
+ if (contains(p->one, needle, len, regexp))
has_changes = 1;
}
else if (!diff_unmodified_pair(p) &&
- contains(p->one, needle, len) !=
- contains(p->two, needle, len))
+ contains(p->one, needle, len, regexp) !=
+ contains(p->two, needle, len, regexp))
has_changes = 1;
if (has_changes)
@@ -98,6 +128,10 @@ void diffcore_pickaxe(const char *needle, int opts)
diff_free_filepair(p);
}
+ if (opts & DIFF_PICKAXE_REGEX) {
+ regfree(&regex);
+ }
+
free(q->queue);
*q = outq;
return;
diff --git a/git-clean.sh b/git-clean.sh
new file mode 100755
index 0000000000..b200868e60
--- /dev/null
+++ b/git-clean.sh
@@ -0,0 +1,80 @@
+#!/bin/sh
+#
+# Copyright (c) 2005-2006 Pavel Roskin
+#
+
+USAGE="[-d] [-n] [-q] [-x | -X]"
+LONG_USAGE='Clean untracked files from the working directory
+ -d remove directories as well
+ -n don'\''t remove anything, just show what would be done
+ -q be quiet, only report errors
+ -x remove ignored files as well
+ -X remove only ignored files as well'
+SUBDIRECTORY_OK=Yes
+. git-sh-setup
+
+ignored=
+ignoredonly=
+cleandir=
+quiet=
+rmf="rm -f"
+rmrf="rm -rf"
+rm_refuse="echo Not removing"
+echo1="echo"
+
+while case "$#" in 0) break ;; esac
+do
+ case "$1" in
+ -d)
+ cleandir=1
+ ;;
+ -n)
+ quiet=1
+ rmf="echo Would remove"
+ rmrf="echo Would remove"
+ rm_refuse="echo Would not remove"
+ echo1=":"
+ ;;
+ -q)
+ quiet=1
+ ;;
+ -x)
+ ignored=1
+ ;;
+ -X)
+ ignoredonly=1
+ ;;
+ *)
+ usage
+ esac
+ shift
+done
+
+case "$ignored,$ignoredonly" in
+ 1,1) usage;;
+esac
+
+if [ -z "$ignored" ]; then
+ excl="--exclude-per-directory=.gitignore"
+ if [ -f "$GIT_DIR/info/exclude" ]; then
+ excl_info="--exclude-from=$GIT_DIR/info/exclude"
+ fi
+ if [ "$ignoredonly" ]; then
+ excl="$excl --ignored"
+ fi
+fi
+
+git-ls-files --others --directory $excl ${excl_info:+"$excl_info"} |
+while read -r file; do
+ if [ -d "$file" -a ! -L "$file" ]; then
+ if [ -z "$cleandir" ]; then
+ $rm_refuse "$file"
+ continue
+ fi
+ $echo1 "Removing $file"
+ $rmrf "$file"
+ else
+ $echo1 "Removing $file"
+ $rmf "$file"
+ fi
+done
diff --git a/git-clone.sh b/git-clone.sh
index 823c74b913..c013e481d0 100755
--- a/git-clone.sh
+++ b/git-clone.sh
@@ -52,7 +52,8 @@ Perhaps git-update-server-info needs to be run there?"
git-http-fetch -v -a -w "$tname" "$name" "$1/" || exit 1
done <"$clone_tmp/refs"
rm -fr "$clone_tmp"
- http_fetch "$1/HEAD" "$GIT_DIR/REMOTE_HEAD"
+ http_fetch "$1/HEAD" "$GIT_DIR/REMOTE_HEAD" ||
+ rm -f "$GIT_DIR/REMOTE_HEAD"
}
# Read git-fetch-pack -k output and store the remote branches.
@@ -324,7 +325,7 @@ test -d "$GIT_DIR/refs/reference-tmp" && rm -fr "$GIT_DIR/refs/reference-tmp"
if test -f "$GIT_DIR/CLONE_HEAD"
then
- # Figure out where the remote HEAD points at.
+ # Read git-fetch-pack -k output and store the remote branches.
perl -e "$copy_refs" "$GIT_DIR" "$use_separate_remote" "$origin"
fi
@@ -332,22 +333,25 @@ cd "$D" || exit
if test -z "$bare" && test -f "$GIT_DIR/REMOTE_HEAD"
then
- head_sha1=`cat "$GIT_DIR/REMOTE_HEAD"`
# Figure out which remote branch HEAD points at.
case "$use_separate_remote" in
'') remote_top=refs/heads ;;
*) remote_top="refs/remotes/$origin" ;;
esac
- # What to use to track the remote primary branch
- if test -n "$use_separate_remote"
- then
- origin_tracking="remotes/$origin/master"
- else
- origin_tracking="heads/$origin"
- fi
+ head_sha1=`cat "$GIT_DIR/REMOTE_HEAD"`
+ case "$head_sha1" in
+ 'ref: refs/'*)
+ # Uh-oh, the remote told us (http transport done against
+ # new style repository with a symref HEAD).
+ # Ideally we should skip the guesswork but for now
+ # opt for minimum change.
+ head_sha1=`expr "$head_sha1" : 'ref: refs/heads/\(.*\)'`
+ head_sha1=`cat "$GIT_DIR/$remote_top/$head_sha1"`
+ ;;
+ esac
- # The name under $remote_top the remote HEAD seems to point at
+ # The name under $remote_top the remote HEAD seems to point at.
head_points_at=$(
(
echo "master"
@@ -368,23 +372,28 @@ then
)
)
- # Write out remotes/$origin file.
+ # Write out remotes/$origin file, and update our "$head_points_at".
case "$head_points_at" in
?*)
mkdir -p "$GIT_DIR/remotes" &&
- echo >"$GIT_DIR/remotes/$origin" \
- "URL: $repo
-Pull: refs/heads/$head_points_at:refs/$origin_tracking" &&
+ git-symbolic-ref HEAD "refs/heads/$head_points_at" &&
case "$use_separate_remote" in
- t) git-update-ref HEAD "$head_sha1" ;;
- *) git-update-ref "refs/heads/$origin" $(git-rev-parse HEAD) ;;
+ t) origin_track="$remote_top/$head_points_at"
+ git-update-ref HEAD "$head_sha1" ;;
+ *) origin_track="$remote_top/$origin"
+ git-update-ref "refs/heads/$origin" "$head_sha1" ;;
esac &&
+ echo >"$GIT_DIR/remotes/$origin" \
+ "URL: $repo
+Pull: refs/heads/$head_points_at:$origin_track" &&
(cd "$GIT_DIR/$remote_top" && find . -type f -print) |
while read dotslref
do
name=`expr "$dotslref" : './\(.*\)'` &&
- test "$head_points_at" = "$name" ||
- test "$origin" = "$name" ||
+ test "$use_separate_remote" = '' && {
+ test "$head_points_at" = "$name" ||
+ test "$origin" = "$name"
+ } ||
echo "Pull: refs/heads/${name}:$remote_top/${name}"
done >>"$GIT_DIR/remotes/$origin" &&
case "$use_separate_remote" in
diff --git a/git-cvsimport.perl b/git-cvsimport.perl
index 3728294e74..c0ae00bda7 100755
--- a/git-cvsimport.perl
+++ b/git-cvsimport.perl
@@ -677,11 +677,7 @@ my $commit = sub {
waitpid($pid,0);
die "Error running git-commit-tree: $?\n" if $?;
- open(C,">$git_dir/refs/heads/$branch")
- or die "Cannot open branch $branch for update: $!\n";
- print C "$cid\n"
- or die "Cannot write branch $branch for update: $!\n";
- close(C)
+ system("git-update-ref refs/heads/$branch $cid") == 0
or die "Cannot write branch $branch for update: $!\n";
if($tag) {
diff --git a/gitk b/gitk
index fa1e83c494..f88c06e565 100755
--- a/gitk
+++ b/gitk
@@ -75,6 +75,7 @@ proc getcommitlines {commfd} {
global commitlisted nextupdate
global leftover
global displayorder commitidx commitrow commitdata
+ global parentlist childlist children
set stuff [read $commfd]
if {$stuff == {}} {
@@ -140,15 +141,26 @@ proc getcommitlines {commfd} {
set id [lindex $ids 0]
if {$listed} {
set olds [lrange $ids 1 end]
- set commitlisted($id) 1
+ if {[llength $olds] > 1} {
+ set olds [lsort -unique $olds]
+ }
+ foreach p $olds {
+ lappend children($p) $id
+ }
} else {
set olds {}
}
- updatechildren $id $olds
+ lappend parentlist $olds
+ if {[info exists children($id)]} {
+ lappend childlist $children($id)
+ } else {
+ lappend childlist {}
+ }
set commitdata($id) [string range $cmit [expr {$j + 1}] end]
set commitrow($id) $commitidx
incr commitidx
lappend displayorder $id
+ lappend commitlisted $listed
set gotsome 1
}
if {$gotsome} {
@@ -181,14 +193,12 @@ proc doupdate {reading} {
proc readcommit {id} {
if {[catch {set contents [exec git-cat-file commit $id]}]} return
- updatechildren $id {}
parsecommit $id $contents 0
}
proc updatecommits {rargs} {
stopfindproc
- foreach v {children nchildren parents nparents commitlisted
- colormap selectedline matchinglines treediffs
+ foreach v {colormap selectedline matchinglines treediffs
mergefilelist currentid rowtextx commitrow
rowidlist rowoffsets idrowranges idrangedrawn iddrawn
linesegends crossings cornercrossings} {
@@ -200,26 +210,6 @@ proc updatecommits {rargs} {
getcommits $rargs
}
-proc updatechildren {id olds} {
- global children nchildren parents nparents
-
- if {![info exists nchildren($id)]} {
- set children($id) {}
- set nchildren($id) 0
- }
- set parents($id) $olds
- set nparents($id) [llength $olds]
- foreach p $olds {
- if {![info exists nchildren($p)]} {
- set children($p) [list $id]
- set nchildren($p) 1
- } elseif {[lsearch -exact $children($p) $id] < 0} {
- lappend children($p) $id
- incr nchildren($p)
- }
- }
-}
-
proc parsecommit {id contents listed} {
global commitinfo cdate
@@ -274,7 +264,7 @@ proc parsecommit {id contents listed} {
}
proc getcommit {id} {
- global commitdata commitinfo nparents
+ global commitdata commitinfo
if {[info exists commitdata($id)]} {
parsecommit $id $commitdata($id) 1
@@ -282,7 +272,6 @@ proc getcommit {id} {
readcommit $id
if {![info exists commitinfo($id)]} {
set commitinfo($id) {"No commit information available"}
- set nparents($id) 0
}
}
return 1
@@ -295,7 +284,7 @@ proc readrefs {} {
foreach v {tagids idtags headids idheads otherrefids idotherrefs} {
catch {unset $v}
}
- set refd [open [list | git-ls-remote [gitdir]] r]
+ set refd [open [list | git ls-remote [gitdir]] r]
while {0 <= [set n [gets $refd line]]} {
if {![regexp {^([0-9a-f]{40}) refs/([^^]*)$} $line \
match id path]} {
@@ -346,7 +335,7 @@ proc error_popup msg {
}
proc makewindow {rargs} {
- global canv canv2 canv3 linespc charspc ctext cflist textfont
+ global canv canv2 canv3 linespc charspc ctext cflist textfont mainfont uifont
global findtype findtypemenu findloc findstring fstring geometry
global entries sha1entry sha1string sha1but
global maincursor textcursor curtextcursor
@@ -354,16 +343,21 @@ proc makewindow {rargs} {
menu .bar
.bar add cascade -label "File" -menu .bar.file
+ .bar configure -font $uifont
menu .bar.file
.bar.file add command -label "Update" -command [list updatecommits $rargs]
.bar.file add command -label "Reread references" -command rereadrefs
.bar.file add command -label "Quit" -command doquit
+ .bar.file configure -font $uifont
menu .bar.edit
.bar add cascade -label "Edit" -menu .bar.edit
.bar.edit add command -label "Preferences" -command doprefs
+ .bar.edit configure -font $uifont
menu .bar.help
.bar add cascade -label "Help" -menu .bar.help
.bar.help add command -label "About gitk" -command about
+ .bar.help add command -label "Key bindings" -command keys
+ .bar.help configure -font $uifont
. configure -menu .bar
if {![info exists geometry(canv1)]} {
@@ -410,7 +404,7 @@ proc makewindow {rargs} {
set entries $sha1entry
set sha1but .ctop.top.bar.sha1label
button $sha1but -text "SHA1 ID: " -state disabled -relief flat \
- -command gotocommit -width 8
+ -command gotocommit -width 8 -font $uifont
$sha1but conf -disabledforeground [$sha1but cget -foreground]
pack .ctop.top.bar.sha1label -side left
entry $sha1entry -width 40 -font $textfont -textvariable sha1string
@@ -440,19 +434,24 @@ proc makewindow {rargs} {
-state disabled -width 26
pack .ctop.top.bar.rightbut -side left -fill y
- button .ctop.top.bar.findbut -text "Find" -command dofind
+ button .ctop.top.bar.findbut -text "Find" -command dofind -font $uifont
pack .ctop.top.bar.findbut -side left
set findstring {}
set fstring .ctop.top.bar.findstring
lappend entries $fstring
- entry $fstring -width 30 -font $textfont -textvariable findstring
+ entry $fstring -width 30 -font $textfont -textvariable findstring -font $textfont
pack $fstring -side left -expand 1 -fill x
set findtype Exact
set findtypemenu [tk_optionMenu .ctop.top.bar.findtype \
findtype Exact IgnCase Regexp]
+ .ctop.top.bar.findtype configure -font $uifont
+ .ctop.top.bar.findtype.menu configure -font $uifont
set findloc "All fields"
tk_optionMenu .ctop.top.bar.findloc findloc "All fields" Headline \
Comments Author Committer Files Pickaxe
+ .ctop.top.bar.findloc configure -font $uifont
+ .ctop.top.bar.findloc.menu configure -font $uifont
+
pack .ctop.top.bar.findloc -side right
pack .ctop.top.bar.findtype -side right
# for making sure type==Exact whenever loc==Pickaxe
@@ -499,7 +498,7 @@ proc makewindow {rargs} {
frame .ctop.cdet.right
set cflist .ctop.cdet.right.cfiles
listbox $cflist -bg white -selectmode extended -width $geometry(cflistw) \
- -yscrollcommand ".ctop.cdet.right.sb set"
+ -yscrollcommand ".ctop.cdet.right.sb set" -font $mainfont
scrollbar .ctop.cdet.right.sb -command "$cflist yview"
pack .ctop.cdet.right.sb -side right -fill y
pack $cflist -side left -fill both -expand 1
@@ -514,12 +513,20 @@ proc makewindow {rargs} {
bindall <ButtonRelease-5> "allcanvs yview scroll 5 units"
bindall <2> "canvscan mark %W %x %y"
bindall <B2-Motion> "canvscan dragto %W %x %y"
+ bindkey <Home> selfirstline
+ bindkey <End> sellastline
bind . <Key-Up> "selnextline -1"
bind . <Key-Down> "selnextline 1"
- bind . <Key-Right> "goforw"
- bind . <Key-Left> "goback"
- bind . <Key-Prior> "allcanvs yview scroll -1 pages"
- bind . <Key-Next> "allcanvs yview scroll 1 pages"
+ bindkey <Key-Right> "goforw"
+ bindkey <Key-Left> "goback"
+ bind . <Key-Prior> "selnextpage -1"
+ bind . <Key-Next> "selnextpage 1"
+ bind . <Control-Home> "allcanvs yview moveto 0.0"
+ bind . <Control-End> "allcanvs yview moveto 1.0"
+ bind . <Control-Key-Up> "allcanvs yview scroll -1 units"
+ bind . <Control-Key-Down> "allcanvs yview scroll 1 units"
+ bind . <Control-Key-Prior> "allcanvs yview scroll -1 pages"
+ bind . <Control-Key-Next> "allcanvs yview scroll 1 pages"
bindkey <Key-Delete> "$ctext yview scroll -1 pages"
bindkey <Key-BackSpace> "$ctext yview scroll -1 pages"
bindkey <Key-space> "$ctext yview scroll 1 pages"
@@ -612,7 +619,7 @@ proc click {w} {
}
proc savestuff {w} {
- global canv canv2 canv3 ctext cflist mainfont textfont
+ global canv canv2 canv3 ctext cflist mainfont textfont uifont
global stuffsaved findmergefiles maxgraphpct
global maxwidth
@@ -622,6 +629,7 @@ proc savestuff {w} {
set f [open "~/.gitk-new" w]
puts $f [list set mainfont $mainfont]
puts $f [list set textfont $textfont]
+ puts $f [list set uifont $uifont]
puts $f [list set findmergefiles $findmergefiles]
puts $f [list set maxgraphpct $maxgraphpct]
puts $f [list set maxwidth $maxwidth]
@@ -729,6 +737,55 @@ Use and redistribute under the terms of the GNU General Public License} \
pack $w.ok -side bottom
}
+proc keys {} {
+ set w .keys
+ if {[winfo exists $w]} {
+ raise $w
+ return
+ }
+ toplevel $w
+ wm title $w "Gitk key bindings"
+ message $w.m -text {
+Gitk key bindings:
+
+<Ctrl-Q> Quit
+<Home> Move to first commit
+<End> Move to last commit
+<Up>, p, i Move up one commit
+<Down>, n, k Move down one commit
+<Left>, z, j Go back in history list
+<Right>, x, l Go forward in history list
+<PageUp> Move up one page in commit list
+<PageDown> Move down one page in commit list
+<Ctrl-Home> Scroll to top of commit list
+<Ctrl-End> Scroll to bottom of commit list
+<Ctrl-Up> Scroll commit list up one line
+<Ctrl-Down> Scroll commit list down one line
+<Ctrl-PageUp> Scroll commit list up one page
+<Ctrl-PageDown> Scroll commit list down one page
+<Delete>, b Scroll diff view up one page
+<Backspace> Scroll diff view up one page
+<Space> Scroll diff view down one page
+u Scroll diff view up 18 lines
+d Scroll diff view down 18 lines
+<Ctrl-F> Find
+<Ctrl-G> Move to next find hit
+<Ctrl-R> Move to previous find hit
+<Return> Move to next find hit
+/ Move to next find hit, or redo find
+? Move to previous find hit
+f Scroll diff view to next file
+<Ctrl-KP+> Increase font size
+<Ctrl-plus> Increase font size
+<Ctrl-KP-> Decrease font size
+<Ctrl-minus> Decrease font size
+} \
+ -justify left -bg white -border 2 -relief sunken
+ pack $w.m -side top -fill both
+ button $w.ok -text Close -command "destroy $w"
+ pack $w.ok -side bottom
+}
+
proc shortids {ids} {
set res {}
foreach id $ids {
@@ -843,15 +900,20 @@ proc makeuparrow {oid x y z} {
}
proc initlayout {} {
- global rowidlist rowoffsets displayorder
+ global rowidlist rowoffsets displayorder commitlisted
global rowlaidout rowoptim
global idinlist rowchk
global commitidx numcommits canvxmax canv
global nextcolor
+ global parentlist childlist children
set commitidx 0
set numcommits 0
set displayorder {}
+ set commitlisted {}
+ set parentlist {}
+ set childlist {}
+ catch {unset children}
set nextcolor 0
set rowidlist {{}}
set rowoffsets {{}}
@@ -950,7 +1012,7 @@ proc showstuff {canshow} {
proc layoutrows {row endrow last} {
global rowidlist rowoffsets displayorder
global uparrowlen downarrowlen maxwidth mingaplen
- global nchildren parents nparents
+ global childlist parentlist
global idrowranges linesegends
global commitidx
global idinlist rowchk
@@ -961,7 +1023,7 @@ proc layoutrows {row endrow last} {
set id [lindex $displayorder $row]
set oldolds {}
set newolds {}
- foreach p $parents($id) {
+ foreach p [lindex $parentlist $row] {
if {![info exists idinlist($p)]} {
lappend newolds $p
} elseif {!$idinlist($p)} {
@@ -1000,7 +1062,7 @@ proc layoutrows {row endrow last} {
lappend idlist $id
lset rowidlist $row $idlist
set z {}
- if {$nchildren($id) > 0} {
+ if {[lindex $childlist $row] ne {}} {
set z [expr {[llength [lindex $rowidlist [expr {$row-1}]]] - $col}]
unset idinlist($id)
}
@@ -1053,16 +1115,22 @@ proc layoutrows {row endrow last} {
}
proc addextraid {id row} {
- global displayorder commitrow commitinfo nparents
+ global displayorder commitrow commitinfo
global commitidx
+ global parentlist childlist children
incr commitidx
lappend displayorder $id
+ lappend parentlist {}
set commitrow($id) $row
readcommit $id
if {![info exists commitinfo($id)]} {
set commitinfo($id) {"No commit information available"}
- set nparents($id) 0
+ }
+ if {[info exists children($id)]} {
+ lappend childlist $children($id)
+ } else {
+ lappend childlist {}
}
}
@@ -1365,7 +1433,7 @@ proc drawparentlinks {id row col olds} {
proc drawlines {id} {
global colormap canv
global idrowranges idrangedrawn
- global children iddrawn commitrow rowidlist
+ global childlist iddrawn commitrow rowidlist
$canv delete lines.$id
set nr [expr {[llength $idrowranges($id)] / 2}]
@@ -1374,14 +1442,12 @@ proc drawlines {id} {
drawlineseg $id $i
}
}
- if {[info exists children($id)]} {
- foreach child $children($id) {
- if {[info exists iddrawn($child)]} {
- set row $commitrow($child)
- set col [lsearch -exact [lindex $rowidlist $row] $child]
- if {$col >= 0} {
- drawparentlinks $child $row $col [list $id]
- }
+ foreach child [lindex $childlist $commitrow($id)] {
+ if {[info exists iddrawn($child)]} {
+ set row $commitrow($child)
+ set col [lsearch -exact [lindex $rowidlist $row] $child]
+ if {$col >= 0} {
+ drawparentlinks $child $row $col [list $id]
}
}
}
@@ -1394,7 +1460,7 @@ proc drawcmittext {id row col rmx} {
global linehtag linentag linedtag
global mainfont namefont canvxmax
- set ofill [expr {[info exists commitlisted($id)]? "blue": "white"}]
+ set ofill [expr {[lindex $commitlisted $row]? "blue": "white"}]
set x [xc $row $col]
set y [yc $row]
set orad [expr {$linespc / 3}]
@@ -1434,7 +1500,7 @@ proc drawcmittext {id row col rmx} {
proc drawcmitrow {row} {
global displayorder rowidlist
global idrowranges idrangedrawn iddrawn
- global commitinfo commitlisted parents numcommits
+ global commitinfo commitlisted parentlist numcommits
if {$row >= $numcommits} return
foreach id [lindex $rowidlist $row] {
@@ -1465,9 +1531,9 @@ proc drawcmitrow {row} {
getcommit $id
}
assigncolor $id
- if {[info exists commitlisted($id)] && [info exists parents($id)]
- && $parents($id) ne {}} {
- set rmx [drawparentlinks $id $row $col $parents($id)]
+ set olds [lindex $parentlist $row]
+ if {$olds ne {}} {
+ set rmx [drawparentlinks $id $row $col $olds]
} else {
set rmx 0
}
@@ -1511,15 +1577,22 @@ proc clear_display {} {
proc assigncolor {id} {
global colormap colors nextcolor
- global parents nparents children nchildren
+ global commitrow parentlist children childlist
global cornercrossings crossings
if {[info exists colormap($id)]} return
set ncolors [llength $colors]
- if {$nchildren($id) == 1} {
- set child [lindex $children($id) 0]
+ if {[info exists commitrow($id)]} {
+ set kids [lindex $childlist $commitrow($id)]
+ } elseif {[info exists children($id)]} {
+ set kids $children($id)
+ } else {
+ set kids {}
+ }
+ if {[llength $kids] == 1} {
+ set child [lindex $kids 0]
if {[info exists colormap($child)]
- && $nparents($child) == 1} {
+ && [llength [lindex $parentlist $commitrow($child)]] == 1} {
set colormap($id) $colormap($child)
return
}
@@ -1552,17 +1625,15 @@ proc assigncolor {id} {
set origbad $badcolors
}
if {[llength $badcolors] < $ncolors - 1} {
- foreach child $children($id) {
+ foreach child $kids {
if {[info exists colormap($child)]
&& [lsearch -exact $badcolors $colormap($child)] < 0} {
lappend badcolors $colormap($child)
}
- if {[info exists parents($child)]} {
- foreach p $parents($child) {
- if {[info exists colormap($p)]
- && [lsearch -exact $badcolors $colormap($p)] < 0} {
- lappend badcolors $colormap($p)
- }
+ foreach p [lindex $parentlist $commitrow($child)] {
+ if {[info exists colormap($p)]
+ && [lsearch -exact $badcolors $colormap($p)] < 0} {
+ lappend badcolors $colormap($p)
}
}
}
@@ -1657,14 +1728,14 @@ proc drawtags {id x xt y1} {
}
proc checkcrossings {row endrow} {
- global displayorder parents rowidlist
+ global displayorder parentlist rowidlist
for {} {$row < $endrow} {incr row} {
set id [lindex $displayorder $row]
set i [lsearch -exact [lindex $rowidlist $row] $id]
if {$i < 0} continue
set idlist [lindex $rowidlist [expr {$row+1}]]
- foreach p $parents($id) {
+ foreach p [lindex $parentlist $row] {
set j [lsearch -exact $idlist $p]
if {$j > 0} {
if {$j < $i - 1} {
@@ -2046,7 +2117,7 @@ proc insertmatch {l id} {
proc findfiles {} {
global selectedline numcommits displayorder ctext
- global ffileline finddidsel parents nparents
+ global ffileline finddidsel parentlist
global findinprogress findstartline findinsertpos
global treediffs fdiffid fdiffsneeded fdiffpos
global findmergefiles
@@ -2064,7 +2135,7 @@ proc findfiles {} {
set fdiffsneeded {}
while 1 {
set id [lindex $displayorder $l]
- if {$findmergefiles || $nparents($id) == 1} {
+ if {$findmergefiles || [llength [lindex $parentlist $l]] == 1} {
if {![info exists treediffs($id)]} {
append diffsneeded "$id\n"
lappend fdiffsneeded $id
@@ -2096,7 +2167,7 @@ proc findfiles {} {
. config -cursor watch
settextcursor watch
set findinprogress 1
- findcont $id
+ findcont
update
}
@@ -2143,7 +2214,7 @@ proc donefilediff {} {
set treediffs($nullid) {}
if {[info exists findid] && $nullid eq $findid} {
unset findid
- findcont $nullid
+ findcont
}
incr fdiffpos
}
@@ -2154,20 +2225,21 @@ proc donefilediff {} {
}
if {[info exists findid] && $fdiffid eq $findid} {
unset findid
- findcont $fdiffid
+ findcont
}
}
}
-proc findcont {id} {
- global findid treediffs parents nparents
+proc findcont {} {
+ global findid treediffs parentlist
global ffileline findstartline finddidsel
global displayorder numcommits matchinglines findinprogress
global findmergefiles
set l $ffileline
- while 1 {
- if {$findmergefiles || $nparents($id) == 1} {
+ while {1} {
+ set id [lindex $displayorder $l]
+ if {$findmergefiles || [llength [lindex $parentlist $l]] == 1} {
if {![info exists treediffs($id)]} {
set findid $id
set ffileline $l
@@ -2189,7 +2261,6 @@ proc findcont {id} {
set l 0
}
if {$l == $findstartline} break
- set id [lindex $displayorder $l]
}
stopfindproc
if {!$finddidsel} {
@@ -2286,10 +2357,26 @@ proc appendwithlinks {text} {
$ctext tag bind link <Leave> { %W configure -cursor $curtextcursor }
}
+proc viewnextline {dir} {
+ global canv linespc
+
+ $canv delete hover
+ set ymax [lindex [$canv cget -scrollregion] 3]
+ set wnow [$canv yview]
+ set wtop [expr {[lindex $wnow 0] * $ymax}]
+ set newtop [expr {$wtop + $dir * $linespc}]
+ if {$newtop < 0} {
+ set newtop 0
+ } elseif {$newtop > $ymax} {
+ set newtop $ymax
+ }
+ allcanvs yview moveto [expr {$newtop * 1.0 / $ymax}]
+}
+
proc selectline {l isnew} {
global canv canv2 canv3 ctext commitinfo selectedline
global displayorder linehtag linentag linedtag
- global canvy0 linespc parents nparents children
+ global canvy0 linespc parentlist childlist
global cflist currentid sha1entry
global commentend idtags linknum
global mergemax numcommits
@@ -2379,9 +2466,10 @@ proc selectline {l isnew} {
}
set comment {}
- if {$nparents($id) > 1} {
+ set olds [lindex $parentlist $l]
+ if {[llength $olds] > 1} {
set np 0
- foreach p $parents($id) {
+ foreach p $olds {
if {$np >= $mergemax} {
set tag mmax
} else {
@@ -2392,17 +2480,13 @@ proc selectline {l isnew} {
incr np
}
} else {
- if {[info exists parents($id)]} {
- foreach p $parents($id) {
- append comment "Parent: [commit_descriptor $p]\n"
- }
+ foreach p $olds {
+ append comment "Parent: [commit_descriptor $p]\n"
}
}
- if {[info exists children($id)]} {
- foreach c $children($id) {
- append comment "Child: [commit_descriptor $c]\n"
- }
+ foreach c [lindex $childlist $l] {
+ append comment "Child: [commit_descriptor $c]\n"
}
append comment "\n"
append comment [lindex $info 5]
@@ -2417,13 +2501,25 @@ proc selectline {l isnew} {
$cflist delete 0 end
$cflist insert end "Comments"
- if {$nparents($id) <= 1} {
+ if {[llength $olds] <= 1} {
startdiff $id
} else {
- mergediff $id
+ mergediff $id $l
}
}
+proc selfirstline {} {
+ unmarkmatches
+ selectline 0 1
+}
+
+proc sellastline {} {
+ global numcommits
+ unmarkmatches
+ set l [expr {$numcommits - 1}]
+ selectline $l 1
+}
+
proc selnextline {dir} {
global selectedline
if {![info exists selectedline]} return
@@ -2432,6 +2528,25 @@ proc selnextline {dir} {
selectline $l 1
}
+proc selnextpage {dir} {
+ global canv linespc selectedline numcommits
+
+ set lpp [expr {([winfo height $canv] - 2) / $linespc}]
+ if {$lpp < 1} {
+ set lpp 1
+ }
+ allcanvs yview scroll [expr {$dir * $lpp}] units
+ if {![info exists selectedline]} return
+ set l [expr {$selectedline + $dir * $lpp}]
+ if {$l < 0} {
+ set l 0
+ } elseif {$l >= $numcommits} {
+ set l [expr $numcommits - 1]
+ }
+ unmarkmatches
+ selectline $l 1
+}
+
proc unselectline {} {
global selectedline
@@ -2489,9 +2604,10 @@ proc goforw {} {
}
}
-proc mergediff {id} {
- global parents diffmergeid diffopts mdifffd
+proc mergediff {id l} {
+ global diffmergeid diffopts mdifffd
global difffilestart diffids
+ global parentlist
set diffmergeid $id
set diffids $id
@@ -2505,12 +2621,13 @@ proc mergediff {id} {
}
fconfigure $mdf -blocking 0
set mdifffd($id) $mdf
- fileevent $mdf readable [list getmergediffline $mdf $id]
+ set np [llength [lindex $parentlist $l]]
+ fileevent $mdf readable [list getmergediffline $mdf $id $np]
set nextupdate [expr {[clock clicks -milliseconds] + 100}]
}
-proc getmergediffline {mdf id} {
- global diffmergeid ctext cflist nextupdate nparents mergemax
+proc getmergediffline {mdf id np} {
+ global diffmergeid ctext cflist nextupdate mergemax
global difffilestart mdifffd
set n [gets $mdf line]
@@ -2543,7 +2660,6 @@ proc getmergediffline {mdf id} {
# do nothing
} else {
# parse the prefix - one ' ', '-' or '+' for each parent
- set np $nparents($id)
set spaces {}
set minuses {}
set pluses {}
@@ -2584,7 +2700,7 @@ proc getmergediffline {mdf id} {
incr nextupdate 100
fileevent $mdf readable {}
update
- fileevent $mdf readable [list getmergediffline $mdf $id]
+ fileevent $mdf readable [list getmergediffline $mdf $id $np]
}
}
@@ -2611,7 +2727,7 @@ proc addtocflist {ids} {
}
proc gettreediffs {ids} {
- global treediff parents treepending
+ global treediff treepending
set treepending $ids
set treediff {}
if {[catch \
@@ -2846,13 +2962,15 @@ proc sha1change {n1 n2 op} {
}
proc gotocommit {} {
- global sha1string currentid commitrow tagids
+ global sha1string currentid commitrow tagids headids
global displayorder numcommits
if {$sha1string == {}
|| ([info exists currentid] && $sha1string == $currentid)} return
if {[info exists tagids($sha1string)]} {
set id $tagids($sha1string)
+ } elseif {[info exists headids($sha1string)]} {
+ set id $headids($sha1string)
} else {
set id [string tolower $sha1string]
if {[regexp {^[0-9a-f]{4,39}$} $id]} {
@@ -2878,7 +2996,7 @@ proc gotocommit {} {
if {[regexp {^[0-9a-fA-F]{4,}$} $sha1string]} {
set type "SHA1 id"
} else {
- set type "Tag"
+ set type "Tag/Head"
}
error_popup "$type $sha1string is not known"
}
@@ -2979,7 +3097,7 @@ proc arrowjump {id n y} {
}
proc lineclick {x y id isnew} {
- global ctext commitinfo children cflist canv thickerline
+ global ctext commitinfo childlist commitrow cflist canv thickerline
if {![info exists commitinfo($id)] && ![getcommit $id]} return
unmarkmatches
@@ -3018,10 +3136,11 @@ proc lineclick {x y id isnew} {
$ctext insert end "\tAuthor:\t[lindex $info 1]\n"
set date [formatdate [lindex $info 2]]
$ctext insert end "\tDate:\t$date\n"
- if {[info exists children($id)]} {
+ set kids [lindex $childlist $commitrow($id)]
+ if {$kids ne {}} {
$ctext insert end "\nChildren:"
set i 0
- foreach child $children($id) {
+ foreach child $kids {
incr i
if {![info exists commitinfo($child)] && ![getcommit $child]} continue
set info $commitinfo($child)
@@ -3368,7 +3487,6 @@ proc listrefs {id} {
proc rereadrefs {} {
global idtags idheads idotherrefs
- global tagids headids otherrefids
set refids [concat [array names idtags] \
[array names idheads] [array names idotherrefs]]
@@ -3777,6 +3895,7 @@ if {$tclencoding == {}} {
set mainfont {Helvetica 9}
set textfont {Courier 9}
+set uifont {Helvetica 9 bold}
set findmergefiles 0
set maxgraphpct 50
set maxwidth 16
diff --git a/imap-send.c b/imap-send.c
index f3cb79b1f8..52e2400b57 100644
--- a/imap-send.c
+++ b/imap-send.c
@@ -1202,6 +1202,7 @@ read_message( FILE *f, msg_data_t *msg )
p = xrealloc(msg->data, len+1);
if (!p)
break;
+ msg->data = p;
}
r = fread( &msg->data[msg->len], 1, len - msg->len, f );
if (r <= 0)
@@ -1332,6 +1333,12 @@ main(int argc, char **argv)
return 1;
}
+ total = count_messages( &all_msgs );
+ if (!total) {
+ fprintf(stderr,"no messages to send\n");
+ return 1;
+ }
+
/* write it to the imap server */
ctx = imap_open_store( &server );
if (!ctx) {
@@ -1339,7 +1346,6 @@ main(int argc, char **argv)
return 1;
}
- total = count_messages( &all_msgs );
fprintf( stderr, "sending %d message%s\n", total, (total!=1)?"s":"" );
ctx->name = imap_folder;
while (1) {
diff --git a/patch-delta.c b/patch-delta.c
index c0e1311435..d95f0d9721 100644
--- a/patch-delta.c
+++ b/patch-delta.c
@@ -28,12 +28,12 @@ void *patch_delta(void *src_buf, unsigned long src_size,
top = delta_buf + delta_size;
/* make sure the orig file size matches what we expect */
- size = get_delta_hdr_size(&data);
+ size = get_delta_hdr_size(&data, top);
if (size != src_size)
return NULL;
/* now the result size */
- size = get_delta_hdr_size(&data);
+ size = get_delta_hdr_size(&data, top);
dst_buf = malloc(size + 1);
if (!dst_buf)
return NULL;
@@ -52,21 +52,37 @@ void *patch_delta(void *src_buf, unsigned long src_size,
if (cmd & 0x20) cp_size |= (*data++ << 8);
if (cmd & 0x40) cp_size |= (*data++ << 16);
if (cp_size == 0) cp_size = 0x10000;
+ if (cp_off + cp_size < cp_size ||
+ cp_off + cp_size > src_size ||
+ cp_size > size)
+ goto bad;
memcpy(out, src_buf + cp_off, cp_size);
out += cp_size;
- } else {
+ size -= cp_size;
+ } else if (cmd) {
+ if (cmd > size)
+ goto bad;
memcpy(out, data, cmd);
out += cmd;
data += cmd;
+ size -= cmd;
+ } else {
+ /*
+ * cmd == 0 is reserved for future encoding
+ * extensions. In the mean time we must fail when
+ * encountering them (might be data corruption).
+ */
+ goto bad;
}
}
/* sanity check */
- if (data != top || out - dst_buf != size) {
+ if (data != top || size != 0) {
+ bad:
free(dst_buf);
return NULL;
}
- *dst_size = size;
+ *dst_size = out - dst_buf;
return dst_buf;
}
diff --git a/sha1_file.c b/sha1_file.c
index ba8c4f7601..e3d011309a 100644
--- a/sha1_file.c
+++ b/sha1_file.c
@@ -808,10 +808,12 @@ static int packed_delta_info(unsigned char *base_sha1,
* the result size.
*/
data = delta_head;
- get_delta_hdr_size(&data); /* ignore base size */
+
+ /* ignore base size */
+ get_delta_hdr_size(&data, delta_head+sizeof(delta_head));
/* Read the result size */
- result_size = get_delta_hdr_size(&data);
+ result_size = get_delta_hdr_size(&data, delta_head+sizeof(delta_head));
*sizep = result_size;
}
return 0;