diff options
74 files changed, 3192 insertions, 3736 deletions
diff --git a/.gitignore b/.gitignore index 4c8c8e4115..7f2cd55088 100644 --- a/.gitignore +++ b/.gitignore @@ -66,7 +66,6 @@ git-merge-one-file git-merge-ours git-merge-recur git-merge-recursive -git-merge-recursive-old git-merge-resolve git-merge-stupid git-mktag diff --git a/Documentation/config.txt b/Documentation/config.txt index 9d3c71c3b8..21ec55797b 100644 --- a/Documentation/config.txt +++ b/Documentation/config.txt @@ -125,10 +125,17 @@ apply.whitespace:: branch.<name>.remote:: When in branch <name>, it tells `git fetch` which remote to fetch. + If this option is not given, `git fetch` defaults to remote "origin". branch.<name>.merge:: - When in branch <name>, it tells `git fetch` the default remote branch - to be merged. + When in branch <name>, it tells `git fetch` the default refspec to + be marked for merging in FETCH_HEAD. The value has exactly to match + a remote part of one of the refspecs which are fetched from the remote + given by "branch.<name>.remote". + The merge information is used by `git pull` (which at first calls + `git fetch`) to lookup the default branch for merging. Without + this option, `git pull` defaults to merge the first refspec fetched. + Specify multiple values to get an octopus merge. pager.color:: A boolean to enable/disable colored output when the pager is in @@ -219,6 +226,12 @@ i18n.commitEncoding:: browser (and possibly at other places in the future or in other porcelains). See e.g. gitlink:git-mailinfo[1]. Defaults to 'utf-8'. +log.showroot:: + If true, the initial commit will be shown as a big creation event. + This is equivalent to a diff against an empty tree. + Tools like gitlink:git-log[1] or gitlink:git-whatchanged[1], which + normally hide the root commit will now show it. True by default. + merge.summary:: Whether to include summaries of merged commits in newly created merge commit messages. False by default. diff --git a/Documentation/cvs-migration.txt b/Documentation/cvs-migration.txt index 6812683a16..b657f4589f 100644 --- a/Documentation/cvs-migration.txt +++ b/Documentation/cvs-migration.txt @@ -1,113 +1,21 @@ git for CVS users ================= -So you're a CVS user. That's OK, it's a treatable condition. The job of -this document is to put you on the road to recovery, by helping you -convert an existing cvs repository to git, and by showing you how to use a -git repository in a cvs-like fashion. +Git differs from CVS in that every working tree contains a repository with +a full copy of the project history, and no repository is inherently more +important than any other. However, you can emulate the CVS model by +designating a single shared repository which people can synchronize with; +this document explains how to do that. Some basic familiarity with git is required. This link:tutorial.html[tutorial introduction to git] should be sufficient. -First, note some ways that git differs from CVS: +Developing against a shared repository +-------------------------------------- - * Commits are atomic and project-wide, not per-file as in CVS. - - * Offline work is supported: you can make multiple commits locally, - then submit them when you're ready. - - * Branching is fast and easy. - - * Every working tree contains a repository with a full copy of the - project history, and no repository is inherently more important than - any other. However, you can emulate the CVS model by designating a - single shared repository which people can synchronize with; see below - for details. - -Importing a CVS archive ------------------------ - -First, install version 2.1 or higher of cvsps from -link:http://www.cobite.com/cvsps/[http://www.cobite.com/cvsps/] and make -sure it is in your path. The magic command line is then - -------------------------------------------- -$ git cvsimport -v -d <cvsroot> -C <destination> <module> -------------------------------------------- - -This puts a git archive of the named CVS module in the directory -<destination>, which will be created if necessary. The -v option makes -the conversion script very chatty. - -The import checks out from CVS every revision of every file. Reportedly -cvsimport can average some twenty revisions per second, so for a -medium-sized project this should not take more than a couple of minutes. -Larger projects or remote repositories may take longer. - -The main trunk is stored in the git branch named `origin`, and additional -CVS branches are stored in git branches with the same names. The most -recent version of the main trunk is also left checked out on the `master` -branch, so you can start adding your own changes right away. - -The import is incremental, so if you call it again next month it will -fetch any CVS updates that have been made in the meantime. For this to -work, you must not modify the imported branches; instead, create new -branches for your own changes, and merge in the imported branches as -necessary. - -Development Models ------------------- - -CVS users are accustomed to giving a group of developers commit access to -a common repository. In the next section we'll explain how to do this -with git. However, the distributed nature of git allows other development -models, and you may want to first consider whether one of them might be a -better fit for your project. - -For example, you can choose a single person to maintain the project's -primary public repository. Other developers then clone this repository -and each work in their own clone. When they have a series of changes that -they're happy with, they ask the maintainer to pull from the branch -containing the changes. The maintainer reviews their changes and pulls -them into the primary repository, which other developers pull from as -necessary to stay coordinated. The Linux kernel and other projects use -variants of this model. - -With a small group, developers may just pull changes from each other's -repositories without the need for a central maintainer. - -Emulating the CVS Development Model ------------------------------------ - -Start with an ordinary git working directory containing the project, and -remove the checked-out files, keeping just the bare .git directory: - ------------------------------------------------- -$ mv project/.git /pub/repo.git -$ rm -r project/ ------------------------------------------------- - -Next, give every team member read/write access to this repository. One -easy way to do this is to give all the team members ssh access to the -machine where the repository is hosted. If you don't want to give them a -full shell on the machine, there is a restricted shell which only allows -users to do git pushes and pulls; see gitlink:git-shell[1]. - -Put all the committers in the same group, and make the repository -writable by that group: - ------------------------------------------------- -$ chgrp -R $group repo.git -$ find repo.git -mindepth 1 -type d |xargs chmod ug+rwx,g+s -$ GIT_DIR=repo.git git repo-config core.sharedrepository true ------------------------------------------------- - -Make sure committers have a umask of at most 027, so that the directories -they create are writable and searchable by other group members. - -Suppose this repository is now set up in /pub/repo.git on the host +Suppose a shared repository is set up in /pub/repo.git on the host foo.com. Then as an individual committer you can clone the shared -repository: +repository over ssh with: ------------------------------------------------ $ git clone foo.com:/pub/repo.git/ my-project @@ -121,7 +29,8 @@ $ git pull origin ------------------------------------------------ which merges in any work that others might have done since the clone -operation. +operation. If there are uncommitted changes in your working tree, commit +them first before running git pull. [NOTE] ================================ @@ -129,20 +38,22 @@ The first `git clone` places the following in the `my-project/.git/remotes/origin` file, and that's why the previous step and the next step both work. ------------ -URL: foo.com:/pub/project.git/ my-project -Pull: master:origin +URL: foo.com:/pub/project.git/ +Pull: refs/heads/master:refs/remotes/origin/master ------------ ================================ -You can update the shared repository with your changes using: +You can update the shared repository with your changes by first committing +your changes, and then using the gitlink:git-push[1] command: ------------------------------------------------ $ git push origin master ------------------------------------------------ -If someone else has updated the repository more recently, `git push`, like -`cvs commit`, will complain, in which case you must pull any changes -before attempting the push again. +to "push" those commits to the shared repository. If someone else has +updated the repository more recently, `git push`, like `cvs commit`, will +complain, in which case you must pull any changes before attempting the +push again. In the `git push` command above we specify the name of the remote branch to update (`master`). If we leave that out, `git push` tries to update @@ -151,21 +62,77 @@ in the local repository. So the last `push` can be done with either of: ------------ $ git push origin -$ git push repo.shared.xz:/pub/scm/project.git/ +$ git push foo.com:/pub/project.git/ ------------ as long as the shared repository does not have any branches other than `master`. -[NOTE] -============ -Because of this behavior, if the shared repository and the developer's -repository both have branches named `origin`, then a push like the above -attempts to update the `origin` branch in the shared repository from the -developer's `origin` branch. The results may be unexpected, so it's -usually best to remove any branch named `origin` from the shared -repository. -============ +Setting Up a Shared Repository +------------------------------ + +We assume you have already created a git repository for your project, +possibly created from scratch or from a tarball (see the +link:tutorial.html[tutorial]), or imported from an already existing CVS +repository (see the next section). + +Assume your existing repo is at /home/alice/myproject. Create a new "bare" +repository (a repository without a working tree) and fetch your project into +it: + +------------------------------------------------ +$ mkdir /pub/my-repo.git +$ cd /pub/my-repo.git +$ git --bare init-db --shared +$ git --bare fetch /home/alice/myproject master:master +------------------------------------------------ + +Next, give every team member read/write access to this repository. One +easy way to do this is to give all the team members ssh access to the +machine where the repository is hosted. If you don't want to give them a +full shell on the machine, there is a restricted shell which only allows +users to do git pushes and pulls; see gitlink:git-shell[1]. + +Put all the committers in the same group, and make the repository +writable by that group: + +------------------------------------------------ +$ chgrp -R $group /pub/my-repo.git +------------------------------------------------ + +Make sure committers have a umask of at most 027, so that the directories +they create are writable and searchable by other group members. + +Importing a CVS archive +----------------------- + +First, install version 2.1 or higher of cvsps from +link:http://www.cobite.com/cvsps/[http://www.cobite.com/cvsps/] and make +sure it is in your path. Then cd to a checked out CVS working directory +of the project you are interested in and run gitlink:git-cvsimport[1]: + +------------------------------------------- +$ git cvsimport -C <destination> +------------------------------------------- + +This puts a git archive of the named CVS module in the directory +<destination>, which will be created if necessary. + +The import checks out from CVS every revision of every file. Reportedly +cvsimport can average some twenty revisions per second, so for a +medium-sized project this should not take more than a couple of minutes. +Larger projects or remote repositories may take longer. + +The main trunk is stored in the git branch named `origin`, and additional +CVS branches are stored in git branches with the same names. The most +recent version of the main trunk is also left checked out on the `master` +branch, so you can start adding your own changes right away. + +The import is incremental, so if you call it again next month it will +fetch any CVS updates that have been made in the meantime. For this to +work, you must not modify the imported branches; instead, create new +branches for your own changes, and merge in the imported branches as +necessary. Advanced Shared Repository Management ------------------------------------- @@ -178,127 +145,30 @@ You can enforce finer grained permissions using update hooks. See link:howto/update-hook-example.txt[Controlling access to branches using update hooks]. -CVS annotate ------------- +Providing CVS Access to a git Repository +---------------------------------------- + +It is also possible to provide true CVS access to a git repository, so +that developers can still use CVS; see gitlink:git-cvsserver[1] for +details. + +Alternative Development Models +------------------------------ + +CVS users are accustomed to giving a group of developers commit access to +a common repository. As we've seen, this is also possible with git. +However, the distributed nature of git allows other development models, +and you may want to first consider whether one of them might be a better +fit for your project. + +For example, you can choose a single person to maintain the project's +primary public repository. Other developers then clone this repository +and each work in their own clone. When they have a series of changes that +they're happy with, they ask the maintainer to pull from the branch +containing the changes. The maintainer reviews their changes and pulls +them into the primary repository, which other developers pull from as +necessary to stay coordinated. The Linux kernel and other projects use +variants of this model. -So, something has gone wrong, and you don't know whom to blame, and -you're an ex-CVS user and used to do "cvs annotate" to see who caused -the breakage. You're looking for the "git annotate", and it's just -claiming not to find such a script. You're annoyed. - -Yes, that's right. Core git doesn't do "annotate", although it's -technically possible, and there are at least two specialized scripts out -there that can be used to get equivalent information (see the git -mailing list archives for details). - -git has a couple of alternatives, though, that you may find sufficient -or even superior depending on your use. One is called "git-whatchanged" -(for obvious reasons) and the other one is called "pickaxe" ("a tool for -the software archaeologist"). - -The "git-whatchanged" script is a truly trivial script that can give you -a good overview of what has changed in a file or a directory (or an -arbitrary list of files or directories). The "pickaxe" support is an -additional layer that can be used to further specify exactly what you're -looking for, if you already know the specific area that changed. - -Let's step back a bit and think about the reason why you would -want to do "cvs annotate a-file.c" to begin with. - -You would use "cvs annotate" on a file when you have trouble -with a function (or even a single "if" statement in a function) -that happens to be defined in the file, which does not do what -you want it to do. And you would want to find out why it was -written that way, because you are about to modify it to suit -your needs, and at the same time you do not want to break its -current callers. For that, you are trying to find out why the -original author did things that way in the original context. - -Many times, it may be enough to see the commit log messages of -commits that touch the file in question, possibly along with the -patches themselves, like this: - - $ git-whatchanged -p a-file.c - -This will show log messages and patches for each commit that -touches a-file. - -This, however, may not be very useful when this file has many -modifications that are not related to the piece of code you are -interested in. You would see many log messages and patches that -do not have anything to do with the piece of code you are -interested in. As an example, assuming that you have this piece -of code that you are interested in in the HEAD version: - - if (frotz) { - nitfol(); - } - -you would use git-rev-list and git-diff-tree like this: - - $ git-rev-list HEAD | - git-diff-tree --stdin -v -p -S'if (frotz) { - nitfol(); - }' - -We have already talked about the "\--stdin" form of git-diff-tree -command that reads the list of commits and compares each commit -with its parents (otherwise you should go back and read the tutorial). -The git-whatchanged command internally runs -the equivalent of the above command, and can be used like this: - - $ git-whatchanged -p -S'if (frotz) { - nitfol(); - }' - -When the -S option is used, git-diff-tree command outputs -differences between two commits only if one tree has the -specified string in a file and the corresponding file in the -other tree does not. The above example looks for a commit that -has the "if" statement in it in a file, but its parent commit -does not have it in the same shape in the corresponding file (or -the other way around, where the parent has it and the commit -does not), and the differences between them are shown, along -with the commit message (thanks to the -v flag). It does not -show anything for commits that do not touch this "if" statement. - -Also, in the original context, the same statement might have -appeared at first in a different file and later the file was -renamed to "a-file.c". CVS annotate would not help you to go -back across such a rename, but git would still help you in such -a situation. For that, you can give the -C flag to -git-diff-tree, like this: - - $ git-whatchanged -p -C -S'if (frotz) { - nitfol(); - }' - -When the -C flag is used, file renames and copies are followed. -So if the "if" statement in question happens to be in "a-file.c" -in the current HEAD commit, even if the file was originally -called "o-file.c" and then renamed in an earlier commit, or if -the file was created by copying an existing "o-file.c" in an -earlier commit, you will not lose track. If the "if" statement -did not change across such a rename or copy, then the commit that -does rename or copy would not show in the output, and if the -"if" statement was modified while the file was still called -"o-file.c", it would find the commit that changed the statement -when it was in "o-file.c". - -NOTE: The current version of "git-diff-tree -C" is not eager - enough to find copies, and it will miss the fact that a-file.c - was created by copying o-file.c unless o-file.c was somehow - changed in the same commit. - -You can use the --pickaxe-all flag in addition to the -S flag. -This causes the differences from all the files contained in -those two commits, not just the differences between the files -that contain this changed "if" statement: - - $ git-whatchanged -p -C -S'if (frotz) { - nitfol(); - }' --pickaxe-all - -NOTE: This option is called "--pickaxe-all" because -S - option is internally called "pickaxe", a tool for software - archaeologists. +With a small group, developers may just pull changes from each other's +repositories without the need for a central maintainer. diff --git a/Documentation/diff-format.txt b/Documentation/diff-format.txt index e4520e28e5..883c1bb0a6 100644 --- a/Documentation/diff-format.txt +++ b/Documentation/diff-format.txt @@ -65,62 +65,17 @@ Generating patches with -p When "git-diff-index", "git-diff-tree", or "git-diff-files" are run with a '-p' option, they do not produce the output described above; -instead they produce a patch file. +instead they produce a patch file. You can customize the creation +of such patches via the GIT_EXTERNAL_DIFF and the GIT_DIFF_OPTS +environment variables. -The patch generation can be customized at two levels. - -1. When the environment variable 'GIT_EXTERNAL_DIFF' is not set, - these commands internally invoke "diff" like this: - - diff -L a/<path> -L b/<path> -pu <old> <new> -+ -For added files, `/dev/null` is used for <old>. For removed -files, `/dev/null` is used for <new> -+ -The "diff" formatting options can be customized via the -environment variable 'GIT_DIFF_OPTS'. For example, if you -prefer context diff: - - GIT_DIFF_OPTS=-c git-diff-index -p HEAD - - -2. When the environment variable 'GIT_EXTERNAL_DIFF' is set, the - program named by it is called, instead of the diff invocation - described above. -+ -For a path that is added, removed, or modified, -'GIT_EXTERNAL_DIFF' is called with 7 parameters: - - path old-file old-hex old-mode new-file new-hex new-mode -+ -where: - - <old|new>-file:: are files GIT_EXTERNAL_DIFF can use to read the - contents of <old|new>, - <old|new>-hex:: are the 40-hexdigit SHA1 hashes, - <old|new>-mode:: are the octal representation of the file modes. - -+ -The file parameters can point at the user's working file -(e.g. `new-file` in "git-diff-files"), `/dev/null` (e.g. `old-file` -when a new file is added), or a temporary file (e.g. `old-file` in the -index). 'GIT_EXTERNAL_DIFF' should not worry about unlinking the -temporary file --- it is removed when 'GIT_EXTERNAL_DIFF' exits. - -For a path that is unmerged, 'GIT_EXTERNAL_DIFF' is called with 1 -parameter, <path>. - - -git specific extension to diff format -------------------------------------- - -What -p option produces is slightly different from the -traditional diff format. +What the -p option produces is slightly different from the traditional +diff format. 1. It is preceded with a "git diff" header, that looks like this: - diff --git a/file1 b/file2 + diff --git a/file1 b/file2 + The `a/` and `b/` filenames are the same unless rename/copy is involved. Especially, even for a creation or a deletion, diff --git a/Documentation/diff-options.txt b/Documentation/diff-options.txt index e112172ca5..9cdd171af7 100644 --- a/Documentation/diff-options.txt +++ b/Documentation/diff-options.txt @@ -129,5 +129,21 @@ -a:: Shorthand for "--text". +--ignore-space-change:: + Ignore changes in amount of white space. This ignores white + space at line end, and consider all other sequences of one or + more white space characters to be equivalent. + +-b:: + Shorthand for "--ignore-space-change". + +--ignore-all-space:: + Ignore white space when comparing lines. This ignores + difference even if one line has white space where the other + line has none. + +-w:: + Shorthand for "--ignore-all-space". + For more detailed explanation on these common options, see also link:diffcore.html[diffcore documentation]. diff --git a/Documentation/git-branch.txt b/Documentation/git-branch.txt index d43ef1dec4..4f5b5d5028 100644 --- a/Documentation/git-branch.txt +++ b/Documentation/git-branch.txt @@ -8,14 +8,16 @@ git-branch - List, create, or delete branches. SYNOPSIS -------- [verse] -'git-branch' [-r] +'git-branch' [-r] [-a] [-v] [--abbrev=<length>] 'git-branch' [-l] [-f] <branchname> [<start-point>] 'git-branch' (-d | -D) <branchname>... DESCRIPTION ----------- -With no arguments given (or just `-r`) a list of available branches +With no arguments given a list of existing branches will be shown, the current branch will be highlighted with an asterisk. +Option `-r` causes the remote-tracking branches to be listed, +and option `-a` shows both. In its second form, a new branch named <branchname> will be created. It will start out with a head equal to the one given as <start-point>. @@ -45,7 +47,17 @@ OPTIONS a branch that already exists with the same name. -r:: - List only the "remote" branches. + List the remote-tracking branches. + +-a:: + List both remote-tracking branches and local branches. + +-v:: + Show sha1 and subject message for each head. + +--abbrev=<length>:: + Alter minimum display length for sha1 in output listing, + default value is 7. <branchname>:: The name of the branch to create or delete. diff --git a/Documentation/git-clone.txt b/Documentation/git-clone.txt index 86060472ad..985043faca 100644 --- a/Documentation/git-clone.txt +++ b/Documentation/git-clone.txt @@ -11,26 +11,26 @@ SYNOPSIS [verse] 'git-clone' [--template=<template_directory>] [-l [-s]] [-q] [-n] [--bare] [-o <name>] [-u <upload-pack>] [--reference <repository>] - [--use-separate-remote] <repository> [<directory>] + [--use-separate-remote | --no-separate-remote] <repository> + [<directory>] DESCRIPTION ----------- -Clones a repository into a newly created directory. All remote -branch heads are copied under `$GIT_DIR/refs/heads/`, except -that the remote `master` is also copied to `origin` branch. -In addition, `$GIT_DIR/remotes/origin` file is set up to have -this line: +Clones a repository into a newly created directory, creates +remote-tracking branches for each branch in the cloned repository +(visible using `git branch -r`), and creates and checks out a master +branch equal to the cloned repository's master branch. - Pull: master:origin - -This is to help the typical workflow of working off of the -remote `master` branch. Every time `git pull` without argument -is run, the progress on the remote `master` branch is tracked by -copying it into the local `origin` branch, and merged into the -branch you are currently working on. Remote branches other than -`master` are also added there to be tracked. +After the clone, a plain `git fetch` without arguments will update +all the remote-tracking branches, and a `git pull` without +arguments will in addition merge the remote master branch into the +current branch. +This default configuration is achieved by creating references to +the remote branch heads under `$GIT_DIR/refs/remotes/origin` and +by initializing `remote.origin.url` and `remote.origin.fetch` +configuration variables. OPTIONS ------- @@ -71,9 +71,13 @@ OPTIONS Make a 'bare' GIT repository. That is, instead of creating `<directory>` and placing the administrative files in `<directory>/.git`, make the `<directory>` - itself the `$GIT_DIR`. This implies `-n` option. When - this option is used, neither the `origin` branch nor the - default `remotes/origin` file is created. + itself the `$GIT_DIR`. This obviously implies the `-n` + because there is nowhere to check out the working tree. + Also the branch heads at the remote are copied directly + to corresponding local branch heads, without mapping + them to `refs/remotes/origin/`. When this option is + used, neither the `origin` branch nor the default + `remotes/origin` file is created. --origin <name>:: -o <name>:: @@ -97,8 +101,15 @@ OPTIONS --use-separate-remote:: Save remotes heads under `$GIT_DIR/remotes/origin/` instead - of `$GIT_DIR/refs/heads/`. Only the master branch is saved - in the latter. + of `$GIT_DIR/refs/heads/`. Only the local master branch is + saved in the latter. This is the default. + +--no-separate-remote:: + Save remotes heads in the same namespace as the local + heads, `$GIT_DIR/refs/heads/'. In regular repositories, + this is a legacy setup git-clone created by default in + older Git versions, and will be removed before the next + major release. <repository>:: The (possibly remote) repository to clone from. It can diff --git a/Documentation/git-diff.txt b/Documentation/git-diff.txt index 228c4d95bd..3144864d85 100644 --- a/Documentation/git-diff.txt +++ b/Documentation/git-diff.txt @@ -22,8 +22,10 @@ the number of trees given to the command. * When one <tree-ish> is given, the working tree and the named tree are compared, using `git-diff-index`. The option - `--cached` can be given to compare the index file and + `--index` can be given to compare the index file and the named tree. + `--cached` is a deprecated alias for `--index`. It's use is + discouraged. * When two <tree-ish>s are given, these two trees are compared using `git-diff-tree`. @@ -47,7 +49,7 @@ Various ways to check your working tree:: + ------------ $ git diff <1> -$ git diff --cached <2> +$ git diff --index <2> $ git diff HEAD <3> ------------ + diff --git a/Documentation/git-merge.txt b/Documentation/git-merge.txt index bebf30ad3d..e2954aa76e 100644 --- a/Documentation/git-merge.txt +++ b/Documentation/git-merge.txt @@ -8,12 +8,14 @@ git-merge - Grand Unified Merge Driver SYNOPSIS -------- -'git-merge' [-n] [--no-commit] [-s <strategy>]... <msg> <head> <remote> <remote>... - +[verse] +'git-merge' [-n] [--no-commit] [--squash] [-s <strategy>]... + [--reflog-action=<action>] + -m=<msg> <remote> <remote>... DESCRIPTION ----------- -This is the top-level user interface to the merge machinery +This is the top-level interface to the merge machinery which drives multiple merge strategy scripts. @@ -27,13 +29,19 @@ include::merge-options.txt[] to give a good default for automated `git-merge` invocations. <head>:: - our branch head commit. + Our branch head commit. This has to be `HEAD`, so new + syntax does not require it <remote>:: - other branch head merged into our branch. You need at + Other branch head merged into our branch. You need at least one <remote>. Specifying more than one <remote> obviously means you are trying an Octopus. +--reflog-action=<action>:: + This is used internally when `git-pull` calls this command + to record that the merge was created by `pull` command + in the `ref-log` entry that results from the merge. + include::merge-strategies.txt[] diff --git a/Documentation/git-shortlog.txt b/Documentation/git-shortlog.txt index d54fc3e5c6..95fa9010c1 100644 --- a/Documentation/git-shortlog.txt +++ b/Documentation/git-shortlog.txt @@ -8,6 +8,7 @@ git-shortlog - Summarize 'git log' output SYNOPSIS -------- git-log --pretty=short | 'git-shortlog' [-h] [-n] [-s] +git-shortlog [-n|--number] [-s|--summary] [<committish>...] DESCRIPTION ----------- diff --git a/Documentation/git-svn.txt b/Documentation/git-svn.txt index a764d1f8ee..a45067e164 100644 --- a/Documentation/git-svn.txt +++ b/Documentation/git-svn.txt @@ -49,7 +49,7 @@ latest revision. Note: You should never attempt to modify the remotes/git-svn branch outside of git-svn. Instead, create a branch from -remotes/git-svn and work on that branch. Use the 'commit' +remotes/git-svn and work on that branch. Use the 'dcommit' command (see below) to write git commits back to remotes/git-svn. @@ -274,7 +274,7 @@ ADVANCED OPTIONS -b<refname>:: --branch <refname>:: -Used with 'fetch' or 'commit'. +Used with 'fetch', 'dcommit' or 'commit'. This can be used to join arbitrary git branches to remotes/git-svn on new commits where the tree object is equivalent. @@ -368,7 +368,7 @@ SVN was very wrong. Basic Examples ~~~~~~~~~~~~~~ -Tracking and contributing to an Subversion managed-project: +Tracking and contributing to a Subversion-managed project: ------------------------------------------------------------------------ # Initialize a repo (like git init-db): @@ -377,10 +377,9 @@ Tracking and contributing to an Subversion managed-project: git-svn fetch # Create your own branch to hack on: git checkout -b my-branch remotes/git-svn -# Commit only the git commits you want to SVN: - git-svn commit <tree-ish> [<tree-ish_2> ...] -# Commit all the git commits from my-branch that don't exist in SVN: - git-svn commit remotes/git-svn..my-branch +# Do some work, and then commit your new changes to SVN, as well as +# automatically updating your working HEAD: + git-svn dcommit # Something is committed to SVN, rebase the latest into your branch: git-svn fetch && git rebase remotes/git-svn # Append svn:ignore settings to the default git exclude file: @@ -404,26 +403,24 @@ which can lead to merge commits reversing previous commits in SVN. DESIGN PHILOSOPHY ----------------- Merge tracking in Subversion is lacking and doing branched development -with Subversion is cumbersome as a result. git-svn completely forgoes -any automated merge/branch tracking on the Subversion side and leaves it -entirely up to the user on the git side. It's simply not worth it to do -a useful translation when the original signal is weak. +with Subversion is cumbersome as a result. git-svn does not do +automated merge/branch tracking by default and leaves it entirely up to +the user on the git side. [[tracking-multiple-repos]] TRACKING MULTIPLE REPOSITORIES OR BRANCHES ------------------------------------------ -This is for advanced users, most users should ignore this section. - Because git-svn does not care about relationships between different branches or directories in a Subversion repository, git-svn has a simple hack to allow it to track an arbitrary number of related _or_ unrelated -SVN repositories via one git repository. Simply set the GIT_SVN_ID -environment variable to a name other other than "git-svn" (the default) -and git-svn will ignore the contents of the $GIT_DIR/svn/git-svn directory -and instead do all of its work in $GIT_DIR/svn/$GIT_SVN_ID for that -invocation. The interface branch will be remotes/$GIT_SVN_ID, instead of -remotes/git-svn. Any remotes/$GIT_SVN_ID branch should never be modified -by the user outside of git-svn commands. +SVN repositories via one git repository. Simply use the --id/-i flag or +set the GIT_SVN_ID environment variable to a name other other than +"git-svn" (the default) and git-svn will ignore the contents of the +$GIT_DIR/svn/git-svn directory and instead do all of its work in +$GIT_DIR/svn/$GIT_SVN_ID for that invocation. The interface branch will +be remotes/$GIT_SVN_ID, instead of remotes/git-svn. Any +remotes/$GIT_SVN_ID branch should never be modified by the user outside +of git-svn commands. [[fetch-args]] ADDITIONAL FETCH ARGUMENTS @@ -486,7 +483,8 @@ If you are not using the SVN::* Perl libraries and somebody commits a conflicting changeset to SVN at a bad moment (right before you commit) causing a conflict and your commit to fail, your svn working tree ($GIT_DIR/git-svn/tree) may be dirtied. The easiest thing to do is -probably just to rm -rf $GIT_DIR/git-svn/tree and run 'rebuild'. +probably just to rm -rf $GIT_DIR/git-svn/tree and run 'rebuild'. You +can avoid this problem entirely by using 'dcommit'. We ignore all SVN properties except svn:executable. Too difficult to map them since we rely heavily on git write-tree being _exactly_ the diff --git a/Documentation/git-symbolic-ref.txt b/Documentation/git-symbolic-ref.txt index 68ac6a65df..4bc35a1d4b 100644 --- a/Documentation/git-symbolic-ref.txt +++ b/Documentation/git-symbolic-ref.txt @@ -19,29 +19,22 @@ argument to see on which branch your working tree is on. Give two arguments, create or update a symbolic ref <name> to point at the given branch <ref>. -Traditionally, `.git/HEAD` is a symlink pointing at -`refs/heads/master`. When we want to switch to another branch, -we did `ln -sf refs/heads/newbranch .git/HEAD`, and when we want +A symbolic ref is a regular file that stores a string that +begins with `ref: refs/`. For example, your `.git/HEAD` is +a regular file whose contents is `ref: refs/heads/master`. + +NOTES +----- +In the past, `.git/HEAD` was a symbolic link pointing at +`refs/heads/master`. When we wanted to switch to another branch, +we did `ln -sf refs/heads/newbranch .git/HEAD`, and when we wanted to find out which branch we are on, we did `readlink .git/HEAD`. This was fine, and internally that is what still happens by default, but on platforms that do not have working symlinks, or that do not have the `readlink(1)` command, this was a bit cumbersome. On some platforms, `ln -sf` does not even work as -advertised (horrors). - -A symbolic ref can be a regular file that stores a string that -begins with `ref: refs/`. For example, your `.git/HEAD` *can* -be a regular file whose contents is `ref: refs/heads/master`. -This can be used on a filesystem that does not support symbolic -links. Instead of doing `readlink .git/HEAD`, `git-symbolic-ref -HEAD` can be used to find out which branch we are on. To point -the HEAD to `newbranch`, instead of `ln -sf refs/heads/newbranch -.git/HEAD`, `git-symbolic-ref HEAD refs/heads/newbranch` can be -used. - -Currently, .git/HEAD uses a regular file symbolic ref on Cygwin, -and everywhere else it is implemented as a symlink. This can be -changed at compilation time. +advertised (horrors). Therefore symbolic links are now deprecated +and symbolic refs are used by default. Author ------ diff --git a/Documentation/git.txt b/Documentation/git.txt index 619d65685e..6382ef0a02 100644 --- a/Documentation/git.txt +++ b/Documentation/git.txt @@ -639,11 +639,35 @@ git Commits git Diffs ~~~~~~~~~ 'GIT_DIFF_OPTS':: + Only valid setting is "--unified=??" or "-u??" to set the + number of context lines shown when a unified diff is created. + This takes precedence over any "-U" or "--unified" option + value passed on the git diff command line. + 'GIT_EXTERNAL_DIFF':: - see the "generating patches" section in : - gitlink:git-diff-index[1]; - gitlink:git-diff-files[1]; - gitlink:git-diff-tree[1] + When the environment variable 'GIT_EXTERNAL_DIFF' is set, the + program named by it is called, instead of the diff invocation + described above. For a path that is added, removed, or modified, + 'GIT_EXTERNAL_DIFF' is called with 7 parameters: + + path old-file old-hex old-mode new-file new-hex new-mode ++ +where: + + <old|new>-file:: are files GIT_EXTERNAL_DIFF can use to read the + contents of <old|new>, + <old|new>-hex:: are the 40-hexdigit SHA1 hashes, + <old|new>-mode:: are the octal representation of the file modes. + ++ +The file parameters can point at the user's working file +(e.g. `new-file` in "git-diff-files"), `/dev/null` (e.g. `old-file` +when a new file is added), or a temporary file (e.g. `old-file` in the +index). 'GIT_EXTERNAL_DIFF' should not worry about unlinking the +temporary file --- it is removed when 'GIT_EXTERNAL_DIFF' exits. ++ +For a path that is unmerged, 'GIT_EXTERNAL_DIFF' is called with 1 +parameter, <path>. other ~~~~~ diff --git a/Documentation/tutorial.txt b/Documentation/tutorial.txt index 6555e58882..fe4491de41 100644 --- a/Documentation/tutorial.txt +++ b/Documentation/tutorial.txt @@ -228,29 +228,28 @@ at /home/bob/myrepo. She does this with: ------------------------------------------------ $ cd /home/alice/project -$ git pull /home/bob/myrepo +$ git pull /home/bob/myrepo master ------------------------------------------------ -This actually pulls changes from the branch in Bob's repository named -"master". Alice could request a different branch by adding the name -of the branch to the end of the git pull command line. +This merges the changes from Bob's "master" branch into Alice's +current branch. If Alice has made her own changes in the meantime, +then she may need to manually fix any conflicts. (Note that the +"master" argument in the above command is actually unnecessary, as it +is the default.) -This merges Bob's changes into her repository; "git log" will -now show the new commits. If Alice has made her own changes in the -meantime, then Bob's changes will be merged in, and she will need to -manually fix any conflicts. +The "pull" command thus performs two operations: it fetches changes +from a remote branch, then merges them into the current branch. -A more cautious Alice might wish to examine Bob's changes before -pulling them. She can do this by creating a temporary branch just -for the purpose of studying Bob's changes: +You can perform the first operation alone using the "git fetch" +command. For example, Alice could create a temporary branch just to +track Bob's changes, without merging them with her own, using: ------------------------------------- $ git fetch /home/bob/myrepo master:bob-incoming ------------------------------------- which fetches the changes from Bob's master branch into a new branch -named bob-incoming. (Unlike git pull, git fetch just fetches a copy -of Bob's line of development without doing any merging). Then +named bob-incoming. Then ------------------------------------- $ git log -p master..bob-incoming @@ -259,8 +258,8 @@ $ git log -p master..bob-incoming shows a list of all the changes that Bob made since he branched from Alice's master branch. -After examining those changes, and possibly fixing things, Alice can -pull the changes into her master branch: +After examining those changes, and possibly fixing things, Alice +could pull the changes into her master branch: ------------------------------------- $ git checkout master @@ -270,6 +269,18 @@ $ git pull . bob-incoming The last command is a pull from the "bob-incoming" branch in Alice's own repository. +Alice could also perform both steps at once with: + +------------------------------------- +$ git pull /home/bob/myrepo master:bob-incoming +------------------------------------- + +This is just like the "git pull /home/bob/myrepo master" that we saw +before, except that it also stores the unmerged changes from bob's +master branch in bob-incoming before merging them into Alice's +current branch. Note that git pull always merges into the current +branch, regardless of what else is given on the commandline. + Later, Bob can update his repo with Alice's latest changes using ------------------------------------- @@ -99,9 +99,6 @@ Issues of note: - "perl" and POSIX-compliant shells are needed to use most of the barebone Porcelainish scripts. - - "python" 2.3 or more recent; if you have 2.3, you may need - to build with "make WITH_OWN_SUBPROCESS_PY=YesPlease". - - Some platform specific issues are dealt with Makefile rules, but depending on your specific installation, you may not have all the libraries/tools needed, or you may have @@ -69,8 +69,6 @@ all: # # Define NO_MMAP if you want to avoid mmap. # -# Define WITH_OWN_SUBPROCESS_PY if you want to use with python 2.3. -# # Define NO_IPV6 if you lack IPv6 support and getaddrinfo(). # # Define NO_SOCKADDR_STORAGE if your platform does not have struct @@ -93,6 +91,10 @@ all: # # Define USE_STDEV below if you want git to care about the underlying device # change being considered an inode change from the update-cache perspective. +# +# Define NO_PERL_MAKEMAKER if you cannot use Makefiles generated by perl's +# MakeMaker (e.g. using ActiveState under Cygwin). +# GIT-VERSION-FILE: .FORCE-GIT-VERSION-FILE @$(SHELL_PATH) ./GIT-VERSION-GEN @@ -116,7 +118,6 @@ prefix = $(HOME) bindir = $(prefix)/bin gitexecdir = $(bindir) template_dir = $(prefix)/share/git-core/templates/ -GIT_PYTHON_DIR = $(prefix)/share/git-core/python # DESTDIR= # default configuration for gitweb @@ -135,7 +136,7 @@ GITWEB_FAVICON = git-favicon.png GITWEB_SITE_HEADER = GITWEB_SITE_FOOTER = -export prefix bindir gitexecdir template_dir GIT_PYTHON_DIR +export prefix bindir gitexecdir template_dir CC = gcc AR = ar @@ -174,17 +175,13 @@ SCRIPT_SH = \ SCRIPT_PERL = \ git-archimport.perl git-cvsimport.perl git-relink.perl \ - git-shortlog.perl git-rerere.perl \ + git-rerere.perl \ git-cvsserver.perl \ git-svnimport.perl git-cvsexportcommit.perl \ git-send-email.perl git-svn.perl -SCRIPT_PYTHON = \ - git-merge-recursive-old.py - SCRIPTS = $(patsubst %.sh,%,$(SCRIPT_SH)) \ $(patsubst %.perl,%,$(SCRIPT_PERL)) \ - $(patsubst %.py,%,$(SCRIPT_PYTHON)) \ git-cherry-pick git-status git-instaweb # ... and all the rest that could be moved out of bindir to gitexecdir @@ -227,12 +224,6 @@ endif ifndef PERL_PATH PERL_PATH = /usr/bin/perl endif -ifndef PYTHON_PATH - PYTHON_PATH = /usr/bin/python -endif - -PYMODULES = \ - gitMergeCommon.py LIB_FILE=libgit.a XDIFF_LIB=xdiff/lib.a @@ -300,6 +291,7 @@ BUILTIN_OBJS = \ builtin-rev-parse.o \ builtin-rm.o \ builtin-runstatus.o \ + builtin-shortlog.o \ builtin-show-branch.o \ builtin-stripspace.o \ builtin-symbolic-ref.o \ @@ -423,16 +415,6 @@ endif -include config.mak.autogen -include config.mak -ifdef WITH_OWN_SUBPROCESS_PY - PYMODULES += compat/subprocess.py -else - ifeq ($(NO_PYTHON),) - ifneq ($(shell $(PYTHON_PATH) -c 'import subprocess;print"OK"' 2>/dev/null),OK) - PYMODULES += compat/subprocess.py - endif - endif -endif - ifndef NO_CURL ifdef CURLDIR # This is still problematic -- gcc does not always want -R. @@ -561,6 +543,9 @@ endif ifdef NO_ACCURATE_DIFF BASIC_CFLAGS += -DNO_ACCURATE_DIFF endif +ifdef NO_PERL_MAKEMAKER + export NO_PERL_MAKEMAKER +endif # Shell quote (do not use $(call) to accommodate ancient setups); @@ -574,8 +559,6 @@ prefix_SQ = $(subst ','\'',$(prefix)) SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH)) PERL_PATH_SQ = $(subst ','\'',$(PERL_PATH)) -PYTHON_PATH_SQ = $(subst ','\'',$(PYTHON_PATH)) -GIT_PYTHON_DIR_SQ = $(subst ','\'',$(GIT_PYTHON_DIR)) LIBS = $(GITLIBS) $(EXTLIBS) @@ -592,8 +575,8 @@ export prefix TAR INSTALL DESTDIR SHELL_PATH template_dir all: $(ALL_PROGRAMS) $(BUILT_INS) git$X gitk gitweb/gitweb.cgi -all: perl/Makefile - $(MAKE) -C perl +all: + $(MAKE) -C perl PERL_PATH='$(PERL_PATH_SQ)' prefix='$(prefix_SQ)' all $(MAKE) -C templates strip: $(PROGRAMS) git$X @@ -622,12 +605,15 @@ $(patsubst %.sh,%,$(SCRIPT_SH)) : % : %.sh -e 's|@@PERL@@|$(PERL_PATH_SQ)|g' \ -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \ -e 's/@@NO_CURL@@/$(NO_CURL)/g' \ - -e 's/@@NO_PYTHON@@/$(NO_PYTHON)/g' \ $@.sh >$@+ chmod +x $@+ mv $@+ $@ -$(patsubst %.perl,%,$(SCRIPT_PERL)): perl/Makefile +$(patsubst %.perl,%,$(SCRIPT_PERL)): perl/perl.mak + +perl/perl.mak: GIT-CFLAGS + $(MAKE) -C perl PERL_PATH='$(PERL_PATH_SQ)' prefix='$(prefix_SQ)' $(@F) + $(patsubst %.perl,%,$(SCRIPT_PERL)): % : %.perl rm -f $@ $@+ INSTLIBDIR=`$(MAKE) -C perl -s --no-print-directory instlibdir` && \ @@ -644,15 +630,6 @@ $(patsubst %.perl,%,$(SCRIPT_PERL)): % : %.perl chmod +x $@+ mv $@+ $@ -$(patsubst %.py,%,$(SCRIPT_PYTHON)) : % : %.py GIT-CFLAGS - rm -f $@ $@+ - sed -e '1s|#!.*python|#!$(PYTHON_PATH_SQ)|' \ - -e 's|@@GIT_PYTHON_PATH@@|$(GIT_PYTHON_DIR_SQ)|g' \ - -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \ - $@.py >$@+ - chmod +x $@+ - mv $@+ $@ - git-cherry-pick: git-revert cp $< $@+ mv $@+ $@ @@ -689,7 +666,6 @@ git-instaweb: git-instaweb.sh gitweb/gitweb.cgi gitweb/gitweb.css sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \ -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \ -e 's/@@NO_CURL@@/$(NO_CURL)/g' \ - -e 's/@@NO_PYTHON@@/$(NO_PYTHON)/g' \ -e '/@@GITWEB_CGI@@/r gitweb/gitweb.cgi' \ -e '/@@GITWEB_CGI@@/d' \ -e '/@@GITWEB_CSS@@/r gitweb/gitweb.css' \ @@ -709,7 +685,6 @@ configure: configure.ac git$X git.spec \ $(patsubst %.sh,%,$(SCRIPT_SH)) \ $(patsubst %.perl,%,$(SCRIPT_PERL)) \ - $(patsubst %.py,%,$(SCRIPT_PYTHON)) \ : GIT-VERSION-FILE %.o: %.c GIT-CFLAGS @@ -783,7 +758,7 @@ tags: find . -name '*.[hcS]' -print | xargs ctags -a ### Detect prefix changes -TRACK_CFLAGS = $(subst ','\'',$(ALL_CFLAGS)):$(GIT_PYTHON_DIR_SQ):\ +TRACK_CFLAGS = $(subst ','\'',$(ALL_CFLAGS)):\ $(bindir_SQ):$(gitexecdir_SQ):$(template_dir_SQ):$(prefix_SQ) GIT-CFLAGS: .FORCE-GIT-CFLAGS @@ -799,7 +774,6 @@ GIT-CFLAGS: .FORCE-GIT-CFLAGS # However, the environment gets quite big, and some programs have problems # with that. -export NO_PYTHON export NO_SVN_TESTS test: all @@ -833,9 +807,7 @@ install: all $(INSTALL) $(ALL_PROGRAMS) '$(DESTDIR_SQ)$(gitexecdir_SQ)' $(INSTALL) git$X gitk '$(DESTDIR_SQ)$(bindir_SQ)' $(MAKE) -C templates DESTDIR='$(DESTDIR_SQ)' install - $(MAKE) -C perl install - $(INSTALL) -d -m755 '$(DESTDIR_SQ)$(GIT_PYTHON_DIR_SQ)' - $(INSTALL) $(PYMODULES) '$(DESTDIR_SQ)$(GIT_PYTHON_DIR_SQ)' + $(MAKE) -C perl prefix='$(prefix_SQ)' install if test 'z$(bindir_SQ)' != 'z$(gitexecdir_SQ)'; \ then \ ln -f '$(DESTDIR_SQ)$(bindir_SQ)/git$X' \ @@ -905,8 +877,7 @@ clean: rm -f $(htmldocs).tar.gz $(manpages).tar.gz rm -f gitweb/gitweb.cgi $(MAKE) -C Documentation/ clean - [ ! -f perl/Makefile ] || $(MAKE) -C perl/ clean || $(MAKE) -C perl/ clean - rm -f perl/ppport.h perl/Makefile.old + $(MAKE) -C perl clean $(MAKE) -C templates/ clean $(MAKE) -C t/ clean rm -f GIT-VERSION-FILE GIT-CFLAGS @@ -922,7 +893,6 @@ check-docs:: case "$$v" in \ git-merge-octopus | git-merge-ours | git-merge-recursive | \ git-merge-resolve | git-merge-stupid | git-merge-recur | \ - git-merge-recursive-old | \ git-ssh-pull | git-ssh-push ) continue ;; \ esac ; \ test -f "Documentation/$$v.txt" || \ diff --git a/builtin-apply.c b/builtin-apply.c index 61f047fd45..436d9e1880 100644 --- a/builtin-apply.c +++ b/builtin-apply.c @@ -2119,7 +2119,11 @@ static void numstat_patch_list(struct patch *patch) for ( ; patch; patch = patch->next) { const char *name; name = patch->new_name ? patch->new_name : patch->old_name; - printf("%d\t%d\t", patch->lines_added, patch->lines_deleted); + if (patch->is_binary) + printf("-\t-\t"); + else + printf("%d\t%d\t", + patch->lines_added, patch->lines_deleted); if (line_termination && quote_c_style(name, NULL, NULL, 0)) quote_c_style(name, NULL, stdout, 0); else diff --git a/builtin-branch.c b/builtin-branch.c index 368b68ec91..3d5cb0e4b2 100644 --- a/builtin-branch.c +++ b/builtin-branch.c @@ -11,7 +11,7 @@ #include "builtin.h" static const char builtin_branch_usage[] = -"git-branch (-d | -D) <branchname> | [-l] [-f] <branchname> [<start-point>] | [-r]"; +"git-branch (-d | -D) <branchname> | [-l] [-f] <branchname> [<start-point>] | [-r | -a] [-v] [--abbrev=<length>] "; static const char *head; @@ -38,12 +38,16 @@ static int in_merge_bases(const unsigned char *sha1, static void delete_branches(int argc, const char **argv, int force) { - struct commit *rev, *head_rev; + struct commit *rev, *head_rev = head_rev; unsigned char sha1[20]; char *name; int i; - head_rev = lookup_commit_reference(head_sha1); + if (!force) { + head_rev = lookup_commit_reference(head_sha1); + if (!head_rev) + die("Couldn't look up commit object for HEAD"); + } for (i = 0; i < argc; i++) { if (!strcmp(head, argv[i])) die("Cannot delete the branch you are currently on."); @@ -53,8 +57,8 @@ static void delete_branches(int argc, const char **argv, int force) die("Branch '%s' not found.", argv[i]); rev = lookup_commit_reference(sha1); - if (!rev || !head_rev) - die("Couldn't look up commit objects."); + if (!rev) + die("Couldn't look up commit object for '%s'", name); /* This checks whether the merge bases of branch and * HEAD contains branch -- which means that the HEAD @@ -79,46 +83,129 @@ static void delete_branches(int argc, const char **argv, int force) } } -static int ref_index, ref_alloc; -static char **ref_list; +#define REF_UNKNOWN_TYPE 0x00 +#define REF_LOCAL_BRANCH 0x01 +#define REF_REMOTE_BRANCH 0x02 +#define REF_TAG 0x04 + +struct ref_item { + char *name; + unsigned int kind; + unsigned char sha1[20]; +}; + +struct ref_list { + int index, alloc, maxwidth; + struct ref_item *list; + int kinds; +}; -static int append_ref(const char *refname, const unsigned char *sha1, int flags, - void *cb_data) +static int append_ref(const char *refname, const unsigned char *sha1, int flags, void *cb_data) { - if (ref_index >= ref_alloc) { - ref_alloc = alloc_nr(ref_alloc); - ref_list = xrealloc(ref_list, ref_alloc * sizeof(char *)); + struct ref_list *ref_list = (struct ref_list*)(cb_data); + struct ref_item *newitem; + int kind = REF_UNKNOWN_TYPE; + int len; + + /* Detect kind */ + if (!strncmp(refname, "refs/heads/", 11)) { + kind = REF_LOCAL_BRANCH; + refname += 11; + } else if (!strncmp(refname, "refs/remotes/", 13)) { + kind = REF_REMOTE_BRANCH; + refname += 13; + } else if (!strncmp(refname, "refs/tags/", 10)) { + kind = REF_TAG; + refname += 10; + } + + /* Don't add types the caller doesn't want */ + if ((kind & ref_list->kinds) == 0) + return 0; + + /* Resize buffer */ + if (ref_list->index >= ref_list->alloc) { + ref_list->alloc = alloc_nr(ref_list->alloc); + ref_list->list = xrealloc(ref_list->list, + ref_list->alloc * sizeof(struct ref_item)); } - ref_list[ref_index++] = xstrdup(refname); + /* Record the new item */ + newitem = &(ref_list->list[ref_list->index++]); + newitem->name = xstrdup(refname); + newitem->kind = kind; + hashcpy(newitem->sha1, sha1); + len = strlen(newitem->name); + if (len > ref_list->maxwidth) + ref_list->maxwidth = len; return 0; } +static void free_ref_list(struct ref_list *ref_list) +{ + int i; + + for (i = 0; i < ref_list->index; i++) + free(ref_list->list[i].name); + free(ref_list->list); +} + static int ref_cmp(const void *r1, const void *r2) { - return strcmp(*(char **)r1, *(char **)r2); + struct ref_item *c1 = (struct ref_item *)(r1); + struct ref_item *c2 = (struct ref_item *)(r2); + + if (c1->kind != c2->kind) + return c1->kind - c2->kind; + return strcmp(c1->name, c2->name); +} + +static void print_ref_info(const unsigned char *sha1, int abbrev) +{ + struct commit *commit; + char subject[256]; + + + commit = lookup_commit(sha1); + if (commit && !parse_commit(commit)) + pretty_print_commit(CMIT_FMT_ONELINE, commit, ~0, + subject, sizeof(subject), 0, + NULL, NULL, 0); + else + strcpy(subject, " **** invalid ref ****"); + + printf(" %s %s\n", find_unique_abbrev(sha1, abbrev), subject); } -static void print_ref_list(int remote_only) +static void print_ref_list(int kinds, int verbose, int abbrev) { int i; char c; + struct ref_list ref_list; - if (remote_only) - for_each_remote_ref(append_ref, NULL); - else - for_each_branch_ref(append_ref, NULL); + memset(&ref_list, 0, sizeof(ref_list)); + ref_list.kinds = kinds; + for_each_ref(append_ref, &ref_list); - qsort(ref_list, ref_index, sizeof(char *), ref_cmp); + qsort(ref_list.list, ref_list.index, sizeof(struct ref_item), ref_cmp); - for (i = 0; i < ref_index; i++) { + for (i = 0; i < ref_list.index; i++) { c = ' '; - if (!strcmp(ref_list[i], head)) + if (ref_list.list[i].kind == REF_LOCAL_BRANCH && + !strcmp(ref_list.list[i].name, head)) c = '*'; - printf("%c %s\n", c, ref_list[i]); + if (verbose) { + printf("%c %-*s", c, ref_list.maxwidth, + ref_list.list[i].name); + print_ref_info(ref_list.list[i].sha1, abbrev); + } + else + printf("%c %s\n", c, ref_list.list[i].name); } + + free_ref_list(&ref_list); } static void create_branch(const char *name, const char *start, @@ -160,8 +247,10 @@ static void create_branch(const char *name, const char *start, int cmd_branch(int argc, const char **argv, const char *prefix) { - int delete = 0, force_delete = 0, force_create = 0, remote_only = 0; + int delete = 0, force_delete = 0, force_create = 0; + int verbose = 0, abbrev = DEFAULT_ABBREV; int reflog = 0; + int kinds = REF_LOCAL_BRANCH; int i; git_config(git_default_config); @@ -189,13 +278,25 @@ int cmd_branch(int argc, const char **argv, const char *prefix) continue; } if (!strcmp(arg, "-r")) { - remote_only = 1; + kinds = REF_REMOTE_BRANCH; + continue; + } + if (!strcmp(arg, "-a")) { + kinds = REF_REMOTE_BRANCH | REF_LOCAL_BRANCH; continue; } if (!strcmp(arg, "-l")) { reflog = 1; continue; } + if (!strncmp(arg, "--abbrev=", 9)) { + abbrev = atoi(arg+9); + continue; + } + if (!strcmp(arg, "-v")) { + verbose = 1; + continue; + } usage(builtin_branch_usage); } @@ -209,7 +310,7 @@ int cmd_branch(int argc, const char **argv, const char *prefix) if (delete) delete_branches(argc - i, argv + i, force_delete); else if (i == argc) - print_ref_list(remote_only); + print_ref_list(kinds, verbose, abbrev); else if (i == argc - 1) create_branch(argv[i], head, force_create, reflog); else if (i == argc - 2) diff --git a/builtin-diff.c b/builtin-diff.c index a6590205e8..1c535b1dd6 100644 --- a/builtin-diff.c +++ b/builtin-diff.c @@ -137,7 +137,7 @@ static int builtin_diff_index(struct rev_info *revs, int cached = 0; while (1 < argc) { const char *arg = argv[1]; - if (!strcmp(arg, "--cached")) + if (!strcmp(arg, "--index") || !strcmp(arg, "--cached")) cached = 1; else usage(builtin_diff_usage); diff --git a/builtin-grep.c b/builtin-grep.c index ad7dc00cde..9873e3d1db 100644 --- a/builtin-grep.c +++ b/builtin-grep.c @@ -268,7 +268,7 @@ static int external_grep(struct grep_opt *opt, const char **paths, int cached) for (i = 0; i < active_nr; i++) { struct cache_entry *ce = active_cache[i]; char *name; - if (ce_stage(ce) || !S_ISREG(ntohl(ce->ce_mode))) + if (!S_ISREG(ntohl(ce->ce_mode))) continue; if (!pathspec_matches(paths, ce->name)) continue; @@ -280,12 +280,19 @@ static int external_grep(struct grep_opt *opt, const char **paths, int cached) memcpy(name + 2, ce->name, len + 1); } argv[argc++] = name; - if (argc < MAXARGS) + if (argc < MAXARGS && !ce_stage(ce)) continue; status = exec_grep(argc, argv); if (0 < status) hit = 1; argc = nr; + if (ce_stage(ce)) { + do { + i++; + } while (i < active_nr && + !strcmp(ce->name, active_cache[i]->name)); + i--; /* compensate for loop control */ + } } if (argc > nr) { status = exec_grep(argc, argv); @@ -316,14 +323,24 @@ static int grep_cache(struct grep_opt *opt, const char **paths, int cached) for (nr = 0; nr < active_nr; nr++) { struct cache_entry *ce = active_cache[nr]; - if (ce_stage(ce) || !S_ISREG(ntohl(ce->ce_mode))) + if (!S_ISREG(ntohl(ce->ce_mode))) continue; if (!pathspec_matches(paths, ce->name)) continue; - if (cached) + if (cached) { + if (ce_stage(ce)) + continue; hit |= grep_sha1(opt, ce->sha1, ce->name, 0); + } else hit |= grep_file(opt, ce->name); + if (ce_stage(ce)) { + do { + nr++; + } while (nr < active_nr && + !strcmp(ce->name, active_cache[nr]->name)); + nr--; /* compensate for loop control */ + } } free_grep_patterns(opt); return hit; diff --git a/builtin-log.c b/builtin-log.c index fedb0137bc..7acf5d3b0c 100644 --- a/builtin-log.c +++ b/builtin-log.c @@ -13,6 +13,8 @@ #include <time.h> #include <sys/time.h> +static int default_show_root = 1; + /* this is in builtin-diff.c */ void add_head(struct rev_info *revs); @@ -22,6 +24,7 @@ static void cmd_log_init(int argc, const char **argv, const char *prefix, rev->abbrev = DEFAULT_ABBREV; rev->commit_format = CMIT_FMT_DEFAULT; rev->verbose_header = 1; + rev->show_root_diff = default_show_root; argc = setup_revisions(argc, argv, rev, "HEAD"); if (rev->diffopt.pickaxe || rev->diffopt.filter) rev->always_show_header = 0; @@ -44,11 +47,20 @@ static int cmd_log_walk(struct rev_info *rev) return 0; } +static int git_log_config(const char *var, const char *value) +{ + if (!strcmp(var, "log.showroot")) { + default_show_root = git_config_bool(var, value); + return 0; + } + return git_diff_ui_config(var, value); +} + int cmd_whatchanged(int argc, const char **argv, const char *prefix) { struct rev_info rev; - git_config(git_diff_ui_config); + git_config(git_log_config); init_revisions(&rev, prefix); rev.diff = 1; rev.diffopt.recursive = 1; @@ -63,7 +75,7 @@ int cmd_show(int argc, const char **argv, const char *prefix) { struct rev_info rev; - git_config(git_diff_ui_config); + git_config(git_log_config); init_revisions(&rev, prefix); rev.diff = 1; rev.diffopt.recursive = 1; @@ -80,7 +92,7 @@ int cmd_log(int argc, const char **argv, const char *prefix) { struct rev_info rev; - git_config(git_diff_ui_config); + git_config(git_log_config); init_revisions(&rev, prefix); rev.always_show_header = 1; cmd_log_init(argc, argv, prefix, &rev); @@ -109,7 +121,7 @@ static int git_format_config(const char *var, const char *value) if (!strcmp(var, "diff.color")) { return 0; } - return git_diff_ui_config(var, value); + return git_log_config(var, value); } diff --git a/builtin-ls-files.c b/builtin-ls-files.c index ad8c41e731..bc79ce40fc 100644 --- a/builtin-ls-files.c +++ b/builtin-ls-files.c @@ -487,10 +487,14 @@ int cmd_ls_files(int argc, const char **argv, const char *prefix) for (num = 0; pathspec[num]; num++) { if (ps_matched[num]) continue; - error("pathspec '%s' did not match any.", + error("pathspec '%s' did not match any file(s) known to git.", pathspec[num] + prefix_offset); errors++; } + + if (errors) + fprintf(stderr, "Did you forget to 'git add'?\n"); + return errors ? 1 : 0; } diff --git a/builtin-pack-objects.c b/builtin-pack-objects.c index 69e5dd39ca..a2dc7d1d9d 100644 --- a/builtin-pack-objects.c +++ b/builtin-pack-objects.c @@ -514,6 +514,8 @@ static void write_pack_file(void) if (do_progress) fputc('\n', stderr); done: + if (written != nr_result) + die("wrote %d objects while expecting %d", written, nr_result); sha1close(f, pack_file_sha1, 1); } @@ -1176,7 +1178,9 @@ static int try_delta(struct unpacked *trg, struct unpacked *src, * on an earlier try, but only when reusing delta data. */ if (!no_reuse_delta && trg_entry->in_pack && - trg_entry->in_pack == src_entry->in_pack) + trg_entry->in_pack == src_entry->in_pack && + trg_entry->in_pack_type != OBJ_REF_DELTA && + trg_entry->in_pack_type != OBJ_OFS_DELTA) return 0; /* @@ -1660,7 +1664,7 @@ int cmd_pack_objects(int argc, const char **argv, const char *prefix) } } if (progress) - fprintf(stderr, "Total %d, written %d (delta %d), reused %d (delta %d)\n", - nr_result, written, written_delta, reused, reused_delta); + fprintf(stderr, "Total %d (delta %d), reused %d (delta %d)\n", + written, written_delta, reused, reused_delta); return 0; } diff --git a/builtin-pack-refs.c b/builtin-pack-refs.c index 042d2718f9..8dc5b9efff 100644 --- a/builtin-pack-refs.c +++ b/builtin-pack-refs.c @@ -1,5 +1,7 @@ #include "cache.h" #include "refs.h" +#include "object.h" +#include "tag.h" static const char builtin_pack_refs_usage[] = "git-pack-refs [--all] [--prune]"; @@ -29,12 +31,26 @@ static int handle_one_ref(const char *path, const unsigned char *sha1, int flags, void *cb_data) { struct pack_refs_cb_data *cb = cb_data; + int is_tag_ref; - if (!cb->all && strncmp(path, "refs/tags/", 10)) - return 0; /* Do not pack the symbolic refs */ - if (!(flags & REF_ISSYMREF)) - fprintf(cb->refs_file, "%s %s\n", sha1_to_hex(sha1), path); + if ((flags & REF_ISSYMREF)) + return 0; + is_tag_ref = !strncmp(path, "refs/tags/", 10); + if (!cb->all && !is_tag_ref) + return 0; + + fprintf(cb->refs_file, "%s %s\n", sha1_to_hex(sha1), path); + if (is_tag_ref) { + struct object *o = parse_object(sha1); + if (o->type == OBJ_TAG) { + o = deref_tag(o, path, 0); + if (o) + fprintf(cb->refs_file, "^%s\n", + sha1_to_hex(o->sha1)); + } + } + if (cb->prune && !do_not_prune(flags)) { int namelen = strlen(path) + 1; struct ref_to_prune *n = xcalloc(1, sizeof(*n) + namelen); @@ -95,6 +111,10 @@ int cmd_pack_refs(int argc, const char **argv, const char *prefix) if (!cbdata.refs_file) die("unable to create ref-pack file structure (%s)", strerror(errno)); + + /* perhaps other traits later as well */ + fprintf(cbdata.refs_file, "# pack-refs with: peeled \n"); + for_each_ref(handle_one_ref, &cbdata); fflush(cbdata.refs_file); fsync(fd); diff --git a/builtin-prune.c b/builtin-prune.c index d853902c51..8591d28b8e 100644 --- a/builtin-prune.c +++ b/builtin-prune.c @@ -16,8 +16,15 @@ static struct rev_info revs; static int prune_object(char *path, const char *filename, const unsigned char *sha1) { + char buf[20]; + const char *type; + if (show_only) { - printf("would prune %s/%s\n", path, filename); + if (sha1_object_info(sha1, buf, NULL)) + type = "unknown"; + else + type = buf; + printf("%s %s\n", sha1_to_hex(sha1), type); return 0; } unlink(mkpath("%s/%s", path, filename)); diff --git a/builtin-shortlog.c b/builtin-shortlog.c new file mode 100644 index 0000000000..7a2ddfe797 --- /dev/null +++ b/builtin-shortlog.c @@ -0,0 +1,335 @@ +#include "builtin.h" +#include "cache.h" +#include "commit.h" +#include "diff.h" +#include "path-list.h" +#include "revision.h" +#include <string.h> + +static const char shortlog_usage[] = +"git-shortlog [-n] [-s] [<commit-id>... ]"; + +static char *common_repo_prefix; + +static int compare_by_number(const void *a1, const void *a2) +{ + const struct path_list_item *i1 = a1, *i2 = a2; + const struct path_list *l1 = i1->util, *l2 = i2->util; + + if (l1->nr < l2->nr) + return 1; + else if (l1->nr == l2->nr) + return 0; + else + return -1; +} + +static struct path_list mailmap = {NULL, 0, 0, 0}; + +static int read_mailmap(const char *filename) +{ + char buffer[1024]; + FILE *f = fopen(filename, "r"); + + if (f == NULL) + return 1; + while (fgets(buffer, sizeof(buffer), f) != NULL) { + char *end_of_name, *left_bracket, *right_bracket; + char *name, *email; + int i; + if (buffer[0] == '#') { + static const char abbrev[] = "# repo-abbrev:"; + int abblen = sizeof(abbrev) - 1; + int len = strlen(buffer); + + if (len && buffer[len - 1] == '\n') + buffer[--len] = 0; + if (!strncmp(buffer, abbrev, abblen)) { + char *cp; + + if (common_repo_prefix) + free(common_repo_prefix); + common_repo_prefix = xmalloc(len); + + for (cp = buffer + abblen; isspace(*cp); cp++) + ; /* nothing */ + strcpy(common_repo_prefix, cp); + } + continue; + } + if ((left_bracket = strchr(buffer, '<')) == NULL) + continue; + if ((right_bracket = strchr(left_bracket + 1, '>')) == NULL) + continue; + if (right_bracket == left_bracket + 1) + continue; + for (end_of_name = left_bracket; end_of_name != buffer + && isspace(end_of_name[-1]); end_of_name--) + /* keep on looking */ + if (end_of_name == buffer) + continue; + name = xmalloc(end_of_name - buffer + 1); + strlcpy(name, buffer, end_of_name - buffer + 1); + email = xmalloc(right_bracket - left_bracket); + for (i = 0; i < right_bracket - left_bracket - 1; i++) + email[i] = tolower(left_bracket[i + 1]); + email[right_bracket - left_bracket - 1] = '\0'; + path_list_insert(email, &mailmap)->util = name; + } + fclose(f); + return 0; +} + +static int map_email(char *email, char *name, int maxlen) +{ + char *p; + struct path_list_item *item; + + /* autocomplete common developers */ + p = strchr(email, '>'); + if (!p) + return 0; + + *p = '\0'; + /* downcase the email address */ + for (p = email; *p; p++) + *p = tolower(*p); + item = path_list_lookup(email, &mailmap); + if (item != NULL) { + const char *realname = (const char *)item->util; + strncpy(name, realname, maxlen); + return 1; + } + return 0; +} + +static void insert_author_oneline(struct path_list *list, + const char *author, int authorlen, + const char *oneline, int onelinelen) +{ + const char *dot3 = common_repo_prefix; + char *buffer, *p; + struct path_list_item *item; + struct path_list *onelines; + + while (authorlen > 0 && isspace(author[authorlen - 1])) + authorlen--; + + buffer = xmalloc(authorlen + 1); + memcpy(buffer, author, authorlen); + buffer[authorlen] = '\0'; + + item = path_list_insert(buffer, list); + if (item->util == NULL) + item->util = xcalloc(1, sizeof(struct path_list)); + else + free(buffer); + + if (!strncmp(oneline, "[PATCH", 6)) { + char *eob = strchr(oneline, ']'); + + if (eob) { + while (isspace(eob[1]) && eob[1] != '\n') + eob++; + if (eob - oneline < onelinelen) { + onelinelen -= eob - oneline; + oneline = eob; + } + } + } + + while (onelinelen > 0 && isspace(oneline[0])) { + oneline++; + onelinelen--; + } + + while (onelinelen > 0 && isspace(oneline[onelinelen - 1])) + onelinelen--; + + buffer = xmalloc(onelinelen + 1); + memcpy(buffer, oneline, onelinelen); + buffer[onelinelen] = '\0'; + + if (dot3) { + int dot3len = strlen(dot3); + if (dot3len > 5) { + while ((p = strstr(buffer, dot3)) != NULL) { + int taillen = strlen(p) - dot3len; + memcpy(p, "/.../", 5); + memmove(p + 5, p + dot3len, taillen + 1); + } + } + } + + onelines = item->util; + if (onelines->nr >= onelines->alloc) { + onelines->alloc = alloc_nr(onelines->nr); + onelines->items = xrealloc(onelines->items, + onelines->alloc + * sizeof(struct path_list_item)); + } + + onelines->items[onelines->nr].util = NULL; + onelines->items[onelines->nr++].path = buffer; +} + +static void read_from_stdin(struct path_list *list) +{ + char buffer[1024]; + + while (fgets(buffer, sizeof(buffer), stdin) != NULL) { + char *bob; + if ((buffer[0] == 'A' || buffer[0] == 'a') && + !strncmp(buffer + 1, "uthor: ", 7) && + (bob = strchr(buffer + 7, '<')) != NULL) { + char buffer2[1024], offset = 0; + + if (map_email(bob + 1, buffer, sizeof(buffer))) + bob = buffer + strlen(buffer); + else { + offset = 8; + if (isspace(bob[-1])) + bob--; + } + + while (fgets(buffer2, sizeof(buffer2), stdin) && + buffer2[0] != '\n') + ; /* chomp input */ + if (fgets(buffer2, sizeof(buffer2), stdin)) + insert_author_oneline(list, + buffer + offset, + bob - buffer - offset, + buffer2, strlen(buffer2)); + } + } +} + +static void get_from_rev(struct rev_info *rev, struct path_list *list) +{ + char scratch[1024]; + struct commit *commit; + + prepare_revision_walk(rev); + while ((commit = get_revision(rev)) != NULL) { + char *author = NULL, *oneline, *buffer; + int authorlen = authorlen, onelinelen; + + /* get author and oneline */ + for (buffer = commit->buffer; buffer && *buffer != '\0' && + *buffer != '\n'; ) { + char *eol = strchr(buffer, '\n'); + + if (eol == NULL) + eol = buffer + strlen(buffer); + else + eol++; + + if (!strncmp(buffer, "author ", 7)) { + char *bracket = strchr(buffer, '<'); + + if (bracket == NULL || bracket > eol) + die("Invalid commit buffer: %s", + sha1_to_hex(commit->object.sha1)); + + if (map_email(bracket + 1, scratch, + sizeof(scratch))) { + author = scratch; + authorlen = strlen(scratch); + } else { + if (bracket[-1] == ' ') + bracket--; + + author = buffer + 7; + authorlen = bracket - buffer - 7; + } + } + buffer = eol; + } + + if (author == NULL) + die ("Missing author: %s", + sha1_to_hex(commit->object.sha1)); + + if (buffer == NULL || *buffer == '\0') { + oneline = "<none>"; + onelinelen = sizeof(oneline) + 1; + } else { + char *eol; + + oneline = buffer + 1; + eol = strchr(oneline, '\n'); + if (eol == NULL) + onelinelen = strlen(oneline); + else + onelinelen = eol - oneline; + } + + insert_author_oneline(list, + author, authorlen, oneline, onelinelen); + } + +} + +int cmd_shortlog(int argc, const char **argv, const char *prefix) +{ + struct rev_info rev; + struct path_list list = { NULL, 0, 0, 1 }; + int i, j, sort_by_number = 0, summary = 0; + + /* since -n is a shadowed rev argument, parse our args first */ + while (argc > 1) { + if (!strcmp(argv[1], "-n") || !strcmp(argv[1], "--numbered")) + sort_by_number = 1; + else if (!strcmp(argv[1], "-s") || + !strcmp(argv[1], "--summary")) + summary = 1; + else if (!strcmp(argv[1], "-h") || !strcmp(argv[1], "--help")) + usage(shortlog_usage); + else + break; + argv++; + argc--; + } + init_revisions(&rev, prefix); + argc = setup_revisions(argc, argv, &rev, NULL); + if (argc > 1) + die ("unrecognized argument: %s", argv[1]); + + if (!access(".mailmap", R_OK)) + read_mailmap(".mailmap"); + + if (rev.pending.nr == 0) + read_from_stdin(&list); + else + get_from_rev(&rev, &list); + + if (sort_by_number) + qsort(list.items, list.nr, sizeof(struct path_list_item), + compare_by_number); + + for (i = 0; i < list.nr; i++) { + struct path_list *onelines = list.items[i].util; + + if (summary) { + printf("%s: %d\n", list.items[i].path, onelines->nr); + } else { + printf("%s (%d):\n", list.items[i].path, onelines->nr); + for (j = onelines->nr - 1; j >= 0; j--) + printf(" %s\n", onelines->items[j].path); + printf("\n"); + } + + onelines->strdup_paths = 1; + path_list_clear(onelines, 1); + free(onelines); + list.items[i].util = NULL; + } + + list.strdup_paths = 1; + path_list_clear(&list, 1); + mailmap.strdup_paths = 1; + path_list_clear(&mailmap, 1); + + return 0; +} + diff --git a/builtin-show-ref.c b/builtin-show-ref.c index 06ec400d7f..073979855b 100644 --- a/builtin-show-ref.c +++ b/builtin-show-ref.c @@ -13,6 +13,7 @@ static int show_ref(const char *refname, const unsigned char *sha1, int flag, vo { struct object *obj; const char *hex; + unsigned char peeled[20]; if (tags_only || heads_only) { int match; @@ -44,12 +45,15 @@ static int show_ref(const char *refname, const unsigned char *sha1, int flag, vo match: found_match++; - obj = parse_object(sha1); - if (!obj) { - if (quiet) - return 0; - die("git-show-ref: bad ref %s (%s)", refname, sha1_to_hex(sha1)); - } + + /* This changes the semantics slightly that even under quiet we + * detect and return error if the repository is corrupt and + * ref points at a nonexistent object. + */ + if (!has_sha1_file(sha1)) + die("git-show-ref: bad ref %s (%s)", refname, + sha1_to_hex(sha1)); + if (quiet) return 0; @@ -58,10 +62,26 @@ match: printf("%s\n", hex); else printf("%s %s\n", hex, refname); - if (deref_tags && obj->type == OBJ_TAG) { - obj = deref_tag(obj, refname, 0); - hex = find_unique_abbrev(obj->sha1, abbrev); - printf("%s %s^{}\n", hex, refname); + + if (!deref_tags) + return 0; + + if ((flag & REF_ISPACKED) && !peel_ref(refname, peeled)) { + if (!is_null_sha1(peeled)) { + hex = find_unique_abbrev(peeled, abbrev); + printf("%s %s^{}\n", hex, refname); + } + } + else { + obj = parse_object(sha1); + if (!obj) + die("git-show-ref: bad ref %s (%s)", refname, + sha1_to_hex(sha1)); + if (obj->type == OBJ_TAG) { + obj = deref_tag(obj, refname, 0); + hex = find_unique_abbrev(obj->sha1, abbrev); + printf("%s %s^{}\n", hex, refname); + } } return 0; } @@ -55,6 +55,7 @@ extern int cmd_rev_list(int argc, const char **argv, const char *prefix); extern int cmd_rev_parse(int argc, const char **argv, const char *prefix); extern int cmd_rm(int argc, const char **argv, const char *prefix); extern int cmd_runstatus(int argc, const char **argv, const char *prefix); +extern int cmd_shortlog(int argc, const char **argv, const char *prefix); extern int cmd_show(int argc, const char **argv, const char *prefix); extern int cmd_show_branch(int argc, const char **argv, const char *prefix); extern int cmd_stripspace(int argc, const char **argv, const char *prefix); diff --git a/compat/subprocess.py b/compat/subprocess.py deleted file mode 100644 index 6474eab119..0000000000 --- a/compat/subprocess.py +++ /dev/null @@ -1,1149 +0,0 @@ -# subprocess - Subprocesses with accessible I/O streams -# -# For more information about this module, see PEP 324. -# -# This module should remain compatible with Python 2.2, see PEP 291. -# -# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se> -# -# Licensed to PSF under a Contributor Agreement. -# See http://www.python.org/2.4/license for licensing details. - -r"""subprocess - Subprocesses with accessible I/O streams - -This module allows you to spawn processes, connect to their -input/output/error pipes, and obtain their return codes. This module -intends to replace several other, older modules and functions, like: - -os.system -os.spawn* -os.popen* -popen2.* -commands.* - -Information about how the subprocess module can be used to replace these -modules and functions can be found below. - - - -Using the subprocess module -=========================== -This module defines one class called Popen: - -class Popen(args, bufsize=0, executable=None, - stdin=None, stdout=None, stderr=None, - preexec_fn=None, close_fds=False, shell=False, - cwd=None, env=None, universal_newlines=False, - startupinfo=None, creationflags=0): - - -Arguments are: - -args should be a string, or a sequence of program arguments. The -program to execute is normally the first item in the args sequence or -string, but can be explicitly set by using the executable argument. - -On UNIX, with shell=False (default): In this case, the Popen class -uses os.execvp() to execute the child program. args should normally -be a sequence. A string will be treated as a sequence with the string -as the only item (the program to execute). - -On UNIX, with shell=True: If args is a string, it specifies the -command string to execute through the shell. If args is a sequence, -the first item specifies the command string, and any additional items -will be treated as additional shell arguments. - -On Windows: the Popen class uses CreateProcess() to execute the child -program, which operates on strings. If args is a sequence, it will be -converted to a string using the list2cmdline method. Please note that -not all MS Windows applications interpret the command line the same -way: The list2cmdline is designed for applications using the same -rules as the MS C runtime. - -bufsize, if given, has the same meaning as the corresponding argument -to the built-in open() function: 0 means unbuffered, 1 means line -buffered, any other positive value means use a buffer of -(approximately) that size. A negative bufsize means to use the system -default, which usually means fully buffered. The default value for -bufsize is 0 (unbuffered). - -stdin, stdout and stderr specify the executed programs' standard -input, standard output and standard error file handles, respectively. -Valid values are PIPE, an existing file descriptor (a positive -integer), an existing file object, and None. PIPE indicates that a -new pipe to the child should be created. With None, no redirection -will occur; the child's file handles will be inherited from the -parent. Additionally, stderr can be STDOUT, which indicates that the -stderr data from the applications should be captured into the same -file handle as for stdout. - -If preexec_fn is set to a callable object, this object will be called -in the child process just before the child is executed. - -If close_fds is true, all file descriptors except 0, 1 and 2 will be -closed before the child process is executed. - -if shell is true, the specified command will be executed through the -shell. - -If cwd is not None, the current directory will be changed to cwd -before the child is executed. - -If env is not None, it defines the environment variables for the new -process. - -If universal_newlines is true, the file objects stdout and stderr are -opened as a text files, but lines may be terminated by any of '\n', -the Unix end-of-line convention, '\r', the Macintosh convention or -'\r\n', the Windows convention. All of these external representations -are seen as '\n' by the Python program. Note: This feature is only -available if Python is built with universal newline support (the -default). Also, the newlines attribute of the file objects stdout, -stdin and stderr are not updated by the communicate() method. - -The startupinfo and creationflags, if given, will be passed to the -underlying CreateProcess() function. They can specify things such as -appearance of the main window and priority for the new process. -(Windows only) - - -This module also defines two shortcut functions: - -call(*args, **kwargs): - Run command with arguments. Wait for command to complete, then - return the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - retcode = call(["ls", "-l"]) - - -Exceptions ----------- -Exceptions raised in the child process, before the new program has -started to execute, will be re-raised in the parent. Additionally, -the exception object will have one extra attribute called -'child_traceback', which is a string containing traceback information -from the childs point of view. - -The most common exception raised is OSError. This occurs, for -example, when trying to execute a non-existent file. Applications -should prepare for OSErrors. - -A ValueError will be raised if Popen is called with invalid arguments. - - -Security --------- -Unlike some other popen functions, this implementation will never call -/bin/sh implicitly. This means that all characters, including shell -metacharacters, can safely be passed to child processes. - - -Popen objects -============= -Instances of the Popen class have the following methods: - -poll() - Check if child process has terminated. Returns returncode - attribute. - -wait() - Wait for child process to terminate. Returns returncode attribute. - -communicate(input=None) - Interact with process: Send data to stdin. Read data from stdout - and stderr, until end-of-file is reached. Wait for process to - terminate. The optional stdin argument should be a string to be - sent to the child process, or None, if no data should be sent to - the child. - - communicate() returns a tuple (stdout, stderr). - - Note: The data read is buffered in memory, so do not use this - method if the data size is large or unlimited. - -The following attributes are also available: - -stdin - If the stdin argument is PIPE, this attribute is a file object - that provides input to the child process. Otherwise, it is None. - -stdout - If the stdout argument is PIPE, this attribute is a file object - that provides output from the child process. Otherwise, it is - None. - -stderr - If the stderr argument is PIPE, this attribute is file object that - provides error output from the child process. Otherwise, it is - None. - -pid - The process ID of the child process. - -returncode - The child return code. A None value indicates that the process - hasn't terminated yet. A negative value -N indicates that the - child was terminated by signal N (UNIX only). - - -Replacing older functions with the subprocess module -==================================================== -In this section, "a ==> b" means that b can be used as a replacement -for a. - -Note: All functions in this section fail (more or less) silently if -the executed program cannot be found; this module raises an OSError -exception. - -In the following examples, we assume that the subprocess module is -imported with "from subprocess import *". - - -Replacing /bin/sh shell backquote ---------------------------------- -output=`mycmd myarg` -==> -output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0] - - -Replacing shell pipe line -------------------------- -output=`dmesg | grep hda` -==> -p1 = Popen(["dmesg"], stdout=PIPE) -p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) -output = p2.communicate()[0] - - -Replacing os.system() ---------------------- -sts = os.system("mycmd" + " myarg") -==> -p = Popen("mycmd" + " myarg", shell=True) -sts = os.waitpid(p.pid, 0) - -Note: - -* Calling the program through the shell is usually not required. - -* It's easier to look at the returncode attribute than the - exitstatus. - -A more real-world example would look like this: - -try: - retcode = call("mycmd" + " myarg", shell=True) - if retcode < 0: - print >>sys.stderr, "Child was terminated by signal", -retcode - else: - print >>sys.stderr, "Child returned", retcode -except OSError, e: - print >>sys.stderr, "Execution failed:", e - - -Replacing os.spawn* -------------------- -P_NOWAIT example: - -pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg") -==> -pid = Popen(["/bin/mycmd", "myarg"]).pid - - -P_WAIT example: - -retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg") -==> -retcode = call(["/bin/mycmd", "myarg"]) - - -Vector example: - -os.spawnvp(os.P_NOWAIT, path, args) -==> -Popen([path] + args[1:]) - - -Environment example: - -os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env) -==> -Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"}) - - -Replacing os.popen* -------------------- -pipe = os.popen(cmd, mode='r', bufsize) -==> -pipe = Popen(cmd, shell=True, bufsize=bufsize, stdout=PIPE).stdout - -pipe = os.popen(cmd, mode='w', bufsize) -==> -pipe = Popen(cmd, shell=True, bufsize=bufsize, stdin=PIPE).stdin - - -(child_stdin, child_stdout) = os.popen2(cmd, mode, bufsize) -==> -p = Popen(cmd, shell=True, bufsize=bufsize, - stdin=PIPE, stdout=PIPE, close_fds=True) -(child_stdin, child_stdout) = (p.stdin, p.stdout) - - -(child_stdin, - child_stdout, - child_stderr) = os.popen3(cmd, mode, bufsize) -==> -p = Popen(cmd, shell=True, bufsize=bufsize, - stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True) -(child_stdin, - child_stdout, - child_stderr) = (p.stdin, p.stdout, p.stderr) - - -(child_stdin, child_stdout_and_stderr) = os.popen4(cmd, mode, bufsize) -==> -p = Popen(cmd, shell=True, bufsize=bufsize, - stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True) -(child_stdin, child_stdout_and_stderr) = (p.stdin, p.stdout) - - -Replacing popen2.* ------------------- -Note: If the cmd argument to popen2 functions is a string, the command -is executed through /bin/sh. If it is a list, the command is directly -executed. - -(child_stdout, child_stdin) = popen2.popen2("somestring", bufsize, mode) -==> -p = Popen(["somestring"], shell=True, bufsize=bufsize - stdin=PIPE, stdout=PIPE, close_fds=True) -(child_stdout, child_stdin) = (p.stdout, p.stdin) - - -(child_stdout, child_stdin) = popen2.popen2(["mycmd", "myarg"], bufsize, mode) -==> -p = Popen(["mycmd", "myarg"], bufsize=bufsize, - stdin=PIPE, stdout=PIPE, close_fds=True) -(child_stdout, child_stdin) = (p.stdout, p.stdin) - -The popen2.Popen3 and popen3.Popen4 basically works as subprocess.Popen, -except that: - -* subprocess.Popen raises an exception if the execution fails -* the capturestderr argument is replaced with the stderr argument. -* stdin=PIPE and stdout=PIPE must be specified. -* popen2 closes all filedescriptors by default, but you have to specify - close_fds=True with subprocess.Popen. - - -""" - -import sys -mswindows = (sys.platform == "win32") - -import os -import types -import traceback - -if mswindows: - import threading - import msvcrt - if 0: # <-- change this to use pywin32 instead of the _subprocess driver - import pywintypes - from win32api import GetStdHandle, STD_INPUT_HANDLE, \ - STD_OUTPUT_HANDLE, STD_ERROR_HANDLE - from win32api import GetCurrentProcess, DuplicateHandle, \ - GetModuleFileName, GetVersion - from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE - from win32pipe import CreatePipe - from win32process import CreateProcess, STARTUPINFO, \ - GetExitCodeProcess, STARTF_USESTDHANDLES, \ - STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE - from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0 - else: - from _subprocess import * - class STARTUPINFO: - dwFlags = 0 - hStdInput = None - hStdOutput = None - hStdError = None - class pywintypes: - error = IOError -else: - import select - import errno - import fcntl - import pickle - -__all__ = ["Popen", "PIPE", "STDOUT", "call"] - -try: - MAXFD = os.sysconf("SC_OPEN_MAX") -except: - MAXFD = 256 - -# True/False does not exist on 2.2.0 -try: - False -except NameError: - False = 0 - True = 1 - -_active = [] - -def _cleanup(): - for inst in _active[:]: - inst.poll() - -PIPE = -1 -STDOUT = -2 - - -def call(*args, **kwargs): - """Run command with arguments. Wait for command to complete, then - return the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - retcode = call(["ls", "-l"]) - """ - return Popen(*args, **kwargs).wait() - - -def list2cmdline(seq): - """ - Translate a sequence of arguments into a command line - string, using the same rules as the MS C runtime: - - 1) Arguments are delimited by white space, which is either a - space or a tab. - - 2) A string surrounded by double quotation marks is - interpreted as a single argument, regardless of white space - contained within. A quoted string can be embedded in an - argument. - - 3) A double quotation mark preceded by a backslash is - interpreted as a literal double quotation mark. - - 4) Backslashes are interpreted literally, unless they - immediately precede a double quotation mark. - - 5) If backslashes immediately precede a double quotation mark, - every pair of backslashes is interpreted as a literal - backslash. If the number of backslashes is odd, the last - backslash escapes the next double quotation mark as - described in rule 3. - """ - - # See - # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp - result = [] - needquote = False - for arg in seq: - bs_buf = [] - - # Add a space to separate this argument from the others - if result: - result.append(' ') - - needquote = (" " in arg) or ("\t" in arg) - if needquote: - result.append('"') - - for c in arg: - if c == '\\': - # Don't know if we need to double yet. - bs_buf.append(c) - elif c == '"': - # Double backspaces. - result.append('\\' * len(bs_buf)*2) - bs_buf = [] - result.append('\\"') - else: - # Normal char - if bs_buf: - result.extend(bs_buf) - bs_buf = [] - result.append(c) - - # Add remaining backspaces, if any. - if bs_buf: - result.extend(bs_buf) - - if needquote: - result.extend(bs_buf) - result.append('"') - - return ''.join(result) - - -class Popen(object): - def __init__(self, args, bufsize=0, executable=None, - stdin=None, stdout=None, stderr=None, - preexec_fn=None, close_fds=False, shell=False, - cwd=None, env=None, universal_newlines=False, - startupinfo=None, creationflags=0): - """Create new Popen instance.""" - _cleanup() - - if not isinstance(bufsize, (int, long)): - raise TypeError("bufsize must be an integer") - - if mswindows: - if preexec_fn is not None: - raise ValueError("preexec_fn is not supported on Windows " - "platforms") - if close_fds: - raise ValueError("close_fds is not supported on Windows " - "platforms") - else: - # POSIX - if startupinfo is not None: - raise ValueError("startupinfo is only supported on Windows " - "platforms") - if creationflags != 0: - raise ValueError("creationflags is only supported on Windows " - "platforms") - - self.stdin = None - self.stdout = None - self.stderr = None - self.pid = None - self.returncode = None - self.universal_newlines = universal_newlines - - # Input and output objects. The general principle is like - # this: - # - # Parent Child - # ------ ----- - # p2cwrite ---stdin---> p2cread - # c2pread <--stdout--- c2pwrite - # errread <--stderr--- errwrite - # - # On POSIX, the child objects are file descriptors. On - # Windows, these are Windows file handles. The parent objects - # are file descriptors on both platforms. The parent objects - # are None when not using PIPEs. The child objects are None - # when not redirecting. - - (p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) = self._get_handles(stdin, stdout, stderr) - - self._execute_child(args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, - startupinfo, creationflags, shell, - p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) - - if p2cwrite: - self.stdin = os.fdopen(p2cwrite, 'wb', bufsize) - if c2pread: - if universal_newlines: - self.stdout = os.fdopen(c2pread, 'rU', bufsize) - else: - self.stdout = os.fdopen(c2pread, 'rb', bufsize) - if errread: - if universal_newlines: - self.stderr = os.fdopen(errread, 'rU', bufsize) - else: - self.stderr = os.fdopen(errread, 'rb', bufsize) - - _active.append(self) - - - def _translate_newlines(self, data): - data = data.replace("\r\n", "\n") - data = data.replace("\r", "\n") - return data - - - if mswindows: - # - # Windows methods - # - def _get_handles(self, stdin, stdout, stderr): - """Construct and return tuple with IO objects: - p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite - """ - if stdin == None and stdout == None and stderr == None: - return (None, None, None, None, None, None) - - p2cread, p2cwrite = None, None - c2pread, c2pwrite = None, None - errread, errwrite = None, None - - if stdin == None: - p2cread = GetStdHandle(STD_INPUT_HANDLE) - elif stdin == PIPE: - p2cread, p2cwrite = CreatePipe(None, 0) - # Detach and turn into fd - p2cwrite = p2cwrite.Detach() - p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0) - elif type(stdin) == types.IntType: - p2cread = msvcrt.get_osfhandle(stdin) - else: - # Assuming file-like object - p2cread = msvcrt.get_osfhandle(stdin.fileno()) - p2cread = self._make_inheritable(p2cread) - - if stdout == None: - c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE) - elif stdout == PIPE: - c2pread, c2pwrite = CreatePipe(None, 0) - # Detach and turn into fd - c2pread = c2pread.Detach() - c2pread = msvcrt.open_osfhandle(c2pread, 0) - elif type(stdout) == types.IntType: - c2pwrite = msvcrt.get_osfhandle(stdout) - else: - # Assuming file-like object - c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) - c2pwrite = self._make_inheritable(c2pwrite) - - if stderr == None: - errwrite = GetStdHandle(STD_ERROR_HANDLE) - elif stderr == PIPE: - errread, errwrite = CreatePipe(None, 0) - # Detach and turn into fd - errread = errread.Detach() - errread = msvcrt.open_osfhandle(errread, 0) - elif stderr == STDOUT: - errwrite = c2pwrite - elif type(stderr) == types.IntType: - errwrite = msvcrt.get_osfhandle(stderr) - else: - # Assuming file-like object - errwrite = msvcrt.get_osfhandle(stderr.fileno()) - errwrite = self._make_inheritable(errwrite) - - return (p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) - - - def _make_inheritable(self, handle): - """Return a duplicate of handle, which is inheritable""" - return DuplicateHandle(GetCurrentProcess(), handle, - GetCurrentProcess(), 0, 1, - DUPLICATE_SAME_ACCESS) - - - def _find_w9xpopen(self): - """Find and return absolute path to w9xpopen.exe""" - w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), - "w9xpopen.exe") - if not os.path.exists(w9xpopen): - # Eeek - file-not-found - possibly an embedding - # situation - see if we can locate it in sys.exec_prefix - w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), - "w9xpopen.exe") - if not os.path.exists(w9xpopen): - raise RuntimeError("Cannot locate w9xpopen.exe, which is " - "needed for Popen to work with your " - "shell or platform.") - return w9xpopen - - - def _execute_child(self, args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, - startupinfo, creationflags, shell, - p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite): - """Execute program (MS Windows version)""" - - if not isinstance(args, types.StringTypes): - args = list2cmdline(args) - - # Process startup details - default_startupinfo = STARTUPINFO() - if startupinfo == None: - startupinfo = default_startupinfo - if not None in (p2cread, c2pwrite, errwrite): - startupinfo.dwFlags |= STARTF_USESTDHANDLES - startupinfo.hStdInput = p2cread - startupinfo.hStdOutput = c2pwrite - startupinfo.hStdError = errwrite - - if shell: - default_startupinfo.dwFlags |= STARTF_USESHOWWINDOW - default_startupinfo.wShowWindow = SW_HIDE - comspec = os.environ.get("COMSPEC", "cmd.exe") - args = comspec + " /c " + args - if (GetVersion() >= 0x80000000L or - os.path.basename(comspec).lower() == "command.com"): - # Win9x, or using command.com on NT. We need to - # use the w9xpopen intermediate program. For more - # information, see KB Q150956 - # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp) - w9xpopen = self._find_w9xpopen() - args = '"%s" %s' % (w9xpopen, args) - # Not passing CREATE_NEW_CONSOLE has been known to - # cause random failures on win9x. Specifically a - # dialog: "Your program accessed mem currently in - # use at xxx" and a hopeful warning about the - # stability of your system. Cost is Ctrl+C wont - # kill children. - creationflags |= CREATE_NEW_CONSOLE - - # Start the process - try: - hp, ht, pid, tid = CreateProcess(executable, args, - # no special security - None, None, - # must inherit handles to pass std - # handles - 1, - creationflags, - env, - cwd, - startupinfo) - except pywintypes.error, e: - # Translate pywintypes.error to WindowsError, which is - # a subclass of OSError. FIXME: We should really - # translate errno using _sys_errlist (or simliar), but - # how can this be done from Python? - raise WindowsError(*e.args) - - # Retain the process handle, but close the thread handle - self._handle = hp - self.pid = pid - ht.Close() - - # Child is launched. Close the parent's copy of those pipe - # handles that only the child should have open. You need - # to make sure that no handles to the write end of the - # output pipe are maintained in this process or else the - # pipe will not close when the child process exits and the - # ReadFile will hang. - if p2cread != None: - p2cread.Close() - if c2pwrite != None: - c2pwrite.Close() - if errwrite != None: - errwrite.Close() - - - def poll(self): - """Check if child process has terminated. Returns returncode - attribute.""" - if self.returncode == None: - if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0: - self.returncode = GetExitCodeProcess(self._handle) - _active.remove(self) - return self.returncode - - - def wait(self): - """Wait for child process to terminate. Returns returncode - attribute.""" - if self.returncode == None: - obj = WaitForSingleObject(self._handle, INFINITE) - self.returncode = GetExitCodeProcess(self._handle) - _active.remove(self) - return self.returncode - - - def _readerthread(self, fh, buffer): - buffer.append(fh.read()) - - - def communicate(self, input=None): - """Interact with process: Send data to stdin. Read data from - stdout and stderr, until end-of-file is reached. Wait for - process to terminate. The optional input argument should be a - string to be sent to the child process, or None, if no data - should be sent to the child. - - communicate() returns a tuple (stdout, stderr).""" - stdout = None # Return - stderr = None # Return - - if self.stdout: - stdout = [] - stdout_thread = threading.Thread(target=self._readerthread, - args=(self.stdout, stdout)) - stdout_thread.setDaemon(True) - stdout_thread.start() - if self.stderr: - stderr = [] - stderr_thread = threading.Thread(target=self._readerthread, - args=(self.stderr, stderr)) - stderr_thread.setDaemon(True) - stderr_thread.start() - - if self.stdin: - if input != None: - self.stdin.write(input) - self.stdin.close() - - if self.stdout: - stdout_thread.join() - if self.stderr: - stderr_thread.join() - - # All data exchanged. Translate lists into strings. - if stdout != None: - stdout = stdout[0] - if stderr != None: - stderr = stderr[0] - - # Translate newlines, if requested. We cannot let the file - # object do the translation: It is based on stdio, which is - # impossible to combine with select (unless forcing no - # buffering). - if self.universal_newlines and hasattr(open, 'newlines'): - if stdout: - stdout = self._translate_newlines(stdout) - if stderr: - stderr = self._translate_newlines(stderr) - - self.wait() - return (stdout, stderr) - - else: - # - # POSIX methods - # - def _get_handles(self, stdin, stdout, stderr): - """Construct and return tuple with IO objects: - p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite - """ - p2cread, p2cwrite = None, None - c2pread, c2pwrite = None, None - errread, errwrite = None, None - - if stdin == None: - pass - elif stdin == PIPE: - p2cread, p2cwrite = os.pipe() - elif type(stdin) == types.IntType: - p2cread = stdin - else: - # Assuming file-like object - p2cread = stdin.fileno() - - if stdout == None: - pass - elif stdout == PIPE: - c2pread, c2pwrite = os.pipe() - elif type(stdout) == types.IntType: - c2pwrite = stdout - else: - # Assuming file-like object - c2pwrite = stdout.fileno() - - if stderr == None: - pass - elif stderr == PIPE: - errread, errwrite = os.pipe() - elif stderr == STDOUT: - errwrite = c2pwrite - elif type(stderr) == types.IntType: - errwrite = stderr - else: - # Assuming file-like object - errwrite = stderr.fileno() - - return (p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) - - - def _set_cloexec_flag(self, fd): - try: - cloexec_flag = fcntl.FD_CLOEXEC - except AttributeError: - cloexec_flag = 1 - - old = fcntl.fcntl(fd, fcntl.F_GETFD) - fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag) - - - def _close_fds(self, but): - for i in range(3, MAXFD): - if i == but: - continue - try: - os.close(i) - except: - pass - - - def _execute_child(self, args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, - startupinfo, creationflags, shell, - p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite): - """Execute program (POSIX version)""" - - if isinstance(args, types.StringTypes): - args = [args] - - if shell: - args = ["/bin/sh", "-c"] + args - - if executable == None: - executable = args[0] - - # For transferring possible exec failure from child to parent - # The first char specifies the exception type: 0 means - # OSError, 1 means some other error. - errpipe_read, errpipe_write = os.pipe() - self._set_cloexec_flag(errpipe_write) - - self.pid = os.fork() - if self.pid == 0: - # Child - try: - # Close parent's pipe ends - if p2cwrite: - os.close(p2cwrite) - if c2pread: - os.close(c2pread) - if errread: - os.close(errread) - os.close(errpipe_read) - - # Dup fds for child - if p2cread: - os.dup2(p2cread, 0) - if c2pwrite: - os.dup2(c2pwrite, 1) - if errwrite: - os.dup2(errwrite, 2) - - # Close pipe fds. Make sure we doesn't close the same - # fd more than once. - if p2cread: - os.close(p2cread) - if c2pwrite and c2pwrite not in (p2cread,): - os.close(c2pwrite) - if errwrite and errwrite not in (p2cread, c2pwrite): - os.close(errwrite) - - # Close all other fds, if asked for - if close_fds: - self._close_fds(but=errpipe_write) - - if cwd != None: - os.chdir(cwd) - - if preexec_fn: - apply(preexec_fn) - - if env == None: - os.execvp(executable, args) - else: - os.execvpe(executable, args, env) - - except: - exc_type, exc_value, tb = sys.exc_info() - # Save the traceback and attach it to the exception object - exc_lines = traceback.format_exception(exc_type, - exc_value, - tb) - exc_value.child_traceback = ''.join(exc_lines) - os.write(errpipe_write, pickle.dumps(exc_value)) - - # This exitcode won't be reported to applications, so it - # really doesn't matter what we return. - os._exit(255) - - # Parent - os.close(errpipe_write) - if p2cread and p2cwrite: - os.close(p2cread) - if c2pwrite and c2pread: - os.close(c2pwrite) - if errwrite and errread: - os.close(errwrite) - - # Wait for exec to fail or succeed; possibly raising exception - data = os.read(errpipe_read, 1048576) # Exceptions limited to 1 MB - os.close(errpipe_read) - if data != "": - os.waitpid(self.pid, 0) - child_exception = pickle.loads(data) - raise child_exception - - - def _handle_exitstatus(self, sts): - if os.WIFSIGNALED(sts): - self.returncode = -os.WTERMSIG(sts) - elif os.WIFEXITED(sts): - self.returncode = os.WEXITSTATUS(sts) - else: - # Should never happen - raise RuntimeError("Unknown child exit status!") - - _active.remove(self) - - - def poll(self): - """Check if child process has terminated. Returns returncode - attribute.""" - if self.returncode == None: - try: - pid, sts = os.waitpid(self.pid, os.WNOHANG) - if pid == self.pid: - self._handle_exitstatus(sts) - except os.error: - pass - return self.returncode - - - def wait(self): - """Wait for child process to terminate. Returns returncode - attribute.""" - if self.returncode == None: - pid, sts = os.waitpid(self.pid, 0) - self._handle_exitstatus(sts) - return self.returncode - - - def communicate(self, input=None): - """Interact with process: Send data to stdin. Read data from - stdout and stderr, until end-of-file is reached. Wait for - process to terminate. The optional input argument should be a - string to be sent to the child process, or None, if no data - should be sent to the child. - - communicate() returns a tuple (stdout, stderr).""" - read_set = [] - write_set = [] - stdout = None # Return - stderr = None # Return - - if self.stdin: - # Flush stdio buffer. This might block, if the user has - # been writing to .stdin in an uncontrolled fashion. - self.stdin.flush() - if input: - write_set.append(self.stdin) - else: - self.stdin.close() - if self.stdout: - read_set.append(self.stdout) - stdout = [] - if self.stderr: - read_set.append(self.stderr) - stderr = [] - - while read_set or write_set: - rlist, wlist, xlist = select.select(read_set, write_set, []) - - if self.stdin in wlist: - # When select has indicated that the file is writable, - # we can write up to PIPE_BUF bytes without risk - # blocking. POSIX defines PIPE_BUF >= 512 - bytes_written = os.write(self.stdin.fileno(), input[:512]) - input = input[bytes_written:] - if not input: - self.stdin.close() - write_set.remove(self.stdin) - - if self.stdout in rlist: - data = os.read(self.stdout.fileno(), 1024) - if data == "": - self.stdout.close() - read_set.remove(self.stdout) - stdout.append(data) - - if self.stderr in rlist: - data = os.read(self.stderr.fileno(), 1024) - if data == "": - self.stderr.close() - read_set.remove(self.stderr) - stderr.append(data) - - # All data exchanged. Translate lists into strings. - if stdout != None: - stdout = ''.join(stdout) - if stderr != None: - stderr = ''.join(stderr) - - # Translate newlines, if requested. We cannot let the file - # object do the translation: It is based on stdio, which is - # impossible to combine with select (unless forcing no - # buffering). - if self.universal_newlines and hasattr(open, 'newlines'): - if stdout: - stdout = self._translate_newlines(stdout) - if stderr: - stderr = self._translate_newlines(stderr) - - self.wait() - return (stdout, stderr) - - -def _demo_posix(): - # - # Example 1: Simple redirection: Get process list - # - plist = Popen(["ps"], stdout=PIPE).communicate()[0] - print "Process list:" - print plist - - # - # Example 2: Change uid before executing child - # - if os.getuid() == 0: - p = Popen(["id"], preexec_fn=lambda: os.setuid(100)) - p.wait() - - # - # Example 3: Connecting several subprocesses - # - print "Looking for 'hda'..." - p1 = Popen(["dmesg"], stdout=PIPE) - p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) - print repr(p2.communicate()[0]) - - # - # Example 4: Catch execution error - # - print - print "Trying a weird file..." - try: - print Popen(["/this/path/does/not/exist"]).communicate() - except OSError, e: - if e.errno == errno.ENOENT: - print "The file didn't exist. I thought so..." - print "Child traceback:" - print e.child_traceback - else: - print "Error", e.errno - else: - print >>sys.stderr, "Gosh. No error." - - -def _demo_windows(): - # - # Example 1: Connecting several subprocesses - # - print "Looking for 'PROMPT' in set output..." - p1 = Popen("set", stdout=PIPE, shell=True) - p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE) - print repr(p2.communicate()[0]) - - # - # Example 2: Simple execution of program - # - print "Executing calc..." - p = Popen("calc") - p.wait() - - -if __name__ == "__main__": - if mswindows: - _demo_windows() - else: - _demo_posix() diff --git a/config.mak.in b/config.mak.in index 1cafa19ed4..9a578405d8 100644 --- a/config.mak.in +++ b/config.mak.in @@ -13,7 +13,6 @@ bindir = @bindir@ #gitexecdir = @libexecdir@/git-core/ datarootdir = @datarootdir@ template_dir = @datadir@/git-core/templates/ -GIT_PYTHON_DIR = @datadir@/git-core/python mandir=@mandir@ @@ -23,7 +22,6 @@ VPATH = @srcdir@ export exec_prefix mandir export srcdir VPATH -NO_PYTHON=@NO_PYTHON@ NEEDS_SSL_WITH_CRYPTO=@NEEDS_SSL_WITH_CRYPTO@ NO_OPENSSL=@NO_OPENSSL@ NO_CURL=@NO_CURL@ diff --git a/configure.ac b/configure.ac index cff5722eb9..34e34789bd 100644 --- a/configure.ac +++ b/configure.ac @@ -75,20 +75,6 @@ GIT_ARG_SET_PATH(shell) # Define PERL_PATH to provide path to Perl. GIT_ARG_SET_PATH(perl) # -# Define PYTHON_PATH to provide path to Python. -AC_ARG_WITH(python,[AS_HELP_STRING([--with-python=PATH], [provide PATH to python]) -AS_HELP_STRING([--without-python], [don't use python scripts])], - [if test "$withval" = "no"; then \ - NO_PYTHON=YesPlease; \ - elif test "$withval" = "yes"; then \ - NO_PYTHON=; \ - else \ - NO_PYTHON=; \ - PYTHON_PATH=$withval; \ - fi; \ - ]) -AC_SUBST(NO_PYTHON) -AC_SUBST(PYTHON_PATH) ## Checks for programs. @@ -98,18 +84,6 @@ AC_PROG_CC([cc gcc]) #AC_PROG_INSTALL # needs install-sh or install.sh in sources AC_CHECK_TOOL(AR, ar, :) AC_CHECK_PROGS(TAR, [gtar tar]) -# -# Define PYTHON_PATH to provide path to Python. -if test -z "$NO_PYTHON"; then - if test -z "$PYTHON_PATH"; then - AC_PATH_PROGS(PYTHON_PATH, [python python2.4 python2.3 python2]) - fi - if test -n "$PYTHON_PATH"; then - GIT_CONF_APPEND_LINE([PYTHON_PATH=@PYTHON_PATH@]) - NO_PYTHON="" - fi -fi - ## Checks for libraries. AC_MSG_NOTICE([CHECKS for libraries]) @@ -262,22 +236,9 @@ AC_SUBST(NO_SETENV) # Define NO_SYMLINK_HEAD if you never want .git/HEAD to be a symbolic link. # Enable it on Windows. By default, symrefs are still used. # -# Define WITH_OWN_SUBPROCESS_PY if you want to use with python 2.3. -AC_CACHE_CHECK([for subprocess.py], - [ac_cv_python_has_subprocess_py], -[if $PYTHON_PATH -c 'import subprocess' 2>/dev/null; then - ac_cv_python_has_subprocess_py=yes -else - ac_cv_python_has_subprocess_py=no -fi]) -if test $ac_cv_python_has_subprocess_py != yes; then - GIT_CONF_APPEND_LINE([WITH_OWN_SUBPROCESS_PY=YesPlease]) -fi -# # Define NO_ACCURATE_DIFF if your diff program at least sometimes misses # a missing newline at the end of the file. - ## Site configuration (override autodetection) ## --with-PACKAGE[=ARG] and --without-PACKAGE AC_MSG_NOTICE([CHECKS for site configuration]) @@ -144,6 +144,7 @@ struct refspec { * +A:B means overwrite remote B with local A. * +A is a shorthand for +A:A. * A is a shorthand for A:A. + * :B means delete remote B. */ static struct refspec *parse_ref_spec(int nr_refspec, char **refspec) { @@ -174,21 +175,58 @@ static int count_refspec_match(const char *pattern, struct ref *refs, struct ref **matched_ref) { - int match; int patlen = strlen(pattern); + struct ref *matched_weak = NULL; + struct ref *matched = NULL; + int weak_match = 0; + int match = 0; - for (match = 0; refs; refs = refs->next) { + for (weak_match = match = 0; refs; refs = refs->next) { char *name = refs->name; int namelen = strlen(name); + int weak_match; + if (namelen < patlen || memcmp(name + namelen - patlen, pattern, patlen)) continue; if (namelen != patlen && name[namelen - patlen - 1] != '/') continue; - match++; - *matched_ref = refs; + + /* A match is "weak" if it is with refs outside + * heads or tags, and did not specify the pattern + * in full (e.g. "refs/remotes/origin/master") or at + * least from the toplevel (e.g. "remotes/origin/master"); + * otherwise "git push $URL master" would result in + * ambiguity between remotes/origin/master and heads/master + * at the remote site. + */ + if (namelen != patlen && + patlen != namelen - 5 && + strncmp(name, "refs/heads/", 11) && + strncmp(name, "refs/tags/", 10)) { + /* We want to catch the case where only weak + * matches are found and there are multiple + * matches, and where more than one strong + * matches are found, as ambiguous. One + * strong match with zero or more weak matches + * are acceptable as a unique match. + */ + matched_weak = refs; + weak_match++; + } + else { + matched = refs; + match++; + } + } + if (!matched) { + *matched_ref = matched_weak; + return weak_match; + } + else { + *matched_ref = matched; + return match; } - return match; } static void link_dst_tail(struct ref *ref, struct ref ***tail) @@ -203,6 +241,13 @@ static struct ref *try_explicit_object_name(const char *name) unsigned char sha1[20]; struct ref *ref; int len; + + if (!*name) { + ref = xcalloc(1, sizeof(*ref) + 20); + strcpy(ref->name, "(delete)"); + hashclr(ref->new_sha1); + return ref; + } if (get_sha1(name, sha1)) return NULL; len = strlen(name) + 1; @@ -225,7 +270,8 @@ static int match_explicit_refs(struct ref *src, struct ref *dst, break; case 0: /* The source could be in the get_sha1() format - * not a reference name. + * not a reference name. :refs/other is a + * way to delete 'other' ref at the remote end. */ matched_src = try_explicit_object_name(rs[i].src); if (matched_src) diff --git a/contrib/completion/git-completion.bash b/contrib/completion/git-completion.bash index a43a177160..447ec20467 100755 --- a/contrib/completion/git-completion.bash +++ b/contrib/completion/git-completion.bash @@ -18,26 +18,94 @@ # 2) Added the following line to your .bashrc: # source ~/.git-completion.sh # +# 3) You may want to make sure the git executable is available +# in your PATH before this script is sourced, as some caching +# is performed while the script loads. If git isn't found +# at source time then all lookups will be done on demand, +# which may be slightly slower. +# +# 4) Consider changing your PS1 to also show the current branch: +# PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ ' +# +# The argument to __git_ps1 will be displayed only if you +# are currently in a git repository. The %s token will be +# the name of the current branch. +# __gitdir () { - echo "${__git_dir:-$(git rev-parse --git-dir 2>/dev/null)}" + if [ -z "$1" ]; then + if [ -n "$__git_dir" ]; then + echo "$__git_dir" + elif [ -d .git ]; then + echo .git + else + git rev-parse --git-dir 2>/dev/null + fi + elif [ -d "$1/.git" ]; then + echo "$1/.git" + else + echo "$1" + fi +} + +__git_ps1 () +{ + local b="$(git symbolic-ref HEAD 2>/dev/null)" + if [ -n "$b" ]; then + if [ -n "$1" ]; then + printf "$1" "${b##refs/heads/}" + else + printf " (%s)" "${b##refs/heads/}" + fi + fi +} + +__git_heads () +{ + local cmd i is_hash=y dir="$(__gitdir "$1")" + if [ -d "$dir" ]; then + for i in $(git --git-dir="$dir" \ + for-each-ref --format='%(refname)' \ + refs/heads ); do + echo "${i#refs/heads/}" + done + return + fi + for i in $(git-ls-remote "$1" 2>/dev/null); do + case "$is_hash,$i" in + y,*) is_hash=n ;; + n,*^{}) is_hash=y ;; + n,refs/heads/*) is_hash=y; echo "${i#refs/heads/}" ;; + n,*) is_hash=y; echo "$i" ;; + esac + done } __git_refs () { - local cmd i is_hash=y dir="${1:-$(__gitdir)}" + local cmd i is_hash=y dir="$(__gitdir "$1")" if [ -d "$dir" ]; then - cmd=git-peek-remote - else - cmd=git-ls-remote + if [ -e "$dir/HEAD" ]; then echo HEAD; fi + for i in $(git --git-dir="$dir" \ + for-each-ref --format='%(refname)' \ + refs/tags refs/heads refs/remotes); do + case "$i" in + refs/tags/*) echo "${i#refs/tags/}" ;; + refs/heads/*) echo "${i#refs/heads/}" ;; + refs/remotes/*) echo "${i#refs/remotes/}" ;; + *) echo "$i" ;; + esac + done + return fi - for i in $($cmd "$dir" 2>/dev/null); do + for i in $(git-ls-remote "$dir" 2>/dev/null); do case "$is_hash,$i" in y,*) is_hash=n ;; n,*^{}) is_hash=y ;; n,refs/tags/*) is_hash=y; echo "${i#refs/tags/}" ;; n,refs/heads/*) is_hash=y; echo "${i#refs/heads/}" ;; + n,refs/remotes/*) is_hash=y; echo "${i#refs/remotes/}" ;; n,*) is_hash=y; echo "$i" ;; esac done @@ -45,19 +113,25 @@ __git_refs () __git_refs2 () { - local cmd i is_hash=y dir="${1:-$(__gitdir)}" - if [ -d "$dir" ]; then - cmd=git-peek-remote - else - cmd=git-ls-remote - fi - for i in $($cmd "$dir" 2>/dev/null); do + local i + for i in $(__git_refs "$1"); do + echo "$i:$i" + done +} + +__git_refs_remotes () +{ + local cmd i is_hash=y + for i in $(git-ls-remote "$1" 2>/dev/null); do case "$is_hash,$i" in + n,refs/heads/*) + is_hash=y + echo "$i:refs/remotes/$1/${i#refs/heads/}" + ;; y,*) is_hash=n ;; n,*^{}) is_hash=y ;; - n,refs/tags/*) is_hash=y; echo "${i#refs/tags/}:${i#refs/tags/}" ;; - n,refs/heads/*) is_hash=y; echo "${i#refs/heads/}:${i#refs/heads/}" ;; - n,*) is_hash=y; echo "$i:$i" ;; + n,refs/tags/*) is_hash=y;; + n,*) is_hash=y; ;; esac done } @@ -81,6 +155,22 @@ __git_remotes () done } +__git_merge_strategies () +{ + if [ -n "$__git_merge_strategylist" ]; then + echo "$__git_merge_strategylist" + return + fi + sed -n "/^all_strategies='/{ + s/^all_strategies='// + s/'// + p + q + }" "$(git --exec-path)/git-merge" +} +__git_merge_strategylist= +__git_merge_strategylist="$(__git_merge_strategies 2>/dev/null)" + __git_complete_file () { local pfx ls ref cur="${COMP_WORDS[COMP_CWORD]}" @@ -115,6 +205,84 @@ __git_complete_file () esac } +__git_complete_revlist () +{ + local pfx cur="${COMP_WORDS[COMP_CWORD]}" + case "$cur" in + *...*) + pfx="${cur%...*}..." + cur="${cur#*...}" + COMPREPLY=($(compgen -P "$pfx" -W "$(__git_refs)" -- "$cur")) + ;; + *..*) + pfx="${cur%..*}.." + cur="${cur#*..}" + COMPREPLY=($(compgen -P "$pfx" -W "$(__git_refs)" -- "$cur")) + ;; + *) + COMPREPLY=($(compgen -W "$(__git_refs)" -- "$cur")) + ;; + esac +} + +__git_commands () +{ + if [ -n "$__git_commandlist" ]; then + echo "$__git_commandlist" + return + fi + local i IFS=" "$'\n' + for i in $(git help -a|egrep '^ ') + do + case $i in + check-ref-format) : plumbing;; + commit-tree) : plumbing;; + convert-objects) : plumbing;; + cvsserver) : daemon;; + daemon) : daemon;; + fetch-pack) : plumbing;; + hash-object) : plumbing;; + http-*) : transport;; + index-pack) : plumbing;; + local-fetch) : plumbing;; + mailinfo) : plumbing;; + mailsplit) : plumbing;; + merge-*) : plumbing;; + mktree) : plumbing;; + mktag) : plumbing;; + pack-objects) : plumbing;; + pack-redundant) : plumbing;; + pack-refs) : plumbing;; + parse-remote) : plumbing;; + patch-id) : plumbing;; + peek-remote) : plumbing;; + read-tree) : plumbing;; + receive-pack) : plumbing;; + rerere) : plumbing;; + rev-list) : plumbing;; + rev-parse) : plumbing;; + runstatus) : plumbing;; + sh-setup) : internal;; + shell) : daemon;; + send-pack) : plumbing;; + show-index) : plumbing;; + ssh-*) : transport;; + stripspace) : plumbing;; + symbolic-ref) : plumbing;; + unpack-file) : plumbing;; + unpack-objects) : plumbing;; + update-ref) : plumbing;; + update-server-info) : daemon;; + upload-archive) : plumbing;; + upload-pack) : plumbing;; + write-tree) : plumbing;; + *) echo $i;; + esac + done +} +__git_commandlist= +__git_commandlist="$(__git_commands 2>/dev/null)" + __git_aliases () { local i IFS=$'\n' @@ -140,6 +308,54 @@ __git_aliased_command () done } +__git_whitespacelist="nowarn warn error error-all strip" + +_git_am () +{ + local cur="${COMP_WORDS[COMP_CWORD]}" + if [ -d .dotest ]; then + COMPREPLY=($(compgen -W " + --skip --resolved + " -- "$cur")) + return + fi + case "$cur" in + --whitespace=*) + COMPREPLY=($(compgen -W "$__git_whitespacelist" \ + -- "${cur##--whitespace=}")) + return + ;; + --*) + COMPREPLY=($(compgen -W " + --signoff --utf8 --binary --3way --interactive + --whitespace= + " -- "$cur")) + return + esac + COMPREPLY=() +} + +_git_apply () +{ + local cur="${COMP_WORDS[COMP_CWORD]}" + case "$cur" in + --whitespace=*) + COMPREPLY=($(compgen -W "$__git_whitespacelist" \ + -- "${cur##--whitespace=}")) + return + ;; + --*) + COMPREPLY=($(compgen -W " + --stat --numstat --summary --check --index + --cached --index-info --reverse --reject --unidiff-zero + --apply --no-add --exclude= + --whitespace= --inaccurate-eof --verbose + " -- "$cur")) + return + esac + COMPREPLY=() +} + _git_branch () { local cur="${COMP_WORDS[COMP_CWORD]}" @@ -168,6 +384,35 @@ _git_checkout () COMPREPLY=($(compgen -W "-l -b $(__git_refs)" -- "$cur")) } +_git_cherry_pick () +{ + local cur="${COMP_WORDS[COMP_CWORD]}" + case "$cur" in + --*) + COMPREPLY=($(compgen -W " + --edit --no-commit + " -- "$cur")) + ;; + *) + COMPREPLY=($(compgen -W "$(__git_refs)" -- "$cur")) + ;; + esac +} + +_git_commit () +{ + local cur="${COMP_WORDS[COMP_CWORD]}" + case "$cur" in + --*) + COMPREPLY=($(compgen -W " + --all --author= --signoff --verify --no-verify + --edit --amend --include --only + " -- "$cur")) + return + esac + COMPREPLY=() +} + _git_diff () { __git_complete_file @@ -209,6 +454,26 @@ _git_fetch () esac } +_git_format_patch () +{ + local cur="${COMP_WORDS[COMP_CWORD]}" + case "$cur" in + --*) + COMPREPLY=($(compgen -W " + --stdout --attach --thread + --output-directory + --numbered --start-number + --keep-subject + --signoff + --in-reply-to= + --full-index --binary + " -- "$cur")) + return + ;; + esac + __git_complete_revlist +} + _git_ls_remote () { local cur="${COMP_WORDS[COMP_CWORD]}" @@ -222,22 +487,53 @@ _git_ls_tree () _git_log () { - local pfx cur="${COMP_WORDS[COMP_CWORD]}" + local cur="${COMP_WORDS[COMP_CWORD]}" case "$cur" in - *...*) - pfx="${cur%...*}..." - cur="${cur#*...}" - COMPREPLY=($(compgen -P "$pfx" -W "$(__git_refs)" -- "$cur")) + --pretty=*) + COMPREPLY=($(compgen -W " + oneline short medium full fuller email raw + " -- "${cur##--pretty=}")) + return ;; - *..*) - pfx="${cur%..*}.." - cur="${cur#*..}" - COMPREPLY=($(compgen -P "$pfx" -W "$(__git_refs)" -- "$cur")) + --*) + COMPREPLY=($(compgen -W " + --max-count= --max-age= --since= --after= + --min-age= --before= --until= + --root --not --topo-order --date-order + --no-merges + --abbrev-commit --abbrev= + --relative-date + --author= --committer= --grep= + --all-match + --pretty= --name-status --name-only + " -- "$cur")) + return ;; - *) - COMPREPLY=($(compgen -W "$(__git_refs)" -- "$cur")) + esac + __git_complete_revlist +} + +_git_merge () +{ + local cur="${COMP_WORDS[COMP_CWORD]}" + case "${COMP_WORDS[COMP_CWORD-1]}" in + -s|--strategy) + COMPREPLY=($(compgen -W "$(__git_merge_strategies)" -- "$cur")) + return + esac + case "$cur" in + --strategy=*) + COMPREPLY=($(compgen -W "$(__git_merge_strategies)" \ + -- "${cur##--strategy=}")) + return ;; + --*) + COMPREPLY=($(compgen -W " + --no-commit --no-summary --squash --strategy + " -- "$cur")) + return esac + COMPREPLY=($(compgen -W "$(__git_refs)" -- "$cur")) } _git_merge_base () @@ -246,6 +542,12 @@ _git_merge_base () COMPREPLY=($(compgen -W "$(__git_refs)" -- "$cur")) } +_git_name_rev () +{ + local cur="${COMP_WORDS[COMP_CWORD]}" + COMPREPLY=($(compgen -W "--tags --all --stdin" -- "$cur")) +} + _git_pull () { local cur="${COMP_WORDS[COMP_CWORD]}" @@ -298,17 +600,153 @@ _git_push () esac } -_git_reset () +_git_rebase () { local cur="${COMP_WORDS[COMP_CWORD]}" - local opt="--mixed --hard --soft" - COMPREPLY=($(compgen -W "$opt $(__git_refs)" -- "$cur")) + if [ -d .dotest ]; then + COMPREPLY=($(compgen -W " + --continue --skip --abort + " -- "$cur")) + return + fi + case "${COMP_WORDS[COMP_CWORD-1]}" in + -s|--strategy) + COMPREPLY=($(compgen -W "$(__git_merge_strategies)" -- "$cur")) + return + esac + case "$cur" in + --strategy=*) + COMPREPLY=($(compgen -W "$(__git_merge_strategies)" \ + -- "${cur##--strategy=}")) + return + ;; + --*) + COMPREPLY=($(compgen -W " + --onto --merge --strategy + " -- "$cur")) + return + esac + COMPREPLY=($(compgen -W "$(__git_refs)" -- "$cur")) } -_git_show () +_git_repo_config () { local cur="${COMP_WORDS[COMP_CWORD]}" - COMPREPLY=($(compgen -W "$(__git_refs)" -- "$cur")) + local prv="${COMP_WORDS[COMP_CWORD-1]}" + case "$prv" in + branch.*.remote) + COMPREPLY=($(compgen -W "$(__git_remotes)" -- "$cur")) + return + ;; + branch.*.merge) + COMPREPLY=($(compgen -W "$(__git_refs)" -- "$cur")) + return + ;; + remote.*.fetch) + local remote="${prv#remote.}" + remote="${remote%.fetch}" + COMPREPLY=($(compgen -W "$(__git_refs_remotes "$remote")" \ + -- "$cur")) + return + ;; + remote.*.push) + local remote="${prv#remote.}" + remote="${remote%.push}" + COMPREPLY=($(compgen -W "$(git --git-dir="$(__gitdir)" \ + for-each-ref --format='%(refname):%(refname)' \ + refs/heads)" -- "$cur")) + return + ;; + *.*) + COMPREPLY=() + return + ;; + esac + case "$cur" in + --*) + COMPREPLY=($(compgen -W " + --global --list --replace-all + --get --get-all --get-regexp + --unset --unset-all + " -- "$cur")) + return + ;; + branch.*.*) + local pfx="${cur%.*}." + cur="${cur##*.}" + COMPREPLY=($(compgen -P "$pfx" -W "remote merge" -- "$cur")) + return + ;; + branch.*) + local pfx="${cur%.*}." + cur="${cur#*.}" + COMPREPLY=($(compgen -P "$pfx" -S . \ + -W "$(__git_heads)" -- "$cur")) + return + ;; + remote.*.*) + local pfx="${cur%.*}." + cur="${cur##*.}" + COMPREPLY=($(compgen -P "$pfx" -W "url fetch push" -- "$cur")) + return + ;; + remote.*) + local pfx="${cur%.*}." + cur="${cur#*.}" + COMPREPLY=($(compgen -P "$pfx" -S . \ + -W "$(__git_remotes)" -- "$cur")) + return + ;; + esac + COMPREPLY=($(compgen -W " + apply.whitespace + core.fileMode + core.gitProxy + core.ignoreStat + core.preferSymlinkRefs + core.logAllRefUpdates + core.repositoryFormatVersion + core.sharedRepository + core.warnAmbiguousRefs + core.compression + core.legacyHeaders + i18n.commitEncoding + diff.color + diff.renameLimit + diff.renames + pager.color + status.color + log.showroot + show.difftree + showbranch.default + whatchanged.difftree + http.sslVerify + http.sslCert + http.sslKey + http.sslCAInfo + http.sslCAPath + http.maxRequests + http.lowSpeedLimit http.lowSpeedTime + http.noEPSV + pack.window + repack.useDeltaBaseOffset + pull.octopus pull.twohead + merge.summary + receive.unpackLimit + receive.denyNonFastForwards + user.name user.email + tar.umask + gitcvs.enabled + gitcvs.logfile + branch. remote. + " -- "$cur")) +} + +_git_reset () +{ + local cur="${COMP_WORDS[COMP_CWORD]}" + local opt="--mixed --hard --soft" + COMPREPLY=($(compgen -W "$opt $(__git_refs)" -- "$cur")) } _git () @@ -327,11 +765,11 @@ _git () done if [ $c -eq $COMP_CWORD -a -z "$command" ]; then - COMPREPLY=($(compgen \ - -W "--git-dir= --version \ - $(git help -a|egrep '^ ') \ - $(__git_aliases)" \ - -- "${COMP_WORDS[COMP_CWORD]}")) + COMPREPLY=($(compgen -W " + --git-dir= --version --exec-path + $(__git_commands) + $(__git_aliases) + " -- "${COMP_WORDS[COMP_CWORD]}")) return; fi @@ -339,20 +777,29 @@ _git () [ "$expansion" ] && command="$expansion" case "$command" in + am) _git_am ;; + apply) _git_apply ;; branch) _git_branch ;; cat-file) _git_cat_file ;; checkout) _git_checkout ;; + cherry-pick) _git_cherry_pick ;; + commit) _git_commit ;; diff) _git_diff ;; diff-tree) _git_diff_tree ;; fetch) _git_fetch ;; + format-patch) _git_format_patch ;; log) _git_log ;; ls-remote) _git_ls_remote ;; ls-tree) _git_ls_tree ;; + merge) _git_merge;; merge-base) _git_merge_base ;; + name-rev) _git_name_rev ;; pull) _git_pull ;; push) _git_push ;; + rebase) _git_rebase ;; + repo-config) _git_repo_config ;; reset) _git_reset ;; - show) _git_show ;; + show) _git_log ;; show-branch) _git_log ;; whatchanged) _git_log ;; *) COMPREPLY=() ;; @@ -367,20 +814,29 @@ _gitk () complete -o default -o nospace -F _git git complete -o default -F _gitk gitk +complete -o default -F _git_am git-am +complete -o default -F _git_apply git-apply complete -o default -F _git_branch git-branch complete -o default -o nospace -F _git_cat_file git-cat-file complete -o default -F _git_checkout git-checkout +complete -o default -F _git_cherry_pick git-cherry-pick +complete -o default -F _git_commit git-commit complete -o default -o nospace -F _git_diff git-diff complete -o default -F _git_diff_tree git-diff-tree complete -o default -o nospace -F _git_fetch git-fetch +complete -o default -o nospace -F _git_format_patch git-format-patch complete -o default -o nospace -F _git_log git-log complete -o default -F _git_ls_remote git-ls-remote complete -o default -o nospace -F _git_ls_tree git-ls-tree +complete -o default -F _git_merge git-merge complete -o default -F _git_merge_base git-merge-base +complete -o default -F _git_name_rev git-name-rev complete -o default -o nospace -F _git_pull git-pull complete -o default -o nospace -F _git_push git-push +complete -o default -F _git_rebase git-rebase +complete -o default -F _git_repo_config git-repo-config complete -o default -F _git_reset git-reset -complete -o default -F _git_show git-show +complete -o default -F _git_log git-show complete -o default -o nospace -F _git_log git-show-branch complete -o default -o nospace -F _git_log git-whatchanged @@ -389,15 +845,20 @@ complete -o default -o nospace -F _git_log git-whatchanged # included the '.exe' suffix. # if [ Cygwin = "$(uname -o 2>/dev/null)" ]; then +complete -o default -F _git_apply git-apply.exe complete -o default -o nospace -F _git git.exe complete -o default -F _git_branch git-branch.exe complete -o default -o nospace -F _git_cat_file git-cat-file.exe complete -o default -o nospace -F _git_diff git-diff.exe complete -o default -o nospace -F _git_diff_tree git-diff-tree.exe +complete -o default -o nospace -F _git_format_patch git-format-patch.exe complete -o default -o nospace -F _git_log git-log.exe complete -o default -o nospace -F _git_ls_tree git-ls-tree.exe complete -o default -F _git_merge_base git-merge-base.exe +complete -o default -F _git_name_rev git-name-rev.exe complete -o default -o nospace -F _git_push git-push.exe +complete -o default -F _git_repo_config git-repo-config +complete -o default -o nospace -F _git_log git-show.exe complete -o default -o nospace -F _git_log git-show-branch.exe complete -o default -o nospace -F _git_log git-whatchanged.exe fi diff --git a/contrib/mailmap.linux b/contrib/mailmap.linux new file mode 100644 index 0000000000..e4907f80f1 --- /dev/null +++ b/contrib/mailmap.linux @@ -0,0 +1,42 @@ +# +# Even with git, we don't always have name translations. +# So have an email->real name table to translate the +# (hopefully few) missing names +# +# repo-abbrev: /pub/scm/linux/kernel/git/ +# +Adrian Bunk <bunk@stusta.de> +Andreas Herrmann <aherrman@de.ibm.com> +Andrew Morton <akpm@osdl.org> +Andrew Vasquez <andrew.vasquez@qlogic.com> +Christoph Hellwig <hch@lst.de> +Corey Minyard <minyard@acm.org> +David Woodhouse <dwmw2@shinybook.infradead.org> +Domen Puncer <domen@coderock.org> +Douglas Gilbert <dougg@torque.net> +Ed L Cashin <ecashin@coraid.com> +Evgeniy Polyakov <johnpol@2ka.mipt.ru> +Felix Moeller <felix@derklecks.de> +Frank Zago <fzago@systemfabricworks.com> +Greg Kroah-Hartman <gregkh@suse.de> +James Bottomley <jejb@mulgrave.(none)> +James Bottomley <jejb@titanic.il.steeleye.com> +Jeff Garzik <jgarzik@pretzel.yyz.us> +Jens Axboe <axboe@suse.de> +Kay Sievers <kay.sievers@vrfy.org> +Mitesh shah <mshah@teja.com> +Morten Welinder <terra@gnome.org> +Morten Welinder <welinder@anemone.rentec.com> +Morten Welinder <welinder@darter.rentec.com> +Morten Welinder <welinder@troll.com> +Nguyen Anh Quynh <aquynh@gmail.com> +Paolo 'Blaisorblade' Giarrusso <blaisorblade@yahoo.it> +Peter A Jonsson <pj@ludd.ltu.se> +Ralf Wildenhues <Ralf.Wildenhues@gmx.de> +Rudolf Marek <R.Marek@sh.cvut.cz> +Rui Saraiva <rmps@joel.ist.utl.pt> +Sachin P Sant <ssant@in.ibm.com> +Santtu Hyrkk,Av(B <santtu.hyrkko@gmail.com> +Simon Kelley <simon@thekelleys.org.uk> +Tejun Heo <htejun@gmail.com> +Tony Luck <tony.luck@intel.com> diff --git a/fetch-pack.c b/fetch-pack.c index 0a169dce85..743eab7efa 100644 --- a/fetch-pack.c +++ b/fetch-pack.c @@ -566,6 +566,29 @@ static int fetch_pack(int fd[2], int nr_match, char **match) return 0; } +static int remove_duplicates(int nr_heads, char **heads) +{ + int src, dst; + + for (src = dst = 0; src < nr_heads; src++) { + /* If heads[src] is different from any of + * heads[0..dst], push it in. + */ + int i; + for (i = 0; i < dst; i++) { + if (!strcmp(heads[i], heads[src])) + break; + } + if (i < dst) + continue; + if (src != dst) + heads[dst] = heads[src]; + dst++; + } + heads[dst] = 0; + return dst; +} + int main(int argc, char **argv) { int i, ret, nr_heads; @@ -617,6 +640,8 @@ int main(int argc, char **argv) pid = git_connect(fd, dest, exec); if (pid < 0) return 1; + if (heads && nr_heads) + nr_heads = remove_duplicates(nr_heads, heads); ret = fetch_pack(fd, nr_heads, heads); close(fd[0]); close(fd[1]); diff --git a/git-clone.sh b/git-clone.sh index 3f006d1a77..0ace989fde 100755 --- a/git-clone.sh +++ b/git-clone.sh @@ -14,7 +14,7 @@ die() { } usage() { - die "Usage: $0 [--template=<template_directory>] [--use-separate-remote] [--reference <reference-repo>] [--bare] [-l [-s]] [-q] [-u <upload-pack>] [--origin <name>] [-n] <repo> [<dir>]" + die "Usage: $0 [--template=<template_directory>] [--no-separate-remote] [--reference <reference-repo>] [--bare] [-l [-s]] [-q] [-u <upload-pack>] [--origin <name>] [-n] <repo> [<dir>]" } get_repo_base() { @@ -48,6 +48,10 @@ Perhaps git-update-server-info needs to be run there?" case "$name" in *^*) continue;; esac + case "$bare,$name" in + yes,* | ,heads/* | ,tags/*) ;; + *) continue ;; + esac if test -n "$use_separate_remote" && branch_name=`expr "z$name" : 'zheads/\(.*\)'` then @@ -115,7 +119,7 @@ bare= reference= origin= origin_override= -use_separate_remote= +use_separate_remote=t while case "$#,$1" in 0,*) break ;; @@ -134,7 +138,10 @@ while template="$1" ;; *,-q|*,--quiet) quiet=-q ;; *,--use-separate-remote) + # default use_separate_remote=t ;; + *,--no-separate-remote) + use_separate_remote= ;; 1,--reference) usage ;; *,--reference) shift; reference="$1" ;; @@ -169,18 +176,15 @@ repo="$1" test -n "$repo" || die 'you must specify a repository to clone.' -# --bare implies --no-checkout +# --bare implies --no-checkout and --no-separate-remote if test yes = "$bare" then if test yes = "$origin_override" then die '--bare and --origin $origin options are incompatible.' fi - if test t = "$use_separate_remote" - then - die '--bare and --use-separate-remote options are incompatible.' - fi no_checkout=yes + use_separate_remote= fi if test -z "$origin" @@ -373,9 +377,9 @@ then *) origin_track="$remote_top/$origin" git-update-ref "refs/heads/$origin" "$head_sha1" ;; esac && - echo >"$GIT_DIR/remotes/$origin" \ - "URL: $repo -Pull: refs/heads/$head_points_at:$origin_track" && + git-repo-config remote."$origin".url "$repo" && + git-repo-config remote."$origin".fetch \ + "refs/heads/$head_points_at:$origin_track" && (cd "$GIT_DIR/$remote_top" && find . -type f -print) | while read dotslref do @@ -389,8 +393,8 @@ Pull: refs/heads/$head_points_at:$origin_track" && then continue fi - echo "Pull: refs/heads/${name}:$remote_top/${name}" - done >>"$GIT_DIR/remotes/$origin" && + git-repo-config remote."$origin".fetch "refs/heads/${name}:$remote_top/${name}" '^$' + done && case "$use_separate_remote" in t) rm -f "refs/remotes/$origin/HEAD" diff --git a/git-cvsexportcommit.perl b/git-cvsexportcommit.perl index 7bac16e946..c9d1d88f2e 100755 --- a/git-cvsexportcommit.perl +++ b/git-cvsexportcommit.perl @@ -116,6 +116,7 @@ if ($opt_a) { close MSG; my (@afiles, @dfiles, @mfiles, @dirs); +my %amodes; my @files = safe_pipe_capture('git-diff-tree', '-r', $parent, $commit); #print @files; $? && die "Error in git-diff-tree"; @@ -124,6 +125,7 @@ foreach my $f (@files) { my @fields = split(m!\s+!, $f); if ($fields[4] eq 'A') { my $path = $fields[5]; + $amodes{$path} = $fields[1]; push @afiles, $path; # add any needed parent directories $path = dirname $path; @@ -268,6 +270,7 @@ if (($? >> 8) == 2) { } foreach my $f (@afiles) { + set_new_file_permissions($f, $amodes{$f}); if (grep { $_ eq $f } @bfiles) { system('cvs', 'add','-kb',$f); } else { @@ -342,3 +345,13 @@ sub safe_pipe_capture { } return wantarray ? @output : join('',@output); } + +# For any file we want to add to cvs, we must first set its permissions +# properly, *before* the "cvs add ..." command. Otherwise, it is impossible +# to change the permission of the file in the CVS repository using only cvs +# commands. This should be fixed in cvs-1.12.14. +sub set_new_file_permissions { + my ($file, $perm) = @_; + chmod oct($perm), $file + or die "failed to set permissions of \"$file\": $!\n"; +} diff --git a/git-cvsimport.perl b/git-cvsimport.perl index b54a9486d2..c5bf2d19cd 100755 --- a/git-cvsimport.perl +++ b/git-cvsimport.perl @@ -29,7 +29,7 @@ use IPC::Open2; $SIG{'PIPE'}="IGNORE"; $ENV{'TZ'}="UTC"; -our($opt_h,$opt_o,$opt_v,$opt_k,$opt_u,$opt_d,$opt_p,$opt_C,$opt_z,$opt_i,$opt_P, $opt_s,$opt_m,$opt_M,$opt_A,$opt_S,$opt_L); +our ($opt_h,$opt_o,$opt_v,$opt_k,$opt_u,$opt_d,$opt_p,$opt_C,$opt_z,$opt_i,$opt_P, $opt_s,$opt_m,$opt_M,$opt_A,$opt_S,$opt_L); my (%conv_author_name, %conv_author_email); sub usage() { @@ -90,15 +90,15 @@ usage if $opt_h; @ARGV <= 1 or usage(); -if($opt_d) { +if ($opt_d) { $ENV{"CVSROOT"} = $opt_d; -} elsif(-f 'CVS/Root') { +} elsif (-f 'CVS/Root') { open my $f, '<', 'CVS/Root' or die 'Failed to open CVS/Root'; $opt_d = <$f>; chomp $opt_d; close $f; $ENV{"CVSROOT"} = $opt_d; -} elsif($ENV{"CVSROOT"}) { +} elsif ($ENV{"CVSROOT"}) { $opt_d = $ENV{"CVSROOT"}; } else { die "CVSROOT needs to be set"; @@ -141,7 +141,7 @@ use File::Temp qw(tempfile); use POSIX qw(strftime dup2); sub new { - my($what,$repo,$subdir) = @_; + my ($what,$repo,$subdir) = @_; $what=ref($what) if ref($what); my $self = {}; @@ -161,24 +161,38 @@ sub new { sub conn { my $self = shift; my $repo = $self->{'fullrep'}; - if($repo =~ s/^:pserver:(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?//) { - my($user,$pass,$serv,$port) = ($1,$2,$3,$4); + if ($repo =~ s/^:pserver(?:([^:]*)):(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?//) { + my ($param,$user,$pass,$serv,$port) = ($1,$2,$3,$4,$5); + + my ($proxyhost,$proxyport); + if ($param && ($param =~ m/proxy=([^;]+)/)) { + $proxyhost = $1; + # Default proxyport, if not specified, is 8080. + $proxyport = 8080; + if ($ENV{"CVS_PROXY_PORT"}) { + $proxyport = $ENV{"CVS_PROXY_PORT"}; + } + if ($param =~ m/proxyport=([^;]+)/) { + $proxyport = $1; + } + } + $user="anonymous" unless defined $user; my $rr2 = "-"; - unless($port) { + unless ($port) { $rr2 = ":pserver:$user\@$serv:$repo"; $port=2401; } my $rr = ":pserver:$user\@$serv:$port$repo"; - unless($pass) { + unless ($pass) { open(H,$ENV{'HOME'}."/.cvspass") and do { # :pserver:cvs@mea.tmt.tele.fi:/cvsroot/zmailer Ah<Z - while(<H>) { + while (<H>) { chomp; s/^\/\d+\s+//; my ($w,$p) = split(/\s/,$_,2); - if($w eq $rr or $w eq $rr2) { + if ($w eq $rr or $w eq $rr2) { $pass = $p; last; } @@ -187,15 +201,45 @@ sub conn { } $pass="A" unless $pass; - my $s = IO::Socket::INET->new(PeerHost => $serv, PeerPort => $port); - die "Socket to $serv: $!\n" unless defined $s; + my ($s, $rep); + if ($proxyhost) { + + # Use a HTTP Proxy. Only works for HTTP proxies that + # don't require user authentication + # + # See: http://www.ietf.org/rfc/rfc2817.txt + + $s = IO::Socket::INET->new(PeerHost => $proxyhost, PeerPort => $proxyport); + die "Socket to $proxyhost: $!\n" unless defined $s; + $s->write("CONNECT $serv:$port HTTP/1.1\r\nHost: $serv:$port\r\n\r\n") + or die "Write to $proxyhost: $!\n"; + $s->flush(); + + $rep = <$s>; + + # The answer should look like 'HTTP/1.x 2yy ....' + if (!($rep =~ m#^HTTP/1\.. 2[0-9][0-9]#)) { + die "Proxy connect: $rep\n"; + } + # Skip up to the empty line of the proxy server output + # including the response headers. + while ($rep = <$s>) { + last if (!defined $rep || + $rep eq "\n" || + $rep eq "\r\n"); + } + } else { + $s = IO::Socket::INET->new(PeerHost => $serv, PeerPort => $port); + die "Socket to $serv: $!\n" unless defined $s; + } + $s->write("BEGIN AUTH REQUEST\n$repo\n$user\n$pass\nEND AUTH REQUEST\n") or die "Write to $serv: $!\n"; $s->flush(); - my $rep = <$s>; + $rep = <$s>; - if($rep ne "I LOVE YOU\n") { + if ($rep ne "I LOVE YOU\n") { $rep="<unknown>" unless $rep; die "AuthReply: $rep\n"; } @@ -227,7 +271,7 @@ sub conn { } } - unless($pid) { + unless ($pid) { $pr->writer(); $pw->reader(); dup2($pw->fileno(),0); @@ -250,7 +294,7 @@ sub conn { $self->{'socketo'}->flush(); chomp(my $rep=$self->readline()); - if($rep !~ s/^Valid-requests\s*//) { + if ($rep !~ s/^Valid-requests\s*//) { $rep="<unknown>" unless $rep; die "Expected Valid-requests from server, but got: $rep\n"; } @@ -262,14 +306,14 @@ sub conn { } sub readline { - my($self) = @_; + my ($self) = @_; return $self->{'socketi'}->getline(); } sub _file { # Request a file with a given revision. # Trial and error says this is a good way to do it. :-/ - my($self,$fn,$rev) = @_; + my ($self,$fn,$rev) = @_; $self->{'socketo'}->write("Argument -N\n") or return undef; $self->{'socketo'}->write("Argument -P\n") or return undef; # -kk: Linus' version doesn't use it - defaults to off @@ -291,12 +335,12 @@ sub _file { sub _line { # Read a line from the server. # ... except that 'line' may be an entire file. ;-) - my($self, $fh) = @_; + my ($self, $fh) = @_; die "Not in lines" unless defined $self->{'lines'}; my $line; my $res=0; - while(defined($line = $self->readline())) { + while (defined($line = $self->readline())) { # M U gnupg-cvs-rep/AUTHORS # Updated gnupg-cvs-rep/ # /daten/src/rsync/gnupg-cvs-rep/AUTHORS @@ -305,7 +349,7 @@ sub _line { # 0 # ok - if($line =~ s/^(?:Created|Updated) //) { + if ($line =~ s/^(?:Created|Updated) //) { $line = $self->readline(); # path $line = $self->readline(); # Entries line my $mode = $self->readline(); chomp $mode; @@ -316,12 +360,12 @@ sub _line { die "Duh: Filesize $cnt" if $cnt !~ /^\d+$/; $line=""; $res = $self->_fetchfile($fh, $cnt); - } elsif($line =~ s/^ //) { + } elsif ($line =~ s/^ //) { print $fh $line; $res += length($line); - } elsif($line =~ /^M\b/) { + } elsif ($line =~ /^M\b/) { # output, do nothing - } elsif($line =~ /^Mbinary\b/) { + } elsif ($line =~ /^Mbinary\b/) { my $cnt; die "EOF from server after 'Mbinary'" unless defined ($cnt = $self->readline()); chomp $cnt; @@ -330,12 +374,12 @@ sub _line { $res += $self->_fetchfile($fh, $cnt); } else { chomp $line; - if($line eq "ok") { + if ($line eq "ok") { # print STDERR "S: ok (".length($res).")\n"; return $res; - } elsif($line =~ s/^E //) { + } elsif ($line =~ s/^E //) { # print STDERR "S: $line\n"; - } elsif($line =~ /^(Remove-entry|Removed) /i) { + } elsif ($line =~ /^(Remove-entry|Removed) /i) { $line = $self->readline(); # filename $line = $self->readline(); # OK chomp $line; @@ -349,7 +393,7 @@ sub _line { return undef; } sub file { - my($self,$fn,$rev) = @_; + my ($self,$fn,$rev) = @_; my $res; my ($fh, $name) = tempfile('gitcvs.XXXXXX', @@ -373,7 +417,7 @@ sub _fetchfile { my ($self, $fh, $cnt) = @_; my $res = 0; my $bufsize = 1024 * 1024; - while($cnt) { + while ($cnt) { if ($bufsize > $cnt) { $bufsize = $cnt; } @@ -394,7 +438,7 @@ my $cvs = CVSconn->new($opt_d, $cvs_tree); sub pdate($) { - my($d) = @_; + my ($d) = @_; m#(\d{2,4})/(\d\d)/(\d\d)\s(\d\d):(\d\d)(?::(\d\d))?# or die "Unparseable date: $d\n"; my $y=$1; $y-=1900 if $y>1900; @@ -402,22 +446,22 @@ sub pdate($) { } sub pmode($) { - my($mode) = @_; + my ($mode) = @_; my $m = 0; my $mm = 0; my $um = 0; for my $x(split(//,$mode)) { - if($x eq ",") { + if ($x eq ",") { $m |= $mm&$um; $mm = 0; $um = 0; - } elsif($x eq "u") { $um |= 0700; - } elsif($x eq "g") { $um |= 0070; - } elsif($x eq "o") { $um |= 0007; - } elsif($x eq "r") { $mm |= 0444; - } elsif($x eq "w") { $mm |= 0222; - } elsif($x eq "x") { $mm |= 0111; - } elsif($x eq "=") { # do nothing + } elsif ($x eq "u") { $um |= 0700; + } elsif ($x eq "g") { $um |= 0070; + } elsif ($x eq "o") { $um |= 0007; + } elsif ($x eq "r") { $mm |= 0444; + } elsif ($x eq "w") { $mm |= 0222; + } elsif ($x eq "x") { $mm |= 0111; + } elsif ($x eq "=") { # do nothing } else { die "Unknown mode: $mode\n"; } } @@ -441,7 +485,7 @@ sub get_headref ($$) { my $git_dir = shift; my $f = "$git_dir/refs/heads/$name"; - if(open(my $fh, $f)) { + if (open(my $fh, $f)) { chomp(my $r = <$fh>); is_sha1($r) or die "Cannot get head id for $name ($r): $!"; return $r; @@ -468,7 +512,7 @@ $orig_git_index = $ENV{GIT_INDEX_FILE} if exists $ENV{GIT_INDEX_FILE}; my %index; # holds filenames of one index per branch -unless(-d $git_dir) { +unless (-d $git_dir) { system("git-init-db"); die "Cannot init the GIT db at $git_tree: $?\n" if $?; system("git-read-tree"); @@ -487,7 +531,7 @@ unless(-d $git_dir) { chomp ($last_branch = <F>); $last_branch = basename($last_branch); close(F); - unless($last_branch) { + unless ($last_branch) { warn "Cannot read the last branch name: $! -- assuming 'master'\n"; $last_branch = "master"; } @@ -498,7 +542,7 @@ unless(-d $git_dir) { my $fmt = '($ref, $author) = (%(refname), %(author));'; open(H, "git-for-each-ref --perl --format='$fmt' refs/heads |") or die "Cannot run git-for-each-ref: $!\n"; - while(defined(my $entry = <H>)) { + while (defined(my $entry = <H>)) { my ($ref, $author); eval($entry) || die "cannot eval refs list: $@"; my ($head) = ($ref =~ m|^refs/heads/(.*)|); @@ -528,7 +572,7 @@ unless ($opt_P) { print "Running cvsps...\n" if $opt_v; my $pid = open(CVSPS,"-|"); die "Cannot fork: $!\n" unless defined $pid; - unless($pid) { + unless ($pid) { my @opt; @opt = split(/,/,$opt_p) if defined $opt_p; unshift @opt, '-z', $opt_z if defined $opt_z; @@ -598,8 +642,8 @@ sub write_tree () { return $tree; } -my($patchset,$date,$author_name,$author_email,$branch,$ancestor,$tag,$logmsg); -my(@old,@new,@skipped,%ignorebranch); +my ($patchset,$date,$author_name,$author_email,$branch,$ancestor,$tag,$logmsg); +my (@old,@new,@skipped,%ignorebranch); # commits that cvsps cannot place anywhere... $ignorebranch{'#CVSPS_NO_BRANCH'} = 1; @@ -640,7 +684,7 @@ sub commit { foreach my $rx (@mergerx) { next unless $logmsg =~ $rx && $1; my $mparent = $1 eq 'HEAD' ? $opt_o : $1; - if(my $sha1 = get_headref($mparent, $git_dir)) { + if (my $sha1 = get_headref($mparent, $git_dir)) { push @commit_args, '-p', $mparent; print "Merge parent branch: $mparent\n" if $opt_v; } @@ -681,9 +725,9 @@ sub commit { system("git-update-ref refs/heads/$branch $cid") == 0 or die "Cannot write branch $branch for update: $!\n"; - if($tag) { - my($in, $out) = ('',''); - my($xtag) = $tag; + if ($tag) { + my ($in, $out) = ('',''); + my ($xtag) = $tag; $xtag =~ s/\s+\*\*.*$//; # Remove stuff like ** INVALID ** and ** FUNKY ** $xtag =~ tr/_/\./ if ( $opt_u ); $xtag =~ s/[\/]/$opt_s/g; @@ -718,25 +762,25 @@ sub commit { }; my $commitcount = 1; -while(<CVS>) { +while (<CVS>) { chomp; - if($state == 0 and /^-+$/) { + if ($state == 0 and /^-+$/) { $state = 1; - } elsif($state == 0) { + } elsif ($state == 0) { $state = 1; redo; - } elsif(($state==0 or $state==1) and s/^PatchSet\s+//) { + } elsif (($state==0 or $state==1) and s/^PatchSet\s+//) { $patchset = 0+$_; $state=2; - } elsif($state == 2 and s/^Date:\s+//) { + } elsif ($state == 2 and s/^Date:\s+//) { $date = pdate($_); - unless($date) { + unless ($date) { print STDERR "Could not parse date: $_\n"; $state=0; next; } $state=3; - } elsif($state == 3 and s/^Author:\s+//) { + } elsif ($state == 3 and s/^Author:\s+//) { s/\s+$//; if (/^(.*?)\s+<(.*)>/) { ($author_name, $author_email) = ($1, $2); @@ -747,34 +791,34 @@ while(<CVS>) { $author_name = $author_email = $_; } $state = 4; - } elsif($state == 4 and s/^Branch:\s+//) { + } elsif ($state == 4 and s/^Branch:\s+//) { s/\s+$//; s/[\/]/$opt_s/g; $branch = $_; $state = 5; - } elsif($state == 5 and s/^Ancestor branch:\s+//) { + } elsif ($state == 5 and s/^Ancestor branch:\s+//) { s/\s+$//; $ancestor = $_; $ancestor = $opt_o if $ancestor eq "HEAD"; $state = 6; - } elsif($state == 5) { + } elsif ($state == 5) { $ancestor = undef; $state = 6; redo; - } elsif($state == 6 and s/^Tag:\s+//) { + } elsif ($state == 6 and s/^Tag:\s+//) { s/\s+$//; - if($_ eq "(none)") { + if ($_ eq "(none)") { $tag = undef; } else { $tag = $_; } $state = 7; - } elsif($state == 7 and /^Log:/) { + } elsif ($state == 7 and /^Log:/) { $logmsg = ""; $state = 8; - } elsif($state == 8 and /^Members:/) { + } elsif ($state == 8 and /^Members:/) { $branch = $opt_o if $branch eq "HEAD"; - if(defined $branch_date{$branch} and $branch_date{$branch} >= $date) { + if (defined $branch_date{$branch} and $branch_date{$branch} >= $date) { # skip print "skip patchset $patchset: $date before $branch_date{$branch}\n" if $opt_v; $state = 11; @@ -785,17 +829,17 @@ while(<CVS>) { $state = 11; next; } - if($ancestor) { - if($ancestor eq $branch) { + if ($ancestor) { + if ($ancestor eq $branch) { print STDERR "Branch $branch erroneously stems from itself -- changed ancestor to $opt_o\n"; $ancestor = $opt_o; } - if(-f "$git_dir/refs/heads/$branch") { + if (-f "$git_dir/refs/heads/$branch") { print STDERR "Branch $branch already exists!\n"; $state=11; next; } - unless(open(H,"$git_dir/refs/heads/$ancestor")) { + unless (open(H,"$git_dir/refs/heads/$ancestor")) { print STDERR "Branch $ancestor does not exist!\n"; $ignorebranch{$branch} = 1; $state=11; @@ -803,7 +847,7 @@ while(<CVS>) { } chomp(my $id = <H>); close(H); - unless(open(H,"> $git_dir/refs/heads/$branch")) { + unless (open(H,"> $git_dir/refs/heads/$branch")) { print STDERR "Could not create branch $branch: $!\n"; $ignorebranch{$branch} = 1; $state=11; @@ -816,9 +860,9 @@ while(<CVS>) { } $last_branch = $branch if $branch ne $last_branch; $state = 9; - } elsif($state == 8) { + } elsif ($state == 8) { $logmsg .= "$_\n"; - } elsif($state == 9 and /^\s+(.+?):(INITIAL|\d+(?:\.\d+)+)->(\d+(?:\.\d+)+)\s*$/) { + } elsif ($state == 9 and /^\s+(.+?):(INITIAL|\d+(?:\.\d+)+)->(\d+(?:\.\d+)+)\s*$/) { # VERSION:1.96->1.96.2.1 my $init = ($2 eq "INITIAL"); my $fn = $1; @@ -831,7 +875,7 @@ while(<CVS>) { } print "Fetching $fn v $rev\n" if $opt_v; my ($tmpname, $size) = $cvs->file($fn,$rev); - if($size == -1) { + if ($size == -1) { push(@old,$fn); print "Drop $fn\n" if $opt_v; } else { @@ -849,14 +893,14 @@ while(<CVS>) { push(@new,[$mode, $sha, $fn]); # may be resurrected! } unlink($tmpname); - } elsif($state == 9 and /^\s+(.+?):\d+(?:\.\d+)+->(\d+(?:\.\d+)+)\(DEAD\)\s*$/) { + } elsif ($state == 9 and /^\s+(.+?):\d+(?:\.\d+)+->(\d+(?:\.\d+)+)\(DEAD\)\s*$/) { my $fn = $1; $fn =~ s#^/+##; push(@old,$fn); print "Delete $fn\n" if $opt_v; - } elsif($state == 9 and /^\s*$/) { + } elsif ($state == 9 and /^\s*$/) { $state = 10; - } elsif(($state == 9 or $state == 10) and /^-+$/) { + } elsif (($state == 9 or $state == 10) and /^-+$/) { $commitcount++; if ($opt_L && $commitcount > $opt_L) { last; @@ -866,11 +910,11 @@ while(<CVS>) { system("git repack -a -d"); } $state = 1; - } elsif($state == 11 and /^-+$/) { + } elsif ($state == 11 and /^-+$/) { $state = 1; - } elsif(/^-+$/) { # end of unknown-line processing + } elsif (/^-+$/) { # end of unknown-line processing $state = 1; - } elsif($state != 11) { # ignore stuff when skipping + } elsif ($state != 11) { # ignore stuff when skipping print "* UNKNOWN LINE * $_\n"; } } @@ -899,7 +943,7 @@ if (defined $orig_git_index) { } # Now switch back to the branch we were in before all of this happened -if($orig_branch) { +if ($orig_branch) { print "DONE.\n" if $opt_v; if ($opt_i) { exit 0; diff --git a/git-fetch.sh b/git-fetch.sh index eb32476bbd..4eecf148ea 100755 --- a/git-fetch.sh +++ b/git-fetch.sh @@ -88,6 +88,10 @@ then : >"$GIT_DIR/FETCH_HEAD" fi +# Global that is reused later +ls_remote_result=$(git ls-remote $upload_pack "$remote") || + die "Cannot find the reflist at $remote" + append_fetch_head () { head_="$1" remote_="$2" @@ -233,10 +237,7 @@ reflist=$(get_remote_refs_for_fetch "$@") if test "$tags" then taglist=`IFS=" " && - ( - git-ls-remote $upload_pack --tags "$remote" || - echo fail ouch - ) | + echo "$ls_remote_result" | while read sha1 name do case "$sha1" in @@ -245,6 +246,8 @@ then esac case "$name" in *^*) continue ;; + refs/tags/*) ;; + *) continue ;; esac if git-check-ref-format "$name" then @@ -304,22 +307,20 @@ fetch_main () { "`git-repo-config --bool http.noEPSV`" = true ]; then noepsv_opt="--disable-epsv" fi - max_depth=5 - depth=0 - head="ref: $remote_name" - while (expr "z$head" : "zref:" && expr $depth \< $max_depth) >/dev/null - do - remote_name_quoted=$(@@PERL@@ -e ' - my $u = $ARGV[0]; - $u =~ s/^ref:\s*//; - $u =~ s{([^-a-zA-Z0-9/.])}{sprintf"%%%02x",ord($1)}eg; - print "$u"; - ' "$head") - head=$(curl -nsfL $curl_extra_args $noepsv_opt "$remote/$remote_name_quoted") - depth=$( expr \( $depth + 1 \) ) - done + + # Find $remote_name from ls-remote output. + head=$( + IFS=' ' + echo "$ls_remote_result" | + while read sha1 name + do + test "z$name" = "z$remote_name" || continue + echo "$sha1" + break + done + ) expr "z$head" : "z$_x40\$" >/dev/null || - die "Failed to fetch $remote_name from $remote" + die "No such ref $remote_name at $remote" echo >&2 "Fetching $remote_name from $remote using $proto" git-http-fetch -v -a "$head" "$remote/" || exit ;; @@ -359,7 +360,7 @@ fetch_main () { esac append_fetch_head "$head" "$remote" \ - "$remote_name" "$remote_nick" "$local_name" "$not_for_merge" + "$remote_name" "$remote_nick" "$local_name" "$not_for_merge" || exit done @@ -413,15 +414,16 @@ fetch_main () { done local_name=$(expr "z$found" : 'z[^:]*:\(.*\)') append_fetch_head "$sha1" "$remote" \ - "$remote_name" "$remote_nick" "$local_name" "$not_for_merge" - done + "$remote_name" "$remote_nick" "$local_name" \ + "$not_for_merge" || exit + done && if [ "$pack_lockfile" ]; then rm -f "$pack_lockfile"; fi ) || exit ;; esac } -fetch_main "$reflist" +fetch_main "$reflist" || exit # automated tag following case "$no_tags$tags" in @@ -431,7 +433,7 @@ case "$no_tags$tags" in # effective only when we are following remote branch # using local tracking branch. taglist=$(IFS=" " && - git-ls-remote $upload_pack --tags "$remote" | + echo "$ls_remote_result" | sed -n -e 's|^\('"$_x40"'\) \(refs/tags/.*\)^{}$|\1 \2|p' \ -e 's|^\('"$_x40"'\) \(refs/tags/.*\)$|\1 \2|p' | while read sha1 name @@ -449,7 +451,7 @@ case "$no_tags$tags" in case "$taglist" in '') ;; ?*) - fetch_main "$taglist" ;; + fetch_main "$taglist" || exit ;; esac esac diff --git a/git-merge-recursive-old.py b/git-merge-recursive-old.py deleted file mode 100755 index 4039435ce4..0000000000 --- a/git-merge-recursive-old.py +++ /dev/null @@ -1,944 +0,0 @@ -#!/usr/bin/python -# -# Copyright (C) 2005 Fredrik Kuivinen -# - -import sys -sys.path.append('''@@GIT_PYTHON_PATH@@''') - -import math, random, os, re, signal, tempfile, stat, errno, traceback -from heapq import heappush, heappop -from sets import Set - -from gitMergeCommon import * - -outputIndent = 0 -def output(*args): - sys.stdout.write(' '*outputIndent) - printList(args) - -originalIndexFile = os.environ.get('GIT_INDEX_FILE', - os.environ.get('GIT_DIR', '.git') + '/index') -temporaryIndexFile = os.environ.get('GIT_DIR', '.git') + \ - '/merge-recursive-tmp-index' -def setupIndex(temporary): - try: - os.unlink(temporaryIndexFile) - except OSError: - pass - if temporary: - newIndex = temporaryIndexFile - else: - newIndex = originalIndexFile - os.environ['GIT_INDEX_FILE'] = newIndex - -# This is a global variable which is used in a number of places but -# only written to in the 'merge' function. - -# cacheOnly == True => Don't leave any non-stage 0 entries in the cache and -# don't update the working directory. -# False => Leave unmerged entries in the cache and update -# the working directory. - -cacheOnly = False - -# The entry point to the merge code -# --------------------------------- - -def merge(h1, h2, branch1Name, branch2Name, graph, callDepth=0, ancestor=None): - '''Merge the commits h1 and h2, return the resulting virtual - commit object and a flag indicating the cleanness of the merge.''' - assert(isinstance(h1, Commit) and isinstance(h2, Commit)) - - global outputIndent - - output('Merging:') - output(h1) - output(h2) - sys.stdout.flush() - - if ancestor: - ca = [ancestor] - else: - assert(isinstance(graph, Graph)) - ca = getCommonAncestors(graph, h1, h2) - output('found', len(ca), 'common ancestor(s):') - for x in ca: - output(x) - sys.stdout.flush() - - mergedCA = ca[0] - for h in ca[1:]: - outputIndent = callDepth+1 - [mergedCA, dummy] = merge(mergedCA, h, - 'Temporary merge branch 1', - 'Temporary merge branch 2', - graph, callDepth+1) - outputIndent = callDepth - assert(isinstance(mergedCA, Commit)) - - global cacheOnly - if callDepth == 0: - setupIndex(False) - cacheOnly = False - else: - setupIndex(True) - runProgram(['git-read-tree', h1.tree()]) - cacheOnly = True - - [shaRes, clean] = mergeTrees(h1.tree(), h2.tree(), mergedCA.tree(), - branch1Name, branch2Name) - - if graph and (clean or cacheOnly): - res = Commit(None, [h1, h2], tree=shaRes) - graph.addNode(res) - else: - res = None - - return [res, clean] - -getFilesRE = re.compile(r'^([0-7]+) (\S+) ([0-9a-f]{40})\t(.*)$', re.S) -def getFilesAndDirs(tree): - files = Set() - dirs = Set() - out = runProgram(['git-ls-tree', '-r', '-z', '-t', tree]) - for l in out.split('\0'): - m = getFilesRE.match(l) - if m: - if m.group(2) == 'tree': - dirs.add(m.group(4)) - elif m.group(2) == 'blob': - files.add(m.group(4)) - - return [files, dirs] - -# Those two global variables are used in a number of places but only -# written to in 'mergeTrees' and 'uniquePath'. They keep track of -# every file and directory in the two branches that are about to be -# merged. -currentFileSet = None -currentDirectorySet = None - -def mergeTrees(head, merge, common, branch1Name, branch2Name): - '''Merge the trees 'head' and 'merge' with the common ancestor - 'common'. The name of the head branch is 'branch1Name' and the name of - the merge branch is 'branch2Name'. Return a tuple (tree, cleanMerge) - where tree is the resulting tree and cleanMerge is True iff the - merge was clean.''' - - assert(isSha(head) and isSha(merge) and isSha(common)) - - if common == merge: - output('Already uptodate!') - return [head, True] - - if cacheOnly: - updateArg = '-i' - else: - updateArg = '-u' - - [out, code] = runProgram(['git-read-tree', updateArg, '-m', - common, head, merge], returnCode = True) - if code != 0: - die('git-read-tree:', out) - - [tree, code] = runProgram('git-write-tree', returnCode=True) - tree = tree.rstrip() - if code != 0: - global currentFileSet, currentDirectorySet - [currentFileSet, currentDirectorySet] = getFilesAndDirs(head) - [filesM, dirsM] = getFilesAndDirs(merge) - currentFileSet.union_update(filesM) - currentDirectorySet.union_update(dirsM) - - entries = unmergedCacheEntries() - renamesHead = getRenames(head, common, head, merge, entries) - renamesMerge = getRenames(merge, common, head, merge, entries) - - cleanMerge = processRenames(renamesHead, renamesMerge, - branch1Name, branch2Name) - for entry in entries: - if entry.processed: - continue - if not processEntry(entry, branch1Name, branch2Name): - cleanMerge = False - - if cleanMerge or cacheOnly: - tree = runProgram('git-write-tree').rstrip() - else: - tree = None - else: - cleanMerge = True - - return [tree, cleanMerge] - -# Low level file merging, update and removal -# ------------------------------------------ - -def mergeFile(oPath, oSha, oMode, aPath, aSha, aMode, bPath, bSha, bMode, - branch1Name, branch2Name): - - merge = False - clean = True - - if stat.S_IFMT(aMode) != stat.S_IFMT(bMode): - clean = False - if stat.S_ISREG(aMode): - mode = aMode - sha = aSha - else: - mode = bMode - sha = bSha - else: - if aSha != oSha and bSha != oSha: - merge = True - - if aMode == oMode: - mode = bMode - else: - mode = aMode - - if aSha == oSha: - sha = bSha - elif bSha == oSha: - sha = aSha - elif stat.S_ISREG(aMode): - assert(stat.S_ISREG(bMode)) - - orig = runProgram(['git-unpack-file', oSha]).rstrip() - src1 = runProgram(['git-unpack-file', aSha]).rstrip() - src2 = runProgram(['git-unpack-file', bSha]).rstrip() - try: - [out, code] = runProgram(['merge', - '-L', branch1Name + '/' + aPath, - '-L', 'orig/' + oPath, - '-L', branch2Name + '/' + bPath, - src1, orig, src2], returnCode=True) - except ProgramError, e: - print >>sys.stderr, e - die("Failed to execute 'merge'. merge(1) is used as the " - "file-level merge tool. Is 'merge' in your path?") - - sha = runProgram(['git-hash-object', '-t', 'blob', '-w', - src1]).rstrip() - - os.unlink(orig) - os.unlink(src1) - os.unlink(src2) - - clean = (code == 0) - else: - assert(stat.S_ISLNK(aMode) and stat.S_ISLNK(bMode)) - sha = aSha - - if aSha != bSha: - clean = False - - return [sha, mode, clean, merge] - -def updateFile(clean, sha, mode, path): - updateCache = cacheOnly or clean - updateWd = not cacheOnly - - return updateFileExt(sha, mode, path, updateCache, updateWd) - -def updateFileExt(sha, mode, path, updateCache, updateWd): - if cacheOnly: - updateWd = False - - if updateWd: - pathComponents = path.split('/') - for x in xrange(1, len(pathComponents)): - p = '/'.join(pathComponents[0:x]) - - try: - createDir = not stat.S_ISDIR(os.lstat(p).st_mode) - except OSError: - createDir = True - - if createDir: - try: - os.mkdir(p) - except OSError, e: - die("Couldn't create directory", p, e.strerror) - - prog = ['git-cat-file', 'blob', sha] - if stat.S_ISREG(mode): - try: - os.unlink(path) - except OSError: - pass - if mode & 0100: - mode = 0777 - else: - mode = 0666 - fd = os.open(path, os.O_WRONLY | os.O_TRUNC | os.O_CREAT, mode) - proc = subprocess.Popen(prog, stdout=fd) - proc.wait() - os.close(fd) - elif stat.S_ISLNK(mode): - linkTarget = runProgram(prog) - os.symlink(linkTarget, path) - else: - assert(False) - - if updateWd and updateCache: - runProgram(['git-update-index', '--add', '--', path]) - elif updateCache: - runProgram(['git-update-index', '--add', '--cacheinfo', - '0%o' % mode, sha, path]) - -def setIndexStages(path, - oSHA1, oMode, - aSHA1, aMode, - bSHA1, bMode, - clear=True): - istring = [] - if clear: - istring.append("0 " + ("0" * 40) + "\t" + path + "\0") - if oMode: - istring.append("%o %s %d\t%s\0" % (oMode, oSHA1, 1, path)) - if aMode: - istring.append("%o %s %d\t%s\0" % (aMode, aSHA1, 2, path)) - if bMode: - istring.append("%o %s %d\t%s\0" % (bMode, bSHA1, 3, path)) - - runProgram(['git-update-index', '-z', '--index-info'], - input="".join(istring)) - -def removeFile(clean, path): - updateCache = cacheOnly or clean - updateWd = not cacheOnly - - if updateCache: - runProgram(['git-update-index', '--force-remove', '--', path]) - - if updateWd: - try: - os.unlink(path) - except OSError, e: - if e.errno != errno.ENOENT and e.errno != errno.EISDIR: - raise - try: - os.removedirs(os.path.dirname(path)) - except OSError: - pass - -def uniquePath(path, branch): - def fileExists(path): - try: - os.lstat(path) - return True - except OSError, e: - if e.errno == errno.ENOENT: - return False - else: - raise - - branch = branch.replace('/', '_') - newPath = path + '~' + branch - suffix = 0 - while newPath in currentFileSet or \ - newPath in currentDirectorySet or \ - fileExists(newPath): - suffix += 1 - newPath = path + '~' + branch + '_' + str(suffix) - currentFileSet.add(newPath) - return newPath - -# Cache entry management -# ---------------------- - -class CacheEntry: - def __init__(self, path): - class Stage: - def __init__(self): - self.sha1 = None - self.mode = None - - # Used for debugging only - def __str__(self): - if self.mode != None: - m = '0%o' % self.mode - else: - m = 'None' - - if self.sha1: - sha1 = self.sha1 - else: - sha1 = 'None' - return 'sha1: ' + sha1 + ' mode: ' + m - - self.stages = [Stage(), Stage(), Stage(), Stage()] - self.path = path - self.processed = False - - def __str__(self): - return 'path: ' + self.path + ' stages: ' + repr([str(x) for x in self.stages]) - -class CacheEntryContainer: - def __init__(self): - self.entries = {} - - def add(self, entry): - self.entries[entry.path] = entry - - def get(self, path): - return self.entries.get(path) - - def __iter__(self): - return self.entries.itervalues() - -unmergedRE = re.compile(r'^([0-7]+) ([0-9a-f]{40}) ([1-3])\t(.*)$', re.S) -def unmergedCacheEntries(): - '''Create a dictionary mapping file names to CacheEntry - objects. The dictionary contains one entry for every path with a - non-zero stage entry.''' - - lines = runProgram(['git-ls-files', '-z', '--unmerged']).split('\0') - lines.pop() - - res = CacheEntryContainer() - for l in lines: - m = unmergedRE.match(l) - if m: - mode = int(m.group(1), 8) - sha1 = m.group(2) - stage = int(m.group(3)) - path = m.group(4) - - e = res.get(path) - if not e: - e = CacheEntry(path) - res.add(e) - - e.stages[stage].mode = mode - e.stages[stage].sha1 = sha1 - else: - die('Error: Merge program failed: Unexpected output from', - 'git-ls-files:', l) - return res - -lsTreeRE = re.compile(r'^([0-7]+) (\S+) ([0-9a-f]{40})\t(.*)\n$', re.S) -def getCacheEntry(path, origTree, aTree, bTree): - '''Returns a CacheEntry object which doesn't have to correspond to - a real cache entry in Git's index.''' - - def parse(out): - if out == '': - return [None, None] - else: - m = lsTreeRE.match(out) - if not m: - die('Unexpected output from git-ls-tree:', out) - elif m.group(2) == 'blob': - return [m.group(3), int(m.group(1), 8)] - else: - return [None, None] - - res = CacheEntry(path) - - [oSha, oMode] = parse(runProgram(['git-ls-tree', origTree, '--', path])) - [aSha, aMode] = parse(runProgram(['git-ls-tree', aTree, '--', path])) - [bSha, bMode] = parse(runProgram(['git-ls-tree', bTree, '--', path])) - - res.stages[1].sha1 = oSha - res.stages[1].mode = oMode - res.stages[2].sha1 = aSha - res.stages[2].mode = aMode - res.stages[3].sha1 = bSha - res.stages[3].mode = bMode - - return res - -# Rename detection and handling -# ----------------------------- - -class RenameEntry: - def __init__(self, - src, srcSha, srcMode, srcCacheEntry, - dst, dstSha, dstMode, dstCacheEntry, - score): - self.srcName = src - self.srcSha = srcSha - self.srcMode = srcMode - self.srcCacheEntry = srcCacheEntry - self.dstName = dst - self.dstSha = dstSha - self.dstMode = dstMode - self.dstCacheEntry = dstCacheEntry - self.score = score - - self.processed = False - -class RenameEntryContainer: - def __init__(self): - self.entriesSrc = {} - self.entriesDst = {} - - def add(self, entry): - self.entriesSrc[entry.srcName] = entry - self.entriesDst[entry.dstName] = entry - - def getSrc(self, path): - return self.entriesSrc.get(path) - - def getDst(self, path): - return self.entriesDst.get(path) - - def __iter__(self): - return self.entriesSrc.itervalues() - -parseDiffRenamesRE = re.compile('^:([0-7]+) ([0-7]+) ([0-9a-f]{40}) ([0-9a-f]{40}) R([0-9]*)$') -def getRenames(tree, oTree, aTree, bTree, cacheEntries): - '''Get information of all renames which occured between 'oTree' and - 'tree'. We need the three trees in the merge ('oTree', 'aTree' and - 'bTree') to be able to associate the correct cache entries with - the rename information. 'tree' is always equal to either aTree or bTree.''' - - assert(tree == aTree or tree == bTree) - inp = runProgram(['git-diff-tree', '-M', '--diff-filter=R', '-r', - '-z', oTree, tree]) - - ret = RenameEntryContainer() - try: - recs = inp.split("\0") - recs.pop() # remove last entry (which is '') - it = recs.__iter__() - while True: - rec = it.next() - m = parseDiffRenamesRE.match(rec) - - if not m: - die('Unexpected output from git-diff-tree:', rec) - - srcMode = int(m.group(1), 8) - dstMode = int(m.group(2), 8) - srcSha = m.group(3) - dstSha = m.group(4) - score = m.group(5) - src = it.next() - dst = it.next() - - srcCacheEntry = cacheEntries.get(src) - if not srcCacheEntry: - srcCacheEntry = getCacheEntry(src, oTree, aTree, bTree) - cacheEntries.add(srcCacheEntry) - - dstCacheEntry = cacheEntries.get(dst) - if not dstCacheEntry: - dstCacheEntry = getCacheEntry(dst, oTree, aTree, bTree) - cacheEntries.add(dstCacheEntry) - - ret.add(RenameEntry(src, srcSha, srcMode, srcCacheEntry, - dst, dstSha, dstMode, dstCacheEntry, - score)) - except StopIteration: - pass - return ret - -def fmtRename(src, dst): - srcPath = src.split('/') - dstPath = dst.split('/') - path = [] - endIndex = min(len(srcPath), len(dstPath)) - 1 - for x in range(0, endIndex): - if srcPath[x] == dstPath[x]: - path.append(srcPath[x]) - else: - endIndex = x - break - - if len(path) > 0: - return '/'.join(path) + \ - '/{' + '/'.join(srcPath[endIndex:]) + ' => ' + \ - '/'.join(dstPath[endIndex:]) + '}' - else: - return src + ' => ' + dst - -def processRenames(renamesA, renamesB, branchNameA, branchNameB): - srcNames = Set() - for x in renamesA: - srcNames.add(x.srcName) - for x in renamesB: - srcNames.add(x.srcName) - - cleanMerge = True - for path in srcNames: - if renamesA.getSrc(path): - renames1 = renamesA - renames2 = renamesB - branchName1 = branchNameA - branchName2 = branchNameB - else: - renames1 = renamesB - renames2 = renamesA - branchName1 = branchNameB - branchName2 = branchNameA - - ren1 = renames1.getSrc(path) - ren2 = renames2.getSrc(path) - - ren1.dstCacheEntry.processed = True - ren1.srcCacheEntry.processed = True - - if ren1.processed: - continue - - ren1.processed = True - - if ren2: - # Renamed in 1 and renamed in 2 - assert(ren1.srcName == ren2.srcName) - ren2.dstCacheEntry.processed = True - ren2.processed = True - - if ren1.dstName != ren2.dstName: - output('CONFLICT (rename/rename): Rename', - fmtRename(path, ren1.dstName), 'in branch', branchName1, - 'rename', fmtRename(path, ren2.dstName), 'in', - branchName2) - cleanMerge = False - - if ren1.dstName in currentDirectorySet: - dstName1 = uniquePath(ren1.dstName, branchName1) - output(ren1.dstName, 'is a directory in', branchName2, - 'adding as', dstName1, 'instead.') - removeFile(False, ren1.dstName) - else: - dstName1 = ren1.dstName - - if ren2.dstName in currentDirectorySet: - dstName2 = uniquePath(ren2.dstName, branchName2) - output(ren2.dstName, 'is a directory in', branchName1, - 'adding as', dstName2, 'instead.') - removeFile(False, ren2.dstName) - else: - dstName2 = ren2.dstName - setIndexStages(dstName1, - None, None, - ren1.dstSha, ren1.dstMode, - None, None) - setIndexStages(dstName2, - None, None, - None, None, - ren2.dstSha, ren2.dstMode) - - else: - removeFile(True, ren1.srcName) - - [resSha, resMode, clean, merge] = \ - mergeFile(ren1.srcName, ren1.srcSha, ren1.srcMode, - ren1.dstName, ren1.dstSha, ren1.dstMode, - ren2.dstName, ren2.dstSha, ren2.dstMode, - branchName1, branchName2) - - if merge or not clean: - output('Renaming', fmtRename(path, ren1.dstName)) - - if merge: - output('Auto-merging', ren1.dstName) - - if not clean: - output('CONFLICT (content): merge conflict in', - ren1.dstName) - cleanMerge = False - - if not cacheOnly: - setIndexStages(ren1.dstName, - ren1.srcSha, ren1.srcMode, - ren1.dstSha, ren1.dstMode, - ren2.dstSha, ren2.dstMode) - - updateFile(clean, resSha, resMode, ren1.dstName) - else: - removeFile(True, ren1.srcName) - - # Renamed in 1, maybe changed in 2 - if renamesA == renames1: - stage = 3 - else: - stage = 2 - - srcShaOtherBranch = ren1.srcCacheEntry.stages[stage].sha1 - srcModeOtherBranch = ren1.srcCacheEntry.stages[stage].mode - - dstShaOtherBranch = ren1.dstCacheEntry.stages[stage].sha1 - dstModeOtherBranch = ren1.dstCacheEntry.stages[stage].mode - - tryMerge = False - - if ren1.dstName in currentDirectorySet: - newPath = uniquePath(ren1.dstName, branchName1) - output('CONFLICT (rename/directory): Rename', - fmtRename(ren1.srcName, ren1.dstName), 'in', branchName1, - 'directory', ren1.dstName, 'added in', branchName2) - output('Renaming', ren1.srcName, 'to', newPath, 'instead') - cleanMerge = False - removeFile(False, ren1.dstName) - updateFile(False, ren1.dstSha, ren1.dstMode, newPath) - elif srcShaOtherBranch == None: - output('CONFLICT (rename/delete): Rename', - fmtRename(ren1.srcName, ren1.dstName), 'in', - branchName1, 'and deleted in', branchName2) - cleanMerge = False - updateFile(False, ren1.dstSha, ren1.dstMode, ren1.dstName) - elif dstShaOtherBranch: - newPath = uniquePath(ren1.dstName, branchName2) - output('CONFLICT (rename/add): Rename', - fmtRename(ren1.srcName, ren1.dstName), 'in', - branchName1 + '.', ren1.dstName, 'added in', branchName2) - output('Adding as', newPath, 'instead') - updateFile(False, dstShaOtherBranch, dstModeOtherBranch, newPath) - cleanMerge = False - tryMerge = True - elif renames2.getDst(ren1.dstName): - dst2 = renames2.getDst(ren1.dstName) - newPath1 = uniquePath(ren1.dstName, branchName1) - newPath2 = uniquePath(dst2.dstName, branchName2) - output('CONFLICT (rename/rename): Rename', - fmtRename(ren1.srcName, ren1.dstName), 'in', - branchName1+'. Rename', - fmtRename(dst2.srcName, dst2.dstName), 'in', branchName2) - output('Renaming', ren1.srcName, 'to', newPath1, 'and', - dst2.srcName, 'to', newPath2, 'instead') - removeFile(False, ren1.dstName) - updateFile(False, ren1.dstSha, ren1.dstMode, newPath1) - updateFile(False, dst2.dstSha, dst2.dstMode, newPath2) - dst2.processed = True - cleanMerge = False - else: - tryMerge = True - - if tryMerge: - - oName, oSHA1, oMode = ren1.srcName, ren1.srcSha, ren1.srcMode - aName, bName = ren1.dstName, ren1.srcName - aSHA1, bSHA1 = ren1.dstSha, srcShaOtherBranch - aMode, bMode = ren1.dstMode, srcModeOtherBranch - aBranch, bBranch = branchName1, branchName2 - - if renamesA != renames1: - aName, bName = bName, aName - aSHA1, bSHA1 = bSHA1, aSHA1 - aMode, bMode = bMode, aMode - aBranch, bBranch = bBranch, aBranch - - [resSha, resMode, clean, merge] = \ - mergeFile(oName, oSHA1, oMode, - aName, aSHA1, aMode, - bName, bSHA1, bMode, - aBranch, bBranch); - - if merge or not clean: - output('Renaming', fmtRename(ren1.srcName, ren1.dstName)) - - if merge: - output('Auto-merging', ren1.dstName) - - if not clean: - output('CONFLICT (rename/modify): Merge conflict in', - ren1.dstName) - cleanMerge = False - - if not cacheOnly: - setIndexStages(ren1.dstName, - oSHA1, oMode, - aSHA1, aMode, - bSHA1, bMode) - - updateFile(clean, resSha, resMode, ren1.dstName) - - return cleanMerge - -# Per entry merge function -# ------------------------ - -def processEntry(entry, branch1Name, branch2Name): - '''Merge one cache entry.''' - - debug('processing', entry.path, 'clean cache:', cacheOnly) - - cleanMerge = True - - path = entry.path - oSha = entry.stages[1].sha1 - oMode = entry.stages[1].mode - aSha = entry.stages[2].sha1 - aMode = entry.stages[2].mode - bSha = entry.stages[3].sha1 - bMode = entry.stages[3].mode - - assert(oSha == None or isSha(oSha)) - assert(aSha == None or isSha(aSha)) - assert(bSha == None or isSha(bSha)) - - assert(oMode == None or type(oMode) is int) - assert(aMode == None or type(aMode) is int) - assert(bMode == None or type(bMode) is int) - - if (oSha and (not aSha or not bSha)): - # - # Case A: Deleted in one - # - if (not aSha and not bSha) or \ - (aSha == oSha and not bSha) or \ - (not aSha and bSha == oSha): - # Deleted in both or deleted in one and unchanged in the other - if aSha: - output('Removing', path) - removeFile(True, path) - else: - # Deleted in one and changed in the other - cleanMerge = False - if not aSha: - output('CONFLICT (delete/modify):', path, 'deleted in', - branch1Name, 'and modified in', branch2Name + '.', - 'Version', branch2Name, 'of', path, 'left in tree.') - mode = bMode - sha = bSha - else: - output('CONFLICT (modify/delete):', path, 'deleted in', - branch2Name, 'and modified in', branch1Name + '.', - 'Version', branch1Name, 'of', path, 'left in tree.') - mode = aMode - sha = aSha - - updateFile(False, sha, mode, path) - - elif (not oSha and aSha and not bSha) or \ - (not oSha and not aSha and bSha): - # - # Case B: Added in one. - # - if aSha: - addBranch = branch1Name - otherBranch = branch2Name - mode = aMode - sha = aSha - conf = 'file/directory' - else: - addBranch = branch2Name - otherBranch = branch1Name - mode = bMode - sha = bSha - conf = 'directory/file' - - if path in currentDirectorySet: - cleanMerge = False - newPath = uniquePath(path, addBranch) - output('CONFLICT (' + conf + '):', - 'There is a directory with name', path, 'in', - otherBranch + '. Adding', path, 'as', newPath) - - removeFile(False, path) - updateFile(False, sha, mode, newPath) - else: - output('Adding', path) - updateFile(True, sha, mode, path) - - elif not oSha and aSha and bSha: - # - # Case C: Added in both (check for same permissions). - # - if aSha == bSha: - if aMode != bMode: - cleanMerge = False - output('CONFLICT: File', path, - 'added identically in both branches, but permissions', - 'conflict', '0%o' % aMode, '->', '0%o' % bMode) - output('CONFLICT: adding with permission:', '0%o' % aMode) - - updateFile(False, aSha, aMode, path) - else: - # This case is handled by git-read-tree - assert(False) - else: - cleanMerge = False - newPath1 = uniquePath(path, branch1Name) - newPath2 = uniquePath(path, branch2Name) - output('CONFLICT (add/add): File', path, - 'added non-identically in both branches. Adding as', - newPath1, 'and', newPath2, 'instead.') - removeFile(False, path) - updateFile(False, aSha, aMode, newPath1) - updateFile(False, bSha, bMode, newPath2) - - elif oSha and aSha and bSha: - # - # case D: Modified in both, but differently. - # - output('Auto-merging', path) - [sha, mode, clean, dummy] = \ - mergeFile(path, oSha, oMode, - path, aSha, aMode, - path, bSha, bMode, - branch1Name, branch2Name) - if clean: - updateFile(True, sha, mode, path) - else: - cleanMerge = False - output('CONFLICT (content): Merge conflict in', path) - - if cacheOnly: - updateFile(False, sha, mode, path) - else: - updateFileExt(sha, mode, path, updateCache=False, updateWd=True) - else: - die("ERROR: Fatal merge failure, shouldn't happen.") - - return cleanMerge - -def usage(): - die('Usage:', sys.argv[0], ' <base>... -- <head> <remote>..') - -# main entry point as merge strategy module -# The first parameters up to -- are merge bases, and the rest are heads. - -if len(sys.argv) < 4: - usage() - -bases = [] -for nextArg in xrange(1, len(sys.argv)): - if sys.argv[nextArg] == '--': - if len(sys.argv) != nextArg + 3: - die('Not handling anything other than two heads merge.') - try: - h1 = firstBranch = sys.argv[nextArg + 1] - h2 = secondBranch = sys.argv[nextArg + 2] - except IndexError: - usage() - break - else: - bases.append(sys.argv[nextArg]) - -print 'Merging', h1, 'with', h2 - -try: - h1 = runProgram(['git-rev-parse', '--verify', h1 + '^0']).rstrip() - h2 = runProgram(['git-rev-parse', '--verify', h2 + '^0']).rstrip() - - if len(bases) == 1: - base = runProgram(['git-rev-parse', '--verify', - bases[0] + '^0']).rstrip() - ancestor = Commit(base, None) - [dummy, clean] = merge(Commit(h1, None), Commit(h2, None), - firstBranch, secondBranch, None, 0, - ancestor) - else: - graph = buildGraph([h1, h2]) - [dummy, clean] = merge(graph.shaMap[h1], graph.shaMap[h2], - firstBranch, secondBranch, graph) - - print '' -except: - if isinstance(sys.exc_info()[1], SystemExit): - raise - else: - traceback.print_exc(None, sys.stderr) - sys.exit(2) - -if clean: - sys.exit(0) -else: - sys.exit(1) diff --git a/git-merge.sh b/git-merge.sh index cb094388bb..a948878b91 100755 --- a/git-merge.sh +++ b/git-merge.sh @@ -3,22 +3,20 @@ # Copyright (c) 2005 Junio C Hamano # -USAGE='[-n] [--no-commit] [--squash] [-s <strategy>]... <merge-message> <head> <remote>+' +USAGE='[-n] [--no-commit] [--squash] [-s <strategy>] [--reflog-action=<action>] [-m=<merge-message>] <commit>+' + . git-sh-setup LF=' ' -all_strategies='recur recursive recursive-old octopus resolve stupid ours' +all_strategies='recur recursive octopus resolve stupid ours' default_twohead_strategies='recursive' default_octopus_strategies='octopus' no_trivial_merge_strategies='ours' use_strategies= index_merge=t -if test "@@NO_PYTHON@@"; then - all_strategies='recur recursive resolve octopus stupid ours' -fi dropsave() { rm -f -- "$GIT_DIR/MERGE_HEAD" "$GIT_DIR/MERGE_MSG" \ @@ -95,7 +93,7 @@ finish () { case "$#" in 0) usage ;; esac -rloga= +rloga= have_message= while case "$#" in 0) break ;; esac do case "$1" in @@ -128,17 +126,82 @@ do --reflog-action=*) rloga=`expr "z$1" : 'z-[^=]*=\(.*\)'` ;; + -m=*|--m=*|--me=*|--mes=*|--mess=*|--messa=*|--messag=*|--message=*) + merge_msg=`expr "z$1" : 'z-[^=]*=\(.*\)'` + have_message=t + ;; + -m|--m|--me|--mes|--mess|--messa|--messag|--message) + shift + case "$#" in + 1) usage ;; + esac + merge_msg="$1" + have_message=t + ;; -*) usage ;; *) break ;; esac shift done -merge_msg="$1" -shift -head_arg="$1" -head=$(git-rev-parse --verify "$1"^0) || usage -shift +# This could be traditional "merge <msg> HEAD <commit>..." and the +# way we can tell it is to see if the second token is HEAD, but some +# people might have misused the interface and used a committish that +# is the same as HEAD there instead. Traditional format never would +# have "-m" so it is an additional safety measure to check for it. + +if test -z "$have_message" && + second_token=$(git-rev-parse --verify "$2^0" 2>/dev/null) && + head_commit=$(git-rev-parse --verify "HEAD" 2>/dev/null) && + test "$second_token" = "$head_commit" +then + merge_msg="$1" + shift + head_arg="$1" + shift +elif ! git-rev-parse --verify HEAD >/dev/null 2>&1 +then + # If the merged head is a valid one there is no reason to + # forbid "git merge" into a branch yet to be born. We do + # the same for "git pull". + if test 1 -ne $# + then + echo >&2 "Can merge only exactly one commit into empty head" + exit 1 + fi + + rh=$(git rev-parse --verify "$1^0") || + die "$1 - not something we can merge" + + git-update-ref -m "initial pull" HEAD "$rh" "" && + git-read-tree --reset -u HEAD + exit + +else + # We are invoked directly as the first-class UI. + head_arg=HEAD + + # All the rest are the commits being merged; prepare + # the standard merge summary message to be appended to + # the given message. If remote is invalid we will die + # later in the common codepath so we discard the error + # in this loop. + merge_name=$(for remote + do + rh=$(git-rev-parse --verify "$remote"^0 2>/dev/null) || + continue ;# not something we can merge + bh=$(git show-ref -s --verify "refs/heads/$remote" 2>/dev/null) + if test "$rh" = "$bh" + then + echo "$rh branch '$remote' of ." + else + echo "$rh commit '$remote'" + fi + done | git-fmt-merge-msg + ) + merge_msg="${merge_msg:+$merge_msg$LF$LF}$merge_name" +fi +head=$(git-rev-parse --verify "$head_arg"^0) || usage # All the rest are remote heads test "$#" = 0 && usage ;# we need at least one remote head. @@ -147,7 +210,7 @@ test "$rloga" = '' && rloga="merge: $@" remoteheads= for remote do - remotehead=$(git-rev-parse --verify "$remote"^0) || + remotehead=$(git-rev-parse --verify "$remote"^0 2>/dev/null) || die "$remote - not something we can merge" remoteheads="${remoteheads}$remotehead " done diff --git a/git-parse-remote.sh b/git-parse-remote.sh index c325ef761e..6ae534bf89 100755 --- a/git-parse-remote.sh +++ b/git-parse-remote.sh @@ -90,6 +90,43 @@ get_remote_default_refs_for_push () { esac } +# Called from canon_refs_list_for_fetch -d "$remote", which +# is called from get_remote_default_refs_for_fetch to grok +# refspecs that are retrieved from the configuration, but not +# from get_remote_refs_for_fetch when it deals with refspecs +# supplied on the command line. $ls_remote_result has the list +# of refs available at remote. +expand_refs_wildcard () { + for ref + do + lref=${ref#'+'} + # a non glob pattern is given back as-is. + expr "z$lref" : 'zrefs/.*/\*:refs/.*/\*$' >/dev/null || { + echo "$ref" + continue + } + + from=`expr "z$lref" : 'z\(refs/.*/\)\*:refs/.*/\*$'` + to=`expr "z$lref" : 'zrefs/.*/\*:\(refs/.*/\)\*$'` + local_force= + test "z$lref" = "z$ref" || local_force='+' + echo "$ls_remote_result" | + ( + IFS=' ' + while read sha1 name + do + mapped=${name#"$from"} + if test "z$name" != "z${name%'^{}'}" || + test "z$name" = "z$mapped" + then + continue + fi + echo "${local_force}${name}:${to}${mapped}" + done + ) + done +} + # Subroutine to canonicalize remote:local notation. canon_refs_list_for_fetch () { # If called from get_remote_default_refs_for_fetch @@ -97,6 +134,8 @@ canon_refs_list_for_fetch () { # or the first one otherwise; add prefix . to the rest # to prevent the secondary branches to be merged by default. merge_branches= + found_mergeref= + curr_branch= if test "$1" = "-d" then shift ; remote="$1" ; shift @@ -107,6 +146,8 @@ canon_refs_list_for_fetch () { merge_branches=$(git-repo-config \ --get-all "branch.${curr_branch}.merge") fi + set x $(expand_refs_wildcard "$@") + shift fi for ref do @@ -132,6 +173,10 @@ canon_refs_list_for_fetch () { dot_prefix= && break done fi + if test -z $dot_prefix + then + found_mergeref=true + fi case "$remote" in '') remote=HEAD ;; refs/heads/* | refs/tags/* | refs/remotes/*) ;; @@ -152,6 +197,11 @@ canon_refs_list_for_fetch () { fi echo "${dot_prefix}${force}${remote}:${local}" done + if test -z "$found_mergeref" -a "$curr_branch" + then + echo >&2 "Warning: No merge candidate found because value of config option + \"branch.${curr_branch}.merge\" does not match any remote branch fetched." + fi } # Returns list of src: (no store), or src:dst (store) diff --git a/git-rebase.sh b/git-rebase.sh index 546fa446fc..25530dfdc5 100755 --- a/git-rebase.sh +++ b/git-rebase.sh @@ -302,15 +302,6 @@ then exit $? fi -if test "@@NO_PYTHON@@" && test "$strategy" = "recursive-old" -then - die 'The recursive-old merge strategy is written in Python, -which this installation of git was not configured with. Please consider -a different merge strategy (e.g. recursive, resolve, or stupid) -or install Python and git with Python support.' - -fi - # start doing a rebase with git-merge # this is rename-aware if the recursive (default) strategy is used diff --git a/git-request-pull.sh b/git-request-pull.sh index 4319e35c62..4eacc3a059 100755 --- a/git-request-pull.sh +++ b/git-request-pull.sh @@ -30,4 +30,4 @@ echo " $url" echo git log $baserev..$headrev | git-shortlog ; -git diff --stat --summary $baserev..$headrev +git diff -M --stat --summary $baserev..$headrev diff --git a/git-shortlog.perl b/git-shortlog.perl deleted file mode 100755 index 334fec7477..0000000000 --- a/git-shortlog.perl +++ /dev/null @@ -1,234 +0,0 @@ -#!/usr/bin/perl -w - -use strict; -use Getopt::Std; -use File::Basename qw(basename dirname); - -our ($opt_h, $opt_n, $opt_s); -getopts('hns'); - -$opt_h && usage(); - -sub usage { - print STDERR "Usage: ${\basename $0} [-h] [-n] [-s] < <log_data>\n"; - exit(1); -} - -my (%mailmap); -my (%email); -my (%map); -my $pstate = 1; -my $n_records = 0; -my $n_output = 0; - -sub shortlog_entry($$) { - my ($name, $desc) = @_; - my $key = $name; - - $desc =~ s#/pub/scm/linux/kernel/git/#/.../#g; - $desc =~ s#\[PATCH\] ##g; - - # store description in array, in email->{desc list} map - if (exists $map{$key}) { - # grab ref - my $obj = $map{$key}; - - # add desc to array - push(@$obj, $desc); - } else { - # create new array, containing 1 item - my @arr = ($desc); - - # store ref to array - $map{$key} = \@arr; - } -} - -# sort comparison function -sub by_name($$) { - my ($a, $b) = @_; - - uc($a) cmp uc($b); -} -sub by_nbentries($$) { - my ($a, $b) = @_; - my $a_entries = $map{$a}; - my $b_entries = $map{$b}; - - @$b_entries - @$a_entries || by_name $a, $b; -} - -my $sort_method = $opt_n ? \&by_nbentries : \&by_name; - -sub summary_output { - my ($obj, $num, $key); - - foreach $key (sort $sort_method keys %map) { - $obj = $map{$key}; - $num = @$obj; - printf "%s: %u\n", $key, $num; - $n_output += $num; - } -} - -sub shortlog_output { - my ($obj, $num, $key, $desc); - - foreach $key (sort $sort_method keys %map) { - $obj = $map{$key}; - $num = @$obj; - - # output author - printf "%s (%u):\n", $key, $num; - - # output author's 1-line summaries - foreach $desc (reverse @$obj) { - print " $desc\n"; - $n_output++; - } - - # blank line separating author from next author - print "\n"; - } -} - -sub changelog_input { - my ($author, $desc); - - while (<>) { - # get author and email - if ($pstate == 1) { - my ($email); - - next unless /^[Aa]uthor:?\s*(.*?)\s*<(.*)>/; - - $n_records++; - - $author = $1; - $email = $2; - $desc = undef; - - # cset author fixups - if (exists $mailmap{$email}) { - $author = $mailmap{$email}; - } elsif (exists $mailmap{$author}) { - $author = $mailmap{$author}; - } elsif (!$author) { - $author = $email; - } - $email{$author}{$email}++; - $pstate++; - } - - # skip to blank line - elsif ($pstate == 2) { - next unless /^\s*$/; - $pstate++; - } - - # skip to non-blank line - elsif ($pstate == 3) { - next unless /^\s*?(.*)/; - - # skip lines that are obviously not - # a 1-line cset description - next if /^\s*From: /; - - chomp; - $desc = $1; - - &shortlog_entry($author, $desc); - - $pstate = 1; - } - - else { - die "invalid parse state $pstate"; - } - } -} - -sub read_mailmap { - my ($fh, $mailmap) = @_; - while (<$fh>) { - chomp; - if (/^([^#].*?)\s*<(.*)>/) { - $mailmap->{$2} = $1; - } - } -} - -sub setup_mailmap { - read_mailmap(\*DATA, \%mailmap); - if (-f '.mailmap') { - my $fh = undef; - open $fh, '<', '.mailmap'; - read_mailmap($fh, \%mailmap); - close $fh; - } -} - -sub finalize { - #print "\n$n_records records parsed.\n"; - - if ($n_records != $n_output) { - die "parse error: input records != output records\n"; - } - if (0) { - for my $author (sort keys %email) { - my $e = $email{$author}; - for my $email (sort keys %$e) { - print STDERR "$author <$email>\n"; - } - } - } -} - -&setup_mailmap; -&changelog_input; -$opt_s ? &summary_output : &shortlog_output; -&finalize; -exit(0); - - -__DATA__ -# -# Even with git, we don't always have name translations. -# So have an email->real name table to translate the -# (hopefully few) missing names -# -Adrian Bunk <bunk@stusta.de> -Andreas Herrmann <aherrman@de.ibm.com> -Andrew Morton <akpm@osdl.org> -Andrew Vasquez <andrew.vasquez@qlogic.com> -Christoph Hellwig <hch@lst.de> -Corey Minyard <minyard@acm.org> -David Woodhouse <dwmw2@shinybook.infradead.org> -Domen Puncer <domen@coderock.org> -Douglas Gilbert <dougg@torque.net> -Ed L Cashin <ecashin@coraid.com> -Evgeniy Polyakov <johnpol@2ka.mipt.ru> -Felix Moeller <felix@derklecks.de> -Frank Zago <fzago@systemfabricworks.com> -Greg Kroah-Hartman <gregkh@suse.de> -James Bottomley <jejb@mulgrave.(none)> -James Bottomley <jejb@titanic.il.steeleye.com> -Jeff Garzik <jgarzik@pretzel.yyz.us> -Jens Axboe <axboe@suse.de> -Kay Sievers <kay.sievers@vrfy.org> -Mitesh shah <mshah@teja.com> -Morten Welinder <terra@gnome.org> -Morten Welinder <welinder@anemone.rentec.com> -Morten Welinder <welinder@darter.rentec.com> -Morten Welinder <welinder@troll.com> -Nguyen Anh Quynh <aquynh@gmail.com> -Paolo 'Blaisorblade' Giarrusso <blaisorblade@yahoo.it> -Peter A Jonsson <pj@ludd.ltu.se> -Ralf Wildenhues <Ralf.Wildenhues@gmx.de> -Rudolf Marek <R.Marek@sh.cvut.cz> -Rui Saraiva <rmps@joel.ist.utl.pt> -Sachin P Sant <ssant@in.ibm.com> -Santtu Hyrkk,Av(B <santtu.hyrkko@gmail.com> -Simon Kelley <simon@thekelleys.org.uk> -Tejun Heo <htejun@gmail.com> -Tony Luck <tony.luck@intel.com> diff --git a/git-svn.perl b/git-svn.perl index b53273eaea..1f8a3b0e07 100755 --- a/git-svn.perl +++ b/git-svn.perl @@ -21,6 +21,7 @@ $ENV{TZ} = 'UTC'; $ENV{LC_ALL} = 'C'; $| = 1; # unbuffer STDOUT +sub fatal (@) { print STDERR $@; exit 1 } # If SVN:: library support is added, please make the dependencies # optional and preserve the capability to use the command-line client. # use eval { require SVN::... } to make it lazy load @@ -39,7 +40,7 @@ memoize('revisions_eq'); memoize('cmt_metadata'); memoize('get_commit_time'); -my ($SVN_PATH, $SVN, $SVN_LOG, $_use_lib); +my ($SVN, $_use_lib); sub nag_lib { print STDERR <<EOF; @@ -59,6 +60,7 @@ nag_lib() unless $_use_lib; my $_optimize_commits = 1 unless $ENV{GIT_SVN_NO_OPTIMIZE_COMMITS}; my $sha1 = qr/[a-f\d]{40}/; my $sha1_short = qr/[a-f\d]{4,40}/; +my $_esc_color = qr/(?:\033\[(?:(?:\d+;)*\d*)?m)*/; my ($_revision,$_stdin,$_no_ignore_ext,$_no_stop_copy,$_help,$_rmdir,$_edit, $_find_copies_harder, $_l, $_cp_similarity, $_cp_remote, $_repack, $_repack_nr, $_repack_flags, $_q, @@ -66,9 +68,11 @@ my ($_revision,$_stdin,$_no_ignore_ext,$_no_stop_copy,$_help,$_rmdir,$_edit, $_template, $_shared, $_no_default_regex, $_no_graft_copy, $_limit, $_verbose, $_incremental, $_oneline, $_l_fmt, $_show_commit, $_version, $_upgrade, $_authors, $_branch_all_refs, @_opt_m, - $_merge, $_strategy, $_dry_run, $_ignore_nodate, $_non_recursive); + $_merge, $_strategy, $_dry_run, $_ignore_nodate, $_non_recursive, + $_username, $_config_dir, $_no_auth_cache, $_xfer_delta, + $_pager, $_color); my (@_branch_from, %tree_map, %users, %rusers, %equiv); -my ($_svn_co_url_revs, $_svn_pg_peg_revs); +my ($_svn_co_url_revs, $_svn_pg_peg_revs, $_svn_can_do_switch); my @repo_path_split_cache; my %fc_opts = ( 'no-ignore-externals' => \$_no_ignore_ext, @@ -79,6 +83,9 @@ my %fc_opts = ( 'no-ignore-externals' => \$_no_ignore_ext, 'repack:i' => \$_repack, 'no-metadata' => \$_no_metadata, 'quiet|q' => \$_q, + 'username=s' => \$_username, + 'config-dir=s' => \$_config_dir, + 'no-auth-cache' => \$_no_auth_cache, 'ignore-nodate' => \$_ignore_nodate, 'repack-flags|repack-args|repack-opts=s' => \$_repack_flags); @@ -117,7 +124,12 @@ my %cmd = ( 'no-graft-copy' => \$_no_graft_copy } ], 'multi-init' => [ \&multi_init, 'Initialize multiple trees (like git-svnimport)', - { %multi_opts, %fc_opts } ], + { %multi_opts, %init_opts, + 'revision|r=i' => \$_revision, + 'username=s' => \$_username, + 'config-dir=s' => \$_config_dir, + 'no-auth-cache' => \$_no_auth_cache, + } ], 'multi-fetch' => [ \&multi_fetch, 'Fetch multiple trees (like git-svnimport)', \%fc_opts ], @@ -130,6 +142,8 @@ my %cmd = ( 'show-commit' => \$_show_commit, 'non-recursive' => \$_non_recursive, 'authors-file|A=s' => \$_authors, + 'color' => \$_color, + 'pager=s' => \$_pager, } ], 'commit-diff' => [ \&commit_diff, 'Commit a diff between two trees', { 'message|m=s' => \$_message, @@ -377,10 +391,7 @@ sub fetch_cmd { sub fetch_lib { my (@parents) = @_; $SVN_URL ||= file_to_s("$GIT_SVN_DIR/info/url"); - my $repo; - ($repo, $SVN_PATH) = repo_path_split($SVN_URL); - $SVN_LOG ||= libsvn_connect($repo); - $SVN ||= libsvn_connect($repo); + $SVN ||= libsvn_connect($SVN_URL); my ($last_rev, $last_commit) = svn_grab_base_rev(); my ($base, $head) = libsvn_parse_revision($last_rev); if ($base > $head) { @@ -422,7 +433,7 @@ sub fetch_lib { # performance sucks with it enabled, so it's much # faster to fetch revision ranges instead of relying # on the limiter. - libsvn_get_log($SVN_LOG, '/'.$SVN_PATH, + libsvn_get_log(libsvn_dup_ra($SVN), [''], $min, $max, 0, 1, 1, sub { my $log_msg; @@ -448,6 +459,7 @@ sub fetch_lib { $min = $max + 1; $max += $inc; $max = $head if ($max > $head); + $SVN = libsvn_connect($SVN_URL); } restore_index($index); return { revision => $last_rev, commit => $last_commit }; @@ -524,7 +536,6 @@ sub commit_lib { my $commit_msg = "$GIT_SVN_DIR/.svn-commit.tmp.$$"; my $repo; - ($repo, $SVN_PATH) = repo_path_split($SVN_URL); set_svn_commit_env(); foreach my $c (@revs) { my $log_msg = get_commit_message($c, $commit_msg); @@ -533,13 +544,11 @@ sub commit_lib { # can't track down... (it's probably in the SVN code) defined(my $pid = open my $fh, '-|') or croak $!; if (!$pid) { - $SVN_LOG = libsvn_connect($repo); - $SVN = libsvn_connect($repo); my $ed = SVN::Git::Editor->new( { r => $r_last, - ra => $SVN_LOG, + ra => libsvn_dup_ra($SVN), c => $c, - svn_path => $SVN_PATH + svn_path => $SVN->{svn_path}, }, $SVN->get_commit_editor( $log_msg->{msg}, @@ -571,7 +580,7 @@ sub commit_lib { $no = 1; } } - close $fh or croak $?; + close $fh or exit 1; if (! defined $r_new && ! defined $cmt_new) { unless ($no) { die "Failed to parse revision information\n"; @@ -657,10 +666,9 @@ sub show_ignore_cmd { sub show_ignore_lib { my $repo; - ($repo, $SVN_PATH) = repo_path_split($SVN_URL); - $SVN ||= libsvn_connect($repo); + $SVN ||= libsvn_connect($SVN_URL); my $r = defined $_revision ? $_revision : $SVN->get_latest_revnum; - libsvn_traverse_ignore(\*STDOUT, $SVN_PATH, $r); + libsvn_traverse_ignore(\*STDOUT, $SVN->{svn_path}, $r); } sub graft_branches { @@ -761,16 +769,17 @@ sub show_log { } } + config_pager(); my $pid = open(my $log,'-|'); defined $pid or croak $!; if (!$pid) { exec(git_svn_log_cmd($r_min,$r_max), @args) or croak $!; } - setup_pager(); + run_pager(); my (@k, $c, $d); while (<$log>) { - if (/^commit ($sha1_short)/o) { + if (/^${_esc_color}commit ($sha1_short)/o) { my $cmt = $1; if ($c && cmt_showable($c) && $c->{r} != $r_last) { $r_last = $c->{r}; @@ -779,25 +788,25 @@ sub show_log { } $d = undef; $c = { c => $cmt }; - } elsif (/^author (.+) (\d+) ([\-\+]?\d+)$/) { + } elsif (/^${_esc_color}author (.+) (\d+) ([\-\+]?\d+)$/) { get_author_info($c, $1, $2, $3); - } elsif (/^(?:tree|parent|committer) /) { + } elsif (/^${_esc_color}(?:tree|parent|committer) /) { # ignore - } elsif (/^:\d{6} \d{6} $sha1_short/o) { + } elsif (/^${_esc_color}:\d{6} \d{6} $sha1_short/o) { push @{$c->{raw}}, $_; - } elsif (/^[ACRMDT]\t/) { - # we could add $SVN_PATH here, but that requires + } elsif (/^${_esc_color}[ACRMDT]\t/) { + # we could add $SVN->{svn_path} here, but that requires # remote access at the moment (repo_path_split)... - s#^([ACRMDT])\t# $1 #; + s#^(${_esc_color})([ACRMDT])\t#$1 $2 #; push @{$c->{changed}}, $_; - } elsif (/^diff /) { + } elsif (/^${_esc_color}diff /) { $d = 1; push @{$c->{diff}}, $_; } elsif ($d) { push @{$c->{diff}}, $_; - } elsif (/^ (git-svn-id:.+)$/) { + } elsif (/^${_esc_color} (git-svn-id:.+)$/) { ($c->{url}, $c->{r}, undef) = extract_metadata($1); - } elsif (s/^ //) { + } elsif (s/^${_esc_color} //) { push @{$c->{l}}, $_; } } @@ -852,10 +861,7 @@ sub commit_diff { $_message ||= get_commit_message($tb, "$GIT_DIR/.svn-commit.tmp.$$")->{msg}; } - my $repo; - ($repo, $SVN_PATH) = repo_path_split($SVN_URL); - $SVN_LOG ||= libsvn_connect($repo); - $SVN ||= libsvn_connect($repo); + $SVN ||= libsvn_connect($SVN_URL); if ($r eq 'HEAD') { $r = $SVN->get_latest_revnum; } elsif ($r !~ /^\d+$/) { @@ -864,8 +870,9 @@ sub commit_diff { my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef, 0) : (); my $rev_committed; my $ed = SVN::Git::Editor->new({ r => $r, - ra => $SVN_LOG, c => $tb, - svn_path => $SVN_PATH + ra => libsvn_dup_ra($SVN), + c => $tb, + svn_path => $SVN->{svn_path} }, $SVN->get_commit_editor($_message, sub { @@ -873,13 +880,16 @@ sub commit_diff { print "Committed $_[0]\n"; }, @lock) ); - my $mods = libsvn_checkout_tree($ta, $tb, $ed); - if (@$mods == 0) { - print "No changes\n$ta == $tb\n"; - $ed->abort_edit; - } else { - $ed->close_edit; - } + eval { + my $mods = libsvn_checkout_tree($ta, $tb, $ed); + if (@$mods == 0) { + print "No changes\n$ta == $tb\n"; + $ed->abort_edit; + } else { + $ed->close_edit; + } + }; + fatal "$@\n" if $@; $_message = $_file = undef; return $rev_committed; } @@ -902,12 +912,30 @@ sub cmt_showable { return defined $c->{r}; } +sub log_use_color { + return 1 if $_color; + my $dc; + chomp($dc = `git-repo-config --get diff.color`); + if ($dc eq 'auto') { + if (-t *STDOUT || (defined $_pager && + `git-repo-config --bool --get pager.color` !~ /^false/)) { + return ($ENV{TERM} && $ENV{TERM} ne 'dumb'); + } + return 0; + } + return 0 if $dc eq 'never'; + return 1 if $dc eq 'always'; + chomp($dc = `git-repo-config --bool --get diff.color`); + $dc eq 'true'; +} + sub git_svn_log_cmd { my ($r_min, $r_max) = @_; my @cmd = (qw/git-log --abbrev-commit --pretty=raw --default/, "refs/remotes/$GIT_SVN"); push @cmd, '-r' unless $_non_recursive; push @cmd, qw/--raw --name-status/ if $_verbose; + push @cmd, '--color' if log_use_color(); return @cmd unless defined $r_max; if ($r_max == $r_min) { push @cmd, '--max-count=1'; @@ -1143,8 +1171,7 @@ sub graft_file_copy_lib { my $tree_paths = $l_map->{$u}; my $pfx = common_prefix([keys %$tree_paths]); my ($repo, $path) = repo_path_split($u.$pfx); - $SVN_LOG ||= libsvn_connect($repo); - $SVN ||= libsvn_connect($repo); + $SVN = libsvn_connect($repo); my ($base, $head) = libsvn_parse_revision(); my $inc = 1000; @@ -1153,7 +1180,8 @@ sub graft_file_copy_lib { $SVN::Error::handler = \&libsvn_skip_unknown_revs; while (1) { my $pool = SVN::Pool->new; - libsvn_get_log($SVN_LOG, "/$path", $min, $max, 0, 1, 1, + libsvn_get_log(libsvn_dup_ra($SVN), [$path], + $min, $max, 0, 2, 1, sub { libsvn_graft_file_copies($grafts, $tree_paths, $path, @_); @@ -1263,13 +1291,9 @@ sub repo_path_split { return ($u, $full_url); } } - if ($_use_lib) { my $tmp = libsvn_connect($full_url); - my $url = $tmp->get_repos_root; - $full_url =~ s#^\Q$url\E/*##; - push @repo_path_split_cache, qr/^(\Q$url\E)/; - return ($url, $full_url); + return ($tmp->{repos_root}, $tmp->{svn_path}); } else { my ($url, $path) = ($full_url =~ m!^([a-z\+]+://[^/]*)(.*)$!i); $path =~ s#^/+##; @@ -2538,14 +2562,18 @@ sub tz_to_s_offset { return ($1 * 60) + ($tz * 3600); } -sub setup_pager { # translated to Perl from pager.c - return unless (-t *STDOUT); - my $pager = $ENV{PAGER}; - if (!defined $pager) { - $pager = 'less'; - } elsif (length $pager == 0 || $pager eq 'cat') { - return; +# adapted from pager.c +sub config_pager { + $_pager ||= $ENV{GIT_PAGER} || $ENV{PAGER}; + if (!defined $_pager) { + $_pager = 'less'; + } elsif (length $_pager == 0 || $_pager eq 'cat') { + $_pager = undef; } +} + +sub run_pager { + return unless -t *STDOUT; pipe my $rfd, my $wfd or return; defined(my $pid = fork) or croak $!; if (!$pid) { @@ -2553,8 +2581,8 @@ sub setup_pager { # translated to Perl from pager.c return; } open STDIN, '<&', $rfd or croak $!; - $ENV{LESS} ||= '-S'; - exec $pager or croak "Can't run pager: $!\n";; + $ENV{LESS} ||= 'FRSX'; + exec $_pager or croak "Can't run pager: $! ($_pager)\n"; } sub get_author_info { @@ -2680,29 +2708,202 @@ sub libsvn_load { require SVN::Ra; require SVN::Delta; push @SVN::Git::Editor::ISA, 'SVN::Delta::Editor'; + push @SVN::Git::Fetcher::ISA, 'SVN::Delta::Editor'; + *SVN::Git::Fetcher::process_rm = *process_rm; + *SVN::Git::Fetcher::safe_qx = *safe_qx; my $kill_stupid_warnings = $SVN::Node::none.$SVN::Node::file. $SVN::Node::dir.$SVN::Node::unknown. $SVN::Node::none.$SVN::Node::file. - $SVN::Node::dir.$SVN::Node::unknown; + $SVN::Node::dir.$SVN::Node::unknown. + $SVN::Auth::SSL::CNMISMATCH. + $SVN::Auth::SSL::NOTYETVALID. + $SVN::Auth::SSL::EXPIRED. + $SVN::Auth::SSL::UNKNOWNCA. + $SVN::Auth::SSL::OTHER; 1; }; } +sub _simple_prompt { + my ($cred, $realm, $default_username, $may_save, $pool) = @_; + $may_save = undef if $_no_auth_cache; + $default_username = $_username if defined $_username; + if (defined $default_username && length $default_username) { + if (defined $realm && length $realm) { + print "Authentication realm: $realm\n"; + } + $cred->username($default_username); + } else { + _username_prompt($cred, $realm, $may_save, $pool); + } + $cred->password(_read_password("Password for '" . + $cred->username . "': ", $realm)); + $cred->may_save($may_save); + $SVN::_Core::SVN_NO_ERROR; +} + +sub _ssl_server_trust_prompt { + my ($cred, $realm, $failures, $cert_info, $may_save, $pool) = @_; + $may_save = undef if $_no_auth_cache; + print "Error validating server certificate for '$realm':\n"; + if ($failures & $SVN::Auth::SSL::UNKNOWNCA) { + print " - The certificate is not issued by a trusted ", + "authority. Use the\n", + " fingerprint to validate the certificate manually!\n"; + } + if ($failures & $SVN::Auth::SSL::CNMISMATCH) { + print " - The certificate hostname does not match.\n"; + } + if ($failures & $SVN::Auth::SSL::NOTYETVALID) { + print " - The certificate is not yet valid.\n"; + } + if ($failures & $SVN::Auth::SSL::EXPIRED) { + print " - The certificate has expired.\n"; + } + if ($failures & $SVN::Auth::SSL::OTHER) { + print " - The certificate has an unknown error.\n"; + } + printf( "Certificate information:\n". + " - Hostname: %s\n". + " - Valid: from %s until %s\n". + " - Issuer: %s\n". + " - Fingerprint: %s\n", + map $cert_info->$_, qw(hostname valid_from valid_until + issuer_dname fingerprint) ); + my $choice; +prompt: + print $may_save ? + "(R)eject, accept (t)emporarily or accept (p)ermanently? " : + "(R)eject or accept (t)emporarily? "; + $choice = lc(substr(<STDIN> || 'R', 0, 1)); + if ($choice =~ /^t$/i) { + $cred->may_save(undef); + } elsif ($choice =~ /^r$/i) { + return -1; + } elsif ($may_save && $choice =~ /^p$/i) { + $cred->may_save($may_save); + } else { + goto prompt; + } + $cred->accepted_failures($failures); + $SVN::_Core::SVN_NO_ERROR; +} + +sub _ssl_client_cert_prompt { + my ($cred, $realm, $may_save, $pool) = @_; + $may_save = undef if $_no_auth_cache; + print "Client certificate filename: "; + chomp(my $filename = <STDIN>); + $cred->cert_file($filename); + $cred->may_save($may_save); + $SVN::_Core::SVN_NO_ERROR; +} + +sub _ssl_client_cert_pw_prompt { + my ($cred, $realm, $may_save, $pool) = @_; + $may_save = undef if $_no_auth_cache; + $cred->password(_read_password("Password: ", $realm)); + $cred->may_save($may_save); + $SVN::_Core::SVN_NO_ERROR; +} + +sub _username_prompt { + my ($cred, $realm, $may_save, $pool) = @_; + $may_save = undef if $_no_auth_cache; + if (defined $realm && length $realm) { + print "Authentication realm: $realm\n"; + } + my $username; + if (defined $_username) { + $username = $_username; + } else { + print "Username: "; + chomp($username = <STDIN>); + } + $cred->username($username); + $cred->may_save($may_save); + $SVN::_Core::SVN_NO_ERROR; +} + +sub _read_password { + my ($prompt, $realm) = @_; + print $prompt; + require Term::ReadKey; + Term::ReadKey::ReadMode('noecho'); + my $password = ''; + while (defined(my $key = Term::ReadKey::ReadKey(0))) { + last if $key =~ /[\012\015]/; # \n\r + $password .= $key; + } + Term::ReadKey::ReadMode('restore'); + print "\n"; + $password; +} + sub libsvn_connect { my ($url) = @_; - my $auth = SVN::Core::auth_open([SVN::Client::get_simple_provider(), - SVN::Client::get_ssl_server_trust_file_provider(), - SVN::Client::get_username_provider()]); - my $s = eval { SVN::Ra->new(url => $url, auth => $auth) }; - return $s; + SVN::_Core::svn_config_ensure($_config_dir, undef); + my ($baton, $callbacks) = SVN::Core::auth_open_helper([ + SVN::Client::get_simple_provider(), + SVN::Client::get_ssl_server_trust_file_provider(), + SVN::Client::get_simple_prompt_provider( + \&_simple_prompt, 2), + SVN::Client::get_ssl_client_cert_prompt_provider( + \&_ssl_client_cert_prompt, 2), + SVN::Client::get_ssl_client_cert_pw_prompt_provider( + \&_ssl_client_cert_pw_prompt, 2), + SVN::Client::get_username_provider(), + SVN::Client::get_ssl_server_trust_prompt_provider( + \&_ssl_server_trust_prompt), + SVN::Client::get_username_prompt_provider( + \&_username_prompt, 2), + ]); + my $config = SVN::Core::config_get_config($_config_dir); + my $ra = SVN::Ra->new(url => $url, auth => $baton, + config => $config, + pool => SVN::Pool->new, + auth_provider_callbacks => $callbacks); + + my $df = $ENV{GIT_SVN_DELTA_FETCH}; + if (defined $df) { + $_xfer_delta = $df; + } else { + $_xfer_delta = ($url =~ m#^file://#) ? undef : 1; + } + $ra->{svn_path} = $url; + $ra->{repos_root} = $ra->get_repos_root; + $ra->{svn_path} =~ s#^\Q$ra->{repos_root}\E/*##; + push @repo_path_split_cache, qr/^(\Q$ra->{repos_root}\E)/; + return $ra; +} + +sub libsvn_can_do_switch { + unless (defined $_svn_can_do_switch) { + my $pool = SVN::Pool->new; + my $rep = eval { + $SVN->do_switch(1, '', 0, $SVN->{url}, + SVN::Delta::Editor->new, $pool); + }; + if ($@) { + $_svn_can_do_switch = 0; + } else { + $rep->abort_report($pool); + $_svn_can_do_switch = 1; + } + $pool->clear; + } + $_svn_can_do_switch; +} + +sub libsvn_dup_ra { + my ($ra) = @_; + SVN::Ra->new(map { $_ => $ra->{$_} } qw/config url + auth auth_provider_callbacks repos_root svn_path/); } sub libsvn_get_file { my ($gui, $f, $rev, $chg) = @_; - my $p = $f; - if (length $SVN_PATH > 0) { - return unless ($p =~ s#^\Q$SVN_PATH\E/##); - } + $f =~ s#^/##; print "\t$chg\t$f\n" unless $_q; my ($hash, $pid, $in, $out); @@ -2739,7 +2940,7 @@ sub libsvn_get_file { waitpid $pid, 0; $hash =~ /^$sha1$/o or die "not a sha1: $hash\n"; } - print $gui $mode,' ',$hash,"\t",$p,"\0" or croak $!; + print $gui $mode,' ',$hash,"\t",$f,"\0" or croak $!; } sub libsvn_log_entry { @@ -2756,8 +2957,7 @@ sub libsvn_log_entry { } sub process_rm { - my ($gui, $last_commit, $f) = @_; - $f =~ s#^\Q$SVN_PATH\E/?## or return; + my ($gui, $last_commit, $f, $q) = @_; # remove entire directories. if (safe_qx('git-ls-tree',$last_commit,'--',$f) =~ /^040000 tree/) { defined(my $pid = open my $ls, '-|') or croak $!; @@ -2768,23 +2968,51 @@ sub process_rm { local $/ = "\0"; while (<$ls>) { print $gui '0 ',0 x 40,"\t",$_ or croak $!; + print "\tD\t$_\n" unless $q; } + print "\tD\t$f/\n" unless $q; close $ls or croak $?; } else { print $gui '0 ',0 x 40,"\t",$f,"\0" or croak $!; + print "\tD\t$f\n" unless $q; } } sub libsvn_fetch { + $_xfer_delta ? libsvn_fetch_delta(@_) : libsvn_fetch_full(@_); +} + +sub libsvn_fetch_delta { + my ($last_commit, $paths, $rev, $author, $date, $msg) = @_; + my $pool = SVN::Pool->new; + my $ed = SVN::Git::Fetcher->new({ c => $last_commit, q => $_q }); + my $reporter = $SVN->do_update($rev, '', 1, $ed, $pool); + my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : (); + my (undef, $last_rev, undef) = cmt_metadata($last_commit); + $reporter->set_path('', $last_rev, 0, @lock, $pool); + $reporter->finish_report($pool); + $pool->clear; + unless ($ed->{git_commit_ok}) { + die "SVN connection failed somewhere...\n"; + } + libsvn_log_entry($rev, $author, $date, $msg, [$last_commit]); +} + +sub libsvn_fetch_full { my ($last_commit, $paths, $rev, $author, $date, $msg) = @_; open my $gui, '| git-update-index -z --index-info' or croak $!; my %amr; + my $p = $SVN->{svn_path}; foreach my $f (keys %$paths) { my $m = $paths->{$f}->action(); - $f =~ s#^/+##; + if (length $p) { + $f =~ s#^/\Q$p\E/##; + next if $f =~ m#^/#; + } else { + $f =~ s#^/##; + } if ($m =~ /^[DR]$/) { - print "\t$m\t$f\n" unless $_q; - process_rm($gui, $last_commit, $f); + process_rm($gui, $last_commit, $f, $_q); next if $m eq 'D'; # 'R' can be file replacements, too, right? } @@ -2871,9 +3099,9 @@ sub libsvn_parse_revision { sub libsvn_traverse { my ($gui, $pfx, $path, $rev, $files) = @_; - my $cwd = "$pfx/$path"; + my $cwd = length $pfx ? "$pfx/$path" : $path; my $pool = SVN::Pool->new; - $cwd =~ s#^/+##g; + $cwd =~ s#^\Q$SVN->{svn_path}\E##; my ($dirent, $r, $props) = $SVN->get_dir($cwd, $rev, $pool); foreach my $d (keys %$dirent) { my $t = $dirent->{$d}->kind; @@ -2897,7 +3125,7 @@ sub libsvn_traverse_ignore { my $pool = SVN::Pool->new; my ($dirent, undef, $props) = $SVN->get_dir($path, $r, $pool); my $p = $path; - $p =~ s#^\Q$SVN_PATH\E/?##; + $p =~ s#^\Q$SVN->{svn_path}\E/##; print $fh length $p ? "\n# $p\n" : "\n# /\n"; if (my $s = $props->{'svn:ignore'}) { $s =~ s/[\r\n]+/\n/g; @@ -2924,8 +3152,8 @@ sub revisions_eq { if ($_use_lib) { # should be OK to use Pool here (r1 - r0) should be small my $pool = SVN::Pool->new; - libsvn_get_log($SVN, "/$path", $r0, $r1, - 0, 1, 1, sub {$nr++}, $pool); + libsvn_get_log($SVN, [$path], $r0, $r1, + 0, 0, 1, sub {$nr++}, $pool); $pool->clear; } else { my ($url, undef) = repo_path_split($SVN_URL); @@ -2939,7 +3167,7 @@ sub revisions_eq { sub libsvn_find_parent_branch { my ($paths, $rev, $author, $date, $msg) = @_; - my $svn_path = '/'.$SVN_PATH; + my $svn_path = '/'.$SVN->{svn_path}; # look for a parent from another branch: my $i = $paths->{$svn_path} or return; @@ -2950,7 +3178,7 @@ sub libsvn_find_parent_branch { $branch_from =~ s#^/##; my $l_map = {}; read_url_paths_all($l_map, '', "$GIT_DIR/svn"); - my $url = $SVN->{url}; + my $url = $SVN->{repos_root}; defined $l_map->{$url} or return; my $id = $l_map->{$url}->{$branch_from}; if (!defined $id && $_follow_parent) { @@ -2972,7 +3200,7 @@ sub libsvn_find_parent_branch { $GIT_SVN = $ENV{GIT_SVN_ID} = $id; init_vars(); $SVN_URL = "$url/$branch_from"; - $SVN_LOG = $SVN = undef; + $SVN = undef; setup_git_svn(); # we can't assume SVN_URL exists at r+1: $_revision = "0:$r"; @@ -2988,8 +3216,26 @@ sub libsvn_find_parent_branch { unlink $GIT_SVN_INDEX; print STDERR "Found branch parent: ($GIT_SVN) $parent\n"; sys(qw/git-read-tree/, $parent); - return libsvn_fetch($parent, $paths, $rev, - $author, $date, $msg); + unless (libsvn_can_do_switch()) { + return libsvn_fetch_full($parent, $paths, $rev, + $author, $date, $msg); + } + # do_switch works with svn/trunk >= r22312, but that is not + # included with SVN 1.4.2 (the latest version at the moment), + # so we can't rely on it. + my $ra = libsvn_connect("$url/$branch_from"); + my $ed = SVN::Git::Fetcher->new({c => $parent, q => $_q}); + my $pool = SVN::Pool->new; + my $reporter = $ra->do_switch($rev, '', 1, $SVN->{url}, + $ed, $pool); + my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : (); + $reporter->set_path('', $r0, 0, @lock, $pool); + $reporter->finish_report($pool); + $pool->clear; + unless ($ed->{git_commit_ok}) { + die "SVN connection failed somewhere...\n"; + } + return libsvn_log_entry($rev, $author, $date, $msg, [$parent]); } print STDERR "Nope, branch point not imported or unknown\n"; return undef; @@ -2997,6 +3243,7 @@ sub libsvn_find_parent_branch { sub libsvn_get_log { my ($ra, @args) = @_; + $args[4]-- if $args[4] && $_xfer_delta && ! $_follow_parent; if ($SVN::Core::VERSION le '1.2.0') { splice(@args, 3, 1); } @@ -3008,9 +3255,22 @@ sub libsvn_new_tree { return $log_entry; } my ($paths, $rev, $author, $date, $msg) = @_; - open my $gui, '| git-update-index -z --index-info' or croak $!; - libsvn_traverse($gui, '', $SVN_PATH, $rev); - close $gui or croak $?; + if ($_xfer_delta) { + my $pool = SVN::Pool->new; + my $ed = SVN::Git::Fetcher->new({q => $_q}); + my $reporter = $SVN->do_update($rev, '', 1, $ed, $pool); + my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : (); + $reporter->set_path('', $rev, 1, @lock, $pool); + $reporter->finish_report($pool); + $pool->clear; + unless ($ed->{git_commit_ok}) { + die "SVN connection failed somewhere...\n"; + } + } else { + open my $gui, '| git-update-index -z --index-info' or croak $!; + libsvn_traverse($gui, '', $SVN->{svn_path}, $rev); + close $gui or croak $?; + } return libsvn_log_entry($rev, $author, $date, $msg); } @@ -3094,12 +3354,11 @@ sub libsvn_commit_cb { sub libsvn_ls_fullurl { my $fullurl = shift; - my ($repo, $path) = repo_path_split($fullurl); - $SVN ||= libsvn_connect($repo); + my $ra = libsvn_connect($fullurl); my @ret; my $pool = SVN::Pool->new; - my ($dirent, undef, undef) = $SVN->get_dir($path, - $SVN->get_latest_revnum, $pool); + my $r = defined $_revision ? $_revision : $ra->get_latest_revnum; + my ($dirent, undef, undef) = $ra->get_dir('', $r, $pool); foreach my $d (keys %$dirent) { if ($dirent->{$d}->kind == $SVN::Node::dir) { push @ret, "$d/"; # add '/' for compat with cli svn @@ -3120,8 +3379,9 @@ sub libsvn_skip_unknown_revs { # Wonderfully consistent library, eh? # 160013 - svn:// and file:// # 175002 - http(s):// + # 175007 - http(s):// (this repo required authorization, too...) # More codes may be discovered later... - if ($errno == 175002 || $errno == 160013) { + if ($errno == 175007 || $errno == 175002 || $errno == 160013) { return; } croak "Error from SVN, ($errno): ", $err->expanded_message,"\n"; @@ -3180,6 +3440,145 @@ sub copy_remote_ref { "refs/remotes/$GIT_SVN on $origin\n"; } } +package SVN::Git::Fetcher; +use vars qw/@ISA/; +use strict; +use warnings; +use Carp qw/croak/; +use IO::File qw//; + +# file baton members: path, mode_a, mode_b, pool, fh, blob, base +sub new { + my ($class, $git_svn) = @_; + my $self = SVN::Delta::Editor->new; + bless $self, $class; + open my $gui, '| git-update-index -z --index-info' or croak $!; + $self->{gui} = $gui; + $self->{c} = $git_svn->{c} if exists $git_svn->{c}; + $self->{q} = $git_svn->{q}; + require Digest::MD5; + $self; +} + +sub delete_entry { + my ($self, $path, $rev, $pb) = @_; + process_rm($self->{gui}, $self->{c}, $path, $self->{q}); + undef; +} + +sub open_file { + my ($self, $path, $pb, $rev) = @_; + my ($mode, $blob) = (safe_qx('git-ls-tree',$self->{c},'--',$path) + =~ /^(\d{6}) blob ([a-f\d]{40})\t/); + unless (defined $mode && defined $blob) { + die "$path was not found in commit $self->{c} (r$rev)\n"; + } + { path => $path, mode_a => $mode, mode_b => $mode, blob => $blob, + pool => SVN::Pool->new, action => 'M' }; +} + +sub add_file { + my ($self, $path, $pb, $cp_path, $cp_rev) = @_; + { path => $path, mode_a => 100644, mode_b => 100644, + pool => SVN::Pool->new, action => 'A' }; +} + +sub change_file_prop { + my ($self, $fb, $prop, $value) = @_; + if ($prop eq 'svn:executable') { + if ($fb->{mode_b} != 120000) { + $fb->{mode_b} = defined $value ? 100755 : 100644; + } + } elsif ($prop eq 'svn:special') { + $fb->{mode_b} = defined $value ? 120000 : 100644; + } + undef; +} + +sub apply_textdelta { + my ($self, $fb, $exp) = @_; + my $fh = IO::File->new_tmpfile; + $fh->autoflush(1); + # $fh gets auto-closed() by SVN::TxDelta::apply(), + # (but $base does not,) so dup() it for reading in close_file + open my $dup, '<&', $fh or croak $!; + my $base = IO::File->new_tmpfile; + $base->autoflush(1); + if ($fb->{blob}) { + defined (my $pid = fork) or croak $!; + if (!$pid) { + open STDOUT, '>&', $base or croak $!; + print STDOUT 'link ' if ($fb->{mode_a} == 120000); + exec qw/git-cat-file blob/, $fb->{blob} or croak $!; + } + waitpid $pid, 0; + croak $? if $?; + + if (defined $exp) { + seek $base, 0, 0 or croak $!; + my $md5 = Digest::MD5->new; + $md5->addfile($base); + my $got = $md5->hexdigest; + die "Checksum mismatch: $fb->{path} $fb->{blob}\n", + "expected: $exp\n", + " got: $got\n" if ($got ne $exp); + } + } + seek $base, 0, 0 or croak $!; + $fb->{fh} = $dup; + $fb->{base} = $base; + [ SVN::TxDelta::apply($base, $fh, undef, $fb->{path}, $fb->{pool}) ]; +} + +sub close_file { + my ($self, $fb, $exp) = @_; + my $hash; + my $path = $fb->{path}; + if (my $fh = $fb->{fh}) { + seek($fh, 0, 0) or croak $!; + my $md5 = Digest::MD5->new; + $md5->addfile($fh); + my $got = $md5->hexdigest; + die "Checksum mismatch: $path\n", + "expected: $exp\n got: $got\n" if ($got ne $exp); + seek($fh, 0, 0) or croak $!; + if ($fb->{mode_b} == 120000) { + read($fh, my $buf, 5) == 5 or croak $!; + $buf eq 'link ' or die "$path has mode 120000", + "but is not a link\n"; + } + defined(my $pid = open my $out,'-|') or die "Can't fork: $!\n"; + if (!$pid) { + open STDIN, '<&', $fh or croak $!; + exec qw/git-hash-object -w --stdin/ or croak $!; + } + chomp($hash = do { local $/; <$out> }); + close $out or croak $!; + close $fh or croak $!; + $hash =~ /^[a-f\d]{40}$/ or die "not a sha1: $hash\n"; + close $fb->{base} or croak $!; + } else { + $hash = $fb->{blob} or die "no blob information\n"; + } + $fb->{pool}->clear; + my $gui = $self->{gui}; + print $gui "$fb->{mode_b} $hash\t$path\0" or croak $!; + print "\t$fb->{action}\t$path\n" if $fb->{action} && ! $self->{q}; + undef; +} + +sub abort_edit { + my $self = shift; + close $self->{gui}; + $self->SUPER::abort_edit(@_); +} + +sub close_edit { + my $self = shift; + close $self->{gui} or croak $!; + $self->{git_commit_ok} = 1; + $self->SUPER::close_edit(@_); +} package SVN::Git::Editor; use vars qw/@ISA/; @@ -3209,8 +3608,7 @@ sub split_path { } sub repo_path { - (defined $_[1] && length $_[1]) ? "$_[0]->{svn_path}/$_[1]" - : $_[0]->{svn_path} + (defined $_[1] && length $_[1]) ? $_[1] : '' } sub url_path { @@ -3242,10 +3640,9 @@ sub rmdirs { exec qw/git-ls-tree --name-only -r -z/, $self->{c} or croak $!; } local $/ = "\0"; - my @svn_path = split m#/#, $self->{svn_path}; while (<$fh>) { chomp; - my @dn = (@svn_path, (split m#/#, $_)); + my @dn = split m#/#, $_; while (pop @dn) { delete $rm->{join '/', @dn}; } diff --git a/git-tag.sh b/git-tag.sh index ac269e3277..d53f94cd9c 100755 --- a/git-tag.sh +++ b/git-tag.sh @@ -5,6 +5,7 @@ USAGE='-l [<pattern>] | [-a | -s | -u <key-id>] [-f | -d] [-m <msg>] <tagname> [ SUBDIRECTORY_OK='Yes' . git-sh-setup +message_given= annotate= signed= force= @@ -37,6 +38,12 @@ do annotate=1 shift message="$1" + if test "$#" = "0"; then + die "error: option -m needs an argument" + exit 2 + else + message_given=1 + fi ;; -u) annotate=1 @@ -83,7 +90,7 @@ tagger=$(git-var GIT_COMMITTER_IDENT) || exit 1 trap 'rm -f "$GIT_DIR"/TAG_TMP* "$GIT_DIR"/TAG_FINALMSG "$GIT_DIR"/TAG_EDITMSG' 0 if [ "$annotate" ]; then - if [ -z "$message" ]; then + if [ -z "$message_given" ]; then ( echo "#" echo "# Write a tag message" echo "#" ) > "$GIT_DIR"/TAG_EDITMSG @@ -95,7 +102,7 @@ if [ "$annotate" ]; then grep -v '^#' <"$GIT_DIR"/TAG_EDITMSG | git-stripspace >"$GIT_DIR"/TAG_FINALMSG - [ -s "$GIT_DIR"/TAG_FINALMSG ] || { + [ -s "$GIT_DIR"/TAG_FINALMSG -o -n "$message_given" ] || { echo >&2 "No tag message?" exit 1 } @@ -260,6 +260,7 @@ static void handle_internal_command(int argc, const char **argv, char **envp) { "rev-parse", cmd_rev_parse, RUN_SETUP }, { "rm", cmd_rm, RUN_SETUP }, { "runstatus", cmd_runstatus, RUN_SETUP }, + { "shortlog", cmd_shortlog, RUN_SETUP | USE_PAGER }, { "show-branch", cmd_show_branch, RUN_SETUP }, { "show", cmd_show, RUN_SETUP | USE_PAGER }, { "stripspace", cmd_stripspace }, diff --git a/git.spec.in b/git.spec.in index 83268fc9d9..f2374b7331 100644 --- a/git.spec.in +++ b/git.spec.in @@ -24,7 +24,7 @@ This is a dummy package which brings in all subpackages. %package core Summary: Core git tools Group: Development/Tools -Requires: zlib >= 1.2, rsync, rcs, curl, less, openssh-clients, python >= 2.3, expat +Requires: zlib >= 1.2, rsync, rcs, curl, less, openssh-clients, expat %description core This is a stupid (but extremely fast) directory content manager. It doesn't do a whole lot, but what it _does_ do is track directory diff --git a/gitMergeCommon.py b/gitMergeCommon.py deleted file mode 100644 index fdbf9e4778..0000000000 --- a/gitMergeCommon.py +++ /dev/null @@ -1,275 +0,0 @@ -# -# Copyright (C) 2005 Fredrik Kuivinen -# - -import sys, re, os, traceback -from sets import Set - -def die(*args): - printList(args, sys.stderr) - sys.exit(2) - -def printList(list, file=sys.stdout): - for x in list: - file.write(str(x)) - file.write(' ') - file.write('\n') - -import subprocess - -# Debugging machinery -# ------------------- - -DEBUG = 0 -functionsToDebug = Set() - -def addDebug(func): - if type(func) == str: - functionsToDebug.add(func) - else: - functionsToDebug.add(func.func_name) - -def debug(*args): - if DEBUG: - funcName = traceback.extract_stack()[-2][2] - if funcName in functionsToDebug: - printList(args) - -# Program execution -# ----------------- - -class ProgramError(Exception): - def __init__(self, progStr, error): - self.progStr = progStr - self.error = error - - def __str__(self): - return self.progStr + ': ' + self.error - -addDebug('runProgram') -def runProgram(prog, input=None, returnCode=False, env=None, pipeOutput=True): - debug('runProgram prog:', str(prog), 'input:', str(input)) - if type(prog) is str: - progStr = prog - else: - progStr = ' '.join(prog) - - try: - if pipeOutput: - stderr = subprocess.STDOUT - stdout = subprocess.PIPE - else: - stderr = None - stdout = None - pop = subprocess.Popen(prog, - shell = type(prog) is str, - stderr=stderr, - stdout=stdout, - stdin=subprocess.PIPE, - env=env) - except OSError, e: - debug('strerror:', e.strerror) - raise ProgramError(progStr, e.strerror) - - if input != None: - pop.stdin.write(input) - pop.stdin.close() - - if pipeOutput: - out = pop.stdout.read() - else: - out = '' - - code = pop.wait() - if returnCode: - ret = [out, code] - else: - ret = out - if code != 0 and not returnCode: - debug('error output:', out) - debug('prog:', prog) - raise ProgramError(progStr, out) -# debug('output:', out.replace('\0', '\n')) - return ret - -# Code for computing common ancestors -# ----------------------------------- - -currentId = 0 -def getUniqueId(): - global currentId - currentId += 1 - return currentId - -# The 'virtual' commit objects have SHAs which are integers -shaRE = re.compile('^[0-9a-f]{40}$') -def isSha(obj): - return (type(obj) is str and bool(shaRE.match(obj))) or \ - (type(obj) is int and obj >= 1) - -class Commit(object): - __slots__ = ['parents', 'firstLineMsg', 'children', '_tree', 'sha', - 'virtual'] - - def __init__(self, sha, parents, tree=None): - self.parents = parents - self.firstLineMsg = None - self.children = [] - - if tree: - tree = tree.rstrip() - assert(isSha(tree)) - self._tree = tree - - if not sha: - self.sha = getUniqueId() - self.virtual = True - self.firstLineMsg = 'virtual commit' - assert(isSha(tree)) - else: - self.virtual = False - self.sha = sha.rstrip() - assert(isSha(self.sha)) - - def tree(self): - self.getInfo() - assert(self._tree != None) - return self._tree - - def shortInfo(self): - self.getInfo() - return str(self.sha) + ' ' + self.firstLineMsg - - def __str__(self): - return self.shortInfo() - - def getInfo(self): - if self.virtual or self.firstLineMsg != None: - return - else: - info = runProgram(['git-cat-file', 'commit', self.sha]) - info = info.split('\n') - msg = False - for l in info: - if msg: - self.firstLineMsg = l - break - else: - if l.startswith('tree'): - self._tree = l[5:].rstrip() - elif l == '': - msg = True - -class Graph: - def __init__(self): - self.commits = [] - self.shaMap = {} - - def addNode(self, node): - assert(isinstance(node, Commit)) - self.shaMap[node.sha] = node - self.commits.append(node) - for p in node.parents: - p.children.append(node) - return node - - def reachableNodes(self, n1, n2): - res = {} - def traverse(n): - res[n] = True - for p in n.parents: - traverse(p) - - traverse(n1) - traverse(n2) - return res - - def fixParents(self, node): - for x in range(0, len(node.parents)): - node.parents[x] = self.shaMap[node.parents[x]] - -# addDebug('buildGraph') -def buildGraph(heads): - debug('buildGraph heads:', heads) - for h in heads: - assert(isSha(h)) - - g = Graph() - - out = runProgram(['git-rev-list', '--parents'] + heads) - for l in out.split('\n'): - if l == '': - continue - shas = l.split(' ') - - # This is a hack, we temporarily use the 'parents' attribute - # to contain a list of SHA1:s. They are later replaced by proper - # Commit objects. - c = Commit(shas[0], shas[1:]) - - g.commits.append(c) - g.shaMap[c.sha] = c - - for c in g.commits: - g.fixParents(c) - - for c in g.commits: - for p in c.parents: - p.children.append(c) - return g - -# Write the empty tree to the object database and return its SHA1 -def writeEmptyTree(): - tmpIndex = os.environ.get('GIT_DIR', '.git') + '/merge-tmp-index' - def delTmpIndex(): - try: - os.unlink(tmpIndex) - except OSError: - pass - delTmpIndex() - newEnv = os.environ.copy() - newEnv['GIT_INDEX_FILE'] = tmpIndex - res = runProgram(['git-write-tree'], env=newEnv).rstrip() - delTmpIndex() - return res - -def addCommonRoot(graph): - roots = [] - for c in graph.commits: - if len(c.parents) == 0: - roots.append(c) - - superRoot = Commit(sha=None, parents=[], tree=writeEmptyTree()) - graph.addNode(superRoot) - for r in roots: - r.parents = [superRoot] - superRoot.children = roots - return superRoot - -def getCommonAncestors(graph, commit1, commit2): - '''Find the common ancestors for commit1 and commit2''' - assert(isinstance(commit1, Commit) and isinstance(commit2, Commit)) - - def traverse(start, set): - stack = [start] - while len(stack) > 0: - el = stack.pop() - set.add(el) - for p in el.parents: - if p not in set: - stack.append(p) - h1Set = Set() - h2Set = Set() - traverse(commit1, h1Set) - traverse(commit2, h2Set) - shared = h1Set.intersection(h2Set) - - if len(shared) == 0: - shared = [addCommonRoot(graph)] - - res = Set() - - for s in shared: - if len([c for c in s.children if c in shared]) == 0: - res.add(s) - return list(res) @@ -554,7 +554,7 @@ proc makewindow {} { pack .ctop.top.lbar.vlabel -side left -fill y global viewhlmenu selectedhlview set viewhlmenu [tk_optionMenu .ctop.top.lbar.vhl selectedhlview None] - $viewhlmenu entryconf 0 -command delvhighlight + $viewhlmenu entryconf None -command delvhighlight $viewhlmenu conf -font $uifont .ctop.top.lbar.vhl conf -font $uifont pack .ctop.top.lbar.vhl -side left -fill y @@ -1474,7 +1474,7 @@ proc doviewmenu {m first cmd op argv} { proc allviewmenus {n op args} { global viewhlmenu - doviewmenu .bar.view 7 [list showview $n] $op $args + doviewmenu .bar.view 5 [list showview $n] $op $args doviewmenu $viewhlmenu 1 [list addvhighlight $n] $op $args } @@ -1516,7 +1516,7 @@ proc newviewok {top n} { set viewperm($n) $newviewperm($n) if {$newviewname($n) ne $viewname($n)} { set viewname($n) $newviewname($n) - doviewmenu .bar.view 7 [list showview $n] \ + doviewmenu .bar.view 5 [list showview $n] \ entryconf [list -label $viewname($n)] doviewmenu $viewhlmenu 1 [list addvhighlight $n] \ entryconf [list -label $viewname($n) -value $viewname($n)] @@ -1632,8 +1632,8 @@ proc showview {n} { set curview $n set selectedview $n - .bar.view entryconf 2 -state [expr {$n == 0? "disabled": "normal"}] - .bar.view entryconf 3 -state [expr {$n == 0? "disabled": "normal"}] + .bar.view entryconf Edit* -state [expr {$n == 0? "disabled": "normal"}] + .bar.view entryconf Delete* -state [expr {$n == 0? "disabled": "normal"}] if {![info exists viewdata($n)]} { set pending_select $selid @@ -4899,9 +4899,9 @@ proc rowmenu {x y id} { } else { set state normal } - $rowctxmenu entryconfigure 0 -state $state - $rowctxmenu entryconfigure 1 -state $state - $rowctxmenu entryconfigure 2 -state $state + $rowctxmenu entryconfigure "Diff this*" -state $state + $rowctxmenu entryconfigure "Diff selected*" -state $state + $rowctxmenu entryconfigure "Make patch" -state $state set rowmenuid $id tk_popup $rowctxmenu $x $y } @@ -6305,8 +6305,8 @@ if {$cmdline_files ne {} || $revtreeargs ne {}} { set viewargs(1) $revtreeargs set viewperm(1) 0 addviewmenu 1 - .bar.view entryconf 2 -state normal - .bar.view entryconf 3 -state normal + .bar.view entryconf Edit* -state normal + .bar.view entryconf Delete* -state normal } if {[info exists permviews]} { diff --git a/gitweb/gitweb.css b/gitweb/gitweb.css index 974b47f19c..7177c6e86b 100644 --- a/gitweb/gitweb.css +++ b/gitweb/gitweb.css @@ -334,11 +334,13 @@ div.diff.extended_header { padding: 2px 0px 2px 0px; } +div.diff a.list, div.diff a.path, div.diff a.hash { text-decoration: none; } +div.diff a.list:hover, div.diff a.path:hover, div.diff a.hash:hover { text-decoration: underline; @@ -362,14 +364,25 @@ div.diff.rem { color: #cc0000; } +div.diff.chunk_header a, div.diff.chunk_header { color: #990099; +} +div.diff.chunk_header { border: dotted #ffe0ff; border-width: 1px 0px 0px 0px; margin-top: 2px; } +div.diff.chunk_header span.chunk_info { + background-color: #ffeeff; +} + +div.diff.chunk_header span.section { + color: #aa22aa; +} + div.diff.incomplete { color: #cccccc; } diff --git a/gitweb/gitweb.perl b/gitweb/gitweb.perl index 758759576c..5ea3fda540 100755 --- a/gitweb/gitweb.perl +++ b/gitweb/gitweb.perl @@ -120,7 +120,7 @@ our %feature = ( # To disable system wide have in $GITWEB_CONFIG # $feature{'snapshot'}{'default'} = [undef]; # To have project specific config enable override in $GITWEB_CONFIG - # $feature{'blame'}{'override'} = 1; + # $feature{'snapshot'}{'override'} = 1; # and in project config gitweb.snapshot = none|gzip|bzip2; 'snapshot' => { 'sub' => \&feature_snapshot, @@ -425,6 +425,7 @@ my %actions = ( "history" => \&git_history, "log" => \&git_log, "rss" => \&git_rss, + "atom" => \&git_atom, "search" => \&git_search, "search_help" => \&git_search_help, "shortlog" => \&git_shortlog, @@ -459,7 +460,8 @@ exit; sub href(%) { my %params = @_; - my $href = $my_uri; + # default is to use -absolute url() i.e. $my_uri + my $href = $params{-full} ? $my_url : $my_uri; # XXX: Warning: If you touch this, check the search form for updating, # too. @@ -583,7 +585,21 @@ sub esc_html ($;%) { return $str; } -# Make control characterss "printable". +# quote control characters and escape filename to HTML +sub esc_path { + my $str = shift; + my %opts = @_; + + $str = to_utf8($str); + $str = escapeHTML($str); + if ($opts{'-nbsp'}) { + $str =~ s/ / /g; + } + $str =~ s|([[:cntrl:]])|quot_cec($1)|eg; + return $str; +} + +# Make control characters "printable", using character escape codes (CEC) sub quot_cec { my $cntrl = shift; my %es = ( # character escape codes, aka escape sequences @@ -603,22 +619,14 @@ sub quot_cec { return "<span class=\"cntrl\">$chr</span>"; } -# Alternatively use unicode control pictures codepoints. +# Alternatively use unicode control pictures codepoints, +# Unicode "printable representation" (PR) sub quot_upr { my $cntrl = shift; my $chr = sprintf('&#%04d;', 0x2400+ord($cntrl)); return "<span class=\"cntrl\">$chr</span>"; } -# quote control characters and escape filename to HTML -sub esc_path { - my $str = shift; - - $str = esc_html($str); - $str =~ s|([[:cntrl:]])|quot_cec($1)|eg; - return $str; -} - # git may return quoted and escaped filenames sub unquote { my $str = shift; @@ -874,8 +882,10 @@ sub format_subject_html { } } +# format patch (diff) line (rather not to be used for diff headers) sub format_diff_line { my $line = shift; + my ($from, $to) = @_; my $char = substr($line, 0, 1); my $diff_class = ""; @@ -891,6 +901,25 @@ sub format_diff_line { $diff_class = " incomplete"; } $line = untabify($line); + if ($from && $to && $line =~ m/^\@{2} /) { + my ($from_text, $from_start, $from_lines, $to_text, $to_start, $to_lines, $section) = + $line =~ m/^\@{2} (-(\d+)(?:,(\d+))?) (\+(\d+)(?:,(\d+))?) \@{2}(.*)$/; + + $from_lines = 0 unless defined $from_lines; + $to_lines = 0 unless defined $to_lines; + + if ($from->{'href'}) { + $from_text = $cgi->a({-href=>"$from->{'href'}#l$from_start", + -class=>"list"}, $from_text); + } + if ($to->{'href'}) { + $to_text = $cgi->a({-href=>"$to->{'href'}#l$to_start", + -class=>"list"}, $to_text); + } + $line = "<span class=\"chunk_info\">@@ $from_text $to_text @@</span>" . + "<span class=\"section\">" . esc_html($section, -nbsp=>1) . "</span>"; + return "<div class=\"diff$diff_class\">$line</div>\n"; + } return "<div class=\"diff$diff_class\">" . esc_html($line, -nbsp=>1) . "</div>\n"; } @@ -1125,14 +1154,15 @@ sub git_get_last_activity { sub git_get_references { my $type = shift || ""; my %refs; - # 5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11 - # c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11^{} - open my $fd, "-|", $GIT, "peek-remote", "$projectroot/$project/" + # 5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11 + # c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11^{} + open my $fd, "-|", git_cmd(), "show-ref", "--dereference", + ($type ? ("--", "refs/$type") : ()) # use -- <pattern> if $type or return; while (my $line = <$fd>) { chomp $line; - if ($line =~ m/^([0-9a-fA-F]{40})\trefs\/($type\/?[^\^]+)/) { + if ($line =~ m!^([0-9a-fA-F]{40})\srefs/($type/?[^^]+)!) { if (defined $refs{$1}) { push @{$refs{$1}}, $2; } else { @@ -1176,10 +1206,12 @@ sub parse_date { $date{'mday'} = $mday; $date{'day'} = $days[$wday]; $date{'month'} = $months[$mon]; - $date{'rfc2822'} = sprintf "%s, %d %s %4d %02d:%02d:%02d +0000", - $days[$wday], $mday, $months[$mon], 1900+$year, $hour ,$min, $sec; + $date{'rfc2822'} = sprintf "%s, %d %s %4d %02d:%02d:%02d +0000", + $days[$wday], $mday, $months[$mon], 1900+$year, $hour ,$min, $sec; $date{'mday-time'} = sprintf "%d %s %02d:%02d", $mday, $months[$mon], $hour ,$min; + $date{'iso-8601'} = sprintf "%04d-%02d-%02dT%02d:%02d:%02dZ", + 1900+$year, $mon, $mday, $hour ,$min, $sec; $tz =~ m/^([+\-][0-9][0-9])([0-9][0-9])$/; my $local = $epoch + ((int $1 + ($2/60)) * 3600); @@ -1187,9 +1219,9 @@ sub parse_date { $date{'hour_local'} = $hour; $date{'minute_local'} = $min; $date{'tz_local'} = $tz; - $date{'iso-tz'} = sprintf ("%04d-%02d-%02d %02d:%02d:%02d %s", - 1900+$year, $mon+1, $mday, - $hour, $min, $sec, $tz); + $date{'iso-tz'} = sprintf("%04d-%02d-%02d %02d:%02d:%02d %s", + 1900+$year, $mon+1, $mday, + $hour, $min, $sec, $tz); return %date; } @@ -1262,8 +1294,9 @@ sub parse_commit { $co{'author'} = $1; $co{'author_epoch'} = $2; $co{'author_tz'} = $3; - if ($co{'author'} =~ m/^([^<]+) </) { - $co{'author_name'} = $1; + if ($co{'author'} =~ m/^([^<]+) <([^>]*)>/) { + $co{'author_name'} = $1; + $co{'author_email'} = $2; } else { $co{'author_name'} = $co{'author'}; } @@ -1272,7 +1305,12 @@ sub parse_commit { $co{'committer_epoch'} = $2; $co{'committer_tz'} = $3; $co{'committer_name'} = $co{'committer'}; - $co{'committer_name'} =~ s/ <.*//; + if ($co{'committer'} =~ m/^([^<]+) <([^>]*)>/) { + $co{'committer_name'} = $1; + $co{'committer_email'} = $2; + } else { + $co{'committer_name'} = $co{'committer'}; + } } } if (!defined $co{'tree'}) { @@ -1650,14 +1688,17 @@ EOF } } if (defined $project) { - printf('<link rel="alternate" title="%s log" '. - 'href="%s" type="application/rss+xml"/>'."\n", + printf('<link rel="alternate" title="%s log RSS feed" '. + 'href="%s" type="application/rss+xml" />'."\n", esc_param($project), href(action=>"rss")); + printf('<link rel="alternate" title="%s log Atom feed" '. + 'href="%s" type="application/atom+xml" />'."\n", + esc_param($project), href(action=>"atom")); } else { printf('<link rel="alternate" title="%s projects list" '. 'href="%s" type="text/plain; charset=utf-8"/>'."\n", $site_name, href(project=>undef, action=>"project_index")); - printf('<link rel="alternate" title="%s projects logs" '. + printf('<link rel="alternate" title="%s projects feeds" '. 'href="%s" type="text/x-opml"/>'."\n", $site_name, href(project=>undef, action=>"opml")); } @@ -1723,7 +1764,9 @@ sub git_footer_html { print "<div class=\"page_footer_text\">" . esc_html($descr) . "</div>\n"; } print $cgi->a({-href => href(action=>"rss"), - -class => "rss_logo"}, "RSS") . "\n"; + -class => "rss_logo"}, "RSS") . " "; + print $cgi->a({-href => href(action=>"atom"), + -class => "rss_logo"}, "Atom") . "\n"; } else { print $cgi->a({-href => href(project=>undef, action=>"opml"), -class => "rss_logo"}, "OPML") . " "; @@ -2062,7 +2105,11 @@ sub git_difftree_body { # link to patch $patchno++; print $cgi->a({-href => "#patch$patchno"}, "patch"); + print " | "; } + print $cgi->a({-href => href(action=>"blob", hash=>$diff{'to_id'}, + hash_base=>$hash, file_name=>$diff{'file'})}, + "blob") . " | "; print "</td>\n"; } elsif ($diff{'status'} eq "D") { # deleted @@ -2082,13 +2129,11 @@ sub git_difftree_body { } print $cgi->a({-href => href(action=>"blob", hash=>$diff{'from_id'}, hash_base=>$parent, file_name=>$diff{'file'})}, - "blob") . " | "; + "blob") . " | "; if ($have_blame) { - print $cgi->a({-href => - href(action=>"blame", - hash_base=>$parent, - file_name=>$diff{'file'})}, - "blame") . " | "; + print $cgi->a({-href => href(action=>"blame", hash_base=>$parent, + file_name=>$diff{'file'})}, + "blame") . " | "; } print $cgi->a({-href => href(action=>"history", hash_base=>$parent, file_name=>$diff{'file'})}, @@ -2133,13 +2178,12 @@ sub git_difftree_body { " | "; } print $cgi->a({-href => href(action=>"blob", hash=>$diff{'to_id'}, - hash_base=>$hash, file_name=>$diff{'file'})}, - "blob") . " | "; + hash_base=>$hash, file_name=>$diff{'file'})}, + "blob") . " | "; if ($have_blame) { - print $cgi->a({-href => href(action=>"blame", - hash_base=>$hash, - file_name=>$diff{'file'})}, - "blame") . " | "; + print $cgi->a({-href => href(action=>"blame", hash_base=>$hash, + file_name=>$diff{'file'})}, + "blame") . " | "; } print $cgi->a({-href => href(action=>"history", hash_base=>$hash, file_name=>$diff{'file'})}, @@ -2178,17 +2222,16 @@ sub git_difftree_body { "diff") . " | "; } - print $cgi->a({-href => href(action=>"blob", hash=>$diff{'from_id'}, - hash_base=>$parent, file_name=>$diff{'from_file'})}, - "blob") . " | "; + print $cgi->a({-href => href(action=>"blob", hash=>$diff{'to_id'}, + hash_base=>$parent, file_name=>$diff{'to_file'})}, + "blob") . " | "; if ($have_blame) { - print $cgi->a({-href => href(action=>"blame", - hash_base=>$hash, - file_name=>$diff{'to_file'})}, - "blame") . " | "; + print $cgi->a({-href => href(action=>"blame", hash_base=>$hash, + file_name=>$diff{'to_file'})}, + "blame") . " | "; } - print $cgi->a({-href => href(action=>"history", hash_base=>$parent, - file_name=>$diff{'from_file'})}, + print $cgi->a({-href => href(action=>"history", hash_base=>$hash, + file_name=>$diff{'to_file'})}, "history"); print "</td>\n"; @@ -2202,31 +2245,56 @@ sub git_patchset_body { my ($fd, $difftree, $hash, $hash_parent) = @_; my $patch_idx = 0; - my $in_header = 0; - my $patch_found = 0; + my $patch_line; my $diffinfo; my (%from, %to); + my ($from_id, $to_id); print "<div class=\"patchset\">\n"; - LINE: - while (my $patch_line = <$fd>) { + # skip to first patch + while ($patch_line = <$fd>) { chomp $patch_line; - if ($patch_line =~ m/^diff /) { # "git diff" header - # beginning of patch (in patchset) - if ($patch_found) { - # close extended header for previous empty patch - if ($in_header) { - print "</div>\n" # class="diff extended_header" - } - # close previous patch - print "</div>\n"; # class="patch" - } else { - # first patch in patchset - $patch_found = 1; + last if ($patch_line =~ m/^diff /); + } + + PATCH: + while ($patch_line) { + my @diff_header; + + # git diff header + #assert($patch_line =~ m/^diff /) if DEBUG; + #assert($patch_line !~ m!$/$!) if DEBUG; # is chomp-ed + push @diff_header, $patch_line; + + # extended diff header + EXTENDED_HEADER: + while ($patch_line = <$fd>) { + chomp $patch_line; + + last EXTENDED_HEADER if ($patch_line =~ m/^--- /); + + if ($patch_line =~ m/^index ([0-9a-fA-F]{40})..([0-9a-fA-F]{40})/) { + $from_id = $1; + $to_id = $2; } - print "<div class=\"patch\" id=\"patch". ($patch_idx+1) ."\">\n"; + + push @diff_header, $patch_line; + } + #last PATCH unless $patch_line; + my $last_patch_line = $patch_line; + + # check if current patch belong to current raw line + # and parse raw git-diff line if needed + if (defined $diffinfo && + $diffinfo->{'from_id'} eq $from_id && + $diffinfo->{'to_id'} eq $to_id) { + # this is split patch + print "<div class=\"patch cont\">\n"; + } else { + # advance raw git-diff output if needed + $patch_idx++ if defined $diffinfo; # read and prepare patch information if (ref($difftree->[$patch_idx]) eq "HASH") { @@ -2247,100 +2315,112 @@ sub git_patchset_body { hash=>$diffinfo->{'to_id'}, file_name=>$to{'file'}); } - $patch_idx++; - - # print "git diff" header - $patch_line =~ s!^(diff (.*?) )"?a/.*$!$1!; - if ($from{'href'}) { - $patch_line .= $cgi->a({-href => $from{'href'}, -class => "path"}, - 'a/' . esc_path($from{'file'})); - } else { # file was added - $patch_line .= 'a/' . esc_path($from{'file'}); - } - $patch_line .= ' '; - if ($to{'href'}) { - $patch_line .= $cgi->a({-href => $to{'href'}, -class => "path"}, - 'b/' . esc_path($to{'file'})); - } else { # file was deleted - $patch_line .= 'b/' . esc_path($to{'file'}); - } - - print "<div class=\"diff header\">$patch_line</div>\n"; - print "<div class=\"diff extended_header\">\n"; - $in_header = 1; - next LINE; + # this is first patch for raw difftree line with $patch_idx index + # we index @$difftree array from 0, but number patches from 1 + print "<div class=\"patch\" id=\"patch". ($patch_idx+1) ."\">\n"; } - if ($in_header) { - if ($patch_line !~ m/^---/) { - # match <path> - if ($patch_line =~ s!^((copy|rename) from ).*$!$1! && $from{'href'}) { - $patch_line .= $cgi->a({-href=>$from{'href'}, -class=>"path"}, - esc_path($from{'file'})); - } - if ($patch_line =~ s!^((copy|rename) to ).*$!$1! && $to{'href'}) { - $patch_line = $cgi->a({-href=>$to{'href'}, -class=>"path"}, - esc_path($to{'file'})); - } - # match <mode> - if ($patch_line =~ m/\s(\d{6})$/) { - $patch_line .= '<span class="info"> (' . - file_type_long($1) . - ')</span>'; + # print "git diff" header + $patch_line = shift @diff_header; + $patch_line =~ s!^(diff (.*?) )"?a/.*$!$1!; + if ($from{'href'}) { + $patch_line .= $cgi->a({-href => $from{'href'}, -class => "path"}, + 'a/' . esc_path($from{'file'})); + } else { # file was added + $patch_line .= 'a/' . esc_path($from{'file'}); + } + $patch_line .= ' '; + if ($to{'href'}) { + $patch_line .= $cgi->a({-href => $to{'href'}, -class => "path"}, + 'b/' . esc_path($to{'file'})); + } else { # file was deleted + $patch_line .= 'b/' . esc_path($to{'file'}); + } + print "<div class=\"diff header\">$patch_line</div>\n"; + + # print extended diff header + print "<div class=\"diff extended_header\">\n" if (@diff_header > 0); + EXTENDED_HEADER: + foreach $patch_line (@diff_header) { + # match <path> + if ($patch_line =~ s!^((copy|rename) from ).*$!$1! && $from{'href'}) { + $patch_line .= $cgi->a({-href=>$from{'href'}, -class=>"path"}, + esc_path($from{'file'})); + } + if ($patch_line =~ s!^((copy|rename) to ).*$!$1! && $to{'href'}) { + $patch_line = $cgi->a({-href=>$to{'href'}, -class=>"path"}, + esc_path($to{'file'})); + } + # match <mode> + if ($patch_line =~ m/\s(\d{6})$/) { + $patch_line .= '<span class="info"> (' . + file_type_long($1) . + ')</span>'; + } + # match <hash> + if ($patch_line =~ m/^index/) { + my ($from_link, $to_link); + if ($from{'href'}) { + $from_link = $cgi->a({-href=>$from{'href'}, -class=>"hash"}, + substr($diffinfo->{'from_id'},0,7)); + } else { + $from_link = '0' x 7; } - # match <hash> - if ($patch_line =~ m/^index/) { - my ($from_link, $to_link); - if ($from{'href'}) { - $from_link = $cgi->a({-href=>$from{'href'}, -class=>"hash"}, - substr($diffinfo->{'from_id'},0,7)); - } else { - $from_link = '0' x 7; - } - if ($to{'href'}) { - $to_link = $cgi->a({-href=>$to{'href'}, -class=>"hash"}, - substr($diffinfo->{'to_id'},0,7)); - } else { - $to_link = '0' x 7; - } - my ($from_id, $to_id) = ($diffinfo->{'from_id'}, $diffinfo->{'to_id'}); - $patch_line =~ s!$from_id\.\.$to_id!$from_link..$to_link!; + if ($to{'href'}) { + $to_link = $cgi->a({-href=>$to{'href'}, -class=>"hash"}, + substr($diffinfo->{'to_id'},0,7)); + } else { + $to_link = '0' x 7; } - print $patch_line . "<br/>\n"; - - } else { - #$in_header && $patch_line =~ m/^---/; - print "</div>\n"; # class="diff extended_header" - $in_header = 0; + #affirm { + # my ($from_hash, $to_hash) = + # ($patch_line =~ m/^index ([0-9a-fA-F]{40})..([0-9a-fA-F]{40})/); + # my ($from_id, $to_id) = + # ($diffinfo->{'from_id'}, $diffinfo->{'to_id'}); + # ($from_hash eq $from_id) && ($to_hash eq $to_id); + #} if DEBUG; + my ($from_id, $to_id) = ($diffinfo->{'from_id'}, $diffinfo->{'to_id'}); + $patch_line =~ s!$from_id\.\.$to_id!$from_link..$to_link!; + } + print $patch_line . "<br/>\n"; + } + print "</div>\n" if (@diff_header > 0); # class="diff extended_header" + + # from-file/to-file diff header + $patch_line = $last_patch_line; + #assert($patch_line =~ m/^---/) if DEBUG; + if ($from{'href'}) { + $patch_line = '--- a/' . + $cgi->a({-href=>$from{'href'}, -class=>"path"}, + esc_path($from{'file'})); + } + print "<div class=\"diff from_file\">$patch_line</div>\n"; - if ($from{'href'}) { - $patch_line = '--- a/' . - $cgi->a({-href=>$from{'href'}, -class=>"path"}, - esc_path($from{'file'})); - } - print "<div class=\"diff from_file\">$patch_line</div>\n"; + $patch_line = <$fd>; + #last PATCH unless $patch_line; + chomp $patch_line; - $patch_line = <$fd>; - chomp $patch_line; + #assert($patch_line =~ m/^+++/) if DEBUG; + if ($to{'href'}) { + $patch_line = '+++ b/' . + $cgi->a({-href=>$to{'href'}, -class=>"path"}, + esc_path($to{'file'})); + } + print "<div class=\"diff to_file\">$patch_line</div>\n"; - #$patch_line =~ m/^+++/; - if ($to{'href'}) { - $patch_line = '+++ b/' . - $cgi->a({-href=>$to{'href'}, -class=>"path"}, - esc_path($to{'file'})); - } - print "<div class=\"diff to_file\">$patch_line</div>\n"; + # the patch itself + LINE: + while ($patch_line = <$fd>) { + chomp $patch_line; - } + next PATCH if ($patch_line =~ m/^diff /); - next LINE; + print format_diff_line($patch_line, \%from, \%to); } - print format_diff_line($patch_line); + } continue { + print "</div>\n"; # class="patch" } - print "</div>\n" if $in_header; # extended header - - print "</div>\n" if $patch_found; # class="patch" print "</div>\n"; # class="patchset" } @@ -2361,6 +2441,7 @@ sub git_project_list_body { ($pr->{'age'}, $pr->{'age_string'}) = @aa; if (!defined $pr->{'descr'}) { my $descr = git_get_project_description($pr->{'path'}) || ""; + $pr->{'descr_long'} = to_utf8($descr); $pr->{'descr'} = chop_str($descr, 25, 5); } if (!defined $pr->{'owner'}) { @@ -2396,7 +2477,7 @@ sub git_project_list_body { } else { print "<th>" . $cgi->a({-href => href(project=>undef, order=>'project'), - -class => "header"}, "Project") . + -class => "header"}, "Project") . "</th>\n"; } if ($order eq "descr") { @@ -2405,7 +2486,7 @@ sub git_project_list_body { } else { print "<th>" . $cgi->a({-href => href(project=>undef, order=>'descr'), - -class => "header"}, "Description") . + -class => "header"}, "Description") . "</th>\n"; } if ($order eq "owner") { @@ -2414,7 +2495,7 @@ sub git_project_list_body { } else { print "<th>" . $cgi->a({-href => href(project=>undef, order=>'owner'), - -class => "header"}, "Owner") . + -class => "header"}, "Owner") . "</th>\n"; } if ($order eq "age") { @@ -2423,7 +2504,7 @@ sub git_project_list_body { } else { print "<th>" . $cgi->a({-href => href(project=>undef, order=>'age'), - -class => "header"}, "Last Change") . + -class => "header"}, "Last Change") . "</th>\n"; } print "<th></th>\n" . @@ -2448,7 +2529,9 @@ sub git_project_list_body { } print "<td>" . $cgi->a({-href => href(project=>$pr->{'path'}, action=>"summary"), -class => "list"}, esc_html($pr->{'path'})) . "</td>\n" . - "<td>" . esc_html($pr->{'descr'}) . "</td>\n" . + "<td>" . $cgi->a({-href => href(project=>$pr->{'path'}, action=>"summary"), + -class => "list", -title => $pr->{'descr_long'}}, + esc_html($pr->{'descr'})) . "</td>\n" . "<td><i>" . chop_str($pr->{'owner'}, 15) . "</i></td>\n"; print "<td class=\"". age_class($pr->{'age'}) . "\">" . $pr->{'age_string'} . "</td>\n" . @@ -2851,8 +2934,8 @@ sub git_tag { print "<div class=\"page_body\">"; my $comment = $tag{'comment'}; foreach my $line (@$comment) { - chomp($line); - print esc_html($line) . "<br/>\n"; + chomp $line; + print esc_html($line, -nbsp=>1) . "<br/>\n"; } print "</div>\n"; git_footer_html(); @@ -2921,7 +3004,7 @@ HTML } } my $data = $_; - chomp($data); + chomp $data; my $rev = substr($full_rev, 0, 8); my $author = $meta->{'author'}; my %date = parse_date($meta->{'author-time'}, @@ -3146,10 +3229,13 @@ sub git_blob { open my $fd, "-|", git_cmd(), "cat-file", "blob", $hash or die_error(undef, "Couldn't cat $file_name, $hash"); my $mimetype = blob_mimetype($fd, $file_name); - if ($mimetype !~ m/^text\//) { + if ($mimetype !~ m!^(?:text/|image/(?:gif|png|jpeg)$)!) { close $fd; return git_blob_plain($mimetype); } + # we can have blame only for text/* mimetype + $have_blame &&= ($mimetype =~ m!^text/!); + git_header_html(undef, $expires); my $formats_nav = ''; if (defined $hash_base && (my %co = parse_commit($hash_base))) { @@ -3186,13 +3272,24 @@ sub git_blob { } git_print_page_path($file_name, "blob", $hash_base); print "<div class=\"page_body\">\n"; - my $nr; - while (my $line = <$fd>) { - chomp $line; - $nr++; - $line = untabify($line); - printf "<div class=\"pre\"><a id=\"l%i\" href=\"#l%i\" class=\"linenr\">%4i</a> %s</div>\n", - $nr, $nr, $nr, esc_html($line, -nbsp=>1); + if ($mimetype =~ m!^text/!) { + my $nr; + while (my $line = <$fd>) { + chomp $line; + $nr++; + $line = untabify($line); + printf "<div class=\"pre\"><a id=\"l%i\" href=\"#l%i\" class=\"linenr\">%4i</a> %s</div>\n", + $nr, $nr, $nr, esc_html($line, -nbsp=>1); + } + } elsif ($mimetype =~ m!^image/!) { + print qq!<img type="$mimetype"!; + if ($file_name) { + print qq! alt="$file_name" title="$file_name"!; + } + print qq! src="! . + href(action=>"blob_plain", hash=>$hash, + hash_base=>$hash_base, file_name=>$file_name) . + qq!" />\n!; } close $fd or print "Reading blob failed.\n"; @@ -3392,6 +3489,7 @@ sub git_log { } sub git_commit { + $hash ||= $hash_base || "HEAD"; my %co = parse_commit($hash); if (!%co) { die_error(undef, "Unknown commit object"); @@ -3669,6 +3767,7 @@ sub git_blobdiff_plain { sub git_commitdiff { my $format = shift || 'html'; + $hash ||= $hash_base || "HEAD"; my %co = parse_commit($hash); if (!%co) { die_error(undef, "Unknown commit object"); @@ -3731,7 +3830,8 @@ sub git_commitdiff { $hash_parent, $hash, "--" or die_error(undef, "Open git-diff-tree failed"); - while (chomp(my $line = <$fd>)) { + while (my $line = <$fd>) { + chomp $line; # empty line ends raw part of diff-tree output last unless $line; push @difftree, $line; @@ -4088,70 +4188,237 @@ sub git_shortlog { } ## ...................................................................... -## feeds (RSS, OPML) +## feeds (RSS, Atom; OPML) -sub git_rss { - # http://www.notestips.com/80256B3A007F2692/1/NAMO5P9UPQ +sub git_feed { + my $format = shift || 'atom'; + my ($have_blame) = gitweb_check_feature('blame'); + + # Atom: http://www.atomenabled.org/developers/syndication/ + # RSS: http://www.notestips.com/80256B3A007F2692/1/NAMO5P9UPQ + if ($format ne 'rss' && $format ne 'atom') { + die_error(undef, "Unknown web feed format"); + } + + # log/feed of current (HEAD) branch, log of given branch, history of file/directory + my $head = $hash || 'HEAD'; open my $fd, "-|", git_cmd(), "rev-list", "--max-count=150", - git_get_head_hash($project), "--" + $head, "--", (defined $file_name ? $file_name : ()) or die_error(undef, "Open git-rev-list failed"); my @revlist = map { chomp; $_ } <$fd>; close $fd or die_error(undef, "Reading git-rev-list failed"); - print $cgi->header(-type => 'text/xml', -charset => 'utf-8'); - print <<XML; -<?xml version="1.0" encoding="utf-8"?> + + my %latest_commit; + my %latest_date; + my $content_type = "application/$format+xml"; + if (defined $cgi->http('HTTP_ACCEPT') && + $cgi->Accept('text/xml') > $cgi->Accept($content_type)) { + # browser (feed reader) prefers text/xml + $content_type = 'text/xml'; + } + if (defined($revlist[0])) { + %latest_commit = parse_commit($revlist[0]); + %latest_date = parse_date($latest_commit{'author_epoch'}); + print $cgi->header( + -type => $content_type, + -charset => 'utf-8', + -last_modified => $latest_date{'rfc2822'}); + } else { + print $cgi->header( + -type => $content_type, + -charset => 'utf-8'); + } + + # Optimization: skip generating the body if client asks only + # for Last-Modified date. + return if ($cgi->request_method() eq 'HEAD'); + + # header variables + my $title = "$site_name - $project/$action"; + my $feed_type = 'log'; + if (defined $hash) { + $title .= " - '$hash'"; + $feed_type = 'branch log'; + if (defined $file_name) { + $title .= " :: $file_name"; + $feed_type = 'history'; + } + } elsif (defined $file_name) { + $title .= " - $file_name"; + $feed_type = 'history'; + } + $title .= " $feed_type"; + my $descr = git_get_project_description($project); + if (defined $descr) { + $descr = esc_html($descr); + } else { + $descr = "$project " . + ($format eq 'rss' ? 'RSS' : 'Atom') . + " feed"; + } + my $owner = git_get_project_owner($project); + $owner = esc_html($owner); + + #header + my $alt_url; + if (defined $file_name) { + $alt_url = href(-full=>1, action=>"history", hash=>$hash, file_name=>$file_name); + } elsif (defined $hash) { + $alt_url = href(-full=>1, action=>"log", hash=>$hash); + } else { + $alt_url = href(-full=>1, action=>"summary"); + } + print qq!<?xml version="1.0" encoding="utf-8"?>\n!; + if ($format eq 'rss') { + print <<XML; <rss version="2.0" xmlns:content="http://purl.org/rss/1.0/modules/content/"> <channel> -<title>$project $my_uri $my_url</title> -<link>${\esc_html("$my_url?p=$project;a=summary")}</link> -<description>$project log</description> -<language>en</language> XML + print "<title>$title</title>\n" . + "<link>$alt_url</link>\n" . + "<description>$descr</description>\n" . + "<language>en</language>\n"; + } elsif ($format eq 'atom') { + print <<XML; +<feed xmlns="http://www.w3.org/2005/Atom"> +XML + print "<title>$title</title>\n" . + "<subtitle>$descr</subtitle>\n" . + '<link rel="alternate" type="text/html" href="' . + $alt_url . '" />' . "\n" . + '<link rel="self" type="' . $content_type . '" href="' . + $cgi->self_url() . '" />' . "\n" . + "<id>" . href(-full=>1) . "</id>\n" . + # use project owner for feed author + "<author><name>$owner</name></author>\n"; + if (defined $favicon) { + print "<icon>" . esc_url($favicon) . "</icon>\n"; + } + if (defined $logo_url) { + # not twice as wide as tall: 72 x 27 pixels + print "<logo>" . esc_url($logo) . "</logo>\n"; + } + if (! %latest_date) { + # dummy date to keep the feed valid until commits trickle in: + print "<updated>1970-01-01T00:00:00Z</updated>\n"; + } else { + print "<updated>$latest_date{'iso-8601'}</updated>\n"; + } + } + # contents for (my $i = 0; $i <= $#revlist; $i++) { my $commit = $revlist[$i]; my %co = parse_commit($commit); # we read 150, we always show 30 and the ones more recent than 48 hours - if (($i >= 20) && ((time - $co{'committer_epoch'}) > 48*60*60)) { + if (($i >= 20) && ((time - $co{'author_epoch'}) > 48*60*60)) { last; } - my %cd = parse_date($co{'committer_epoch'}); + my %cd = parse_date($co{'author_epoch'}); + + # get list of changed files open $fd, "-|", git_cmd(), "diff-tree", '-r', @diff_opts, - $co{'parent'}, $co{'id'}, "--" + $co{'parent'}, $co{'id'}, "--", (defined $file_name ? $file_name : ()) or next; my @difftree = map { chomp; $_ } <$fd>; close $fd or next; - print "<item>\n" . - "<title>" . - sprintf("%d %s %02d:%02d", $cd{'mday'}, $cd{'month'}, $cd{'hour'}, $cd{'minute'}) . " - " . esc_html($co{'title'}) . - "</title>\n" . - "<author>" . esc_html($co{'author'}) . "</author>\n" . - "<pubDate>$cd{'rfc2822'}</pubDate>\n" . - "<guid isPermaLink=\"true\">" . esc_html("$my_url?p=$project;a=commit;h=$commit") . "</guid>\n" . - "<link>" . esc_html("$my_url?p=$project;a=commit;h=$commit") . "</link>\n" . - "<description>" . esc_html($co{'title'}) . "</description>\n" . - "<content:encoded>" . - "<![CDATA[\n"; + + # print element (entry, item) + my $co_url = href(-full=>1, action=>"commit", hash=>$commit); + if ($format eq 'rss') { + print "<item>\n" . + "<title>" . esc_html($co{'title'}) . "</title>\n" . + "<author>" . esc_html($co{'author'}) . "</author>\n" . + "<pubDate>$cd{'rfc2822'}</pubDate>\n" . + "<guid isPermaLink=\"true\">$co_url</guid>\n" . + "<link>$co_url</link>\n" . + "<description>" . esc_html($co{'title'}) . "</description>\n" . + "<content:encoded>" . + "<![CDATA[\n"; + } elsif ($format eq 'atom') { + print "<entry>\n" . + "<title type=\"html\">" . esc_html($co{'title'}) . "</title>\n" . + "<updated>$cd{'iso-8601'}</updated>\n" . + "<author>\n" . + " <name>" . esc_html($co{'author_name'}) . "</name>\n"; + if ($co{'author_email'}) { + print " <email>" . esc_html($co{'author_email'}) . "</email>\n"; + } + print "</author>\n" . + # use committer for contributor + "<contributor>\n" . + " <name>" . esc_html($co{'committer_name'}) . "</name>\n"; + if ($co{'committer_email'}) { + print " <email>" . esc_html($co{'committer_email'}) . "</email>\n"; + } + print "</contributor>\n" . + "<published>$cd{'iso-8601'}</published>\n" . + "<link rel=\"alternate\" type=\"text/html\" href=\"$co_url\" />\n" . + "<id>$co_url</id>\n" . + "<content type=\"xhtml\" xml:base=\"" . esc_url($my_url) . "\">\n" . + "<div xmlns=\"http://www.w3.org/1999/xhtml\">\n"; + } my $comment = $co{'comment'}; + print "<pre>\n"; foreach my $line (@$comment) { - $line = to_utf8($line); - print "$line<br/>\n"; + $line = esc_html($line); + print "$line\n"; } - print "<br/>\n"; - foreach my $line (@difftree) { - if (!($line =~ m/^:([0-7]{6}) ([0-7]{6}) ([0-9a-fA-F]{40}) ([0-9a-fA-F]{40}) (.)([0-9]{0,3})\t(.*)$/)) { - next; + print "</pre><ul>\n"; + foreach my $difftree_line (@difftree) { + my %difftree = parse_difftree_raw_line($difftree_line); + next if !$difftree{'from_id'}; + + my $file = $difftree{'file'} || $difftree{'to_file'}; + + print "<li>" . + "[" . + $cgi->a({-href => href(-full=>1, action=>"blobdiff", + hash=>$difftree{'to_id'}, hash_parent=>$difftree{'from_id'}, + hash_base=>$co{'id'}, hash_parent_base=>$co{'parent'}, + file_name=>$file, file_parent=>$difftree{'from_file'}), + -title => "diff"}, 'D'); + if ($have_blame) { + print $cgi->a({-href => href(-full=>1, action=>"blame", + file_name=>$file, hash_base=>$commit), + -title => "blame"}, 'B'); } - my $file = esc_path(unquote($7)); - $file = to_utf8($file); - print "$file<br/>\n"; + # if this is not a feed of a file history + if (!defined $file_name || $file_name ne $file) { + print $cgi->a({-href => href(-full=>1, action=>"history", + file_name=>$file, hash=>$commit), + -title => "history"}, 'H'); + } + $file = esc_path($file); + print "] ". + "$file</li>\n"; + } + if ($format eq 'rss') { + print "</ul>]]>\n" . + "</content:encoded>\n" . + "</item>\n"; + } elsif ($format eq 'atom') { + print "</ul>\n</div>\n" . + "</content>\n" . + "</entry>\n"; } - print "]]>\n" . - "</content:encoded>\n" . - "</item>\n"; } - print "</channel></rss>"; + + # end of feed + if ($format eq 'rss') { + print "</channel>\n</rss>\n"; + } elsif ($format eq 'atom') { + print "</feed>\n"; + } +} + +sub git_rss { + git_feed('rss'); +} + +sub git_atom { + git_feed('atom'); } sub git_opml { @@ -158,12 +158,17 @@ static int copy(char *buf, int size, int offset, const char *src) static const char au_env[] = "GIT_AUTHOR_NAME"; static const char co_env[] = "GIT_COMMITTER_NAME"; static const char *env_hint = -"\n*** Environment problem:\n" +"\n" "*** Your name cannot be determined from your system services (gecos).\n" -"*** You would need to set %s and %s\n" -"*** environment variables; otherwise you won't be able to perform\n" -"*** certain operations because of \"empty ident\" errors.\n" -"*** Alternatively, you can use user.name configuration variable.\n\n"; +"\n" +"Run\n" +"\n" +" git repo-config user.email \"you@email.com\"\n" +" git repo-config user.name \"Your Name\"\n" +"\n" +"To set the identity in this repository.\n" +"Add --global to set your account\'s default\n" +"\n"; static const char *get_ident(const char *name, const char *email, const char *date_str, int error_on_no_name) diff --git a/perl/.gitignore b/perl/.gitignore index e990caeea7..98b24772c7 100644 --- a/perl/.gitignore +++ b/perl/.gitignore @@ -1,4 +1,5 @@ -Makefile +perl.mak +perl.mak.old blib blibdirs pm_to_blib diff --git a/perl/Makefile b/perl/Makefile new file mode 100644 index 0000000000..099beda873 --- /dev/null +++ b/perl/Makefile @@ -0,0 +1,39 @@ +# +# Makefile for perl support modules and routine +# +makfile:=perl.mak + +PERL_PATH_SQ = $(subst ','\'',$(PERL_PATH)) +prefix_SQ = $(subst ','\'',$(prefix)) + +all install instlibdir: $(makfile) + $(MAKE) -f $(makfile) $@ + +clean: + test -f $(makfile) && $(MAKE) -f $(makfile) $@ || exit 0 + $(RM) ppport.h + $(RM) $(makfile) + $(RM) $(makfile).old + +ifdef NO_PERL_MAKEMAKER +instdir_SQ = $(subst ','\'',$(prefix)/lib) +$(makfile): ../GIT-CFLAGS Makefile + echo all: > $@ + echo ' :' >> $@ + echo install: >> $@ + echo ' mkdir -p $(instdir_SQ)' >> $@ + echo ' $(RM) $(instdir_SQ)/Git.pm; cp Git.pm $(instdir_SQ)' >> $@ + echo ' $(RM) $(instdir_SQ)/Error.pm; \ + cp private-Error.pm $(instdir_SQ)/Error.pm' >> $@ + echo instlibdir: >> $@ + echo ' echo $(instdir_SQ)' >> $@ +else +$(makfile): Makefile.PL ../GIT-CFLAGS + '$(PERL_PATH_SQ)' $< PREFIX='$(prefix_SQ)' +endif + +# this is just added comfort for calling make directly in perl dir +# (even though GIT-CFLAGS aren't used yet. If ever) +../GIT-CFLAGS: + $(MAKE) -C .. GIT-CFLAGS + diff --git a/perl/Makefile.PL b/perl/Makefile.PL index de73235e4c..41687757a7 100644 --- a/perl/Makefile.PL +++ b/perl/Makefile.PL @@ -24,5 +24,6 @@ WriteMakefile( NAME => 'Git', VERSION_FROM => 'Git.pm', PM => \%pm, + MAKEFILE => 'perl.mak', %extra ); diff --git a/receive-pack.c b/receive-pack.c index f18915117e..e76d9aea31 100644 --- a/receive-pack.c +++ b/receive-pack.c @@ -11,10 +11,10 @@ static const char receive_pack_usage[] = "git-receive-pack <git-dir>"; static int deny_non_fast_forwards = 0; -static int unpack_limit = 5000; +static int unpack_limit = 100; static int report_status; -static char capabilities[] = "report-status"; +static char capabilities[] = " report-status delete-refs "; static int capabilities_sent; static int receive_pack_config(const char *var, const char *value) @@ -113,12 +113,14 @@ static int update(struct command *cmd) strcpy(new_hex, sha1_to_hex(new_sha1)); strcpy(old_hex, sha1_to_hex(old_sha1)); - if (!has_sha1_file(new_sha1)) { + + if (!is_null_sha1(new_sha1) && !has_sha1_file(new_sha1)) { cmd->error_string = "bad pack"; return error("unpack should have generated %s, " "but I can't find it!", new_hex); } - if (deny_non_fast_forwards && !is_null_sha1(old_sha1) && + if (deny_non_fast_forwards && !is_null_sha1(new_sha1) && + !is_null_sha1(old_sha1) && !strncmp(name, "refs/heads/", 11)) { struct commit *old_commit, *new_commit; struct commit_list *bases, *ent; @@ -139,14 +141,22 @@ static int update(struct command *cmd) return error("hook declined to update %s", name); } - lock = lock_any_ref_for_update(name, old_sha1); - if (!lock) { - cmd->error_string = "failed to lock"; - return error("failed to lock %s", name); + if (is_null_sha1(new_sha1)) { + if (delete_ref(name, old_sha1)) { + cmd->error_string = "failed to delete"; + return error("failed to delete %s", name); + } + fprintf(stderr, "%s: %s -> deleted\n", name, old_hex); + } + else { + lock = lock_any_ref_for_update(name, old_sha1); + if (!lock) { + cmd->error_string = "failed to lock"; + return error("failed to lock %s", name); + } + write_ref_sha1(lock, new_sha1, "push"); + fprintf(stderr, "%s: %s -> %s\n", name, old_hex, new_hex); } - write_ref_sha1(lock, new_sha1, "push"); - - fprintf(stderr, "%s: %s -> %s\n", name, old_hex, new_hex); return 0; } @@ -376,6 +386,16 @@ static void report(const char *unpack_status) packet_flush(1); } +static int delete_only(struct command *cmd) +{ + while (cmd) { + if (!is_null_sha1(cmd->new_sha1)) + return 0; + cmd = cmd->next; + } + return 1; +} + int main(int argc, char **argv) { int i; @@ -409,7 +429,10 @@ int main(int argc, char **argv) read_head_info(); if (commands) { - const char *unpack_status = unpack(); + const char *unpack_status = NULL; + + if (!delete_only(commands)) + unpack_status = unpack(); if (!unpack_status) execute_commands(); if (pack_lockfile) @@ -1,12 +1,18 @@ #include "refs.h" #include "cache.h" +#include "object.h" +#include "tag.h" #include <errno.h> +/* ISSYMREF=01 and ISPACKED=02 are public interfaces */ +#define REF_KNOWS_PEELED 04 + struct ref_list { struct ref_list *next; unsigned char flag; /* ISSYMREF? ISPACKED? */ unsigned char sha1[20]; + unsigned char peeled[20]; char name[FLEX_ARRAY]; }; @@ -34,11 +40,13 @@ static const char *parse_ref_line(char *line, unsigned char *sha1) if (line[len] != '\n') return NULL; line[len] = 0; + return line; } static struct ref_list *add_ref(const char *name, const unsigned char *sha1, - int flag, struct ref_list *list) + int flag, struct ref_list *list, + struct ref_list **new_entry) { int len; struct ref_list **p = &list, *entry; @@ -50,8 +58,11 @@ static struct ref_list *add_ref(const char *name, const unsigned char *sha1, break; /* Same as existing entry? */ - if (!cmp) + if (!cmp) { + if (new_entry) + *new_entry = entry; return list; + } p = &entry->next; } @@ -59,10 +70,13 @@ static struct ref_list *add_ref(const char *name, const unsigned char *sha1, len = strlen(name) + 1; entry = xmalloc(sizeof(struct ref_list) + len); hashcpy(entry->sha1, sha1); + hashclr(entry->peeled); memcpy(entry->name, name, len); entry->flag = flag; entry->next = *p; *p = entry; + if (new_entry) + *new_entry = entry; return list; } @@ -98,25 +112,50 @@ static void invalidate_cached_refs(void) ca->did_loose = ca->did_packed = 0; } +static void read_packed_refs(FILE *f, struct cached_refs *cached_refs) +{ + struct ref_list *list = NULL; + struct ref_list *last = NULL; + char refline[PATH_MAX]; + int flag = REF_ISPACKED; + + while (fgets(refline, sizeof(refline), f)) { + unsigned char sha1[20]; + const char *name; + static const char header[] = "# pack-refs with:"; + + if (!strncmp(refline, header, sizeof(header)-1)) { + const char *traits = refline + sizeof(header) - 1; + if (strstr(traits, " peeled ")) + flag |= REF_KNOWS_PEELED; + /* perhaps other traits later as well */ + continue; + } + + name = parse_ref_line(refline, sha1); + if (name) { + list = add_ref(name, sha1, flag, list, &last); + continue; + } + if (last && + refline[0] == '^' && + strlen(refline) == 42 && + refline[41] == '\n' && + !get_sha1_hex(refline + 1, sha1)) + hashcpy(last->peeled, sha1); + } + cached_refs->packed = list; +} + static struct ref_list *get_packed_refs(void) { if (!cached_refs.did_packed) { - struct ref_list *refs = NULL; FILE *f = fopen(git_path("packed-refs"), "r"); + cached_refs.packed = NULL; if (f) { - struct ref_list *list = NULL; - char refline[PATH_MAX]; - while (fgets(refline, sizeof(refline), f)) { - unsigned char sha1[20]; - const char *name = parse_ref_line(refline, sha1); - if (!name) - continue; - list = add_ref(name, sha1, REF_ISPACKED, list); - } + read_packed_refs(f, &cached_refs); fclose(f); - refs = list; } - cached_refs.packed = refs; cached_refs.did_packed = 1; } return cached_refs.packed; @@ -159,7 +198,7 @@ static struct ref_list *get_ref_dir(const char *base, struct ref_list *list) error("%s points nowhere!", ref); continue; } - list = add_ref(ref, sha1, flag, list); + list = add_ref(ref, sha1, flag, list, NULL); } free(ref); closedir(dir); @@ -336,6 +375,43 @@ static int do_one_ref(const char *base, each_ref_fn fn, int trim, return fn(entry->name + trim, entry->sha1, entry->flag, cb_data); } +int peel_ref(const char *ref, unsigned char *sha1) +{ + int flag; + unsigned char base[20]; + struct object *o; + + if (!resolve_ref(ref, base, 1, &flag)) + return -1; + + if ((flag & REF_ISPACKED)) { + struct ref_list *list = get_packed_refs(); + + while (list) { + if (!strcmp(list->name, ref)) { + if (list->flag & REF_KNOWS_PEELED) { + hashcpy(sha1, list->peeled); + return 0; + } + /* older pack-refs did not leave peeled ones */ + break; + } + list = list->next; + } + } + + /* fallback - callers should not call this for unpacked refs */ + o = parse_object(base); + if (o->type == OBJ_TAG) { + o = deref_tag(o, ref, 0); + if (o) { + hashcpy(sha1, o->sha1); + return 0; + } + } + return -1; +} + static int do_for_each_ref(const char *base, each_ref_fn fn, int trim, void *cb_data) { @@ -10,12 +10,13 @@ struct ref_lock { int force_write; }; +#define REF_ISSYMREF 01 +#define REF_ISPACKED 02 + /* * Calls the specified function for each ref file until it returns nonzero, * and returns the value */ -#define REF_ISSYMREF 01 -#define REF_ISPACKED 02 typedef int each_ref_fn(const char *refname, const unsigned char *sha1, int flags, void *cb_data); extern int head_ref(each_ref_fn, void *); extern int for_each_ref(each_ref_fn, void *); @@ -23,6 +24,8 @@ extern int for_each_tag_ref(each_ref_fn, void *); extern int for_each_branch_ref(each_ref_fn, void *); extern int for_each_remote_ref(each_ref_fn, void *); +extern int peel_ref(const char *, unsigned char *); + /** Reads the refs file specified into sha1 **/ extern int get_ref_sha1(const char *ref, unsigned char *sha1); diff --git a/send-pack.c b/send-pack.c index 447666665b..328dbbc16a 100644 --- a/send-pack.c +++ b/send-pack.c @@ -271,6 +271,7 @@ static int send_pack(int in, int out, int nr_refspec, char **refspec) int new_refs; int ret = 0; int ask_for_status_report = 0; + int allow_deleting_refs = 0; int expect_status_report = 0; /* No funny business with the matcher */ @@ -280,6 +281,8 @@ static int send_pack(int in, int out, int nr_refspec, char **refspec) /* Does the other end support the reporting? */ if (server_supports("report-status")) ask_for_status_report = 1; + if (server_supports("delete-refs")) + allow_deleting_refs = 1; /* match them up */ if (!remote_tail) @@ -299,9 +302,19 @@ static int send_pack(int in, int out, int nr_refspec, char **refspec) new_refs = 0; for (ref = remote_refs; ref; ref = ref->next) { char old_hex[60], *new_hex; + int delete_ref; + if (!ref->peer_ref) continue; - if (!hashcmp(ref->old_sha1, ref->peer_ref->new_sha1)) { + + delete_ref = is_null_sha1(ref->peer_ref->new_sha1); + if (delete_ref && !allow_deleting_refs) { + error("remote does not support deleting refs"); + ret = -2; + continue; + } + if (!delete_ref && + !hashcmp(ref->old_sha1, ref->peer_ref->new_sha1)) { if (verbose) fprintf(stderr, "'%s': up-to-date\n", ref->name); continue; @@ -321,9 +334,13 @@ static int send_pack(int in, int out, int nr_refspec, char **refspec) * * (3) if both new and old are commit-ish, and new is a * descendant of old, it is OK. + * + * (4) regardless of all of the above, removing :B is + * always allowed. */ if (!force_update && + !delete_ref && !is_zero_sha1(ref->old_sha1) && !ref->force) { if (!has_sha1_file(ref->old_sha1) || @@ -347,12 +364,8 @@ static int send_pack(int in, int out, int nr_refspec, char **refspec) } } hashcpy(ref->new_sha1, ref->peer_ref->new_sha1); - if (is_zero_sha1(ref->new_sha1)) { - error("cannot happen anymore"); - ret = -3; - continue; - } - new_refs++; + if (!delete_ref) + new_refs++; strcpy(old_hex, sha1_to_hex(ref->old_sha1)); new_hex = sha1_to_hex(ref->new_sha1); @@ -366,10 +379,16 @@ static int send_pack(int in, int out, int nr_refspec, char **refspec) else packet_write(out, "%s %s %s", old_hex, new_hex, ref->name); - fprintf(stderr, "updating '%s'", ref->name); - if (strcmp(ref->name, ref->peer_ref->name)) - fprintf(stderr, " using '%s'", ref->peer_ref->name); - fprintf(stderr, "\n from %s\n to %s\n", old_hex, new_hex); + if (delete_ref) + fprintf(stderr, "deleting '%s'\n", ref->name); + else { + fprintf(stderr, "updating '%s'", ref->name); + if (strcmp(ref->name, ref->peer_ref->name)) + fprintf(stderr, " using '%s'", + ref->peer_ref->name); + fprintf(stderr, "\n from %s\n to %s\n", + old_hex, new_hex); + } } packet_flush(out); diff --git a/sha1_file.c b/sha1_file.c index 09456d23f8..63f416bb5a 100644 --- a/sha1_file.c +++ b/sha1_file.c @@ -1261,7 +1261,7 @@ struct packed_git *find_sha1_pack(const unsigned char *sha1, } -int sha1_object_info(const unsigned char *sha1, char *type, unsigned long *sizep) +static int sha1_loose_object_info(const unsigned char *sha1, char *type, unsigned long *sizep) { int status; unsigned long mapsize, size; @@ -1270,20 +1270,8 @@ int sha1_object_info(const unsigned char *sha1, char *type, unsigned long *sizep char hdr[128]; map = map_sha1_file(sha1, &mapsize); - if (!map) { - struct pack_entry e; - - if (!find_pack_entry(sha1, &e, NULL)) { - reprepare_packed_git(); - if (!find_pack_entry(sha1, &e, NULL)) - return error("unable to find %s", sha1_to_hex(sha1)); - } - if (use_packed_git(e.p)) - die("cannot map packed file"); - status = packed_object_info(e.p, e.offset, type, sizep); - unuse_packed_git(e.p); - return status; - } + if (!map) + return error("unable to find %s", sha1_to_hex(sha1)); if (unpack_sha1_header(&stream, map, mapsize, hdr, sizeof(hdr)) < 0) status = error("unable to unpack %s header", sha1_to_hex(sha1)); @@ -1299,6 +1287,23 @@ int sha1_object_info(const unsigned char *sha1, char *type, unsigned long *sizep return status; } +int sha1_object_info(const unsigned char *sha1, char *type, unsigned long *sizep) +{ + int status; + struct pack_entry e; + + if (!find_pack_entry(sha1, &e, NULL)) { + reprepare_packed_git(); + if (!find_pack_entry(sha1, &e, NULL)) + return sha1_loose_object_info(sha1, type, sizep); + } + if (use_packed_git(e.p)) + die("cannot map packed file"); + status = packed_object_info(e.p, e.offset, type, sizep); + unuse_packed_git(e.p); + return status; +} + static void *read_packed_sha1(const unsigned char *sha1, char *type, unsigned long *size) { struct pack_entry e; diff --git a/t/Makefile b/t/Makefile index 89835093fb..c9bd9a4690 100644 --- a/t/Makefile +++ b/t/Makefile @@ -13,10 +13,6 @@ SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH)) T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh) TSVN = $(wildcard t91[0-9][0-9]-*.sh) -ifdef NO_PYTHON - GIT_TEST_OPTS += --no-python -endif - all: $(T) clean $(T): @@ -27,8 +23,9 @@ clean: # we can test NO_OPTIMIZE_COMMITS independently of LC_ALL full-svn-test: + $(MAKE) $(TSVN) GIT_SVN_NO_LIB=0 GIT_SVN_DELTA_FETCH=1 \ + GIT_SVN_NO_OPTIMIZE_COMMITS=1 LC_ALL=C $(MAKE) $(TSVN) GIT_SVN_NO_LIB=1 GIT_SVN_NO_OPTIMIZE_COMMITS=1 LC_ALL=C - $(MAKE) $(TSVN) GIT_SVN_NO_LIB=0 GIT_SVN_NO_OPTIMIZE_COMMITS=1 LC_ALL=C $(MAKE) $(TSVN) GIT_SVN_NO_LIB=1 GIT_SVN_NO_OPTIMIZE_COMMITS=0 \ LC_ALL=en_US.UTF-8 $(MAKE) $(TSVN) GIT_SVN_NO_LIB=0 GIT_SVN_NO_OPTIMIZE_COMMITS=0 \ diff --git a/t/lib-git-svn.sh b/t/lib-git-svn.sh index 29a1e72c61..63c670304f 100644 --- a/t/lib-git-svn.sh +++ b/t/lib-git-svn.sh @@ -45,6 +45,6 @@ else svnadmin create "$svnrepo" fi -svnrepo="file://$svnrepo/test-git-svn" +svnrepo="file://$svnrepo" diff --git a/t/t0000-basic.sh b/t/t0000-basic.sh index 6aff0b808c..81f3bedc90 100755 --- a/t/t0000-basic.sh +++ b/t/t0000-basic.sh @@ -20,10 +20,10 @@ modification *should* take notice and update the test vectors here. ################################################################ # It appears that people are getting bitten by not installing -# 'merge' (usually part of RCS package in binary distributions) -# or have too old python without subprocess. Check them and error -# out before running any tests. Also catch the bogosity of trying -# to run tests without building while we are at it. +# 'merge' (usually part of RCS package in binary distributions). +# Check this and error out before running any tests. Also catch +# the bogosity of trying to run tests without building while we +# are at it. ../git >/dev/null if test $? != 1 @@ -42,12 +42,6 @@ fi . ./test-lib.sh -test "$no_python" || "$PYTHON" -c 'import subprocess' || { - echo >&2 'Your python seem to lack "subprocess" module. -Please check INSTALL document.' - exit 1 -} - ################################################################ # init-db has been done in an empty repository. # make sure it is empty. diff --git a/t/t4013-diff-various.sh b/t/t4013-diff-various.sh index 71c454356f..ed37141b6e 100755 --- a/t/t4013-diff-various.sh +++ b/t/t4013-diff-various.sh @@ -73,6 +73,7 @@ test_expect_success setup ' for i in 1 2; do echo $i; done >>dir/sub && git update-index file0 dir/sub && + git repo-config log.showroot false && git commit --amend && git show-branch ' diff --git a/t/t4015-diff-whitespace.sh b/t/t4015-diff-whitespace.sh index 1bc5b7a412..adf4993bac 100755 --- a/t/t4015-diff-whitespace.sh +++ b/t/t4015-diff-whitespace.sh @@ -109,12 +109,10 @@ index d99af23..8b32fb5 100644 + whitespace at beginning whitespace change -whitespace in the middle --whitespace at end +white space in the middle -+whitespace at end + whitespace at end unchanged line --CR at endQ -+CR at end + CR at endQ EOF git-diff -b > out test_expect_success 'another test, with -b' 'diff -u expect out' diff --git a/t/t5400-send-pack.sh b/t/t5400-send-pack.sh index 8afb899717..28744b35e1 100755 --- a/t/t5400-send-pack.sh +++ b/t/t5400-send-pack.sh @@ -64,6 +64,16 @@ test_expect_success \ cmp victim/.git/refs/heads/master .git/refs/heads/master ' +test_expect_success \ + 'push can be used to delete a ref' ' + cd victim && + git branch extra master && + cd .. && + test -f victim/.git/refs/heads/extra && + git-send-pack ./victim/.git/ :extra master && + ! test -f victim/.git/refs/heads/extra +' + unset GIT_CONFIG GIT_CONFIG_LOCAL HOME=`pwd`/no-such-directory export HOME ;# this way we force the victim/.git/config to be used. diff --git a/t/t9100-git-svn-basic.sh b/t/t9100-git-svn-basic.sh index 34a3ccd31c..f9de232366 100755 --- a/t/t9100-git-svn-basic.sh +++ b/t/t9100-git-svn-basic.sh @@ -228,6 +228,11 @@ tree 56a30b966619b863674f5978696f4a3594f2fca9 tree d667270a1f7b109f5eb3aaea21ede14b56bfdd6e tree 8f51f74cf0163afc9ad68a4b1537288c4558b5a4 EOF + +if test -z "$GIT_SVN_NO_LIB" || test "$GIT_SVN_NO_LIB" -eq 0; then + echo tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904 >> expected +fi + test_expect_success "$name" "diff -u a expected" test_done diff --git a/t/t9103-git-svn-graft-branches.sh b/t/t9103-git-svn-graft-branches.sh index cc62d4ece8..293b98f928 100755 --- a/t/t9103-git-svn-graft-branches.sh +++ b/t/t9103-git-svn-graft-branches.sh @@ -1,6 +1,8 @@ test_description='git-svn graft-branches' . ./lib-git-svn.sh +svnrepo="$svnrepo/test-git-svn" + test_expect_success 'initialize repo' " mkdir import && cd import && diff --git a/t/t9200-git-cvsexportcommit.sh b/t/t9200-git-cvsexportcommit.sh index 6e566d4409..c1024790e4 100755 --- a/t/t9200-git-cvsexportcommit.sh +++ b/t/t9200-git-cvsexportcommit.sh @@ -142,4 +142,20 @@ test_expect_success \ diff F/newfile6.png ../F/newfile6.png )' +test_expect_success 'Retain execute bit' ' + mkdir G && + echo executeon >G/on && + chmod +x G/on && + echo executeoff >G/off && + git add G/on && + git add G/off && + git commit -a -m "Execute test" && + ( + cd "$CVSWORK" && + git-cvsexportcommit -c HEAD + test -x G/on && + ! test -x G/off + ) +' + test_done diff --git a/t/test-lib.sh b/t/test-lib.sh index 3895f16709..ac7be769b4 100755 --- a/t/test-lib.sh +++ b/t/test-lib.sh @@ -76,7 +76,8 @@ do -v|--v|--ve|--ver|--verb|--verbo|--verbos|--verbose) verbose=t; shift ;; --no-python) - no_python=t; shift ;; + # noop now... + shift ;; *) break ;; esac @@ -210,18 +211,6 @@ GIT_EXEC_PATH=$(pwd)/.. HOME=$(pwd)/trash export PATH GIT_EXEC_PATH HOME -# Similarly use ../compat/subprocess.py if our python does not -# have subprocess.py on its own. -PYTHON=`sed -e '1{ - s/^#!// - q -}' ../git-merge-recursive-old` || { - error "You haven't built things yet, have you?" -} -"$PYTHON" -c 'import subprocess' 2>/dev/null || { - PYTHONPATH=$(pwd)/../compat - export PYTHONPATH -} GITPERLLIB=$(pwd)/../perl/blib/lib:$(pwd)/../perl/blib/arch/auto/Git export GITPERLLIB test -d ../templates/blt || { diff --git a/upload-pack.c b/upload-pack.c index ddaa72f0a9..4572fff07c 100644 --- a/upload-pack.c +++ b/upload-pack.c @@ -12,9 +12,15 @@ static const char upload_pack_usage[] = "git-upload-pack [--strict] [--timeout=nn] <dir>"; -#define THEY_HAVE (1U << 0) -#define OUR_REF (1U << 1) -#define WANTED (1U << 2) +/* bits #0..7 in revision.h, #8..10 in commit.c */ +#define THEY_HAVE (1u << 11) +#define OUR_REF (1u << 12) +#define WANTED (1u << 13) +#define COMMON_KNOWN (1u << 14) +#define REACHABLE (1u << 15) + +static unsigned long oldest_have; + static int multi_ack, nr_our_refs; static int use_thin_pack, use_ofs_delta; static struct object_array have_obj; @@ -303,11 +309,12 @@ static void create_pack_file(void) static int got_sha1(char *hex, unsigned char *sha1) { struct object *o; + int we_knew_they_have = 0; if (get_sha1_hex(hex, sha1)) die("git-upload-pack: expected SHA1 object, got '%s'", hex); if (!has_sha1_file(sha1)) - return 0; + return -1; o = lookup_object(sha1); if (!(o && o->parsed)) @@ -316,15 +323,84 @@ static int got_sha1(char *hex, unsigned char *sha1) die("oops (%s)", sha1_to_hex(sha1)); if (o->type == OBJ_COMMIT) { struct commit_list *parents; + struct commit *commit = (struct commit *)o; if (o->flags & THEY_HAVE) - return 0; - o->flags |= THEY_HAVE; - for (parents = ((struct commit*)o)->parents; + we_knew_they_have = 1; + else + o->flags |= THEY_HAVE; + if (!oldest_have || (commit->date < oldest_have)) + oldest_have = commit->date; + for (parents = commit->parents; parents; parents = parents->next) parents->item->object.flags |= THEY_HAVE; } - add_object_array(o, NULL, &have_obj); + if (!we_knew_they_have) { + add_object_array(o, NULL, &have_obj); + return 1; + } + return 0; +} + +static int reachable(struct commit *want) +{ + struct commit_list *work = NULL; + + insert_by_date(want, &work); + while (work) { + struct commit_list *list = work->next; + struct commit *commit = work->item; + free(work); + work = list; + + if (commit->object.flags & THEY_HAVE) { + want->object.flags |= COMMON_KNOWN; + break; + } + if (!commit->object.parsed) + parse_object(commit->object.sha1); + if (commit->object.flags & REACHABLE) + continue; + commit->object.flags |= REACHABLE; + if (commit->date < oldest_have) + continue; + for (list = commit->parents; list; list = list->next) { + struct commit *parent = list->item; + if (!(parent->object.flags & REACHABLE)) + insert_by_date(parent, &work); + } + } + want->object.flags |= REACHABLE; + clear_commit_marks(want, REACHABLE); + free_commit_list(work); + return (want->object.flags & COMMON_KNOWN); +} + +static int ok_to_give_up(void) +{ + int i; + + if (!have_obj.nr) + return 0; + + for (i = 0; i < want_obj.nr; i++) { + struct object *want = want_obj.objects[i].item; + + if (want->flags & COMMON_KNOWN) + continue; + want = deref_tag(want, "a want line", 0); + if (!want || want->type != OBJ_COMMIT) { + /* no way to tell if this is reachable by + * looking at the ancestry chain alone, so + * leave a note to ourselves not to worry about + * this object anymore. + */ + want_obj.objects[i].item->flags |= COMMON_KNOWN; + continue; + } + if (!reachable((struct commit *)want)) + return 0; + } return 1; } @@ -349,7 +425,13 @@ static int get_common_commits(void) } len = strip(line, len); if (!strncmp(line, "have ", 5)) { - if (got_sha1(line+5, sha1)) { + switch (got_sha1(line+5, sha1)) { + case -1: /* they have what we do not */ + if (multi_ack && ok_to_give_up()) + packet_write(1, "ACK %s continue\n", + sha1_to_hex(sha1)); + break; + default: memcpy(hex, sha1_to_hex(sha1), 41); if (multi_ack) { const char *msg = "ACK %s continue\n"; @@ -358,6 +440,7 @@ static int get_common_commits(void) } else if (have_obj.nr == 1) packet_write(1, "ACK %s\n", hex); + break; } continue; } diff --git a/xdiff/xutils.c b/xdiff/xutils.c index 9e4bb47ee9..1b899f32c4 100644 --- a/xdiff/xutils.c +++ b/xdiff/xutils.c @@ -230,7 +230,8 @@ unsigned long xdl_hash_record(char const **data, char const *top, long flags) { while (ptr + 1 < top && isspace(ptr[1]) && ptr[1] != '\n') ptr++; - if (flags & XDF_IGNORE_WHITESPACE_CHANGE) { + if (flags & XDF_IGNORE_WHITESPACE_CHANGE + && ptr[1] != '\n') { ha += (ha << 5); ha ^= (unsigned long) ' '; } |