diff options
Diffstat (limited to 'contrib')
-rwxr-xr-x | contrib/completion/git-completion.bash | 499 | ||||
-rw-r--r-- | contrib/convert-objects/git-convert-objects.txt | 1 | ||||
-rw-r--r-- | contrib/credential/osxkeychain/.gitignore | 1 | ||||
-rw-r--r-- | contrib/credential/osxkeychain/Makefile | 14 | ||||
-rw-r--r-- | contrib/credential/osxkeychain/git-credential-osxkeychain.c | 173 | ||||
-rw-r--r-- | contrib/diff-highlight/README | 57 | ||||
-rwxr-xr-x | contrib/diff-highlight/diff-highlight | 124 | ||||
-rwxr-xr-x | contrib/fast-import/git-p4 | 985 | ||||
-rw-r--r-- | contrib/fast-import/git-p4.txt | 215 | ||||
-rw-r--r-- | contrib/git-jump/README | 92 | ||||
-rwxr-xr-x | contrib/git-jump/git-jump | 69 | ||||
-rw-r--r-- | contrib/gitview/gitview.txt | 1 | ||||
-rwxr-xr-x | contrib/hooks/post-receive-email | 13 | ||||
-rwxr-xr-x | contrib/mw-to-git/git-remote-mediawiki | 827 | ||||
-rw-r--r-- | contrib/mw-to-git/git-remote-mediawiki.txt | 7 |
15 files changed, 2345 insertions, 733 deletions
diff --git a/contrib/completion/git-completion.bash b/contrib/completion/git-completion.bash index 840ae38760..b0062bac22 100755 --- a/contrib/completion/git-completion.bash +++ b/contrib/completion/git-completion.bash @@ -1,6 +1,6 @@ #!bash # -# bash completion support for core Git. +# bash/zsh completion support for core Git. # # Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org> # Conceptually based on gitcompletion (http://gitweb.hawaga.org.uk/). @@ -18,16 +18,12 @@ # To use these routines: # # 1) Copy this file to somewhere (e.g. ~/.git-completion.sh). -# 2) Added the following line to your .bashrc: -# source ~/.git-completion.sh -# -# Or, add the following lines to your .zshrc: -# autoload bashcompinit -# bashcompinit +# 2) Add the following line to your .bashrc/.zshrc: # source ~/.git-completion.sh # # 3) Consider changing your PS1 to also show the current branch: -# PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ ' +# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ ' +# ZSH: PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ ' # # The argument to __git_ps1 will be displayed only if you # are currently in a git repository. The %s token will be @@ -77,6 +73,10 @@ # git@vger.kernel.org # +if [[ -n ${ZSH_VERSION-} ]]; then + autoload -U +X bashcompinit && bashcompinit +fi + case "$COMP_WORDBREAKS" in *:*) : great ;; *) COMP_WORDBREAKS="$COMP_WORDBREAKS:" @@ -110,7 +110,8 @@ __git_ps1_show_upstream () local upstream=git legacy="" verbose="" # get some config options from git-config - while read key value; do + local output="$(git config -z --get-regexp '^(svn-remote\..*\.url|bash\.showupstream)$' 2>/dev/null | tr '\0\n' '\n ')" + while read -r key value; do case "$key" in bash.showupstream) GIT_PS1_SHOWUPSTREAM="$value" @@ -125,7 +126,7 @@ __git_ps1_show_upstream () upstream=svn+git # default upstream is SVN if available, else git ;; esac - done < <(git config -z --get-regexp '^(svn-remote\..*\.url|bash\.showupstream)$' 2>/dev/null | tr '\0\n' '\n ') + done <<< "$output" # parse configuration values for option in ${GIT_PS1_SHOWUPSTREAM}; do @@ -485,16 +486,21 @@ _get_comp_words_by_ref () fi fi -# __gitcomp accepts 1, 2, 3, or 4 arguments -# generates completion reply with compgen +# Generates completion reply with compgen, appending a space to possible +# completion words, if necessary. +# It accepts 1 to 4 arguments: +# 1: List of possible completion words. +# 2: A prefix to be added to each possible completion word (optional). +# 3: Generate possible completion matches for this word (optional). +# 4: A suffix to be appended to each possible completion word (optional). __gitcomp () { - local cur - _get_comp_words_by_ref -n =: cur + local cur_="$cur" + if [ $# -gt 2 ]; then - cur="$3" + cur_="$3" fi - case "$cur" in + case "$cur_" in --*=) COMPREPLY=() ;; @@ -502,47 +508,54 @@ __gitcomp () local IFS=$'\n' COMPREPLY=($(compgen -P "${2-}" \ -W "$(__gitcomp_1 "${1-}" "${4-}")" \ - -- "$cur")) + -- "$cur_")) ;; esac } -# __git_heads accepts 0 or 1 arguments (to pass to __gitdir) +# Generates completion reply with compgen from newline-separated possible +# completion words by appending a space to all of them. +# It accepts 1 to 4 arguments: +# 1: List of possible completion words, separated by a single newline. +# 2: A prefix to be added to each possible completion word (optional). +# 3: Generate possible completion matches for this word (optional). +# 4: A suffix to be appended to each possible completion word instead of +# the default space (optional). If specified but empty, nothing is +# appended. +__gitcomp_nl () +{ + local s=$'\n' IFS=' '$'\t'$'\n' + local cur_="$cur" suffix=" " + + if [ $# -gt 2 ]; then + cur_="$3" + if [ $# -gt 3 ]; then + suffix="$4" + fi + fi + + IFS=$s + COMPREPLY=($(compgen -P "${2-}" -S "$suffix" -W "$1" -- "$cur_")) +} + __git_heads () { - local cmd i is_hash=y dir="$(__gitdir "${1-}")" + local dir="$(__gitdir)" if [ -d "$dir" ]; then git --git-dir="$dir" for-each-ref --format='%(refname:short)' \ refs/heads return fi - for i in $(git ls-remote "${1-}" 2>/dev/null); do - case "$is_hash,$i" in - y,*) is_hash=n ;; - n,*^{}) is_hash=y ;; - n,refs/heads/*) is_hash=y; echo "${i#refs/heads/}" ;; - n,*) is_hash=y; echo "$i" ;; - esac - done } -# __git_tags accepts 0 or 1 arguments (to pass to __gitdir) __git_tags () { - local cmd i is_hash=y dir="$(__gitdir "${1-}")" + local dir="$(__gitdir)" if [ -d "$dir" ]; then git --git-dir="$dir" for-each-ref --format='%(refname:short)' \ refs/tags return fi - for i in $(git ls-remote "${1-}" 2>/dev/null); do - case "$is_hash,$i" in - y,*) is_hash=n ;; - n,*^{}) is_hash=y ;; - n,refs/tags/*) is_hash=y; echo "${i#refs/tags/}" ;; - n,*) is_hash=y; echo "$i" ;; - esac - done } # __git_refs accepts 0, 1 (to pass to __gitdir), or 2 arguments @@ -550,9 +563,8 @@ __git_tags () # by checkout for tracking branches __git_refs () { - local i is_hash=y dir="$(__gitdir "${1-}")" track="${2-}" - local cur format refs - _get_comp_words_by_ref -n =: cur + local i hash dir="$(__gitdir "${1-}")" track="${2-}" + local format refs if [ -d "$dir" ]; then case "$cur" in refs|refs/*) @@ -577,7 +589,7 @@ __git_refs () local ref entry git --git-dir="$dir" for-each-ref --shell --format="ref=%(refname:short)" \ "refs/remotes/" | \ - while read entry; do + while read -r entry; do eval "$entry" ref="${ref#*/}" if [[ "$ref" == "$cur"* ]]; then @@ -587,16 +599,27 @@ __git_refs () fi return fi - for i in $(git ls-remote "$dir" 2>/dev/null); do - case "$is_hash,$i" in - y,*) is_hash=n ;; - n,*^{}) is_hash=y ;; - n,refs/tags/*) is_hash=y; echo "${i#refs/tags/}" ;; - n,refs/heads/*) is_hash=y; echo "${i#refs/heads/}" ;; - n,refs/remotes/*) is_hash=y; echo "${i#refs/remotes/}" ;; - n,*) is_hash=y; echo "$i" ;; - esac - done + case "$cur" in + refs|refs/*) + git ls-remote "$dir" "$cur*" 2>/dev/null | \ + while read -r hash i; do + case "$i" in + *^{}) ;; + *) echo "$i" ;; + esac + done + ;; + *) + git ls-remote "$dir" HEAD ORIG_HEAD 'refs/tags/*' 'refs/heads/*' 'refs/remotes/*' 2>/dev/null | \ + while read -r hash i; do + case "$i" in + *^{}) ;; + refs/*) echo "${i#refs/*/}" ;; + *) echo "$i" ;; + esac + done + ;; + esac } # __git_refs2 requires 1 argument (to pass to __git_refs) @@ -611,30 +634,22 @@ __git_refs2 () # __git_refs_remotes requires 1 argument (to pass to ls-remote) __git_refs_remotes () { - local cmd i is_hash=y - for i in $(git ls-remote "$1" 2>/dev/null); do - case "$is_hash,$i" in - n,refs/heads/*) - is_hash=y - echo "$i:refs/remotes/$1/${i#refs/heads/}" - ;; - y,*) is_hash=n ;; - n,*^{}) is_hash=y ;; - n,refs/tags/*) is_hash=y;; - n,*) is_hash=y; ;; - esac + local i hash + git ls-remote "$1" 'refs/heads/*' 2>/dev/null | \ + while read -r hash i; do + echo "$i:refs/remotes/$1/${i#refs/heads/}" done } __git_remotes () { local i ngoff IFS=$'\n' d="$(__gitdir)" - shopt -q nullglob || ngoff=1 - shopt -s nullglob + __git_shopt -q nullglob || ngoff=1 + __git_shopt -s nullglob for i in "$d/remotes"/*; do echo ${i#$d/remotes/} done - [ "$ngoff" ] && shopt -u nullglob + [ "$ngoff" ] && __git_shopt -u nullglob for i in $(git --git-dir="$d" config --get-regexp 'remote\..*\.url' 2>/dev/null); do i="${i#remote.}" echo "${i/.url*/}" @@ -666,19 +681,18 @@ __git_compute_merge_strategies () __git_complete_revlist_file () { - local pfx ls ref cur - _get_comp_words_by_ref -n =: cur - case "$cur" in + local pfx ls ref cur_="$cur" + case "$cur_" in *..?*:*) return ;; ?*:*) - ref="${cur%%:*}" - cur="${cur#*:}" - case "$cur" in + ref="${cur_%%:*}" + cur_="${cur_#*:}" + case "$cur_" in ?*/*) - pfx="${cur%/*}" - cur="${cur##*/}" + pfx="${cur_%/*}" + cur_="${cur_##*/}" ls="$ref:$pfx" pfx="$pfx/" ;; @@ -708,20 +722,20 @@ __git_complete_revlist_file () s,$,/, } s/^.* //')" \ - -- "$cur")) + -- "$cur_")) ;; *...*) - pfx="${cur%...*}..." - cur="${cur#*...}" - __gitcomp "$(__git_refs)" "$pfx" "$cur" + pfx="${cur_%...*}..." + cur_="${cur_#*...}" + __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_" ;; *..*) - pfx="${cur%..*}.." - cur="${cur#*..}" - __gitcomp "$(__git_refs)" "$pfx" "$cur" + pfx="${cur_%..*}.." + cur_="${cur_#*..}" + __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_" ;; *) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" ;; esac } @@ -739,9 +753,7 @@ __git_complete_revlist () __git_complete_remote_or_refspec () { - local cur words cword - _get_comp_words_by_ref -n =: cur words cword - local cmd="${words[1]}" + local cur_="$cur" cmd="${words[1]}" local i c=2 remote="" pfx="" lhs=1 no_complete_refspec=0 while [ $c -lt $cword ]; do i="${words[c]}" @@ -763,7 +775,7 @@ __git_complete_remote_or_refspec () c=$((++c)) done if [ -z "$remote" ]; then - __gitcomp "$(__git_remotes)" + __gitcomp_nl "$(__git_remotes)" return fi if [ $no_complete_refspec = 1 ]; then @@ -771,40 +783,40 @@ __git_complete_remote_or_refspec () return fi [ "$remote" = "." ] && remote= - case "$cur" in + case "$cur_" in *:*) case "$COMP_WORDBREAKS" in *:*) : great ;; - *) pfx="${cur%%:*}:" ;; + *) pfx="${cur_%%:*}:" ;; esac - cur="${cur#*:}" + cur_="${cur_#*:}" lhs=0 ;; +*) pfx="+" - cur="${cur#+}" + cur_="${cur_#+}" ;; esac case "$cmd" in fetch) if [ $lhs = 1 ]; then - __gitcomp "$(__git_refs2 "$remote")" "$pfx" "$cur" + __gitcomp_nl "$(__git_refs2 "$remote")" "$pfx" "$cur_" else - __gitcomp "$(__git_refs)" "$pfx" "$cur" + __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_" fi ;; pull) if [ $lhs = 1 ]; then - __gitcomp "$(__git_refs "$remote")" "$pfx" "$cur" + __gitcomp_nl "$(__git_refs "$remote")" "$pfx" "$cur_" else - __gitcomp "$(__git_refs)" "$pfx" "$cur" + __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_" fi ;; push) if [ $lhs = 1 ]; then - __gitcomp "$(__git_refs)" "$pfx" "$cur" + __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_" else - __gitcomp "$(__git_refs "$remote")" "$pfx" "$cur" + __gitcomp_nl "$(__git_refs "$remote")" "$pfx" "$cur_" fi ;; esac @@ -812,8 +824,6 @@ __git_complete_remote_or_refspec () __git_complete_strategy () { - local cur prev - _get_comp_words_by_ref -n =: cur prev __git_compute_merge_strategies case "$prev" in -s|--strategy) @@ -991,8 +1001,7 @@ __git_aliased_command () # __git_find_on_cmdline requires 1 argument __git_find_on_cmdline () { - local word subcommand c=1 words cword - _get_comp_words_by_ref -n =: words cword + local word subcommand c=1 while [ $c -lt $cword ]; do word="${words[c]}" for subcommand in $1; do @@ -1007,8 +1016,7 @@ __git_find_on_cmdline () __git_has_doubledash () { - local c=1 words cword - _get_comp_words_by_ref -n =: words cword + local c=1 while [ $c -lt $cword ]; do if [ "--" = "${words[c]}" ]; then return 0 @@ -1022,8 +1030,7 @@ __git_whitespacelist="nowarn warn error error-all fix" _git_am () { - local cur dir="$(__gitdir)" - _get_comp_words_by_ref -n =: cur + local dir="$(__gitdir)" if [ -d "$dir"/rebase-apply ]; then __gitcomp "--skip --continue --resolved --abort" return @@ -1047,8 +1054,6 @@ _git_am () _git_apply () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --whitespace=*) __gitcomp "$__git_whitespacelist" "" "${cur##--whitespace=}" @@ -1071,8 +1076,6 @@ _git_add () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -1086,15 +1089,13 @@ _git_add () _git_archive () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --format=*) __gitcomp "$(git archive --list)" "" "${cur##--format=}" return ;; --remote=*) - __gitcomp "$(__git_remotes)" "" "${cur##--remote=}" + __gitcomp_nl "$(__git_remotes)" "" "${cur##--remote=}" return ;; --*) @@ -1125,7 +1126,7 @@ _git_bisect () case "$subcommand" in bad|good|reset|skip|start) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" ;; *) COMPREPLY=() @@ -1135,9 +1136,8 @@ _git_bisect () _git_branch () { - local i c=1 only_local_ref="n" has_r="n" cur words cword + local i c=1 only_local_ref="n" has_r="n" - _get_comp_words_by_ref -n =: cur words cword while [ $c -lt $cword ]; do i="${words[c]}" case "$i" in @@ -1157,9 +1157,9 @@ _git_branch () ;; *) if [ $only_local_ref = "y" -a $has_r = "n" ]; then - __gitcomp "$(__git_heads)" + __gitcomp_nl "$(__git_heads)" else - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" fi ;; esac @@ -1167,8 +1167,6 @@ _git_branch () _git_bundle () { - local words cword - _get_comp_words_by_ref -n =: words cword local cmd="${words[2]}" case "$cword" in 2) @@ -1191,8 +1189,6 @@ _git_checkout () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --conflict=*) __gitcomp "diff3 merge" "" "${cur##--conflict=}" @@ -1210,7 +1206,7 @@ _git_checkout () if [ -n "$(__git_find_on_cmdline "$flags")" ]; then track='' fi - __gitcomp "$(__git_refs '' $track)" + __gitcomp_nl "$(__git_refs '' $track)" ;; esac } @@ -1222,14 +1218,12 @@ _git_cherry () _git_cherry_pick () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--edit --no-commit" ;; *) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" ;; esac } @@ -1238,8 +1232,6 @@ _git_clean () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--dry-run --quiet" @@ -1251,8 +1243,6 @@ _git_clean () _git_clone () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -1279,20 +1269,15 @@ _git_commit () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --cleanup=*) __gitcomp "default strip verbatim whitespace " "" "${cur##--cleanup=}" return ;; - --reuse-message=*) - __gitcomp "$(__git_refs)" "" "${cur##--reuse-message=}" - return - ;; - --reedit-message=*) - __gitcomp "$(__git_refs)" "" "${cur##--reedit-message=}" + --reuse-message=*|--reedit-message=*|\ + --fixup=*|--squash=*) + __gitcomp_nl "$(__git_refs)" "" "${cur#*=}" return ;; --untracked-files=*) @@ -1306,7 +1291,7 @@ _git_commit () --dry-run --reuse-message= --reedit-message= --reset-author --file= --message= --template= --cleanup= --untracked-files --untracked-files= - --verbose --quiet + --verbose --quiet --fixup= --squash= " return esac @@ -1315,8 +1300,6 @@ _git_commit () _git_describe () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -1325,7 +1308,7 @@ _git_describe () " return esac - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } __git_diff_common_options="--stat --numstat --shortstat --summary @@ -1348,8 +1331,6 @@ _git_diff () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--cached --staged --pickaxe-all --pickaxe-regex @@ -1370,8 +1351,6 @@ _git_difftool () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --tool=*) __gitcomp "$__git_mergetools_common kompare" "" "${cur##--tool=}" @@ -1396,8 +1375,6 @@ __git_fetch_options=" _git_fetch () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "$__git_fetch_options" @@ -1409,8 +1386,6 @@ _git_fetch () _git_format_patch () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --thread=*) __gitcomp " @@ -1442,8 +1417,6 @@ _git_format_patch () _git_fsck () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -1458,8 +1431,6 @@ _git_fsck () _git_gc () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--prune --aggressive" @@ -1474,19 +1445,22 @@ _git_gitk () _gitk } +__git_match_ctag() { + awk "/^${1////\\/}/ { print \$1 }" "$2" +} + _git_grep () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " --cached --text --ignore-case --word-regexp --invert-match - --full-name + --full-name --line-number --extended-regexp --basic-regexp --fixed-strings + --perl-regexp --files-with-matches --name-only --files-without-match --max-depth @@ -1497,13 +1471,20 @@ _git_grep () ;; esac - __gitcomp "$(__git_refs)" + case "$cword,$prev" in + 2,*|*,-*) + if test -r tags; then + __gitcomp_nl "$(__git_match_ctag "$cur" tags)" + return + fi + ;; + esac + + __gitcomp_nl "$(__git_refs)" } _git_help () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--all --info --man --web" @@ -1514,15 +1495,13 @@ _git_help () __gitcomp "$__git_all_commands $(__git_aliases) attributes cli core-tutorial cvs-migration diffcore gitk glossary hooks ignore modules - repository-layout tutorial tutorial-2 + namespaces repository-layout tutorial tutorial-2 workflows " } _git_init () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --shared=*) __gitcomp " @@ -1542,8 +1521,6 @@ _git_ls_files () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--cached --deleted --modified --others --ignored @@ -1561,7 +1538,7 @@ _git_ls_files () _git_ls_remote () { - __gitcomp "$(__git_remotes)" + __gitcomp_nl "$(__git_remotes)" } _git_ls_tree () @@ -1584,7 +1561,7 @@ __git_log_common_options=" __git_log_gitk_options=" --dense --sparse --full-history --simplify-merges --simplify-by-decoration - --left-right + --left-right --notes --no-notes " # Options that go well for log and shortlog (not gitk) __git_log_shortlog_options=" @@ -1604,17 +1581,10 @@ _git_log () if [ -f "$g/MERGE_HEAD" ]; then merge="--merge" fi - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in - --pretty=*) + --pretty=*|--format=*) __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases) - " "" "${cur##--pretty=}" - return - ;; - --format=*) - __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases) - " "" "${cur##--format=}" + " "" "${cur#*=}" return ;; --date=*) @@ -1659,20 +1629,16 @@ _git_merge () { __git_complete_strategy && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "$__git_merge_options" return esac - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } _git_mergetool () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --tool=*) __gitcomp "$__git_mergetools_common tortoisemerge" "" "${cur##--tool=}" @@ -1688,13 +1654,11 @@ _git_mergetool () _git_merge_base () { - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } _git_mv () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--dry-run" @@ -1713,8 +1677,6 @@ _git_notes () { local subcommands='add append copy edit list prune remove show' local subcommand="$(__git_find_on_cmdline "$subcommands")" - local cur words cword - _get_comp_words_by_ref -n =: cur words cword case "$subcommand,$cur" in ,--*) @@ -1723,18 +1685,16 @@ _git_notes () ,*) case "${words[cword-1]}" in --ref) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" ;; *) __gitcomp "$subcommands --ref" ;; esac ;; - add,--reuse-message=*|append,--reuse-message=*) - __gitcomp "$(__git_refs)" "" "${cur##--reuse-message=}" - ;; + add,--reuse-message=*|append,--reuse-message=*|\ add,--reedit-message=*|append,--reedit-message=*) - __gitcomp "$(__git_refs)" "" "${cur##--reedit-message=}" + __gitcomp_nl "$(__git_refs)" "" "${cur#*=}" ;; add,--*|append,--*) __gitcomp '--file= --message= --reedit-message= @@ -1753,7 +1713,7 @@ _git_notes () -m|-F) ;; *) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" ;; esac ;; @@ -1764,8 +1724,6 @@ _git_pull () { __git_complete_strategy && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -1781,22 +1739,20 @@ _git_pull () _git_push () { - local cur prev - _get_comp_words_by_ref -n =: cur prev case "$prev" in --repo) - __gitcomp "$(__git_remotes)" + __gitcomp_nl "$(__git_remotes)" return esac case "$cur" in --repo=*) - __gitcomp "$(__git_remotes)" "" "${cur##--repo=}" + __gitcomp_nl "$(__git_remotes)" "" "${cur##--repo=}" return ;; --*) __gitcomp " --all --mirror --tags --dry-run --force --verbose - --receive-pack= --repo= + --receive-pack= --repo= --set-upstream " return ;; @@ -1807,8 +1763,6 @@ _git_push () _git_rebase () { local dir="$(__gitdir)" - local cur - _get_comp_words_by_ref -n =: cur if [ -d "$dir"/rebase-apply ] || [ -d "$dir"/rebase-merge ]; then __gitcomp "--continue --skip --abort" return @@ -1830,7 +1784,7 @@ _git_rebase () return esac - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } _git_reflog () @@ -1841,7 +1795,7 @@ _git_reflog () if [ -z "$subcommand" ]; then __gitcomp "$subcommands" else - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" fi } @@ -1850,8 +1804,6 @@ __git_send_email_suppresscc_options="author self cc bodycc sob cccmd body all" _git_send_email () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --confirm=*) __gitcomp " @@ -1893,8 +1845,6 @@ _git_stage () __git_config_get_set_variables () { - local words cword - _get_comp_words_by_ref -n =: words cword local prevword word config_file= c=$cword while [ $c -gt 1 ]; do word="${words[c]}" @@ -1913,7 +1863,7 @@ __git_config_get_set_variables () done git --git-dir="$(__gitdir)" config $config_file --list 2>/dev/null | - while read line + while read -r line do case "$line" in *.*=*) @@ -1925,27 +1875,29 @@ __git_config_get_set_variables () _git_config () { - local cur prev - _get_comp_words_by_ref -n =: cur prev case "$prev" in branch.*.remote) - __gitcomp "$(__git_remotes)" + __gitcomp_nl "$(__git_remotes)" return ;; branch.*.merge) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" return ;; remote.*.fetch) local remote="${prev#remote.}" remote="${remote%.fetch}" - __gitcomp "$(__git_refs_remotes "$remote")" + if [ -z "$cur" ]; then + COMPREPLY=("refs/heads/") + return + fi + __gitcomp_nl "$(__git_refs_remotes "$remote")" return ;; remote.*.push) local remote="${prev#remote.}" remote="${remote%.push}" - __gitcomp "$(git --git-dir="$(__gitdir)" \ + __gitcomp_nl "$(git --git-dir="$(__gitdir)" \ for-each-ref --format='%(refname):%(refname)' \ refs/heads)" return @@ -1992,7 +1944,7 @@ _git_config () return ;; --get|--get-all|--unset|--unset-all) - __gitcomp "$(__git_config_get_set_variables)" + __gitcomp_nl "$(__git_config_get_set_variables)" return ;; *.*) @@ -2012,70 +1964,60 @@ _git_config () return ;; branch.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" - __gitcomp "remote merge mergeoptions rebase" "$pfx" "$cur" + local pfx="${cur%.*}." cur_="${cur##*.}" + __gitcomp "remote merge mergeoptions rebase" "$pfx" "$cur_" return ;; branch.*) - local pfx="${cur%.*}." - cur="${cur#*.}" - __gitcomp "$(__git_heads)" "$pfx" "$cur" "." + local pfx="${cur%.*}." cur_="${cur#*.}" + __gitcomp_nl "$(__git_heads)" "$pfx" "$cur_" "." return ;; guitool.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" + local pfx="${cur%.*}." cur_="${cur##*.}" __gitcomp " argprompt cmd confirm needsfile noconsole norescan prompt revprompt revunmerged title - " "$pfx" "$cur" + " "$pfx" "$cur_" return ;; difftool.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" - __gitcomp "cmd path" "$pfx" "$cur" + local pfx="${cur%.*}." cur_="${cur##*.}" + __gitcomp "cmd path" "$pfx" "$cur_" return ;; man.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" - __gitcomp "cmd path" "$pfx" "$cur" + local pfx="${cur%.*}." cur_="${cur##*.}" + __gitcomp "cmd path" "$pfx" "$cur_" return ;; mergetool.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" - __gitcomp "cmd path trustExitCode" "$pfx" "$cur" + local pfx="${cur%.*}." cur_="${cur##*.}" + __gitcomp "cmd path trustExitCode" "$pfx" "$cur_" return ;; pager.*) - local pfx="${cur%.*}." - cur="${cur#*.}" + local pfx="${cur%.*}." cur_="${cur#*.}" __git_compute_all_commands - __gitcomp "$__git_all_commands" "$pfx" "$cur" + __gitcomp_nl "$__git_all_commands" "$pfx" "$cur_" return ;; remote.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" + local pfx="${cur%.*}." cur_="${cur##*.}" __gitcomp " url proxy fetch push mirror skipDefaultUpdate receivepack uploadpack tagopt pushurl - " "$pfx" "$cur" + " "$pfx" "$cur_" return ;; remote.*) - local pfx="${cur%.*}." - cur="${cur#*.}" - __gitcomp "$(__git_remotes)" "$pfx" "$cur" "." + local pfx="${cur%.*}." cur_="${cur#*.}" + __gitcomp_nl "$(__git_remotes)" "$pfx" "$cur_" "." return ;; url.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" - __gitcomp "insteadOf pushInsteadOf" "$pfx" "$cur" + local pfx="${cur%.*}." cur_="${cur##*.}" + __gitcomp "insteadOf pushInsteadOf" "$pfx" "$cur_" return ;; esac @@ -2139,7 +2081,7 @@ _git_config () color.ui commit.status commit.template - core.abbrevguard + core.abbrev core.askpass core.attributesfile core.autocrlf @@ -2371,7 +2313,7 @@ _git_remote () case "$subcommand" in rename|rm|show|prune) - __gitcomp "$(__git_remotes)" + __gitcomp_nl "$(__git_remotes)" ;; update) local i c='' IFS=$'\n' @@ -2389,43 +2331,37 @@ _git_remote () _git_replace () { - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } _git_reset () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--merge --mixed --hard --soft --patch" return ;; esac - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } _git_revert () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--edit --mainline --no-edit --no-commit --signoff" return ;; esac - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } _git_rm () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--cached --dry-run --ignore-unmatch --quiet" @@ -2439,8 +2375,6 @@ _git_shortlog () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -2458,17 +2392,10 @@ _git_show () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in - --pretty=*) + --pretty=*|--format=*) __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases) - " "" "${cur##--pretty=}" - return - ;; - --format=*) - __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases) - " "" "${cur##--format=}" + " "" "${cur#*=}" return ;; --*) @@ -2483,8 +2410,6 @@ _git_show () _git_show_branch () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -2501,8 +2426,6 @@ _git_show_branch () _git_stash () { - local cur - _get_comp_words_by_ref -n =: cur local save_opts='--keep-index --no-keep-index --quiet --patch' local subcommands='save list show apply clear drop pop create branch' local subcommand="$(__git_find_on_cmdline "$subcommands")" @@ -2531,7 +2454,7 @@ _git_stash () COMPREPLY=() ;; show,*|apply,*|drop,*|pop,*|branch,*) - __gitcomp "$(git --git-dir="$(__gitdir)" stash list \ + __gitcomp_nl "$(git --git-dir="$(__gitdir)" stash list \ | sed -n -e 's/:.*//p')" ;; *) @@ -2547,8 +2470,6 @@ _git_submodule () local subcommands="add status init update summary foreach sync" if [ -z "$(__git_find_on_cmdline "$subcommands")" ]; then - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--quiet --cached" @@ -2592,8 +2513,6 @@ _git_svn () --edit --rmdir --find-copies-harder --copy-similarity= " - local cur - _get_comp_words_by_ref -n =: cur case "$subcommand,$cur" in fetch,--*) __gitcomp "--revision= --fetch-all $fc_opts" @@ -2665,13 +2584,11 @@ _git_svn () _git_tag () { local i c=1 f=0 - local words cword prev - _get_comp_words_by_ref -n =: words cword prev while [ $c -lt $cword ]; do i="${words[c]}" case "$i" in -d|-v) - __gitcomp "$(__git_tags)" + __gitcomp_nl "$(__git_tags)" return ;; -f) @@ -2687,13 +2604,13 @@ _git_tag () ;; -*|tag) if [ $f = 1 ]; then - __gitcomp "$(__git_tags)" + __gitcomp_nl "$(__git_tags)" else COMPREPLY=() fi ;; *) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" ;; esac } @@ -2710,10 +2627,14 @@ _git () if [[ -n ${ZSH_VERSION-} ]]; then emulate -L bash setopt KSH_TYPESET + + # workaround zsh's bug that leaves 'words' as a special + # variable in versions < 4.3.12 + typeset -h words fi - local cur words cword - _get_comp_words_by_ref -n =: cur words cword + local cur words cword prev + _get_comp_words_by_ref -n =: cur words cword prev while [ $c -lt $cword ]; do i="${words[c]}" case "$i" in @@ -2737,6 +2658,7 @@ _git () --exec-path --html-path --work-tree= + --namespace= --help " ;; @@ -2761,17 +2683,22 @@ _gitk () if [[ -n ${ZSH_VERSION-} ]]; then emulate -L bash setopt KSH_TYPESET + + # workaround zsh's bug that leaves 'words' as a special + # variable in versions < 4.3.12 + typeset -h words fi + local cur words cword prev + _get_comp_words_by_ref -n =: cur words cword prev + __git_has_doubledash && return - local cur local g="$(__gitdir)" local merge="" if [ -f "$g/MERGE_HEAD" ]; then merge="--merge" fi - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -2800,7 +2727,7 @@ complete -o bashdefault -o default -o nospace -F _git git.exe 2>/dev/null \ fi if [[ -n ${ZSH_VERSION-} ]]; then - shopt () { + __git_shopt () { local option if [ $# -ne 2 ]; then echo "USAGE: $0 (-q|-s|-u) <option>" >&2 @@ -2823,4 +2750,8 @@ if [[ -n ${ZSH_VERSION-} ]]; then return 1 esac } +else + __git_shopt () { + shopt "$@" + } fi diff --git a/contrib/convert-objects/git-convert-objects.txt b/contrib/convert-objects/git-convert-objects.txt index 9718abf86d..0565d83fc4 100644 --- a/contrib/convert-objects/git-convert-objects.txt +++ b/contrib/convert-objects/git-convert-objects.txt @@ -8,6 +8,7 @@ git-convert-objects - Converts old-style git repository SYNOPSIS -------- +[verse] 'git-convert-objects' DESCRIPTION diff --git a/contrib/credential/osxkeychain/.gitignore b/contrib/credential/osxkeychain/.gitignore new file mode 100644 index 0000000000..6c5b7026c5 --- /dev/null +++ b/contrib/credential/osxkeychain/.gitignore @@ -0,0 +1 @@ +git-credential-osxkeychain diff --git a/contrib/credential/osxkeychain/Makefile b/contrib/credential/osxkeychain/Makefile new file mode 100644 index 0000000000..75c07f8be4 --- /dev/null +++ b/contrib/credential/osxkeychain/Makefile @@ -0,0 +1,14 @@ +all:: git-credential-osxkeychain + +CC = gcc +RM = rm -f +CFLAGS = -g -Wall + +git-credential-osxkeychain: git-credential-osxkeychain.o + $(CC) -o $@ $< -Wl,-framework -Wl,Security + +git-credential-osxkeychain.o: git-credential-osxkeychain.c + $(CC) -c $(CFLAGS) $< + +clean: + $(RM) git-credential-osxkeychain git-credential-osxkeychain.o diff --git a/contrib/credential/osxkeychain/git-credential-osxkeychain.c b/contrib/credential/osxkeychain/git-credential-osxkeychain.c new file mode 100644 index 0000000000..6beed123ab --- /dev/null +++ b/contrib/credential/osxkeychain/git-credential-osxkeychain.c @@ -0,0 +1,173 @@ +#include <stdio.h> +#include <string.h> +#include <stdlib.h> +#include <Security/Security.h> + +static SecProtocolType protocol; +static char *host; +static char *path; +static char *username; +static char *password; +static UInt16 port; + +static void die(const char *err, ...) +{ + char msg[4096]; + va_list params; + va_start(params, err); + vsnprintf(msg, sizeof(msg), err, params); + fprintf(stderr, "%s\n", msg); + va_end(params); + exit(1); +} + +static void *xstrdup(const char *s1) +{ + void *ret = strdup(s1); + if (!ret) + die("Out of memory"); + return ret; +} + +#define KEYCHAIN_ITEM(x) (x ? strlen(x) : 0), x +#define KEYCHAIN_ARGS \ + NULL, /* default keychain */ \ + KEYCHAIN_ITEM(host), \ + 0, NULL, /* account domain */ \ + KEYCHAIN_ITEM(username), \ + KEYCHAIN_ITEM(path), \ + port, \ + protocol, \ + kSecAuthenticationTypeDefault + +static void write_item(const char *what, const char *buf, int len) +{ + printf("%s=", what); + fwrite(buf, 1, len, stdout); + putchar('\n'); +} + +static void find_username_in_item(SecKeychainItemRef item) +{ + SecKeychainAttributeList list; + SecKeychainAttribute attr; + + list.count = 1; + list.attr = &attr; + attr.tag = kSecAccountItemAttr; + + if (SecKeychainItemCopyContent(item, NULL, &list, NULL, NULL)) + return; + + write_item("username", attr.data, attr.length); + SecKeychainItemFreeContent(&list, NULL); +} + +static void find_internet_password(void) +{ + void *buf; + UInt32 len; + SecKeychainItemRef item; + + if (SecKeychainFindInternetPassword(KEYCHAIN_ARGS, &len, &buf, &item)) + return; + + write_item("password", buf, len); + if (!username) + find_username_in_item(item); + + SecKeychainItemFreeContent(NULL, buf); +} + +static void delete_internet_password(void) +{ + SecKeychainItemRef item; + + /* + * Require at least a protocol and host for removal, which is what git + * will give us; if you want to do something more fancy, use the + * Keychain manager. + */ + if (!protocol || !host) + return; + + if (SecKeychainFindInternetPassword(KEYCHAIN_ARGS, 0, NULL, &item)) + return; + + SecKeychainItemDelete(item); +} + +static void add_internet_password(void) +{ + /* Only store complete credentials */ + if (!protocol || !host || !username || !password) + return; + + if (SecKeychainAddInternetPassword( + KEYCHAIN_ARGS, + KEYCHAIN_ITEM(password), + NULL)) + return; +} + +static void read_credential(void) +{ + char buf[1024]; + + while (fgets(buf, sizeof(buf), stdin)) { + char *v; + + if (!strcmp(buf, "\n")) + break; + buf[strlen(buf)-1] = '\0'; + + v = strchr(buf, '='); + if (!v) + die("bad input: %s", buf); + *v++ = '\0'; + + if (!strcmp(buf, "protocol")) { + if (!strcmp(v, "https")) + protocol = kSecProtocolTypeHTTPS; + else if (!strcmp(v, "http")) + protocol = kSecProtocolTypeHTTP; + else /* we don't yet handle other protocols */ + exit(0); + } + else if (!strcmp(buf, "host")) { + char *colon = strchr(v, ':'); + if (colon) { + *colon++ = '\0'; + port = atoi(colon); + } + host = xstrdup(v); + } + else if (!strcmp(buf, "path")) + path = xstrdup(v); + else if (!strcmp(buf, "username")) + username = xstrdup(v); + else if (!strcmp(buf, "password")) + password = xstrdup(v); + } +} + +int main(int argc, const char **argv) +{ + const char *usage = + "Usage: git credential-osxkeychain <get|store|erase>"; + + if (!argv[1]) + die(usage); + + read_credential(); + + if (!strcmp(argv[1], "get")) + find_internet_password(); + else if (!strcmp(argv[1], "store")) + add_internet_password(); + else if (!strcmp(argv[1], "erase")) + delete_internet_password(); + /* otherwise, ignore unknown action */ + + return 0; +} diff --git a/contrib/diff-highlight/README b/contrib/diff-highlight/README new file mode 100644 index 0000000000..1b7b6df8eb --- /dev/null +++ b/contrib/diff-highlight/README @@ -0,0 +1,57 @@ +diff-highlight +============== + +Line oriented diffs are great for reviewing code, because for most +hunks, you want to see the old and the new segments of code next to each +other. Sometimes, though, when an old line and a new line are very +similar, it's hard to immediately see the difference. + +You can use "--color-words" to highlight only the changed portions of +lines. However, this can often be hard to read for code, as it loses +the line structure, and you end up with oddly formatted bits. + +Instead, this script post-processes the line-oriented diff, finds pairs +of lines, and highlights the differing segments. It's currently very +simple and stupid about doing these tasks. In particular: + + 1. It will only highlight a pair of lines if they are the only two + lines in a hunk. It could instead try to match up "before" and + "after" lines for a given hunk into pairs of similar lines. + However, this may end up visually distracting, as the paired + lines would have other highlighted lines in between them. And in + practice, the lines which most need attention called to their + small, hard-to-see changes are touching only a single line. + + 2. It will find the common prefix and suffix of two lines, and + consider everything in the middle to be "different". It could + instead do a real diff of the characters between the two lines and + find common subsequences. However, the point of the highlight is to + call attention to a certain area. Even if some small subset of the + highlighted area actually didn't change, that's OK. In practice it + ends up being more readable to just have a single blob on the line + showing the interesting bit. + +The goal of the script is therefore not to be exact about highlighting +changes, but to call attention to areas of interest without being +visually distracting. Non-diff lines and existing diff coloration is +preserved; the intent is that the output should look exactly the same as +the input, except for the occasional highlight. + +Use +--- + +You can try out the diff-highlight program with: + +--------------------------------------------- +git log -p --color | /path/to/diff-highlight +--------------------------------------------- + +If you want to use it all the time, drop it in your $PATH and put the +following in your git configuration: + +--------------------------------------------- +[pager] + log = diff-highlight | less + show = diff-highlight | less + diff = diff-highlight | less +--------------------------------------------- diff --git a/contrib/diff-highlight/diff-highlight b/contrib/diff-highlight/diff-highlight new file mode 100755 index 0000000000..d8938982e4 --- /dev/null +++ b/contrib/diff-highlight/diff-highlight @@ -0,0 +1,124 @@ +#!/usr/bin/perl + +# Highlight by reversing foreground and background. You could do +# other things like bold or underline if you prefer. +my $HIGHLIGHT = "\x1b[7m"; +my $UNHIGHLIGHT = "\x1b[27m"; +my $COLOR = qr/\x1b\[[0-9;]*m/; + +my @window; + +while (<>) { + # We highlight only single-line changes, so we need + # a 4-line window to make a decision on whether + # to highlight. + push @window, $_; + next if @window < 4; + if ($window[0] =~ /^$COLOR*(\@| )/ && + $window[1] =~ /^$COLOR*-/ && + $window[2] =~ /^$COLOR*\+/ && + $window[3] !~ /^$COLOR*\+/) { + print shift @window; + show_pair(shift @window, shift @window); + } + else { + print shift @window; + } + + # Most of the time there is enough output to keep things streaming, + # but for something like "git log -Sfoo", you can get one early + # commit and then many seconds of nothing. We want to show + # that one commit as soon as possible. + # + # Since we can receive arbitrary input, there's no optimal + # place to flush. Flushing on a blank line is a heuristic that + # happens to match git-log output. + if (!length) { + local $| = 1; + } +} + +# Special case a single-line hunk at the end of file. +if (@window == 3 && + $window[0] =~ /^$COLOR*(\@| )/ && + $window[1] =~ /^$COLOR*-/ && + $window[2] =~ /^$COLOR*\+/) { + print shift @window; + show_pair(shift @window, shift @window); +} + +# And then flush any remaining lines. +while (@window) { + print shift @window; +} + +exit 0; + +sub show_pair { + my @a = split_line(shift); + my @b = split_line(shift); + + # Find common prefix, taking care to skip any ansi + # color codes. + my $seen_plusminus; + my ($pa, $pb) = (0, 0); + while ($pa < @a && $pb < @b) { + if ($a[$pa] =~ /$COLOR/) { + $pa++; + } + elsif ($b[$pb] =~ /$COLOR/) { + $pb++; + } + elsif ($a[$pa] eq $b[$pb]) { + $pa++; + $pb++; + } + elsif (!$seen_plusminus && $a[$pa] eq '-' && $b[$pb] eq '+') { + $seen_plusminus = 1; + $pa++; + $pb++; + } + else { + last; + } + } + + # Find common suffix, ignoring colors. + my ($sa, $sb) = ($#a, $#b); + while ($sa >= $pa && $sb >= $pb) { + if ($a[$sa] =~ /$COLOR/) { + $sa--; + } + elsif ($b[$sb] =~ /$COLOR/) { + $sb--; + } + elsif ($a[$sa] eq $b[$sb]) { + $sa--; + $sb--; + } + else { + last; + } + } + + print highlight(\@a, $pa, $sa); + print highlight(\@b, $pb, $sb); +} + +sub split_line { + local $_ = shift; + return map { /$COLOR/ ? $_ : (split //) } + split /($COLOR*)/; +} + +sub highlight { + my ($line, $prefix, $suffix) = @_; + + return join('', + @{$line}[0..($prefix-1)], + $HIGHLIGHT, + @{$line}[$prefix..$suffix], + $UNHIGHLIGHT, + @{$line}[($suffix+1)..$#$line] + ); +} diff --git a/contrib/fast-import/git-p4 b/contrib/fast-import/git-p4 index 78e5b3aaf4..3e1aa276cf 100755 --- a/contrib/fast-import/git-p4 +++ b/contrib/fast-import/git-p4 @@ -22,37 +22,41 @@ def p4_build_cmd(cmd): location. It means that hooking into the environment, or other configuration can be done more easily. """ - real_cmd = "%s " % "p4" + real_cmd = ["p4"] user = gitConfig("git-p4.user") if len(user) > 0: - real_cmd += "-u %s " % user + real_cmd += ["-u",user] password = gitConfig("git-p4.password") if len(password) > 0: - real_cmd += "-P %s " % password + real_cmd += ["-P", password] port = gitConfig("git-p4.port") if len(port) > 0: - real_cmd += "-p %s " % port + real_cmd += ["-p", port] host = gitConfig("git-p4.host") if len(host) > 0: - real_cmd += "-h %s " % host + real_cmd += ["-h", host] client = gitConfig("git-p4.client") if len(client) > 0: - real_cmd += "-c %s " % client + real_cmd += ["-c", client] - real_cmd += "%s" % (cmd) - if verbose: - print real_cmd + + if isinstance(cmd,basestring): + real_cmd = ' '.join(real_cmd) + ' ' + cmd + else: + real_cmd += cmd return real_cmd def chdir(dir): - if os.name == 'nt': - os.environ['PWD']=dir + # P4 uses the PWD environment variable rather than getcwd(). Since we're + # not using the shell, we have to set it ourselves. This path could + # be relative, so go there first, then figure out where we ended up. os.chdir(dir) + os.environ['PWD'] = os.getcwd() def die(msg): if verbose: @@ -61,29 +65,34 @@ def die(msg): sys.stderr.write(msg + "\n") sys.exit(1) -def write_pipe(c, str): +def write_pipe(c, stdin): if verbose: - sys.stderr.write('Writing pipe: %s\n' % c) + sys.stderr.write('Writing pipe: %s\n' % str(c)) - pipe = os.popen(c, 'w') - val = pipe.write(str) - if pipe.close(): - die('Command failed: %s' % c) + expand = isinstance(c,basestring) + p = subprocess.Popen(c, stdin=subprocess.PIPE, shell=expand) + pipe = p.stdin + val = pipe.write(stdin) + pipe.close() + if p.wait(): + die('Command failed: %s' % str(c)) return val -def p4_write_pipe(c, str): +def p4_write_pipe(c, stdin): real_cmd = p4_build_cmd(c) - return write_pipe(real_cmd, str) + return write_pipe(real_cmd, stdin) def read_pipe(c, ignore_error=False): if verbose: - sys.stderr.write('Reading pipe: %s\n' % c) + sys.stderr.write('Reading pipe: %s\n' % str(c)) - pipe = os.popen(c, 'rb') + expand = isinstance(c,basestring) + p = subprocess.Popen(c, stdout=subprocess.PIPE, shell=expand) + pipe = p.stdout val = pipe.read() - if pipe.close() and not ignore_error: - die('Command failed: %s' % c) + if p.wait() and not ignore_error: + die('Command failed: %s' % str(c)) return val @@ -93,12 +102,14 @@ def p4_read_pipe(c, ignore_error=False): def read_pipe_lines(c): if verbose: - sys.stderr.write('Reading pipe: %s\n' % c) - ## todo: check return status - pipe = os.popen(c, 'rb') + sys.stderr.write('Reading pipe: %s\n' % str(c)) + + expand = isinstance(c, basestring) + p = subprocess.Popen(c, stdout=subprocess.PIPE, shell=expand) + pipe = p.stdout val = pipe.readlines() - if pipe.close(): - die('Command failed: %s' % c) + if pipe.close() or p.wait(): + die('Command failed: %s' % str(c)) return val @@ -108,23 +119,73 @@ def p4_read_pipe_lines(c): return read_pipe_lines(real_cmd) def system(cmd): + expand = isinstance(cmd,basestring) if verbose: - sys.stderr.write("executing %s\n" % cmd) - if os.system(cmd) != 0: - die("command failed: %s" % cmd) + sys.stderr.write("executing %s\n" % str(cmd)) + subprocess.check_call(cmd, shell=expand) def p4_system(cmd): """Specifically invoke p4 as the system command. """ real_cmd = p4_build_cmd(cmd) - return system(real_cmd) + expand = isinstance(real_cmd, basestring) + subprocess.check_call(real_cmd, shell=expand) + +def p4_integrate(src, dest): + p4_system(["integrate", "-Dt", src, dest]) + +def p4_sync(path): + p4_system(["sync", path]) + +def p4_add(f): + p4_system(["add", f]) + +def p4_delete(f): + p4_system(["delete", f]) + +def p4_edit(f): + p4_system(["edit", f]) + +def p4_revert(f): + p4_system(["revert", f]) + +def p4_reopen(type, file): + p4_system(["reopen", "-t", type, file]) -def isP4Exec(kind): - """Determine if a Perforce 'kind' should have execute permission +# +# Canonicalize the p4 type and return a tuple of the +# base type, plus any modifiers. See "p4 help filetypes" +# for a list and explanation. +# +def split_p4_type(p4type): + + p4_filetypes_historical = { + "ctempobj": "binary+Sw", + "ctext": "text+C", + "cxtext": "text+Cx", + "ktext": "text+k", + "kxtext": "text+kx", + "ltext": "text+F", + "tempobj": "binary+FSw", + "ubinary": "binary+F", + "uresource": "resource+F", + "uxbinary": "binary+Fx", + "xbinary": "binary+x", + "xltext": "text+Fx", + "xtempobj": "binary+Swx", + "xtext": "text+x", + "xunicode": "unicode+x", + "xutf16": "utf16+x", + } + if p4type in p4_filetypes_historical: + p4type = p4_filetypes_historical[p4type] + mods = "" + s = p4type.split("+") + base = s[0] + mods = "" + if len(s) > 1: + mods = s[1] + return (base, mods) - 'p4 help filetypes' gives a list of the types. If it starts with 'x', - or x follows one of a few letters. Otherwise, if there is an 'x' after - a plus sign, it is also executable""" - return (re.search(r"(^[cku]?x)|\+.*x", kind) != None) def setP4ExecBit(file, mode): # Reopens an already open file and changes the execute bit to match @@ -139,12 +200,12 @@ def setP4ExecBit(file, mode): if p4Type[-1] == "+": p4Type = p4Type[0:-1] - p4_system("reopen -t %s %s" % (p4Type, file)) + p4_reopen(p4Type, file) def getP4OpenedType(file): # Returns the perforce file type for the given file. - result = p4_read_pipe("opened %s" % file) + result = p4_read_pipe(["opened", file]) match = re.match(".*\((.+)\)\r?$", result) if match: return match.group(1) @@ -200,9 +261,17 @@ def isModeExecChanged(src_mode, dst_mode): return isModeExec(src_mode) != isModeExec(dst_mode) def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None): - cmd = p4_build_cmd("-G %s" % (cmd)) + + if isinstance(cmd,basestring): + cmd = "-G " + cmd + expand = True + else: + cmd = ["-G"] + cmd + expand = False + + cmd = p4_build_cmd(cmd) if verbose: - sys.stderr.write("Opening pipe: %s\n" % cmd) + sys.stderr.write("Opening pipe: %s\n" % str(cmd)) # Use a temporary file to avoid deadlocks without # subprocess.communicate(), which would put another copy @@ -210,11 +279,16 @@ def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None): stdin_file = None if stdin is not None: stdin_file = tempfile.TemporaryFile(prefix='p4-stdin', mode=stdin_mode) - stdin_file.write(stdin) + if isinstance(stdin,basestring): + stdin_file.write(stdin) + else: + for i in stdin: + stdin_file.write(i + '\n') stdin_file.flush() stdin_file.seek(0) - p4 = subprocess.Popen(cmd, shell=True, + p4 = subprocess.Popen(cmd, + shell=expand, stdin=stdin_file, stdout=subprocess.PIPE) @@ -247,7 +321,7 @@ def p4Where(depotPath): if not depotPath.endswith("/"): depotPath += "/" depotPath = depotPath + "..." - outputList = p4CmdList("where %s" % depotPath) + outputList = p4CmdList(["where", depotPath]) output = None for entry in outputList: if "depotFile" in entry: @@ -288,6 +362,11 @@ def isValidGitDir(path): def parseRevision(ref): return read_pipe("git rev-parse %s" % ref).strip() +def branchExists(ref): + rev = read_pipe(["git", "rev-parse", "-q", "--verify", ref], + ignore_error=True) + return len(rev) > 0 + def extractLogMessageFromGitCommit(commit): logMessage = "" @@ -342,6 +421,11 @@ def gitConfig(key, args = None): # set args to "--bool", for instance _gitConfig[key] = read_pipe(cmd, ignore_error=True).strip() return _gitConfig[key] +def gitConfigList(key): + if not _gitConfig.has_key(key): + _gitConfig[key] = read_pipe("git config --get-all %s" % key, ignore_error=True).strip().split(os.linesep) + return _gitConfig[key] + def p4BranchesInGit(branchesAreInRemotes = True): branches = {} @@ -444,8 +528,10 @@ def originP4BranchesExist(): def p4ChangesForPaths(depotPaths, changeRange): assert depotPaths - output = p4_read_pipe_lines("changes " + ' '.join (["%s...%s" % (p, changeRange) - for p in depotPaths])) + cmd = ['changes'] + for p in depotPaths: + cmd += ["%s...%s" % (p, changeRange)] + output = p4_read_pipe_lines(cmd) changes = {} for line in output: @@ -474,6 +560,47 @@ class Command: self.usage = "usage: %prog [options]" self.needsGit = True +class P4UserMap: + def __init__(self): + self.userMapFromPerforceServer = False + + def getUserCacheFilename(self): + home = os.environ.get("HOME", os.environ.get("USERPROFILE")) + return home + "/.gitp4-usercache.txt" + + def getUserMapFromPerforceServer(self): + if self.userMapFromPerforceServer: + return + self.users = {} + self.emails = {} + + for output in p4CmdList("users"): + if not output.has_key("User"): + continue + self.users[output["User"]] = output["FullName"] + " <" + output["Email"] + ">" + self.emails[output["Email"]] = output["User"] + + + s = '' + for (key, val) in self.users.items(): + s += "%s\t%s\n" % (key.expandtabs(1), val.expandtabs(1)) + + open(self.getUserCacheFilename(), "wb").write(s) + self.userMapFromPerforceServer = True + + def loadUserMapFromCache(self): + self.users = {} + self.userMapFromPerforceServer = False + try: + cache = open(self.getUserCacheFilename(), "rb") + lines = cache.readlines() + cache.close() + for line in lines: + entry = line.strip().split("\t") + self.users[entry[0]] = entry[1] + except IOError: + self.getUserMapFromPerforceServer() + class P4Debug(Command): def __init__(self): Command.__init__(self) @@ -487,7 +614,7 @@ class P4Debug(Command): def run(self, args): j = 0 - for output in p4CmdList(" ".join(args)): + for output in p4CmdList(args): print 'Element: %d' % j j += 1 print output @@ -554,13 +681,16 @@ class P4RollBack(Command): return True -class P4Submit(Command): +class P4Submit(Command, P4UserMap): def __init__(self): Command.__init__(self) + P4UserMap.__init__(self) self.options = [ optparse.make_option("--verbose", dest="verbose", action="store_true"), optparse.make_option("--origin", dest="origin"), optparse.make_option("-M", dest="detectRenames", action="store_true"), + # preserve the user, requires relevant p4 permissions + optparse.make_option("--preserve-user", dest="preserveUser", action="store_true"), ] self.description = "Submit changes from git to the perforce depot." self.usage += " [name of git branch to submit into perforce depot]" @@ -568,7 +698,9 @@ class P4Submit(Command): self.origin = "" self.detectRenames = False self.verbose = False + self.preserveUser = gitConfig("git-p4.preserveUser").lower() == "true" self.isWindows = (platform.system() == "Windows") + self.myP4UserId = None def check(self): if len(p4CmdList("opened ...")) > 0: @@ -602,11 +734,104 @@ class P4Submit(Command): return result + def p4UserForCommit(self,id): + # Return the tuple (perforce user,git email) for a given git commit id + self.getUserMapFromPerforceServer() + gitEmail = read_pipe("git log --max-count=1 --format='%%ae' %s" % id) + gitEmail = gitEmail.strip() + if not self.emails.has_key(gitEmail): + return (None,gitEmail) + else: + return (self.emails[gitEmail],gitEmail) + + def checkValidP4Users(self,commits): + # check if any git authors cannot be mapped to p4 users + for id in commits: + (user,email) = self.p4UserForCommit(id) + if not user: + msg = "Cannot find p4 user for email %s in commit %s." % (email, id) + if gitConfig('git-p4.allowMissingP4Users').lower() == "true": + print "%s" % msg + else: + die("Error: %s\nSet git-p4.allowMissingP4Users to true to allow this." % msg) + + def lastP4Changelist(self): + # Get back the last changelist number submitted in this client spec. This + # then gets used to patch up the username in the change. If the same + # client spec is being used by multiple processes then this might go + # wrong. + results = p4CmdList("client -o") # find the current client + client = None + for r in results: + if r.has_key('Client'): + client = r['Client'] + break + if not client: + die("could not get client spec") + results = p4CmdList(["changes", "-c", client, "-m", "1"]) + for r in results: + if r.has_key('change'): + return r['change'] + die("Could not get changelist number for last submit - cannot patch up user details") + + def modifyChangelistUser(self, changelist, newUser): + # fixup the user field of a changelist after it has been submitted. + changes = p4CmdList("change -o %s" % changelist) + if len(changes) != 1: + die("Bad output from p4 change modifying %s to user %s" % + (changelist, newUser)) + + c = changes[0] + if c['User'] == newUser: return # nothing to do + c['User'] = newUser + input = marshal.dumps(c) + + result = p4CmdList("change -f -i", stdin=input) + for r in result: + if r.has_key('code'): + if r['code'] == 'error': + die("Could not modify user field of changelist %s to %s:%s" % (changelist, newUser, r['data'])) + if r.has_key('data'): + print("Updated user field for changelist %s to %s" % (changelist, newUser)) + return + die("Could not modify user field of changelist %s to %s" % (changelist, newUser)) + + def canChangeChangelists(self): + # check to see if we have p4 admin or super-user permissions, either of + # which are required to modify changelists. + results = p4CmdList("protects %s" % self.depotPath) + for r in results: + if r.has_key('perm'): + if r['perm'] == 'admin': + return 1 + if r['perm'] == 'super': + return 1 + return 0 + + def p4UserId(self): + if self.myP4UserId: + return self.myP4UserId + + results = p4CmdList("user -o") + for r in results: + if r.has_key('User'): + self.myP4UserId = r['User'] + return r['User'] + die("Could not find your p4 user id") + + def p4UserIsMe(self, p4User): + # return True if the given p4 user is actually me + me = self.p4UserId() + if not p4User or p4User != me: + return False + else: + return True + def prepareSubmitTemplate(self): # remove lines in the Files section that show changes to files outside the depot path we're committing into template = "" inFilesSection = False - for line in p4_read_pipe_lines("change -o"): + for line in p4_read_pipe_lines(['change', '-o']): if line.endswith("\r\n"): line = line[:-2] + "\n" if inFilesSection: @@ -628,22 +853,64 @@ class P4Submit(Command): return template + def edit_template(self, template_file): + """Invoke the editor to let the user change the submission + message. Return true if okay to continue with the submit.""" + + # if configured to skip the editing part, just submit + if gitConfig("git-p4.skipSubmitEdit") == "true": + return True + + # look at the modification time, to check later if the user saved + # the file + mtime = os.stat(template_file).st_mtime + + # invoke the editor + if os.environ.has_key("P4EDITOR"): + editor = os.environ.get("P4EDITOR") + else: + editor = read_pipe("git var GIT_EDITOR").strip() + system(editor + " " + template_file) + + # If the file was not saved, prompt to see if this patch should + # be skipped. But skip this verification step if configured so. + if gitConfig("git-p4.skipSubmitEditCheck") == "true": + return True + + # modification time updated means user saved the file + if os.stat(template_file).st_mtime > mtime: + return True + + while True: + response = raw_input("Submit template unchanged. Submit anyway? [y]es, [n]o (skip this patch) ") + if response == 'y': + return True + if response == 'n': + return False + def applyCommit(self, id): print "Applying %s" % (read_pipe("git log --max-count=1 --pretty=oneline %s" % id)) + (p4User, gitEmail) = self.p4UserForCommit(id) + if not self.detectRenames: # If not explicitly set check the config variable - self.detectRenames = gitConfig("git-p4.detectRenames").lower() == "true" + self.detectRenames = gitConfig("git-p4.detectRenames") - if self.detectRenames: + if self.detectRenames.lower() == "false" or self.detectRenames == "": + diffOpts = "" + elif self.detectRenames.lower() == "true": diffOpts = "-M" else: - diffOpts = "" + diffOpts = "-M%s" % self.detectRenames - if gitConfig("git-p4.detectCopies").lower() == "true": + detectCopies = gitConfig("git-p4.detectCopies") + if detectCopies.lower() == "true": diffOpts += " -C" + elif detectCopies != "" and detectCopies.lower() != "false": + diffOpts += " -C%s" % detectCopies - if gitConfig("git-p4.detectCopiesHarder").lower() == "true": + if gitConfig("git-p4.detectCopiesHarder", "--bool") == "true": diffOpts += " --find-copies-harder" diff = read_pipe_lines("git diff-tree -r %s \"%s^\" \"%s\"" % (diffOpts, id, id)) @@ -656,7 +923,7 @@ class P4Submit(Command): modifier = diff['status'] path = diff['src'] if modifier == "M": - p4_system("edit \"%s\"" % path) + p4_edit(path) if isModeExecChanged(diff['src_mode'], diff['dst_mode']): filesToChangeExecBit[path] = diff['dst_mode'] editedFiles.add(path) @@ -671,21 +938,21 @@ class P4Submit(Command): filesToAdd.remove(path) elif modifier == "C": src, dest = diff['src'], diff['dst'] - p4_system("integrate -Dt \"%s\" \"%s\"" % (src, dest)) + p4_integrate(src, dest) if diff['src_sha1'] != diff['dst_sha1']: - p4_system("edit \"%s\"" % (dest)) + p4_edit(dest) if isModeExecChanged(diff['src_mode'], diff['dst_mode']): - p4_system("edit \"%s\"" % (dest)) + p4_edit(dest) filesToChangeExecBit[dest] = diff['dst_mode'] os.unlink(dest) editedFiles.add(dest) elif modifier == "R": src, dest = diff['src'], diff['dst'] - p4_system("integrate -Dt \"%s\" \"%s\"" % (src, dest)) + p4_integrate(src, dest) if diff['src_sha1'] != diff['dst_sha1']: - p4_system("edit \"%s\"" % (dest)) + p4_edit(dest) if isModeExecChanged(diff['src_mode'], diff['dst_mode']): - p4_system("edit \"%s\"" % (dest)) + p4_edit(dest) filesToChangeExecBit[dest] = diff['dst_mode'] os.unlink(dest) editedFiles.add(dest) @@ -708,9 +975,9 @@ class P4Submit(Command): if response == "s": print "Skipping! Good luck with the next patches..." for f in editedFiles: - p4_system("revert \"%s\"" % f); + p4_revert(f) for f in filesToAdd: - system("rm %s" %f) + os.remove(f) return elif response == "a": os.system(applyPatchCmd) @@ -731,10 +998,10 @@ class P4Submit(Command): system(applyPatchCmd) for f in filesToAdd: - p4_system("add \"%s\"" % f) + p4_add(f) for f in filesToDelete: - p4_system("revert \"%s\"" % f) - p4_system("delete \"%s\"" % f) + p4_revert(f) + p4_delete(f) # Set/clear executable bits for f in filesToChangeExecBit.keys(): @@ -748,11 +1015,15 @@ class P4Submit(Command): if self.interactive: submitTemplate = self.prepareLogMessage(template, logMessage) + + if self.preserveUser: + submitTemplate = submitTemplate + ("\n######## Actual user %s, modified after commit\n" % p4User) + if os.environ.has_key("P4DIFF"): del(os.environ["P4DIFF"]) diff = "" for editedFile in editedFiles: - diff += p4_read_pipe("diff -du %r" % editedFile) + diff += p4_read_pipe(['diff', '-du', editedFile]) newdiff = "" for newFile in filesToAdd: @@ -764,9 +1035,14 @@ class P4Submit(Command): newdiff += "+" + line f.close() + if self.checkAuthorship and not self.p4UserIsMe(p4User): + submitTemplate += "######## git author %s does not match your p4 account.\n" % gitEmail + submitTemplate += "######## Use git-p4 option --preserve-user to modify authorship\n" + submitTemplate += "######## Use git-p4 config git-p4.skipUserNameCheck hides this message.\n" + separatorLine = "######## everything below this line is just the diff #######\n" - [handle, fileName] = tempfile.mkstemp() + (handle, fileName) = tempfile.mkstemp() tmpFile = os.fdopen(handle, "w+") if self.isWindows: submitTemplate = submitTemplate.replace("\n", "\r\n") @@ -774,33 +1050,32 @@ class P4Submit(Command): newdiff = newdiff.replace("\n", "\r\n") tmpFile.write(submitTemplate + separatorLine + diff + newdiff) tmpFile.close() - mtime = os.stat(fileName).st_mtime - if os.environ.has_key("P4EDITOR"): - editor = os.environ.get("P4EDITOR") - else: - editor = read_pipe("git var GIT_EDITOR").strip() - system(editor + " " + fileName) - - response = "y" - if os.stat(fileName).st_mtime <= mtime: - response = "x" - while response != "y" and response != "n": - response = raw_input("Submit template unchanged. Submit anyway? [y]es, [n]o (skip this patch) ") - if response == "y": + if self.edit_template(fileName): + # read the edited message and submit tmpFile = open(fileName, "rb") message = tmpFile.read() tmpFile.close() submitTemplate = message[:message.index(separatorLine)] if self.isWindows: submitTemplate = submitTemplate.replace("\r\n", "\n") - p4_write_pipe("submit -i", submitTemplate) + p4_write_pipe(['submit', '-i'], submitTemplate) + + if self.preserveUser: + if p4User: + # Get last changelist number. Cannot easily get it from + # the submit command output as the output is + # unmarshalled. + changelist = self.lastP4Changelist() + self.modifyChangelistUser(changelist, p4User) else: + # skip this patch + print "Submission cancelled, undoing p4 changes." for f in editedFiles: - p4_system("revert \"%s\"" % f); + p4_revert(f) for f in filesToAdd: - p4_system("revert \"%s\"" % f); - system("rm %s" %f) + p4_revert(f) + os.remove(f) os.remove(fileName) else: @@ -819,6 +1094,8 @@ class P4Submit(Command): die("Detecting current git branch failed!") elif len(args) == 1: self.master = args[0] + if not branchExists(self.master): + die("Branch %s does not exist" % self.master) else: return False @@ -831,6 +1108,10 @@ class P4Submit(Command): if len(self.origin) == 0: self.origin = upstream + if self.preserveUser: + if not self.canChangeChangelists(): + die("Cannot preserve user names without p4 super-user or admin permissions") + if self.verbose: print "Origin branch is " + self.origin @@ -847,10 +1128,13 @@ class P4Submit(Command): print "Perforce checkout for depot path %s located at %s" % (self.depotPath, self.clientPath) self.oldWorkingDirectory = os.getcwd() + # ensure the clientPath exists + if not os.path.exists(self.clientPath): + os.makedirs(self.clientPath) + chdir(self.clientPath) print "Synchronizing p4 checkout..." - p4_system("sync ...") - + p4_sync("...") self.check() commits = [] @@ -858,6 +1142,14 @@ class P4Submit(Command): commits.append(line.strip()) commits.reverse() + if self.preserveUser or (gitConfig("git-p4.skipUserNameCheck") == "true"): + self.checkAuthorship = False + else: + self.checkAuthorship = True + + if self.preserveUser: + self.checkValidP4Users(commits) + while len(commits) > 0: commit = commits[0] commits = commits[1:] @@ -877,11 +1169,224 @@ class P4Submit(Command): return True -class P4Sync(Command): +class View(object): + """Represent a p4 view ("p4 help views"), and map files in a + repo according to the view.""" + + class Path(object): + """A depot or client path, possibly containing wildcards. + The only one supported is ... at the end, currently. + Initialize with the full path, with //depot or //client.""" + + def __init__(self, path, is_depot): + self.path = path + self.is_depot = is_depot + self.find_wildcards() + # remember the prefix bit, useful for relative mappings + m = re.match("(//[^/]+/)", self.path) + if not m: + die("Path %s does not start with //prefix/" % self.path) + prefix = m.group(1) + if not self.is_depot: + # strip //client/ on client paths + self.path = self.path[len(prefix):] + + def find_wildcards(self): + """Make sure wildcards are valid, and set up internal + variables.""" + + self.ends_triple_dot = False + # There are three wildcards allowed in p4 views + # (see "p4 help views"). This code knows how to + # handle "..." (only at the end), but cannot deal with + # "%%n" or "*". Only check the depot_side, as p4 should + # validate that the client_side matches too. + if re.search(r'%%[1-9]', self.path): + die("Can't handle %%n wildcards in view: %s" % self.path) + if self.path.find("*") >= 0: + die("Can't handle * wildcards in view: %s" % self.path) + triple_dot_index = self.path.find("...") + if triple_dot_index >= 0: + if not self.path.endswith("..."): + die("Can handle ... wildcard only at end of path: %s" % + self.path) + self.ends_triple_dot = True + + def ensure_compatible(self, other_path): + """Make sure the wildcards agree.""" + if self.ends_triple_dot != other_path.ends_triple_dot: + die("Both paths must end with ... if either does;\n" + + "paths: %s %s" % (self.path, other_path.path)) + + def match_wildcards(self, test_path): + """See if this test_path matches us, and fill in the value + of the wildcards if so. Returns a tuple of + (True|False, wildcards[]). For now, only the ... at end + is supported, so at most one wildcard.""" + if self.ends_triple_dot: + dotless = self.path[:-3] + if test_path.startswith(dotless): + wildcard = test_path[len(dotless):] + return (True, [ wildcard ]) + else: + if test_path == self.path: + return (True, []) + return (False, []) + + def match(self, test_path): + """Just return if it matches; don't bother with the wildcards.""" + b, _ = self.match_wildcards(test_path) + return b + + def fill_in_wildcards(self, wildcards): + """Return the relative path, with the wildcards filled in + if there are any.""" + if self.ends_triple_dot: + return self.path[:-3] + wildcards[0] + else: + return self.path + + class Mapping(object): + def __init__(self, depot_side, client_side, overlay, exclude): + # depot_side is without the trailing /... if it had one + self.depot_side = View.Path(depot_side, is_depot=True) + self.client_side = View.Path(client_side, is_depot=False) + self.overlay = overlay # started with "+" + self.exclude = exclude # started with "-" + assert not (self.overlay and self.exclude) + self.depot_side.ensure_compatible(self.client_side) + + def __str__(self): + c = " " + if self.overlay: + c = "+" + if self.exclude: + c = "-" + return "View.Mapping: %s%s -> %s" % \ + (c, self.depot_side, self.client_side) + + def map_depot_to_client(self, depot_path): + """Calculate the client path if using this mapping on the + given depot path; does not consider the effect of other + mappings in a view. Even excluded mappings are returned.""" + matches, wildcards = self.depot_side.match_wildcards(depot_path) + if not matches: + return "" + client_path = self.client_side.fill_in_wildcards(wildcards) + return client_path + + # + # View methods + # + def __init__(self): + self.mappings = [] + + def append(self, view_line): + """Parse a view line, splitting it into depot and client + sides. Append to self.mappings, preserving order.""" + + # Split the view line into exactly two words. P4 enforces + # structure on these lines that simplifies this quite a bit. + # + # Either or both words may be double-quoted. + # Single quotes do not matter. + # Double-quote marks cannot occur inside the words. + # A + or - prefix is also inside the quotes. + # There are no quotes unless they contain a space. + # The line is already white-space stripped. + # The two words are separated by a single space. + # + if view_line[0] == '"': + # First word is double quoted. Find its end. + close_quote_index = view_line.find('"', 1) + if close_quote_index <= 0: + die("No first-word closing quote found: %s" % view_line) + depot_side = view_line[1:close_quote_index] + # skip closing quote and space + rhs_index = close_quote_index + 1 + 1 + else: + space_index = view_line.find(" ") + if space_index <= 0: + die("No word-splitting space found: %s" % view_line) + depot_side = view_line[0:space_index] + rhs_index = space_index + 1 + + if view_line[rhs_index] == '"': + # Second word is double quoted. Make sure there is a + # double quote at the end too. + if not view_line.endswith('"'): + die("View line with rhs quote should end with one: %s" % + view_line) + # skip the quotes + client_side = view_line[rhs_index+1:-1] + else: + client_side = view_line[rhs_index:] + + # prefix + means overlay on previous mapping + overlay = False + if depot_side.startswith("+"): + overlay = True + depot_side = depot_side[1:] + + # prefix - means exclude this path + exclude = False + if depot_side.startswith("-"): + exclude = True + depot_side = depot_side[1:] + + m = View.Mapping(depot_side, client_side, overlay, exclude) + self.mappings.append(m) + + def map_in_client(self, depot_path): + """Return the relative location in the client where this + depot file should live. Returns "" if the file should + not be mapped in the client.""" + + paths_filled = [] + client_path = "" + + # look at later entries first + for m in self.mappings[::-1]: + + # see where will this path end up in the client + p = m.map_depot_to_client(depot_path) + + if p == "": + # Depot path does not belong in client. Must remember + # this, as previous items should not cause files to + # exist in this path either. Remember that the list is + # being walked from the end, which has higher precedence. + # Overlap mappings do not exclude previous mappings. + if not m.overlay: + paths_filled.append(m.client_side) + + else: + # This mapping matched; no need to search any further. + # But, the mapping could be rejected if the client path + # has already been claimed by an earlier mapping. + already_mapped_in_client = False + for f in paths_filled: + # this is View.Path.match + if f.match(p): + already_mapped_in_client = True + break + if not already_mapped_in_client: + # Include this file, unless it is from a line that + # explicitly said to exclude it. + if not m.exclude: + client_path = p + + # a match, even if rejected, always stops the search + break + + return client_path + +class P4Sync(Command, P4UserMap): delete_actions = ( "delete", "move/delete", "purge" ) def __init__(self): Command.__init__(self) + P4UserMap.__init__(self) self.options = [ optparse.make_option("--branch", dest="branch"), optparse.make_option("--detect-branches", dest="detectBranches", action="store_true"), @@ -923,7 +1428,7 @@ class P4Sync(Command): self.p4BranchesInGit = [] self.cloneExclude = [] self.useClientSpec = False - self.clientSpecDirs = [] + self.clientSpecDirs = None if gitConfig("git-p4.syncFromOrigin") == "false": self.syncWithOrigin = False @@ -974,20 +1479,7 @@ class P4Sync(Command): def stripRepoPath(self, path, prefixes): if self.useClientSpec: - - # if using the client spec, we use the output directory - # specified in the client. For example, a view - # //depot/foo/branch/... //client/branch/foo/... - # will end up putting all foo/branch files into - # branch/foo/ - for val in self.clientSpecDirs: - if path.startswith(val[0]): - # replace the depot path with the client path - path = path.replace(val[0], val[1][1]) - # now strip out the client (//client/...) - path = re.sub("^(//[^/]+/)", '', path) - # the rest is all path - return path + return self.clientSpecDirs.map_in_client(path) if self.keepRepoPath: prefixes = [re.sub("^(//[^/]+/).*", r'\1', prefixes[0])] @@ -1033,38 +1525,66 @@ class P4Sync(Command): # - helper for streamP4Files def streamOneP4File(self, file, contents): - if file["type"] == "apple": - print "\nfile %s is a strange apple file that forks. Ignoring" % \ - file['depotFile'] - return - relPath = self.stripRepoPath(file['depotFile'], self.branchPrefixes) relPath = self.wildcard_decode(relPath) if verbose: sys.stderr.write("%s\n" % relPath) - mode = "644" - if isP4Exec(file["type"]): - mode = "755" - elif file["type"] == "symlink": - mode = "120000" - # p4 print on a symlink contains "target\n", so strip it off + (type_base, type_mods) = split_p4_type(file["type"]) + + git_mode = "100644" + if "x" in type_mods: + git_mode = "100755" + if type_base == "symlink": + git_mode = "120000" + # p4 print on a symlink contains "target\n"; remove the newline data = ''.join(contents) contents = [data[:-1]] - if self.isWindows and file["type"].endswith("text"): + if type_base == "utf16": + # p4 delivers different text in the python output to -G + # than it does when using "print -o", or normal p4 client + # operations. utf16 is converted to ascii or utf8, perhaps. + # But ascii text saved as -t utf16 is completely mangled. + # Invoke print -o to get the real contents. + text = p4_read_pipe(['print', '-q', '-o', '-', file['depotFile']]) + contents = [ text ] + + if type_base == "apple": + # Apple filetype files will be streamed as a concatenation of + # its appledouble header and the contents. This is useless + # on both macs and non-macs. If using "print -q -o xx", it + # will create "xx" with the data, and "%xx" with the header. + # This is also not very useful. + # + # Ideally, someday, this script can learn how to generate + # appledouble files directly and import those to git, but + # non-mac machines can never find a use for apple filetype. + print "\nIgnoring apple filetype file %s" % file['depotFile'] + return + + # Perhaps windows wants unicode, utf16 newlines translated too; + # but this is not doing it. + if self.isWindows and type_base == "text": mangled = [] for data in contents: data = data.replace("\r\n", "\n") mangled.append(data) contents = mangled - if file['type'] in ('text+ko', 'unicode+ko', 'binary+ko'): - contents = map(lambda text: re.sub(r'(?i)\$(Id|Header):[^$]*\$',r'$\1$', text), contents) - elif file['type'] in ('text+k', 'ktext', 'kxtext', 'unicode+k', 'binary+k'): - contents = map(lambda text: re.sub(r'\$(Id|Header|Author|Date|DateTime|Change|File|Revision):[^$\n]*\$',r'$\1$', text), contents) + # Note that we do not try to de-mangle keywords on utf16 files, + # even though in theory somebody may want that. + if type_base in ("text", "unicode", "binary"): + if "ko" in type_mods: + text = ''.join(contents) + text = re.sub(r'\$(Id|Header):[^$]*\$', r'$\1$', text) + contents = [ text ] + elif "k" in type_mods: + text = ''.join(contents) + text = re.sub(r'\$(Id|Header|Author|Date|DateTime|Change|File|Revision):[^$]*\$', r'$\1$', text) + contents = [ text ] - self.gitStream.write("M %s inline %s\n" % (mode, relPath)) + self.gitStream.write("M %s inline %s\n" % (git_mode, relPath)) # total length... length = 0 @@ -1109,19 +1629,17 @@ class P4Sync(Command): filesToDelete = [] for f in files: - includeFile = True - for val in self.clientSpecDirs: - if f['path'].startswith(val[0]): - if val[1][0] <= 0: - includeFile = False - break + # if using a client spec, only add the files that have + # a path in the client + if self.clientSpecDirs: + if self.clientSpecDirs.map_in_client(f['path']) == "": + continue - if includeFile: - filesForCommit.append(f) - if f['action'] in self.delete_actions: - filesToDelete.append(f) - else: - filesToRead.append(f) + filesForCommit.append(f) + if f['action'] in self.delete_actions: + filesToDelete.append(f) + else: + filesToRead.append(f) # deleted files... for f in filesToDelete: @@ -1136,10 +1654,11 @@ class P4Sync(Command): def streamP4FilesCbSelf(entry): self.streamP4FilesCb(entry) - p4CmdList("-x - print", - '\n'.join(['%s#%s' % (f['path'], f['rev']) - for f in filesToRead]), - cb=streamP4FilesCbSelf) + fileArgs = ['%s#%s' % (f['path'], f['rev']) for f in filesToRead] + + p4CmdList(["-x", "-", "print"], + stdin=fileArgs, + cb=streamP4FilesCbSelf) # do the last chunk if self.stream_file.has_key('depotFile'): @@ -1200,8 +1719,8 @@ class P4Sync(Command): if self.verbose: print "Change %s is labelled %s" % (change, labelDetails) - files = p4CmdList("files " + ' '.join (["%s...@%s" % (p, change) - for p in branchPrefixes])) + files = p4CmdList(["files"] + ["%s...@%s" % (p, change) + for p in branchPrefixes]) if len(files) == len(labelRevisions): @@ -1236,41 +1755,6 @@ class P4Sync(Command): print ("Tag %s does not match with change %s: file count is different." % (labelDetails["label"], change)) - def getUserCacheFilename(self): - home = os.environ.get("HOME", os.environ.get("USERPROFILE")) - return home + "/.gitp4-usercache.txt" - - def getUserMapFromPerforceServer(self): - if self.userMapFromPerforceServer: - return - self.users = {} - - for output in p4CmdList("users"): - if not output.has_key("User"): - continue - self.users[output["User"]] = output["FullName"] + " <" + output["Email"] + ">" - - - s = '' - for (key, val) in self.users.items(): - s += "%s\t%s\n" % (key.expandtabs(1), val.expandtabs(1)) - - open(self.getUserCacheFilename(), "wb").write(s) - self.userMapFromPerforceServer = True - - def loadUserMapFromCache(self): - self.users = {} - self.userMapFromPerforceServer = False - try: - cache = open(self.getUserCacheFilename(), "rb") - lines = cache.readlines() - cache.close() - for line in lines: - entry = line.strip().split("\t") - self.users[entry[0]] = entry[1] - except IOError: - self.getUserMapFromPerforceServer() - def getLabels(self): self.labels = {} @@ -1284,9 +1768,9 @@ class P4Sync(Command): newestChange = 0 if self.verbose: print "Querying files for label %s" % label - for file in p4CmdList("files " - + ' '.join (["%s...@%s" % (p, label) - for p in self.depotPaths])): + for file in p4CmdList(["files"] + + ["%s...@%s" % (p, label) + for p in self.depotPaths]): revisions[file["depotFile"]] = file["rev"] change = int(file["change"]) if change > newestChange: @@ -1309,7 +1793,13 @@ class P4Sync(Command): def getBranchMapping(self): lostAndFoundBranches = set() - for info in p4CmdList("branches"): + user = gitConfig("git-p4.branchUser") + if len(user) > 0: + command = "branches -u %s" % user + else: + command = "branches" + + for info in p4CmdList(command): details = p4Cmd("branch -o %s" % info["branch"]) viewIdx = 0 while details.has_key("View%s" % viewIdx): @@ -1338,6 +1828,25 @@ class P4Sync(Command): if source not in self.knownBranches: lostAndFoundBranches.add(source) + # Perforce does not strictly require branches to be defined, so we also + # check git config for a branch list. + # + # Example of branch definition in git config file: + # [git-p4] + # branchList=main:branchA + # branchList=main:branchB + # branchList=branchA:branchC + configBranches = gitConfigList("git-p4.branchList") + for branch in configBranches: + if branch: + (source, destination) = branch.split(":") + self.knownBranches[destination] = source + + lostAndFoundBranches.discard(destination) + + if source not in self.knownBranches: + lostAndFoundBranches.add(source) + for branch in lostAndFoundBranches: self.knownBranches[branch] = branch @@ -1508,17 +2017,17 @@ class P4Sync(Command): def importHeadRevision(self, revision): print "Doing initial import of %s from revision %s into %s" % (' '.join(self.depotPaths), revision, self.branch) - details = { "user" : "git perforce import user", "time" : int(time.time()) } + details = {} + details["user"] = "git perforce import user" details["desc"] = ("Initial import of %s from the state at revision %s\n" % (' '.join(self.depotPaths), revision)) details["change"] = revision newestRevision = 0 fileCnt = 0 - for info in p4CmdList("files " - + ' '.join(["%s...%s" - % (p, revision) - for p in self.depotPaths])): + fileArgs = ["%s...%s" % (p,revision) for p in self.depotPaths] + + for info in p4CmdList(["files"] + fileArgs): if 'code' in info and info['code'] == 'error': sys.stderr.write("p4 returned an error: %s\n" @@ -1548,6 +2057,18 @@ class P4Sync(Command): fileCnt = fileCnt + 1 details["change"] = newestRevision + + # Use time from top-most change so that all git-p4 clones of + # the same p4 repo have the same commit SHA1s. + res = p4CmdList("describe -s %d" % newestRevision) + newestTime = None + for r in res: + if r.has_key('time'): + newestTime = int(r['time']) + if newestTime is None: + die("\"describe -s\" on newest change %d did not give a time") + details["time"] = newestTime + self.updateOptionDict(details) try: self.commit(details, self.extractFilesFromCommit(details), self.branch, self.depotPaths) @@ -1557,50 +2078,31 @@ class P4Sync(Command): def getClientSpec(self): - specList = p4CmdList( "client -o" ) - temp = {} - for entry in specList: - for k,v in entry.iteritems(): - if k.startswith("View"): - - # p4 has these %%1 to %%9 arguments in specs to - # reorder paths; which we can't handle (yet :) - if re.match('%%\d', v) != None: - print "Sorry, can't handle %%n arguments in client specs" - sys.exit(1) - - if v.startswith('"'): - start = 1 - else: - start = 0 - index = v.find("...") - - # save the "client view"; i.e the RHS of the view - # line that tells the client where to put the - # files for this view. - cv = v[index+3:].strip() # +3 to remove previous '...' - - # if the client view doesn't end with a - # ... wildcard, then we're going to mess up the - # output directory, so fail gracefully. - if not cv.endswith('...'): - print 'Sorry, client view in "%s" needs to end with wildcard' % (k) - sys.exit(1) - cv=cv[:-3] - - # now save the view; +index means included, -index - # means it should be filtered out. - v = v[start:index] - if v.startswith("-"): - v = v[1:] - include = -len(v) - else: - include = len(v) + specList = p4CmdList("client -o") + if len(specList) != 1: + die('Output from "client -o" is %d lines, expecting 1' % + len(specList)) + + # dictionary of all client parameters + entry = specList[0] + + # just the keys that start with "View" + view_keys = [ k for k in entry.keys() if k.startswith("View") ] + + # hold this new View + view = View() - temp[v] = (include, cv) + # append the lines, in order, to the view + for view_num in range(len(view_keys)): + k = "View%d" % view_num + if k not in view_keys: + die("Expected view key %s missing" % k) + view.append(entry[k]) - self.clientSpecDirs = temp.items() - self.clientSpecDirs.sort( lambda x, y: abs( y[1][0] ) - abs( x[1][0] ) ) + self.clientSpecDirs = view + if self.verbose: + for i, m in enumerate(self.clientSpecDirs.mappings): + print "clientSpecDirs %d: %s" % (i, str(m)) def run(self, args): self.depotPaths = [] @@ -1634,7 +2136,10 @@ class P4Sync(Command): if not gitBranchExists(self.refPrefix + "HEAD") and self.importIntoRemotes and gitBranchExists(self.branch): system("git symbolic-ref %sHEAD %s" % (self.refPrefix, self.branch)) - if self.useClientSpec or gitConfig("git-p4.useclientspec") == "true": + if not self.useClientSpec: + if gitConfig("git-p4.useclientspec", "--bool") == "true": + self.useClientSpec = True + if self.useClientSpec: self.getClientSpec() # TODO: should always look at previous commits, @@ -1670,12 +2175,14 @@ class P4Sync(Command): else: paths = [] for (prev, cur) in zip(self.previousDepotPaths, depotPaths): - for i in range(0, min(len(cur), len(prev))): - if cur[i] <> prev[i]: + prev_list = prev.split("/") + cur_list = cur.split("/") + for i in range(0, min(len(cur_list), len(prev_list))): + if cur_list[i] <> prev_list[i]: i = i - 1 break - paths.append (cur[:i + 1]) + paths.append ("/".join(cur_list[:i + 1])) self.previousDepotPaths = paths @@ -1705,6 +2212,17 @@ class P4Sync(Command): revision = "" self.users = {} + # Make sure no revision specifiers are used when --changesfile + # is specified. + bad_changesfile = False + if len(self.changesFile) > 0: + for p in self.depotPaths: + if p.find("@") >= 0 or p.find("#") >= 0: + bad_changesfile = True + break + if bad_changesfile: + die("Option --changesfile is incompatible with revision specifiers") + newPaths = [] for p in self.depotPaths: if p.find("@") != -1: @@ -1721,7 +2239,10 @@ class P4Sync(Command): revision = p[hashIdx:] p = p[:hashIdx] elif self.previousDepotPaths == []: - revision = "#head" + # pay attention to changesfile, if given, else import + # the entire p4 tree at the head revision + if len(self.changesFile) == 0: + revision = "#head" p = re.sub ("\.\.\.$", "", p) if not p.endswith("/"): @@ -2016,7 +2537,8 @@ def main(): args = sys.argv[2:] if len(options) > 0: - options.append(optparse.make_option("--git-dir", dest="gitdir")) + if cmd.needsGit: + options.append(optparse.make_option("--git-dir", dest="gitdir")) parser = optparse.OptionParser(cmd.usage.replace("%prog", "%prog " + cmdName), options, @@ -2046,6 +2568,7 @@ def main(): if not cmd.run(args): parser.print_help() + sys.exit(2) if __name__ == '__main__': diff --git a/contrib/fast-import/git-p4.txt b/contrib/fast-import/git-p4.txt deleted file mode 100644 index e09da445b6..0000000000 --- a/contrib/fast-import/git-p4.txt +++ /dev/null @@ -1,215 +0,0 @@ -git-p4 - Perforce <-> Git converter using git-fast-import - -Usage -===== - -git-p4 can be used in two different ways: - -1) To import changes from Perforce to a Git repository, using "git-p4 sync". - -2) To submit changes from Git back to Perforce, using "git-p4 submit". - -Importing -========= - -Simply start with - - git-p4 clone //depot/path/project - -or - - git-p4 clone //depot/path/project myproject - -This will: - -1) Create an empty git repository in a subdirectory called "project" (or -"myproject" with the second command) - -2) Import the head revision from the given Perforce path into a git branch -called "p4" (remotes/p4 actually) - -3) Create a master branch based on it and check it out. - -If you want the entire history (not just the head revision) then you can simply -append a "@all" to the depot path: - - git-p4 clone //depot/project/main@all myproject - - - -If you want more control you can also use the git-p4 sync command directly: - - mkdir repo-git - cd repo-git - git init - git-p4 sync //path/in/your/perforce/depot - -This will import the current head revision of the specified depot path into a -"remotes/p4/master" branch of your git repository. You can use the ---branch=mybranch option to import into a different branch. - -If you want to import the entire history of a given depot path simply use: - - git-p4 sync //path/in/depot@all - - -Note: - -To achieve optimal compression you may want to run 'git repack -a -d -f' after -a big import. This may take a while. - -Incremental Imports -=================== - -After an initial import you can continue to synchronize your git repository -with newer changes from the Perforce depot by just calling - - git-p4 sync - -in your git repository. By default the "remotes/p4/master" branch is updated. - -Advanced Setup -============== - -Suppose you have a periodically updated git repository somewhere, containing a -complete import of a Perforce project. This repository can be cloned and used -with git-p4. When updating the cloned repository with the "sync" command, -git-p4 will try to fetch changes from the original repository first. The git -protocol used with this is usually faster than importing from Perforce -directly. - -This behaviour can be disabled by setting the "git-p4.syncFromOrigin" git -configuration variable to "false". - -Updating -======== - -A common working pattern is to fetch the latest changes from the Perforce depot -and merge them with local uncommitted changes. The recommended way is to use -git's rebase mechanism to preserve linear history. git-p4 provides a convenient - - git-p4 rebase - -command that calls git-p4 sync followed by git rebase to rebase the current -working branch. - -Submitting -========== - -git-p4 has support for submitting changes from a git repository back to the -Perforce depot. This requires a Perforce checkout separate from your git -repository. To submit all changes that are in the current git branch but not in -the "p4" branch (or "origin" if "p4" doesn't exist) simply call - - git-p4 submit - -in your git repository. If you want to submit changes in a specific branch that -is not your current git branch you can also pass that as an argument: - - git-p4 submit mytopicbranch - -You can override the reference branch with the --origin=mysourcebranch option. - -If a submit fails you may have to "p4 resolve" and submit manually. You can -continue importing the remaining changes with - - git-p4 submit --continue - -Example -======= - -# Clone a repository - git-p4 clone //depot/path/project -# Enter the newly cloned directory - cd project -# Do some work... - vi foo.h -# ... and commit locally to gi - git commit foo.h -# In the meantime somebody submitted changes to the Perforce depot. Rebase your latest -# changes against the latest changes in Perforce: - git-p4 rebase -# Submit your locally committed changes back to Perforce - git-p4 submit -# ... and synchronize with Perforce - git-p4 rebase - - -Configuration parameters -======================== - -git-p4.user ($P4USER) - -Allows you to specify the username to use to connect to the Perforce repository. - - git config [--global] git-p4.user public - -git-p4.password ($P4PASS) - -Allows you to specify the password to use to connect to the Perforce repository. -Warning this password will be visible on the command-line invocation of the p4 binary. - - git config [--global] git-p4.password public1234 - -git-p4.port ($P4PORT) - -Specify the port to be used to contact the Perforce server. As this will be passed -directly to the p4 binary, it may be in the format host:port as well. - - git config [--global] git-p4.port codes.zimbra.com:2666 - -git-p4.host ($P4HOST) - -Specify the host to contact for a Perforce repository. - - git config [--global] git-p4.host perforce.example.com - -git-p4.client ($P4CLIENT) - -Specify the client name to use - - git config [--global] git-p4.client public-view - -git-p4.allowSubmit - - git config [--global] git-p4.allowSubmit false - -git-p4.syncFromOrigin - -A useful setup may be that you have a periodically updated git repository -somewhere that contains a complete import of a Perforce project. That git -repository can be used to clone the working repository from and one would -import from Perforce directly after cloning using git-p4. If the connection to -the Perforce server is slow and the working repository hasn't been synced for a -while it may be desirable to fetch changes from the origin git repository using -the efficient git protocol. git-p4 supports this setup by calling "git fetch origin" -by default if there is an origin branch. You can disable this using: - - git config [--global] git-p4.syncFromOrigin false - -git-p4.useclientspec - - git config [--global] git-p4.useclientspec false - -The P4CLIENT environment variable should be correctly set for p4 to be -able to find the relevant client. This client spec will be used to -both filter the files cloned by git and set the directory layout as -specified in the client (this implies --keep-path style semantics). - -Implementation Details... -========================= - -* Changesets from Perforce are imported using git fast-import. -* The import does not require anything from the Perforce client view as it just uses - "p4 print //depot/path/file#revision" to get the actual file contents. -* Every imported changeset has a special [git-p4...] line at the - end of the log message that gives information about the corresponding - Perforce change number and is also used by git-p4 itself to find out - where to continue importing when doing incremental imports. - Basically when syncing it extracts the perforce change number of the - latest commit in the "p4" branch and uses "p4 changes //depot/path/...@changenum,#head" - to find out which changes need to be imported. -* git-p4 submit uses "git rev-list" to pick the commits between the "p4" branch - and the current branch. - The commits themselves are applied using git diff/format-patch ... | git apply - diff --git a/contrib/git-jump/README b/contrib/git-jump/README new file mode 100644 index 0000000000..1cebc328cb --- /dev/null +++ b/contrib/git-jump/README @@ -0,0 +1,92 @@ +git-jump +======== + +Git-jump is a script for helping you jump to "interesting" parts of your +project in your editor. It works by outputting a set of interesting +spots in the "quickfix" format, which editors like vim can use as a +queue of places to visit (this feature is usually used to jump to errors +produced by a compiler). For example, given a diff like this: + +------------------------------------ +diff --git a/foo.c b/foo.c +index a655540..5a59044 100644 +--- a/foo.c ++++ b/foo.c +@@ -1,3 +1,3 @@ + int main(void) { +- printf("hello word!\n"); ++ printf("hello world!\n"); + } +----------------------------------- + +git-jump will feed this to the editor: + +----------------------------------- +foo.c:2: printf("hello word!\n"); +----------------------------------- + +Obviously this trivial case isn't that interesting; you could just open +`foo.c` yourself. But when you have many changes scattered across a +project, you can use the editor's support to "jump" from point to point. + +Git-jump can generate three types of interesting lists: + + 1. The beginning of any diff hunks. + + 2. The beginning of any merge conflict markers. + + 3. Any grep matches. + + +Using git-jump +-------------- + +To use it, just drop git-jump in your PATH, and then invoke it like +this: + +-------------------------------------------------- +# jump to changes not yet staged for commit +git jump diff + +# jump to changes that are staged for commit; you can give +# arbitrary diff options +git jump diff --cached + +# jump to merge conflicts +git jump merge + +# jump to all instances of foo_bar +git jump grep foo_bar + +# same as above, but case-insensitive; you can give +# arbitrary grep options +git jump grep -i foo_bar +-------------------------------------------------- + + +Related Programs +---------------- + +You can accomplish some of the same things with individual tools. For +example, you can use `git mergetool` to start vimdiff on each unmerged +file. `git jump merge` is for the vim-wielding luddite who just wants to +jump straight to the conflict text with no fanfare. + +As of git v1.7.2, `git grep` knows the `--open-files-in-pager` option, +which does something similar to `git jump grep`. However, it is limited +to positioning the cursor to the correct line in only the first file, +leaving you to locate subsequent hits in that file or other files using +the editor or pager. By contrast, git-jump provides the editor with a +complete list of files and line numbers for each match. + + +Limitations +----------- + +This scripts was written and tested with vim. Given that the quickfix +format is the same as what gcc produces, I expect emacs users have a +similar feature for iterating through the list, but I know nothing about +how to activate it. + +The shell snippets to generate the quickfix lines will almost certainly +choke on filenames with exotic characters (like newlines). diff --git a/contrib/git-jump/git-jump b/contrib/git-jump/git-jump new file mode 100755 index 0000000000..a33674e47a --- /dev/null +++ b/contrib/git-jump/git-jump @@ -0,0 +1,69 @@ +#!/bin/sh + +usage() { + cat <<\EOF +usage: git jump <mode> [<args>] + +Jump to interesting elements in an editor. +The <mode> parameter is one of: + +diff: elements are diff hunks. Arguments are given to diff. + +merge: elements are merge conflicts. Arguments are ignored. + +grep: elements are grep hits. Arguments are given to grep. +EOF +} + +open_editor() { + editor=`git var GIT_EDITOR` + eval "$editor -q \$1" +} + +mode_diff() { + git diff --relative "$@" | + perl -ne ' + if (m{^\+\+\+ b/(.*)}) { $file = $1; next } + defined($file) or next; + if (m/^@@ .*\+(\d+)/) { $line = $1; next } + defined($line) or next; + if (/^ /) { $line++; next } + if (/^[-+]\s*(.*)/) { + print "$file:$line: $1\n"; + $line = undef; + } + ' +} + +mode_merge() { + git ls-files -u | + perl -pe 's/^.*?\t//' | + sort -u | + while IFS= read fn; do + grep -Hn '^<<<<<<<' "$fn" + done +} + +# Grep -n generates nice quickfix-looking lines by itself, +# but let's clean up extra whitespace, so they look better if the +# editor shows them to us in the status bar. +mode_grep() { + git grep -n "$@" | + perl -pe ' + s/[ \t]+/ /g; + s/^ *//; + ' +} + +if test $# -lt 1; then + usage >&2 + exit 1 +fi +mode=$1; shift + +trap 'rm -f "$tmp"' 0 1 2 3 15 +tmp=`mktemp -t git-jump.XXXXXX` || exit 1 +type "mode_$mode" >/dev/null 2>&1 || { usage >&2; exit 1; } +"mode_$mode" "$@" >"$tmp" +test -s "$tmp" || exit 0 +open_editor "$tmp" diff --git a/contrib/gitview/gitview.txt b/contrib/gitview/gitview.txt index 77c29de305..9e12f97842 100644 --- a/contrib/gitview/gitview.txt +++ b/contrib/gitview/gitview.txt @@ -7,6 +7,7 @@ gitview - A GTK based repository browser for git SYNOPSIS -------- +[verse] 'gitview' [options] [args] DESCRIPTION diff --git a/contrib/hooks/post-receive-email b/contrib/hooks/post-receive-email index 21989fc6ab..ba077c13f9 100755 --- a/contrib/hooks/post-receive-email +++ b/contrib/hooks/post-receive-email @@ -11,11 +11,11 @@ # will have put this somewhere standard. You should make this script # executable then link to it in the repository you would like to use it in. # For example, on debian the hook is stored in -# /usr/share/doc/git-core/contrib/hooks/post-receive-email: +# /usr/share/git-core/contrib/hooks/post-receive-email: # # chmod a+x post-receive-email # cd /path/to/your/repository.git -# ln -sf /usr/share/doc/git-core/contrib/hooks/post-receive-email hooks/post-receive +# ln -sf /usr/share/git-core/contrib/hooks/post-receive-email hooks/post-receive # # This hook script assumes it is enabled on the central repository of a # project, with all users pushing only to it and not between each other. It @@ -60,6 +60,11 @@ # email body. If not specified, there is no limit. # Lines beyond the limit are suppressed and counted, and a final # line is added indicating the number of suppressed lines. +# hooks.diffopts +# Alternate options for the git diff-tree invocation that shows changes. +# Default is "--stat --summary --find-copies-harder". Add -p to those +# options to include a unified diff of changes in addition to the usual +# summary output. # # Notes # ----- @@ -446,7 +451,7 @@ generate_update_branch_email() # non-fast-forward updates. echo "" echo "Summary of changes:" - git diff-tree --stat --summary --find-copies-harder $oldrev..$newrev + git diff-tree $diffopts $oldrev..$newrev } # @@ -723,6 +728,8 @@ envelopesender=$(git config hooks.envelopesender) emailprefix=$(git config hooks.emailprefix || echo '[SCM] ') custom_showrev=$(git config hooks.showrev) maxlines=$(git config hooks.emailmaxlines) +diffopts=$(git config hooks.diffopts) +: ${diffopts:="--stat --summary --find-copies-harder"} # --- Main loop # Allow dual mode: run from the command line just like the update hook, or diff --git a/contrib/mw-to-git/git-remote-mediawiki b/contrib/mw-to-git/git-remote-mediawiki new file mode 100755 index 0000000000..c18bfa1f15 --- /dev/null +++ b/contrib/mw-to-git/git-remote-mediawiki @@ -0,0 +1,827 @@ +#! /usr/bin/perl + +# Copyright (C) 2011 +# Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr> +# Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr> +# Claire Fousse <claire.fousse@ensimag.imag.fr> +# David Amouyal <david.amouyal@ensimag.imag.fr> +# Matthieu Moy <matthieu.moy@grenoble-inp.fr> +# License: GPL v2 or later + +# Gateway between Git and MediaWiki. +# https://github.com/Bibzball/Git-Mediawiki/wiki +# +# Known limitations: +# +# - Only wiki pages are managed, no support for [[File:...]] +# attachments. +# +# - Poor performance in the best case: it takes forever to check +# whether we're up-to-date (on fetch or push) or to fetch a few +# revisions from a large wiki, because we use exclusively a +# page-based synchronization. We could switch to a wiki-wide +# synchronization when the synchronization involves few revisions +# but the wiki is large. +# +# - Git renames could be turned into MediaWiki renames (see TODO +# below) +# +# - login/password support requires the user to write the password +# cleartext in a file (see TODO below). +# +# - No way to import "one page, and all pages included in it" +# +# - Multiple remote MediaWikis have not been very well tested. + +use strict; +use MediaWiki::API; +use DateTime::Format::ISO8601; +use encoding 'utf8'; + +# use encoding 'utf8' doesn't change STDERROR +# but we're going to output UTF-8 filenames to STDERR +binmode STDERR, ":utf8"; + +use URI::Escape; +use warnings; + +# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced +use constant SLASH_REPLACEMENT => "%2F"; + +# It's not always possible to delete pages (may require some +# priviledges). Deleted pages are replaced with this content. +use constant DELETED_CONTENT => "[[Category:Deleted]]\n"; + +# It's not possible to create empty pages. New empty files in Git are +# sent with this content instead. +use constant EMPTY_CONTENT => "<!-- empty page -->\n"; + +# used to reflect file creation or deletion in diff. +use constant NULL_SHA1 => "0000000000000000000000000000000000000000"; + +my $remotename = $ARGV[0]; +my $url = $ARGV[1]; + +# Accept both space-separated and multiple keys in config file. +# Spaces should be written as _ anyway because we'll use chomp. +my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages")); +chomp(@tracked_pages); + +# Just like @tracked_pages, but for MediaWiki categories. +my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories")); +chomp(@tracked_categories); + +my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin"); +# TODO: ideally, this should be able to read from keyboard, but we're +# inside a remote helper, so our stdin is connect to git, not to a +# terminal. +my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword"); +my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain"); +chomp($wiki_login); +chomp($wiki_passwd); +chomp($wiki_domain); + +# Import only last revisions (both for clone and fetch) +my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow"); +chomp($shallow_import); +$shallow_import = ($shallow_import eq "true"); + +# Dumb push: don't update notes and mediawiki ref to reflect the last push. +# +# Configurable with mediawiki.dumbPush, or per-remote with +# remote.<remotename>.dumbPush. +# +# This means the user will have to re-import the just-pushed +# revisions. On the other hand, this means that the Git revisions +# corresponding to MediaWiki revisions are all imported from the wiki, +# regardless of whether they were initially created in Git or from the +# web interface, hence all users will get the same history (i.e. if +# the push from Git to MediaWiki loses some information, everybody +# will get the history with information lost). If the import is +# deterministic, this means everybody gets the same sha1 for each +# MediaWiki revision. +my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush"); +unless ($dumb_push) { + $dumb_push = run_git("config --get --bool mediawiki.dumbPush"); +} +chomp($dumb_push); +$dumb_push = ($dumb_push eq "true"); + +my $wiki_name = $url; +$wiki_name =~ s/[^\/]*:\/\///; +# If URL is like http://user:password@example.com/, we clearly don't +# want the password in $wiki_name. While we're there, also remove user +# and '@' sign, to avoid author like MWUser@HTTPUser@host.com +$wiki_name =~ s/^.*@//; + +# Commands parser +my $entry; +my @cmd; +while (<STDIN>) { + chomp; + @cmd = split(/ /); + if (defined($cmd[0])) { + # Line not blank + if ($cmd[0] eq "capabilities") { + die("Too many arguments for capabilities") unless (!defined($cmd[1])); + mw_capabilities(); + } elsif ($cmd[0] eq "list") { + die("Too many arguments for list") unless (!defined($cmd[2])); + mw_list($cmd[1]); + } elsif ($cmd[0] eq "import") { + die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2])); + mw_import($cmd[1]); + } elsif ($cmd[0] eq "option") { + die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3])); + mw_option($cmd[1],$cmd[2]); + } elsif ($cmd[0] eq "push") { + mw_push($cmd[1]); + } else { + print STDERR "Unknown command. Aborting...\n"; + last; + } + } else { + # blank line: we should terminate + last; + } + + BEGIN { $| = 1 } # flush STDOUT, to make sure the previous + # command is fully processed. +} + +########################## Functions ############################## + +# MediaWiki API instance, created lazily. +my $mediawiki; + +sub mw_connect_maybe { + if ($mediawiki) { + return; + } + $mediawiki = MediaWiki::API->new; + $mediawiki->{config}->{api_url} = "$url/api.php"; + if ($wiki_login) { + if (!$mediawiki->login({ + lgname => $wiki_login, + lgpassword => $wiki_passwd, + lgdomain => $wiki_domain, + })) { + print STDERR "Failed to log in mediawiki user \"$wiki_login\" on $url\n"; + print STDERR "(error " . + $mediawiki->{error}->{code} . ': ' . + $mediawiki->{error}->{details} . ")\n"; + exit 1; + } else { + print STDERR "Logged in with user \"$wiki_login\".\n"; + } + } +} + +sub get_mw_first_pages { + my $some_pages = shift; + my @some_pages = @{$some_pages}; + + my $pages = shift; + + # pattern 'page1|page2|...' required by the API + my $titles = join('|', @some_pages); + + my $mw_pages = $mediawiki->api({ + action => 'query', + titles => $titles, + }); + if (!defined($mw_pages)) { + print STDERR "fatal: could not query the list of wiki pages.\n"; + print STDERR "fatal: '$url' does not appear to be a mediawiki\n"; + print STDERR "fatal: make sure '$url/api.php' is a valid page.\n"; + exit 1; + } + while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) { + if ($id < 0) { + print STDERR "Warning: page $page->{title} not found on wiki\n"; + } else { + $pages->{$page->{title}} = $page; + } + } +} + +sub get_mw_pages { + mw_connect_maybe(); + + my %pages; # hash on page titles to avoid duplicates + my $user_defined; + if (@tracked_pages) { + $user_defined = 1; + # The user provided a list of pages titles, but we + # still need to query the API to get the page IDs. + + my @some_pages = @tracked_pages; + while (@some_pages) { + my $last = 50; + if ($#some_pages < $last) { + $last = $#some_pages; + } + my @slice = @some_pages[0..$last]; + get_mw_first_pages(\@slice, \%pages); + @some_pages = @some_pages[51..$#some_pages]; + } + } + if (@tracked_categories) { + $user_defined = 1; + foreach my $category (@tracked_categories) { + if (index($category, ':') < 0) { + # Mediawiki requires the Category + # prefix, but let's not force the user + # to specify it. + $category = "Category:" . $category; + } + my $mw_pages = $mediawiki->list( { + action => 'query', + list => 'categorymembers', + cmtitle => $category, + cmlimit => 'max' } ) + || die $mediawiki->{error}->{code} . ': ' . $mediawiki->{error}->{details}; + foreach my $page (@{$mw_pages}) { + $pages{$page->{title}} = $page; + } + } + } + if (!$user_defined) { + # No user-provided list, get the list of pages from + # the API. + my $mw_pages = $mediawiki->list({ + action => 'query', + list => 'allpages', + aplimit => 500, + }); + if (!defined($mw_pages)) { + print STDERR "fatal: could not get the list of wiki pages.\n"; + print STDERR "fatal: '$url' does not appear to be a mediawiki\n"; + print STDERR "fatal: make sure '$url/api.php' is a valid page.\n"; + exit 1; + } + foreach my $page (@{$mw_pages}) { + $pages{$page->{title}} = $page; + } + } + return values(%pages); +} + +sub run_git { + open(my $git, "-|:encoding(UTF-8)", "git " . $_[0]); + my $res = do { local $/; <$git> }; + close($git); + + return $res; +} + + +sub get_last_local_revision { + # Get note regarding last mediawiki revision + my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null"); + my @note_info = split(/ /, $note); + + my $lastrevision_number; + if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) { + print STDERR "No previous mediawiki revision found"; + $lastrevision_number = 0; + } else { + # Notes are formatted : mediawiki_revision: #number + $lastrevision_number = $note_info[1]; + chomp($lastrevision_number); + print STDERR "Last local mediawiki revision found is $lastrevision_number"; + } + return $lastrevision_number; +} + +# Remember the timestamp corresponding to a revision id. +my %basetimestamps; + +sub get_last_remote_revision { + mw_connect_maybe(); + + my @pages = get_mw_pages(); + + my $max_rev_num = 0; + + foreach my $page (@pages) { + my $id = $page->{pageid}; + + my $query = { + action => 'query', + prop => 'revisions', + rvprop => 'ids|timestamp', + pageids => $id, + }; + + my $result = $mediawiki->api($query); + + my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}}); + + $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp}; + + $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num); + } + + print STDERR "Last remote revision found is $max_rev_num.\n"; + return $max_rev_num; +} + +# Clean content before sending it to MediaWiki +sub mediawiki_clean { + my $string = shift; + my $page_created = shift; + # Mediawiki does not allow blank space at the end of a page and ends with a single \n. + # This function right trims a string and adds a \n at the end to follow this rule + $string =~ s/\s+$//; + if ($string eq "" && $page_created) { + # Creating empty pages is forbidden. + $string = EMPTY_CONTENT; + } + return $string."\n"; +} + +# Filter applied on MediaWiki data before adding them to Git +sub mediawiki_smudge { + my $string = shift; + if ($string eq EMPTY_CONTENT) { + $string = ""; + } + # This \n is important. This is due to mediawiki's way to handle end of files. + return $string."\n"; +} + +sub mediawiki_clean_filename { + my $filename = shift; + $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g; + # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded. + # Do a variant of URL-encoding, i.e. looks like URL-encoding, + # but with _ added to prevent MediaWiki from thinking this is + # an actual special character. + $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge; + # If we use the uri escape before + # we should unescape here, before anything + + return $filename; +} + +sub mediawiki_smudge_filename { + my $filename = shift; + $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g; + $filename =~ s/ /_/g; + # Decode forbidden characters encoded in mediawiki_clean_filename + $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge; + return $filename; +} + +sub literal_data { + my ($content) = @_; + print STDOUT "data ", bytes::length($content), "\n", $content; +} + +sub mw_capabilities { + # Revisions are imported to the private namespace + # refs/mediawiki/$remotename/ by the helper and fetched into + # refs/remotes/$remotename later by fetch. + print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n"; + print STDOUT "import\n"; + print STDOUT "list\n"; + print STDOUT "push\n"; + print STDOUT "\n"; +} + +sub mw_list { + # MediaWiki do not have branches, we consider one branch arbitrarily + # called master, and HEAD pointing to it. + print STDOUT "? refs/heads/master\n"; + print STDOUT "\@refs/heads/master HEAD\n"; + print STDOUT "\n"; +} + +sub mw_option { + print STDERR "remote-helper command 'option $_[0]' not yet implemented\n"; + print STDOUT "unsupported\n"; +} + +sub fetch_mw_revisions_for_page { + my $page = shift; + my $id = shift; + my $fetch_from = shift; + my @page_revs = (); + my $query = { + action => 'query', + prop => 'revisions', + rvprop => 'ids', + rvdir => 'newer', + rvstartid => $fetch_from, + rvlimit => 500, + pageids => $id, + }; + + my $revnum = 0; + # Get 500 revisions at a time due to the mediawiki api limit + while (1) { + my $result = $mediawiki->api($query); + + # Parse each of those 500 revisions + foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) { + my $page_rev_ids; + $page_rev_ids->{pageid} = $page->{pageid}; + $page_rev_ids->{revid} = $revision->{revid}; + push(@page_revs, $page_rev_ids); + $revnum++; + } + last unless $result->{'query-continue'}; + $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid}; + } + if ($shallow_import && @page_revs) { + print STDERR " Found 1 revision (shallow import).\n"; + @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs); + return $page_revs[0]; + } + print STDERR " Found ", $revnum, " revision(s).\n"; + return @page_revs; +} + +sub fetch_mw_revisions { + my $pages = shift; my @pages = @{$pages}; + my $fetch_from = shift; + + my @revisions = (); + my $n = 1; + foreach my $page (@pages) { + my $id = $page->{pageid}; + + print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n"; + $n++; + my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from); + @revisions = (@page_revs, @revisions); + } + + return ($n, @revisions); +} + +sub import_file_revision { + my $commit = shift; + my %commit = %{$commit}; + my $full_import = shift; + my $n = shift; + + my $title = $commit{title}; + my $comment = $commit{comment}; + my $content = $commit{content}; + my $author = $commit{author}; + my $date = $commit{date}; + + print STDOUT "commit refs/mediawiki/$remotename/master\n"; + print STDOUT "mark :$n\n"; + print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n"; + literal_data($comment); + + # If it's not a clone, we need to know where to start from + if (!$full_import && $n == 1) { + print STDOUT "from refs/mediawiki/$remotename/master^0\n"; + } + if ($content ne DELETED_CONTENT) { + print STDOUT "M 644 inline $title.mw\n"; + literal_data($content); + print STDOUT "\n\n"; + } else { + print STDOUT "D $title.mw\n"; + } + + # mediawiki revision number in the git note + if ($full_import && $n == 1) { + print STDOUT "reset refs/notes/$remotename/mediawiki\n"; + } + print STDOUT "commit refs/notes/$remotename/mediawiki\n"; + print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n"; + literal_data("Note added by git-mediawiki during import"); + if (!$full_import && $n == 1) { + print STDOUT "from refs/notes/$remotename/mediawiki^0\n"; + } + print STDOUT "N inline :$n\n"; + literal_data("mediawiki_revision: " . $commit{mw_revision}); + print STDOUT "\n\n"; +} + +# parse a sequence of +# <cmd> <arg1> +# <cmd> <arg2> +# \n +# (like batch sequence of import and sequence of push statements) +sub get_more_refs { + my $cmd = shift; + my @refs; + while (1) { + my $line = <STDIN>; + if ($line =~ m/^$cmd (.*)$/) { + push(@refs, $1); + } elsif ($line eq "\n") { + return @refs; + } else { + die("Invalid command in a '$cmd' batch: ". $_); + } + } +} + +sub mw_import { + # multiple import commands can follow each other. + my @refs = (shift, get_more_refs("import")); + foreach my $ref (@refs) { + mw_import_ref($ref); + } + print STDOUT "done\n"; +} + +sub mw_import_ref { + my $ref = shift; + # The remote helper will call "import HEAD" and + # "import refs/heads/master". + # Since HEAD is a symbolic ref to master (by convention, + # followed by the output of the command "list" that we gave), + # we don't need to do anything in this case. + if ($ref eq "HEAD") { + return; + } + + mw_connect_maybe(); + + my @pages = get_mw_pages(); + + print STDERR "Searching revisions...\n"; + my $last_local = get_last_local_revision(); + my $fetch_from = $last_local + 1; + if ($fetch_from == 1) { + print STDERR ", fetching from beginning.\n"; + } else { + print STDERR ", fetching from here.\n"; + } + my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from); + + # Creation of the fast-import stream + print STDERR "Fetching & writing export data...\n"; + + $n = 0; + my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined + + foreach my $pagerevid (sort {$a->{revid} <=> $b->{revid}} @revisions) { + # fetch the content of the pages + my $query = { + action => 'query', + prop => 'revisions', + rvprop => 'content|timestamp|comment|user|ids', + revids => $pagerevid->{revid}, + }; + + my $result = $mediawiki->api($query); + + my $rev = pop(@{$result->{query}->{pages}->{$pagerevid->{pageid}}->{revisions}}); + + $n++; + + my %commit; + $commit{author} = $rev->{user} || 'Anonymous'; + $commit{comment} = $rev->{comment} || '*Empty MediaWiki Message*'; + $commit{title} = mediawiki_smudge_filename( + $result->{query}->{pages}->{$pagerevid->{pageid}}->{title} + ); + $commit{mw_revision} = $pagerevid->{revid}; + $commit{content} = mediawiki_smudge($rev->{'*'}); + + if (!defined($rev->{timestamp})) { + $last_timestamp++; + } else { + $last_timestamp = $rev->{timestamp}; + } + $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp); + + print STDERR "$n/", scalar(@revisions), ": Revision #$pagerevid->{revid} of $commit{title}\n"; + + import_file_revision(\%commit, ($fetch_from == 1), $n); + } + + if ($fetch_from == 1 && $n == 0) { + print STDERR "You appear to have cloned an empty MediaWiki.\n"; + # Something has to be done remote-helper side. If nothing is done, an error is + # thrown saying that HEAD is refering to unknown object 0000000000000000000 + # and the clone fails. + } +} + +sub error_non_fast_forward { + my $advice = run_git("config --bool advice.pushNonFastForward"); + chomp($advice); + if ($advice ne "false") { + # Native git-push would show this after the summary. + # We can't ask it to display it cleanly, so print it + # ourselves before. + print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n"; + print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n"; + print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n"; + } + print STDOUT "error $_[0] \"non-fast-forward\"\n"; + return 0; +} + +sub mw_push_file { + my $diff_info = shift; + # $diff_info contains a string in this format: + # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status> + my @diff_info_split = split(/[ \t]/, $diff_info); + + # Filename, including .mw extension + my $complete_file_name = shift; + # Commit message + my $summary = shift; + # MediaWiki revision number. Keep the previous one by default, + # in case there's no edit to perform. + my $newrevid = shift; + + my $new_sha1 = $diff_info_split[3]; + my $old_sha1 = $diff_info_split[2]; + my $page_created = ($old_sha1 eq NULL_SHA1); + my $page_deleted = ($new_sha1 eq NULL_SHA1); + $complete_file_name = mediawiki_clean_filename($complete_file_name); + + if (substr($complete_file_name,-3) eq ".mw") { + my $title = substr($complete_file_name,0,-3); + + my $file_content; + if ($page_deleted) { + # Deleting a page usually requires + # special priviledges. A common + # convention is to replace the page + # with this content instead: + $file_content = DELETED_CONTENT; + } else { + $file_content = run_git("cat-file blob $new_sha1"); + } + + mw_connect_maybe(); + + my $result = $mediawiki->edit( { + action => 'edit', + summary => $summary, + title => $title, + basetimestamp => $basetimestamps{$newrevid}, + text => mediawiki_clean($file_content, $page_created), + }, { + skip_encoding => 1 # Helps with names with accentuated characters + }); + if (!$result) { + if ($mediawiki->{error}->{code} == 3) { + # edit conflicts, considered as non-fast-forward + print STDERR 'Warning: Error ' . + $mediawiki->{error}->{code} . + ' from mediwiki: ' . $mediawiki->{error}->{details} . + ".\n"; + return ($newrevid, "non-fast-forward"); + } else { + # Other errors. Shouldn't happen => just die() + die 'Fatal: Error ' . + $mediawiki->{error}->{code} . + ' from mediwiki: ' . $mediawiki->{error}->{details}; + } + } + $newrevid = $result->{edit}->{newrevid}; + print STDERR "Pushed file: $new_sha1 - $title\n"; + } else { + print STDERR "$complete_file_name not a mediawiki file (Not pushable on this version of git-remote-mediawiki).\n" + } + return ($newrevid, "ok"); +} + +sub mw_push { + # multiple push statements can follow each other + my @refsspecs = (shift, get_more_refs("push")); + my $pushed; + for my $refspec (@refsspecs) { + my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/ + or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>"); + if ($force) { + print STDERR "Warning: forced push not allowed on a MediaWiki.\n"; + } + if ($local eq "") { + print STDERR "Cannot delete remote branch on a MediaWiki\n"; + print STDOUT "error $remote cannot delete\n"; + next; + } + if ($remote ne "refs/heads/master") { + print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n"; + print STDOUT "error $remote only master allowed\n"; + next; + } + if (mw_push_revision($local, $remote)) { + $pushed = 1; + } + } + + # Notify Git that the push is done + print STDOUT "\n"; + + if ($pushed && $dumb_push) { + print STDERR "Just pushed some revisions to MediaWiki.\n"; + print STDERR "The pushed revisions now have to be re-imported, and your current branch\n"; + print STDERR "needs to be updated with these re-imported commits. You can do this with\n"; + print STDERR "\n"; + print STDERR " git pull --rebase\n"; + print STDERR "\n"; + } +} + +sub mw_push_revision { + my $local = shift; + my $remote = shift; # actually, this has to be "refs/heads/master" at this point. + my $last_local_revid = get_last_local_revision(); + print STDERR ".\n"; # Finish sentence started by get_last_local_revision() + my $last_remote_revid = get_last_remote_revision(); + my $mw_revision = $last_remote_revid; + + # Get sha1 of commit pointed by local HEAD + my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1); + # Get sha1 of commit pointed by remotes/$remotename/master + my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null"); + chomp($remoteorigin_sha1); + + if ($last_local_revid > 0 && + $last_local_revid < $last_remote_revid) { + return error_non_fast_forward($remote); + } + + if ($HEAD_sha1 eq $remoteorigin_sha1) { + # nothing to push + return 0; + } + + # Get every commit in between HEAD and refs/remotes/origin/master, + # including HEAD and refs/remotes/origin/master + my @commit_pairs = (); + if ($last_local_revid > 0) { + my $parsed_sha1 = $remoteorigin_sha1; + # Find a path from last MediaWiki commit to pushed commit + while ($parsed_sha1 ne $HEAD_sha1) { + my @commit_info = grep(/^$parsed_sha1/, split(/\n/, run_git("rev-list --children $local"))); + if (!@commit_info) { + return error_non_fast_forward($remote); + } + my @commit_info_split = split(/ |\n/, $commit_info[0]); + # $commit_info_split[1] is the sha1 of the commit to export + # $commit_info_split[0] is the sha1 of its direct child + push(@commit_pairs, \@commit_info_split); + $parsed_sha1 = $commit_info_split[1]; + } + } else { + # No remote mediawiki revision. Export the whole + # history (linearized with --first-parent) + print STDERR "Warning: no common ancestor, pushing complete history\n"; + my $history = run_git("rev-list --first-parent --children $local"); + my @history = split('\n', $history); + @history = @history[1..$#history]; + foreach my $line (reverse @history) { + my @commit_info_split = split(/ |\n/, $line); + push(@commit_pairs, \@commit_info_split); + } + } + + foreach my $commit_info_split (@commit_pairs) { + my $sha1_child = @{$commit_info_split}[0]; + my $sha1_commit = @{$commit_info_split}[1]; + my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit"); + # TODO: we could detect rename, and encode them with a #redirect on the wiki. + # TODO: for now, it's just a delete+add + my @diff_info_list = split(/\0/, $diff_infos); + # Keep the first line of the commit message as mediawiki comment for the revision + my $commit_msg = (split(/\n/, run_git("show --pretty=format:\"%s\" $sha1_commit")))[0]; + chomp($commit_msg); + # Push every blob + while (@diff_info_list) { + my $status; + # git diff-tree -z gives an output like + # <metadata>\0<filename1>\0 + # <metadata>\0<filename2>\0 + # and we've split on \0. + my $info = shift(@diff_info_list); + my $file = shift(@diff_info_list); + ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision); + if ($status eq "non-fast-forward") { + # we may already have sent part of the + # commit to MediaWiki, but it's too + # late to cancel it. Stop the push in + # the middle, but still give an + # accurate error message. + return error_non_fast_forward($remote); + } + if ($status ne "ok") { + die("Unknown error from mw_push_file()"); + } + } + unless ($dumb_push) { + run_git("notes --ref=$remotename/mediawiki add -m \"mediawiki_revision: $mw_revision\" $sha1_commit"); + run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child"); + } + } + + print STDOUT "ok $remote\n"; + return 1; +} diff --git a/contrib/mw-to-git/git-remote-mediawiki.txt b/contrib/mw-to-git/git-remote-mediawiki.txt new file mode 100644 index 0000000000..4d211f5b81 --- /dev/null +++ b/contrib/mw-to-git/git-remote-mediawiki.txt @@ -0,0 +1,7 @@ +Git-Mediawiki is a project which aims the creation of a gate +between git and mediawiki, allowing git users to push and pull +objects from mediawiki just as one would do with a classic git +repository thanks to remote-helpers. + +For more information, visit the wiki at +https://github.com/Bibzball/Git-Mediawiki/wiki |