diff options
Diffstat (limited to 'contrib')
22 files changed, 3188 insertions, 852 deletions
diff --git a/contrib/completion/git-completion.bash b/contrib/completion/git-completion.bash index 893b7716ca..31f714da92 100755 --- a/contrib/completion/git-completion.bash +++ b/contrib/completion/git-completion.bash @@ -1,6 +1,6 @@ #!bash # -# bash completion support for core Git. +# bash/zsh completion support for core Git. # # Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org> # Conceptually based on gitcompletion (http://gitweb.hawaga.org.uk/). @@ -18,16 +18,12 @@ # To use these routines: # # 1) Copy this file to somewhere (e.g. ~/.git-completion.sh). -# 2) Added the following line to your .bashrc: -# source ~/.git-completion.sh -# -# Or, add the following lines to your .zshrc: -# autoload bashcompinit -# bashcompinit +# 2) Add the following line to your .bashrc/.zshrc: # source ~/.git-completion.sh # # 3) Consider changing your PS1 to also show the current branch: -# PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ ' +# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ ' +# ZSH: PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ ' # # The argument to __git_ps1 will be displayed only if you # are currently in a git repository. The %s token will be @@ -64,18 +60,10 @@ # per-repository basis by setting the bash.showUpstream config # variable. # -# -# To submit patches: -# -# *) Read Documentation/SubmittingPatches -# *) Send all patches to the current maintainer: -# -# "Shawn O. Pearce" <spearce@spearce.org> -# -# *) Always CC the Git mailing list: -# -# git@vger.kernel.org -# + +if [[ -n ${ZSH_VERSION-} ]]; then + autoload -U +X bashcompinit && bashcompinit +fi case "$COMP_WORDBREAKS" in *:*) : great ;; @@ -106,11 +94,13 @@ __gitdir () __git_ps1_show_upstream () { local key value - local svn_remote=() svn_url_pattern count n + local svn_remote svn_url_pattern count n local upstream=git legacy="" verbose="" + svn_remote=() # get some config options from git-config - while read key value; do + local output="$(git config -z --get-regexp '^(svn-remote\..*\.url|bash\.showupstream)$' 2>/dev/null | tr '\0\n' '\n ')" + while read -r key value; do case "$key" in bash.showupstream) GIT_PS1_SHOWUPSTREAM="$value" @@ -125,7 +115,7 @@ __git_ps1_show_upstream () upstream=svn+git # default upstream is SVN if available, else git ;; esac - done < <(git config -z --get-regexp '^(svn-remote\..*\.url|bash\.showupstream)$' 2>/dev/null | tr '\0\n' '\n ') + done <<< "$output" # parse configuration values for option in ${GIT_PS1_SHOWUPSTREAM}; do @@ -148,7 +138,7 @@ __git_ps1_show_upstream () svn_upstream=${svn_upstream[ ${#svn_upstream[@]} - 2 ]} svn_upstream=${svn_upstream%@*} local n_stop="${#svn_remote[@]}" - for ((n=1; n <= n_stop; ++n)); do + for ((n=1; n <= n_stop; n++)); do svn_upstream=${svn_upstream#${svn_remote[$n]}} done @@ -177,10 +167,8 @@ __git_ps1_show_upstream () for commit in $commits do case "$commit" in - "<"*) let ++behind - ;; - *) let ++ahead - ;; + "<"*) ((behind++)) ;; + *) ((ahead++)) ;; esac done count="$behind $ahead" @@ -246,6 +234,8 @@ __git_ps1 () fi elif [ -f "$g/MERGE_HEAD" ]; then r="|MERGING" + elif [ -f "$g/CHERRY_PICK_HEAD" ]; then + r="|CHERRY-PICKING" elif [ -f "$g/BISECT_LOG" ]; then r="|BISECTING" fi @@ -295,13 +285,13 @@ __git_ps1 () fi fi if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ]; then - git rev-parse --verify refs/stash >/dev/null 2>&1 && s="$" + git rev-parse --verify refs/stash >/dev/null 2>&1 && s="$" fi if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ]; then - if [ -n "$(git ls-files --others --exclude-standard)" ]; then - u="%" - fi + if [ -n "$(git ls-files --others --exclude-standard)" ]; then + u="%" + fi fi if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then @@ -310,7 +300,7 @@ __git_ps1 () fi local f="$w$i$s$u" - printf "${1:- (%s)}" "$c${b##refs/heads/}${f:+ $f}$r$p" + printf -- "${1:- (%s)}" "$c${b##refs/heads/}${f:+ $f}$r$p" fi } @@ -483,16 +473,18 @@ _get_comp_words_by_ref () fi fi -# __gitcomp accepts 1, 2, 3, or 4 arguments -# generates completion reply with compgen +# Generates completion reply with compgen, appending a space to possible +# completion words, if necessary. +# It accepts 1 to 4 arguments: +# 1: List of possible completion words. +# 2: A prefix to be added to each possible completion word (optional). +# 3: Generate possible completion matches for this word (optional). +# 4: A suffix to be appended to each possible completion word (optional). __gitcomp () { - local cur - _get_comp_words_by_ref -n =: cur - if [ $# -gt 2 ]; then - cur="$3" - fi - case "$cur" in + local cur_="${3-$cur}" + + case "$cur_" in --*=) COMPREPLY=() ;; @@ -500,47 +492,44 @@ __gitcomp () local IFS=$'\n' COMPREPLY=($(compgen -P "${2-}" \ -W "$(__gitcomp_1 "${1-}" "${4-}")" \ - -- "$cur")) + -- "$cur_")) ;; esac } -# __git_heads accepts 0 or 1 arguments (to pass to __gitdir) +# Generates completion reply with compgen from newline-separated possible +# completion words by appending a space to all of them. +# It accepts 1 to 4 arguments: +# 1: List of possible completion words, separated by a single newline. +# 2: A prefix to be added to each possible completion word (optional). +# 3: Generate possible completion matches for this word (optional). +# 4: A suffix to be appended to each possible completion word instead of +# the default space (optional). If specified but empty, nothing is +# appended. +__gitcomp_nl () +{ + local IFS=$'\n' + COMPREPLY=($(compgen -P "${2-}" -S "${4- }" -W "$1" -- "${3-$cur}")) +} + __git_heads () { - local cmd i is_hash=y dir="$(__gitdir "${1-}")" + local dir="$(__gitdir)" if [ -d "$dir" ]; then git --git-dir="$dir" for-each-ref --format='%(refname:short)' \ refs/heads return fi - for i in $(git ls-remote "${1-}" 2>/dev/null); do - case "$is_hash,$i" in - y,*) is_hash=n ;; - n,*^{}) is_hash=y ;; - n,refs/heads/*) is_hash=y; echo "${i#refs/heads/}" ;; - n,*) is_hash=y; echo "$i" ;; - esac - done } -# __git_tags accepts 0 or 1 arguments (to pass to __gitdir) __git_tags () { - local cmd i is_hash=y dir="$(__gitdir "${1-}")" + local dir="$(__gitdir)" if [ -d "$dir" ]; then git --git-dir="$dir" for-each-ref --format='%(refname:short)' \ refs/tags return fi - for i in $(git ls-remote "${1-}" 2>/dev/null); do - case "$is_hash,$i" in - y,*) is_hash=n ;; - n,*^{}) is_hash=y ;; - n,refs/tags/*) is_hash=y; echo "${i#refs/tags/}" ;; - n,*) is_hash=y; echo "$i" ;; - esac - done } # __git_refs accepts 0, 1 (to pass to __gitdir), or 2 arguments @@ -548,9 +537,8 @@ __git_tags () # by checkout for tracking branches __git_refs () { - local i is_hash=y dir="$(__gitdir "${1-}")" track="${2-}" - local cur format refs - _get_comp_words_by_ref -n =: cur + local i hash dir="$(__gitdir "${1-}")" track="${2-}" + local format refs if [ -d "$dir" ]; then case "$cur" in refs|refs/*) @@ -575,7 +563,7 @@ __git_refs () local ref entry git --git-dir="$dir" for-each-ref --shell --format="ref=%(refname:short)" \ "refs/remotes/" | \ - while read entry; do + while read -r entry; do eval "$entry" ref="${ref#*/}" if [[ "$ref" == "$cur"* ]]; then @@ -585,16 +573,27 @@ __git_refs () fi return fi - for i in $(git ls-remote "$dir" 2>/dev/null); do - case "$is_hash,$i" in - y,*) is_hash=n ;; - n,*^{}) is_hash=y ;; - n,refs/tags/*) is_hash=y; echo "${i#refs/tags/}" ;; - n,refs/heads/*) is_hash=y; echo "${i#refs/heads/}" ;; - n,refs/remotes/*) is_hash=y; echo "${i#refs/remotes/}" ;; - n,*) is_hash=y; echo "$i" ;; - esac - done + case "$cur" in + refs|refs/*) + git ls-remote "$dir" "$cur*" 2>/dev/null | \ + while read -r hash i; do + case "$i" in + *^{}) ;; + *) echo "$i" ;; + esac + done + ;; + *) + git ls-remote "$dir" HEAD ORIG_HEAD 'refs/tags/*' 'refs/heads/*' 'refs/remotes/*' 2>/dev/null | \ + while read -r hash i; do + case "$i" in + *^{}) ;; + refs/*) echo "${i#refs/*/}" ;; + *) echo "$i" ;; + esac + done + ;; + esac } # __git_refs2 requires 1 argument (to pass to __git_refs) @@ -609,30 +608,17 @@ __git_refs2 () # __git_refs_remotes requires 1 argument (to pass to ls-remote) __git_refs_remotes () { - local cmd i is_hash=y - for i in $(git ls-remote "$1" 2>/dev/null); do - case "$is_hash,$i" in - n,refs/heads/*) - is_hash=y - echo "$i:refs/remotes/$1/${i#refs/heads/}" - ;; - y,*) is_hash=n ;; - n,*^{}) is_hash=y ;; - n,refs/tags/*) is_hash=y;; - n,*) is_hash=y; ;; - esac + local i hash + git ls-remote "$1" 'refs/heads/*' 2>/dev/null | \ + while read -r hash i; do + echo "$i:refs/remotes/$1/${i#refs/heads/}" done } __git_remotes () { - local i ngoff IFS=$'\n' d="$(__gitdir)" - shopt -q nullglob || ngoff=1 - shopt -s nullglob - for i in "$d/remotes"/*; do - echo ${i#$d/remotes/} - done - [ "$ngoff" ] && shopt -u nullglob + local i IFS=$'\n' d="$(__gitdir)" + test -d "$d/remotes" && ls -1 "$d/remotes" for i in $(git --git-dir="$d" config --get-regexp 'remote\..*\.url' 2>/dev/null); do i="${i#remote.}" echo "${i/.url*/}" @@ -659,28 +645,31 @@ __git_merge_strategies= # is needed. __git_compute_merge_strategies () { - : ${__git_merge_strategies:=$(__git_list_merge_strategies)} + test -n "$__git_merge_strategies" || + __git_merge_strategies=$(__git_list_merge_strategies) } -__git_complete_file () +__git_complete_revlist_file () { - local pfx ls ref cur - _get_comp_words_by_ref -n =: cur - case "$cur" in + local pfx ls ref cur_="$cur" + case "$cur_" in + *..?*:*) + return + ;; ?*:*) - ref="${cur%%:*}" - cur="${cur#*:}" - case "$cur" in + ref="${cur_%%:*}" + cur_="${cur_#*:}" + case "$cur_" in ?*/*) - pfx="${cur%/*}" - cur="${cur##*/}" + pfx="${cur_%/*}" + cur_="${cur_##*/}" ls="$ref:$pfx" pfx="$pfx/" ;; *) ls="$ref" ;; - esac + esac case "$COMP_WORDBREAKS" in *:*) : great ;; @@ -703,41 +692,42 @@ __git_complete_file () s,$,/, } s/^.* //')" \ - -- "$cur")) - ;; - *) - __gitcomp "$(__git_refs)" + -- "$cur_")) ;; - esac -} - -__git_complete_revlist () -{ - local pfx cur - _get_comp_words_by_ref -n =: cur - case "$cur" in *...*) - pfx="${cur%...*}..." - cur="${cur#*...}" - __gitcomp "$(__git_refs)" "$pfx" "$cur" + pfx="${cur_%...*}..." + cur_="${cur_#*...}" + __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_" ;; *..*) - pfx="${cur%..*}.." - cur="${cur#*..}" - __gitcomp "$(__git_refs)" "$pfx" "$cur" + pfx="${cur_%..*}.." + cur_="${cur_#*..}" + __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_" ;; *) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" ;; esac } + +__git_complete_file () +{ + __git_complete_revlist_file +} + +__git_complete_revlist () +{ + __git_complete_revlist_file +} + __git_complete_remote_or_refspec () { - local cur words cword - _get_comp_words_by_ref -n =: cur words cword - local cmd="${words[1]}" + local cur_="$cur" cmd="${words[1]}" local i c=2 remote="" pfx="" lhs=1 no_complete_refspec=0 + if [ "$cmd" = "remote" ]; then + ((c++)) + fi while [ $c -lt $cword ]; do i="${words[c]}" case "$i" in @@ -755,10 +745,10 @@ __git_complete_remote_or_refspec () -*) ;; *) remote="$i"; break ;; esac - c=$((++c)) + ((c++)) done if [ -z "$remote" ]; then - __gitcomp "$(__git_remotes)" + __gitcomp_nl "$(__git_remotes)" return fi if [ $no_complete_refspec = 1 ]; then @@ -766,40 +756,40 @@ __git_complete_remote_or_refspec () return fi [ "$remote" = "." ] && remote= - case "$cur" in + case "$cur_" in *:*) case "$COMP_WORDBREAKS" in *:*) : great ;; - *) pfx="${cur%%:*}:" ;; + *) pfx="${cur_%%:*}:" ;; esac - cur="${cur#*:}" + cur_="${cur_#*:}" lhs=0 ;; +*) pfx="+" - cur="${cur#+}" + cur_="${cur_#+}" ;; esac case "$cmd" in fetch) if [ $lhs = 1 ]; then - __gitcomp "$(__git_refs2 "$remote")" "$pfx" "$cur" + __gitcomp_nl "$(__git_refs2 "$remote")" "$pfx" "$cur_" else - __gitcomp "$(__git_refs)" "$pfx" "$cur" + __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_" fi ;; - pull) + pull|remote) if [ $lhs = 1 ]; then - __gitcomp "$(__git_refs "$remote")" "$pfx" "$cur" + __gitcomp_nl "$(__git_refs "$remote")" "$pfx" "$cur_" else - __gitcomp "$(__git_refs)" "$pfx" "$cur" + __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_" fi ;; push) if [ $lhs = 1 ]; then - __gitcomp "$(__git_refs)" "$pfx" "$cur" + __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_" else - __gitcomp "$(__git_refs "$remote")" "$pfx" "$cur" + __gitcomp_nl "$(__git_refs "$remote")" "$pfx" "$cur_" fi ;; esac @@ -807,8 +797,6 @@ __git_complete_remote_or_refspec () __git_complete_strategy () { - local cur prev - _get_comp_words_by_ref -n =: cur prev __git_compute_merge_strategies case "$prev" in -s|--strategy) @@ -839,7 +827,8 @@ __git_list_all_commands () __git_all_commands= __git_compute_all_commands () { - : ${__git_all_commands:=$(__git_list_all_commands)} + test -n "$__git_all_commands" || + __git_all_commands=$(__git_list_all_commands) } __git_list_porcelain_commands () @@ -932,7 +921,8 @@ __git_porcelain_commands= __git_compute_porcelain_commands () { __git_compute_all_commands - : ${__git_porcelain_commands:=$(__git_list_porcelain_commands)} + test -n "$__git_porcelain_commands" || + __git_porcelain_commands=$(__git_list_porcelain_commands) } __git_pretty_aliases () @@ -986,8 +976,7 @@ __git_aliased_command () # __git_find_on_cmdline requires 1 argument __git_find_on_cmdline () { - local word subcommand c=1 words cword - _get_comp_words_by_ref -n =: words cword + local word subcommand c=1 while [ $c -lt $cword ]; do word="${words[c]}" for subcommand in $1; do @@ -996,19 +985,18 @@ __git_find_on_cmdline () return fi done - c=$((++c)) + ((c++)) done } __git_has_doubledash () { - local c=1 words cword - _get_comp_words_by_ref -n =: words cword + local c=1 while [ $c -lt $cword ]; do if [ "--" = "${words[c]}" ]; then return 0 fi - c=$((++c)) + ((c++)) done return 1 } @@ -1017,8 +1005,7 @@ __git_whitespacelist="nowarn warn error error-all fix" _git_am () { - local cur dir="$(__gitdir)" - _get_comp_words_by_ref -n =: cur + local dir="$(__gitdir)" if [ -d "$dir"/rebase-apply ]; then __gitcomp "--skip --continue --resolved --abort" return @@ -1042,8 +1029,6 @@ _git_am () _git_apply () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --whitespace=*) __gitcomp "$__git_whitespacelist" "" "${cur##--whitespace=}" @@ -1066,8 +1051,6 @@ _git_add () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -1081,15 +1064,13 @@ _git_add () _git_archive () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --format=*) __gitcomp "$(git archive --list)" "" "${cur##--format=}" return ;; --remote=*) - __gitcomp "$(__git_remotes)" "" "${cur##--remote=}" + __gitcomp_nl "$(__git_remotes)" "" "${cur##--remote=}" return ;; --*) @@ -1120,7 +1101,7 @@ _git_bisect () case "$subcommand" in bad|good|reset|skip|start) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" ;; *) COMPREPLY=() @@ -1130,16 +1111,15 @@ _git_bisect () _git_branch () { - local i c=1 only_local_ref="n" has_r="n" cur words cword + local i c=1 only_local_ref="n" has_r="n" - _get_comp_words_by_ref -n =: cur words cword while [ $c -lt $cword ]; do i="${words[c]}" case "$i" in -d|-m) only_local_ref="y" ;; -r) has_r="y" ;; esac - c=$((++c)) + ((c++)) done case "$cur" in @@ -1147,14 +1127,14 @@ _git_branch () __gitcomp " --color --no-color --verbose --abbrev= --no-abbrev --track --no-track --contains --merged --no-merged - --set-upstream + --set-upstream --edit-description --list " ;; *) if [ $only_local_ref = "y" -a $has_r = "n" ]; then - __gitcomp "$(__git_heads)" + __gitcomp_nl "$(__git_heads)" else - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" fi ;; esac @@ -1162,8 +1142,6 @@ _git_branch () _git_bundle () { - local words cword - _get_comp_words_by_ref -n =: words cword local cmd="${words[2]}" case "$cword" in 2) @@ -1186,8 +1164,6 @@ _git_checkout () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --conflict=*) __gitcomp "diff3 merge" "" "${cur##--conflict=}" @@ -1205,7 +1181,7 @@ _git_checkout () if [ -n "$(__git_find_on_cmdline "$flags")" ]; then track='' fi - __gitcomp "$(__git_refs '' $track)" + __gitcomp_nl "$(__git_refs '' $track)" ;; esac } @@ -1217,14 +1193,12 @@ _git_cherry () _git_cherry_pick () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--edit --no-commit" ;; *) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" ;; esac } @@ -1233,8 +1207,6 @@ _git_clean () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--dry-run --quiet" @@ -1246,8 +1218,6 @@ _git_clean () _git_clone () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -1274,20 +1244,15 @@ _git_commit () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --cleanup=*) __gitcomp "default strip verbatim whitespace " "" "${cur##--cleanup=}" return ;; - --reuse-message=*) - __gitcomp "$(__git_refs)" "" "${cur##--reuse-message=}" - return - ;; - --reedit-message=*) - __gitcomp "$(__git_refs)" "" "${cur##--reedit-message=}" + --reuse-message=*|--reedit-message=*|\ + --fixup=*|--squash=*) + __gitcomp_nl "$(__git_refs)" "" "${cur#*=}" return ;; --untracked-files=*) @@ -1301,7 +1266,7 @@ _git_commit () --dry-run --reuse-message= --reedit-message= --reset-author --file= --message= --template= --cleanup= --untracked-files --untracked-files= - --verbose --quiet + --verbose --quiet --fixup= --squash= " return esac @@ -1310,8 +1275,6 @@ _git_commit () _git_describe () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -1320,7 +1283,7 @@ _git_describe () " return esac - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } __git_diff_common_options="--stat --numstat --shortstat --summary @@ -1343,8 +1306,6 @@ _git_diff () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--cached --staged --pickaxe-all --pickaxe-regex @@ -1354,19 +1315,17 @@ _git_diff () return ;; esac - __git_complete_file + __git_complete_revlist_file } __git_mergetools_common="diffuse ecmerge emerge kdiff3 meld opendiff - tkdiff vimdiff gvimdiff xxdiff araxis p4merge + tkdiff vimdiff gvimdiff xxdiff araxis p4merge bc3 " _git_difftool () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --tool=*) __gitcomp "$__git_mergetools_common kompare" "" "${cur##--tool=}" @@ -1391,8 +1350,6 @@ __git_fetch_options=" _git_fetch () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "$__git_fetch_options" @@ -1404,8 +1361,6 @@ _git_fetch () _git_format_patch () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --thread=*) __gitcomp " @@ -1437,8 +1392,6 @@ _git_format_patch () _git_fsck () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -1453,8 +1406,6 @@ _git_fsck () _git_gc () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--prune --aggressive" @@ -1469,19 +1420,22 @@ _git_gitk () _gitk } +__git_match_ctag() { + awk "/^${1////\\/}/ { print \$1 }" "$2" +} + _git_grep () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " --cached --text --ignore-case --word-regexp --invert-match - --full-name + --full-name --line-number --extended-regexp --basic-regexp --fixed-strings + --perl-regexp --files-with-matches --name-only --files-without-match --max-depth @@ -1492,13 +1446,20 @@ _git_grep () ;; esac - __gitcomp "$(__git_refs)" + case "$cword,$prev" in + 2,*|*,-*) + if test -r tags; then + __gitcomp_nl "$(__git_match_ctag "$cur" tags)" + return + fi + ;; + esac + + __gitcomp_nl "$(__git_refs)" } _git_help () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--all --info --man --web" @@ -1506,18 +1467,16 @@ _git_help () ;; esac __git_compute_all_commands - __gitcomp "$__git_all_commands + __gitcomp "$__git_all_commands $(__git_aliases) attributes cli core-tutorial cvs-migration diffcore gitk glossary hooks ignore modules - repository-layout tutorial tutorial-2 + namespaces repository-layout tutorial tutorial-2 workflows " } _git_init () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --shared=*) __gitcomp " @@ -1537,8 +1496,6 @@ _git_ls_files () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--cached --deleted --modified --others --ignored @@ -1556,7 +1513,7 @@ _git_ls_files () _git_ls_remote () { - __gitcomp "$(__git_remotes)" + __gitcomp_nl "$(__git_remotes)" } _git_ls_tree () @@ -1572,12 +1529,14 @@ __git_log_common_options=" --max-count= --max-age= --since= --after= --min-age= --until= --before= + --min-parents= --max-parents= + --no-min-parents --no-max-parents " # Options that go well for log and gitk (not shortlog) __git_log_gitk_options=" --dense --sparse --full-history --simplify-merges --simplify-by-decoration - --left-right + --left-right --notes --no-notes " # Options that go well for log and shortlog (not gitk) __git_log_shortlog_options=" @@ -1597,17 +1556,10 @@ _git_log () if [ -f "$g/MERGE_HEAD" ]; then merge="--merge" fi - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in - --pretty=*) + --pretty=*|--format=*) __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases) - " "" "${cur##--pretty=}" - return - ;; - --format=*) - __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases) - " "" "${cur##--format=}" + " "" "${cur#*=}" return ;; --date=*) @@ -1645,27 +1597,23 @@ _git_log () __git_merge_options=" --no-commit --no-stat --log --no-log --squash --strategy - --commit --stat --no-squash --ff --no-ff --ff-only + --commit --stat --no-squash --ff --no-ff --ff-only --edit --no-edit " _git_merge () { __git_complete_strategy && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "$__git_merge_options" return esac - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } _git_mergetool () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --tool=*) __gitcomp "$__git_mergetools_common tortoisemerge" "" "${cur##--tool=}" @@ -1681,13 +1629,11 @@ _git_mergetool () _git_merge_base () { - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } _git_mv () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--dry-run" @@ -1706,8 +1652,6 @@ _git_notes () { local subcommands='add append copy edit list prune remove show' local subcommand="$(__git_find_on_cmdline "$subcommands")" - local cur words cword - _get_comp_words_by_ref -n =: cur words cword case "$subcommand,$cur" in ,--*) @@ -1716,18 +1660,16 @@ _git_notes () ,*) case "${words[cword-1]}" in --ref) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" ;; *) __gitcomp "$subcommands --ref" ;; esac ;; - add,--reuse-message=*|append,--reuse-message=*) - __gitcomp "$(__git_refs)" "" "${cur##--reuse-message=}" - ;; + add,--reuse-message=*|append,--reuse-message=*|\ add,--reedit-message=*|append,--reedit-message=*) - __gitcomp "$(__git_refs)" "" "${cur##--reedit-message=}" + __gitcomp_nl "$(__git_refs)" "" "${cur#*=}" ;; add,--*|append,--*) __gitcomp '--file= --message= --reedit-message= @@ -1746,7 +1688,7 @@ _git_notes () -m|-F) ;; *) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" ;; esac ;; @@ -1757,8 +1699,6 @@ _git_pull () { __git_complete_strategy && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -1774,22 +1714,20 @@ _git_pull () _git_push () { - local cur prev - _get_comp_words_by_ref -n =: cur prev case "$prev" in --repo) - __gitcomp "$(__git_remotes)" + __gitcomp_nl "$(__git_remotes)" return esac case "$cur" in --repo=*) - __gitcomp "$(__git_remotes)" "" "${cur##--repo=}" + __gitcomp_nl "$(__git_remotes)" "" "${cur##--repo=}" return ;; --*) __gitcomp " --all --mirror --tags --dry-run --force --verbose - --receive-pack= --repo= + --receive-pack= --repo= --set-upstream " return ;; @@ -1800,8 +1738,6 @@ _git_push () _git_rebase () { local dir="$(__gitdir)" - local cur - _get_comp_words_by_ref -n =: cur if [ -d "$dir"/rebase-apply ] || [ -d "$dir"/rebase-merge ]; then __gitcomp "--continue --skip --abort" return @@ -1823,7 +1759,7 @@ _git_rebase () return esac - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } _git_reflog () @@ -1834,7 +1770,7 @@ _git_reflog () if [ -z "$subcommand" ]; then __gitcomp "$subcommands" else - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" fi } @@ -1843,8 +1779,6 @@ __git_send_email_suppresscc_options="author self cc bodycc sob cccmd body all" _git_send_email () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --confirm=*) __gitcomp " @@ -1886,8 +1820,6 @@ _git_stage () __git_config_get_set_variables () { - local words cword - _get_comp_words_by_ref -n =: words cword local prevword word config_file= c=$cword while [ $c -gt 1 ]; do word="${words[c]}" @@ -1906,7 +1838,7 @@ __git_config_get_set_variables () done git --git-dir="$(__gitdir)" config $config_file --list 2>/dev/null | - while read line + while read -r line do case "$line" in *.*=*) @@ -1918,27 +1850,29 @@ __git_config_get_set_variables () _git_config () { - local cur prev - _get_comp_words_by_ref -n =: cur prev case "$prev" in branch.*.remote) - __gitcomp "$(__git_remotes)" + __gitcomp_nl "$(__git_remotes)" return ;; branch.*.merge) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" return ;; remote.*.fetch) local remote="${prev#remote.}" remote="${remote%.fetch}" - __gitcomp "$(__git_refs_remotes "$remote")" + if [ -z "$cur" ]; then + COMPREPLY=("refs/heads/") + return + fi + __gitcomp_nl "$(__git_refs_remotes "$remote")" return ;; remote.*.push) local remote="${prev#remote.}" remote="${remote%.push}" - __gitcomp "$(git --git-dir="$(__gitdir)" \ + __gitcomp_nl "$(git --git-dir="$(__gitdir)" \ for-each-ref --format='%(refname):%(refname)' \ refs/heads)" return @@ -1985,7 +1919,7 @@ _git_config () return ;; --get|--get-all|--unset|--unset-all) - __gitcomp "$(__git_config_get_set_variables)" + __gitcomp_nl "$(__git_config_get_set_variables)" return ;; *.*) @@ -2005,70 +1939,60 @@ _git_config () return ;; branch.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" - __gitcomp "remote merge mergeoptions rebase" "$pfx" "$cur" + local pfx="${cur%.*}." cur_="${cur##*.}" + __gitcomp "remote merge mergeoptions rebase" "$pfx" "$cur_" return ;; branch.*) - local pfx="${cur%.*}." - cur="${cur#*.}" - __gitcomp "$(__git_heads)" "$pfx" "$cur" "." + local pfx="${cur%.*}." cur_="${cur#*.}" + __gitcomp_nl "$(__git_heads)" "$pfx" "$cur_" "." return ;; guitool.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" + local pfx="${cur%.*}." cur_="${cur##*.}" __gitcomp " argprompt cmd confirm needsfile noconsole norescan prompt revprompt revunmerged title - " "$pfx" "$cur" + " "$pfx" "$cur_" return ;; difftool.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" - __gitcomp "cmd path" "$pfx" "$cur" + local pfx="${cur%.*}." cur_="${cur##*.}" + __gitcomp "cmd path" "$pfx" "$cur_" return ;; man.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" - __gitcomp "cmd path" "$pfx" "$cur" + local pfx="${cur%.*}." cur_="${cur##*.}" + __gitcomp "cmd path" "$pfx" "$cur_" return ;; mergetool.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" - __gitcomp "cmd path trustExitCode" "$pfx" "$cur" + local pfx="${cur%.*}." cur_="${cur##*.}" + __gitcomp "cmd path trustExitCode" "$pfx" "$cur_" return ;; pager.*) - local pfx="${cur%.*}." - cur="${cur#*.}" + local pfx="${cur%.*}." cur_="${cur#*.}" __git_compute_all_commands - __gitcomp "$__git_all_commands" "$pfx" "$cur" + __gitcomp_nl "$__git_all_commands" "$pfx" "$cur_" return ;; remote.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" + local pfx="${cur%.*}." cur_="${cur##*.}" __gitcomp " url proxy fetch push mirror skipDefaultUpdate receivepack uploadpack tagopt pushurl - " "$pfx" "$cur" + " "$pfx" "$cur_" return ;; remote.*) - local pfx="${cur%.*}." - cur="${cur#*.}" - __gitcomp "$(__git_remotes)" "$pfx" "$cur" "." + local pfx="${cur%.*}." cur_="${cur#*.}" + __gitcomp_nl "$(__git_remotes)" "$pfx" "$cur_" "." return ;; url.*.*) - local pfx="${cur%.*}." - cur="${cur##*.}" - __gitcomp "insteadOf pushInsteadOf" "$pfx" "$cur" + local pfx="${cur%.*}." cur_="${cur##*.}" + __gitcomp "insteadOf pushInsteadOf" "$pfx" "$cur_" return ;; esac @@ -2132,7 +2056,7 @@ _git_config () color.ui commit.status commit.template - core.abbrevguard + core.abbrev core.askpass core.attributesfile core.autocrlf @@ -2169,6 +2093,7 @@ _git_config () core.whitespace core.worktree diff.autorefreshindex + diff.statGraphWidth diff.external diff.ignoreSubmodules diff.mnemonicprefix @@ -2355,7 +2280,7 @@ _git_config () _git_remote () { - local subcommands="add rename rm show prune update set-head" + local subcommands="add rename rm set-head set-branches set-url show prune update" local subcommand="$(__git_find_on_cmdline "$subcommands")" if [ -z "$subcommand" ]; then __gitcomp "$subcommands" @@ -2363,8 +2288,11 @@ _git_remote () fi case "$subcommand" in - rename|rm|show|prune) - __gitcomp "$(__git_remotes)" + rename|rm|set-url|show|prune) + __gitcomp_nl "$(__git_remotes)" + ;; + set-head|set-branches) + __git_complete_remote_or_refspec ;; update) local i c='' IFS=$'\n' @@ -2382,43 +2310,37 @@ _git_remote () _git_replace () { - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } _git_reset () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--merge --mixed --hard --soft --patch" return ;; esac - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } _git_revert () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--edit --mainline --no-edit --no-commit --signoff" return ;; esac - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" } _git_rm () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--cached --dry-run --ignore-unmatch --quiet" @@ -2432,8 +2354,6 @@ _git_shortlog () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -2451,17 +2371,10 @@ _git_show () { __git_has_doubledash && return - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in - --pretty=*) + --pretty=*|--format=*) __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases) - " "" "${cur##--pretty=}" - return - ;; - --format=*) - __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases) - " "" "${cur##--format=}" + " "" "${cur#*=}" return ;; --*) @@ -2476,8 +2389,6 @@ _git_show () _git_show_branch () { - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -2494,8 +2405,6 @@ _git_show_branch () _git_stash () { - local cur - _get_comp_words_by_ref -n =: cur local save_opts='--keep-index --no-keep-index --quiet --patch' local subcommands='save list show apply clear drop pop create branch' local subcommand="$(__git_find_on_cmdline "$subcommands")" @@ -2524,7 +2433,7 @@ _git_stash () COMPREPLY=() ;; show,*|apply,*|drop,*|pop,*|branch,*) - __gitcomp "$(git --git-dir="$(__gitdir)" stash list \ + __gitcomp_nl "$(git --git-dir="$(__gitdir)" stash list \ | sed -n -e 's/:.*//p')" ;; *) @@ -2540,8 +2449,6 @@ _git_submodule () local subcommands="add status init update summary foreach sync" if [ -z "$(__git_find_on_cmdline "$subcommands")" ]; then - local cur - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp "--quiet --cached" @@ -2585,8 +2492,6 @@ _git_svn () --edit --rmdir --find-copies-harder --copy-similarity= " - local cur - _get_comp_words_by_ref -n =: cur case "$subcommand,$cur" in fetch,--*) __gitcomp "--revision= --fetch-all $fc_opts" @@ -2601,7 +2506,7 @@ _git_svn () __gitcomp " --merge --strategy= --verbose --dry-run --fetch-all --no-rebase --commit-url - --revision $cmt_opts $fc_opts + --revision --interactive $cmt_opts $fc_opts " ;; set-tree,--*) @@ -2658,20 +2563,18 @@ _git_svn () _git_tag () { local i c=1 f=0 - local words cword prev - _get_comp_words_by_ref -n =: words cword prev while [ $c -lt $cword ]; do i="${words[c]}" case "$i" in -d|-v) - __gitcomp "$(__git_tags)" + __gitcomp_nl "$(__git_tags)" return ;; -f) f=1 ;; esac - c=$((++c)) + ((c++)) done case "$prev" in @@ -2680,13 +2583,13 @@ _git_tag () ;; -*|tag) if [ $f = 1 ]; then - __gitcomp "$(__git_tags)" + __gitcomp_nl "$(__git_tags)" else COMPREPLY=() fi ;; *) - __gitcomp "$(__git_refs)" + __gitcomp_nl "$(__git_refs)" ;; esac } @@ -2703,10 +2606,18 @@ _git () if [[ -n ${ZSH_VERSION-} ]]; then emulate -L bash setopt KSH_TYPESET + + # workaround zsh's bug that leaves 'words' as a special + # variable in versions < 4.3.12 + typeset -h words + + # workaround zsh's bug that quotes spaces in the COMPREPLY + # array if IFS doesn't contain spaces. + typeset -h IFS fi - local cur words cword - _get_comp_words_by_ref -n =: cur words cword + local cur words cword prev + _get_comp_words_by_ref -n =: cur words cword prev while [ $c -lt $cword ]; do i="${words[c]}" case "$i" in @@ -2716,7 +2627,7 @@ _git () --help) command="help"; break ;; *) command="$i"; break ;; esac - c=$((++c)) + ((c++)) done if [ -z "$command" ]; then @@ -2730,6 +2641,7 @@ _git () --exec-path --html-path --work-tree= + --namespace= --help " ;; @@ -2754,17 +2666,26 @@ _gitk () if [[ -n ${ZSH_VERSION-} ]]; then emulate -L bash setopt KSH_TYPESET + + # workaround zsh's bug that leaves 'words' as a special + # variable in versions < 4.3.12 + typeset -h words + + # workaround zsh's bug that quotes spaces in the COMPREPLY + # array if IFS doesn't contain spaces. + typeset -h IFS fi + local cur words cword prev + _get_comp_words_by_ref -n =: cur words cword prev + __git_has_doubledash && return - local cur local g="$(__gitdir)" local merge="" if [ -f "$g/MERGE_HEAD" ]; then merge="--merge" fi - _get_comp_words_by_ref -n =: cur case "$cur" in --*) __gitcomp " @@ -2791,29 +2712,3 @@ if [ Cygwin = "$(uname -o 2>/dev/null)" ]; then complete -o bashdefault -o default -o nospace -F _git git.exe 2>/dev/null \ || complete -o default -o nospace -F _git git.exe fi - -if [[ -n ${ZSH_VERSION-} ]]; then - shopt () { - local option - if [ $# -ne 2 ]; then - echo "USAGE: $0 (-q|-s|-u) <option>" >&2 - return 1 - fi - case "$2" in - nullglob) - option="$2" - ;; - *) - echo "$0: invalid option: $2" >&2 - return 1 - esac - case "$1" in - -q) setopt | grep -q "$option" ;; - -u) unsetopt "$option" ;; - -s) setopt "$option" ;; - *) - echo "$0: invalid flag: $1" >&2 - return 1 - esac - } -fi diff --git a/contrib/convert-objects/git-convert-objects.txt b/contrib/convert-objects/git-convert-objects.txt index 9718abf86d..0565d83fc4 100644 --- a/contrib/convert-objects/git-convert-objects.txt +++ b/contrib/convert-objects/git-convert-objects.txt @@ -8,6 +8,7 @@ git-convert-objects - Converts old-style git repository SYNOPSIS -------- +[verse] 'git-convert-objects' DESCRIPTION diff --git a/contrib/credential/osxkeychain/.gitignore b/contrib/credential/osxkeychain/.gitignore new file mode 100644 index 0000000000..6c5b7026c5 --- /dev/null +++ b/contrib/credential/osxkeychain/.gitignore @@ -0,0 +1 @@ +git-credential-osxkeychain diff --git a/contrib/credential/osxkeychain/Makefile b/contrib/credential/osxkeychain/Makefile new file mode 100644 index 0000000000..4b3a08a2ba --- /dev/null +++ b/contrib/credential/osxkeychain/Makefile @@ -0,0 +1,17 @@ +all:: git-credential-osxkeychain + +CC = gcc +RM = rm -f +CFLAGS = -g -O2 -Wall + +-include ../../../config.mak.autogen +-include ../../../config.mak + +git-credential-osxkeychain: git-credential-osxkeychain.o + $(CC) $(CFLAGS) -o $@ $< $(LDFLAGS) -Wl,-framework -Wl,Security + +git-credential-osxkeychain.o: git-credential-osxkeychain.c + $(CC) -c $(CFLAGS) $< + +clean: + $(RM) git-credential-osxkeychain git-credential-osxkeychain.o diff --git a/contrib/credential/osxkeychain/git-credential-osxkeychain.c b/contrib/credential/osxkeychain/git-credential-osxkeychain.c new file mode 100644 index 0000000000..6beed123ab --- /dev/null +++ b/contrib/credential/osxkeychain/git-credential-osxkeychain.c @@ -0,0 +1,173 @@ +#include <stdio.h> +#include <string.h> +#include <stdlib.h> +#include <Security/Security.h> + +static SecProtocolType protocol; +static char *host; +static char *path; +static char *username; +static char *password; +static UInt16 port; + +static void die(const char *err, ...) +{ + char msg[4096]; + va_list params; + va_start(params, err); + vsnprintf(msg, sizeof(msg), err, params); + fprintf(stderr, "%s\n", msg); + va_end(params); + exit(1); +} + +static void *xstrdup(const char *s1) +{ + void *ret = strdup(s1); + if (!ret) + die("Out of memory"); + return ret; +} + +#define KEYCHAIN_ITEM(x) (x ? strlen(x) : 0), x +#define KEYCHAIN_ARGS \ + NULL, /* default keychain */ \ + KEYCHAIN_ITEM(host), \ + 0, NULL, /* account domain */ \ + KEYCHAIN_ITEM(username), \ + KEYCHAIN_ITEM(path), \ + port, \ + protocol, \ + kSecAuthenticationTypeDefault + +static void write_item(const char *what, const char *buf, int len) +{ + printf("%s=", what); + fwrite(buf, 1, len, stdout); + putchar('\n'); +} + +static void find_username_in_item(SecKeychainItemRef item) +{ + SecKeychainAttributeList list; + SecKeychainAttribute attr; + + list.count = 1; + list.attr = &attr; + attr.tag = kSecAccountItemAttr; + + if (SecKeychainItemCopyContent(item, NULL, &list, NULL, NULL)) + return; + + write_item("username", attr.data, attr.length); + SecKeychainItemFreeContent(&list, NULL); +} + +static void find_internet_password(void) +{ + void *buf; + UInt32 len; + SecKeychainItemRef item; + + if (SecKeychainFindInternetPassword(KEYCHAIN_ARGS, &len, &buf, &item)) + return; + + write_item("password", buf, len); + if (!username) + find_username_in_item(item); + + SecKeychainItemFreeContent(NULL, buf); +} + +static void delete_internet_password(void) +{ + SecKeychainItemRef item; + + /* + * Require at least a protocol and host for removal, which is what git + * will give us; if you want to do something more fancy, use the + * Keychain manager. + */ + if (!protocol || !host) + return; + + if (SecKeychainFindInternetPassword(KEYCHAIN_ARGS, 0, NULL, &item)) + return; + + SecKeychainItemDelete(item); +} + +static void add_internet_password(void) +{ + /* Only store complete credentials */ + if (!protocol || !host || !username || !password) + return; + + if (SecKeychainAddInternetPassword( + KEYCHAIN_ARGS, + KEYCHAIN_ITEM(password), + NULL)) + return; +} + +static void read_credential(void) +{ + char buf[1024]; + + while (fgets(buf, sizeof(buf), stdin)) { + char *v; + + if (!strcmp(buf, "\n")) + break; + buf[strlen(buf)-1] = '\0'; + + v = strchr(buf, '='); + if (!v) + die("bad input: %s", buf); + *v++ = '\0'; + + if (!strcmp(buf, "protocol")) { + if (!strcmp(v, "https")) + protocol = kSecProtocolTypeHTTPS; + else if (!strcmp(v, "http")) + protocol = kSecProtocolTypeHTTP; + else /* we don't yet handle other protocols */ + exit(0); + } + else if (!strcmp(buf, "host")) { + char *colon = strchr(v, ':'); + if (colon) { + *colon++ = '\0'; + port = atoi(colon); + } + host = xstrdup(v); + } + else if (!strcmp(buf, "path")) + path = xstrdup(v); + else if (!strcmp(buf, "username")) + username = xstrdup(v); + else if (!strcmp(buf, "password")) + password = xstrdup(v); + } +} + +int main(int argc, const char **argv) +{ + const char *usage = + "Usage: git credential-osxkeychain <get|store|erase>"; + + if (!argv[1]) + die(usage); + + read_credential(); + + if (!strcmp(argv[1], "get")) + find_internet_password(); + else if (!strcmp(argv[1], "store")) + add_internet_password(); + else if (!strcmp(argv[1], "erase")) + delete_internet_password(); + /* otherwise, ignore unknown action */ + + return 0; +} diff --git a/contrib/diff-highlight/README b/contrib/diff-highlight/README new file mode 100644 index 0000000000..502e03b305 --- /dev/null +++ b/contrib/diff-highlight/README @@ -0,0 +1,152 @@ +diff-highlight +============== + +Line oriented diffs are great for reviewing code, because for most +hunks, you want to see the old and the new segments of code next to each +other. Sometimes, though, when an old line and a new line are very +similar, it's hard to immediately see the difference. + +You can use "--color-words" to highlight only the changed portions of +lines. However, this can often be hard to read for code, as it loses +the line structure, and you end up with oddly formatted bits. + +Instead, this script post-processes the line-oriented diff, finds pairs +of lines, and highlights the differing segments. It's currently very +simple and stupid about doing these tasks. In particular: + + 1. It will only highlight hunks in which the number of removed and + added lines is the same, and it will pair lines within the hunk by + position (so the first removed line is compared to the first added + line, and so forth). This is simple and tends to work well in + practice. More complex changes don't highlight well, so we tend to + exclude them due to the "same number of removed and added lines" + restriction. Or even if we do try to highlight them, they end up + not highlighting because of our "don't highlight if the whole line + would be highlighted" rule. + + 2. It will find the common prefix and suffix of two lines, and + consider everything in the middle to be "different". It could + instead do a real diff of the characters between the two lines and + find common subsequences. However, the point of the highlight is to + call attention to a certain area. Even if some small subset of the + highlighted area actually didn't change, that's OK. In practice it + ends up being more readable to just have a single blob on the line + showing the interesting bit. + +The goal of the script is therefore not to be exact about highlighting +changes, but to call attention to areas of interest without being +visually distracting. Non-diff lines and existing diff coloration is +preserved; the intent is that the output should look exactly the same as +the input, except for the occasional highlight. + +Use +--- + +You can try out the diff-highlight program with: + +--------------------------------------------- +git log -p --color | /path/to/diff-highlight +--------------------------------------------- + +If you want to use it all the time, drop it in your $PATH and put the +following in your git configuration: + +--------------------------------------------- +[pager] + log = diff-highlight | less + show = diff-highlight | less + diff = diff-highlight | less +--------------------------------------------- + +Bugs +---- + +Because diff-highlight relies on heuristics to guess which parts of +changes are important, there are some cases where the highlighting is +more distracting than useful. Fortunately, these cases are rare in +practice, and when they do occur, the worst case is simply a little +extra highlighting. This section documents some cases known to be +sub-optimal, in case somebody feels like working on improving the +heuristics. + +1. Two changes on the same line get highlighted in a blob. For example, + highlighting: + +---------------------------------------------- +-foo(buf, size); ++foo(obj->buf, obj->size); +---------------------------------------------- + + yields (where the inside of "+{}" would be highlighted): + +---------------------------------------------- +-foo(buf, size); ++foo(+{obj->buf, obj->}size); +---------------------------------------------- + + whereas a more semantically meaningful output would be: + +---------------------------------------------- +-foo(buf, size); ++foo(+{obj->}buf, +{obj->}size); +---------------------------------------------- + + Note that doing this right would probably involve a set of + content-specific boundary patterns, similar to word-diff. Otherwise + you get junk like: + +----------------------------------------------------- +-this line has some -{i}nt-{ere}sti-{ng} text on it ++this line has some +{fa}nt+{a}sti+{c} text on it +----------------------------------------------------- + + which is less readable than the current output. + +2. The multi-line matching assumes that lines in the pre- and post-image + match by position. This is often the case, but can be fooled when a + line is removed from the top and a new one added at the bottom (or + vice versa). Unless the lines in the middle are also changed, diffs + will show this as two hunks, and it will not get highlighted at all + (which is good). But if the lines in the middle are changed, the + highlighting can be misleading. Here's a pathological case: + +----------------------------------------------------- +-one +-two +-three +-four ++two 2 ++three 3 ++four 4 ++five 5 +----------------------------------------------------- + + which gets highlighted as: + +----------------------------------------------------- +-one +-t-{wo} +-three +-f-{our} ++two 2 ++t+{hree 3} ++four 4 ++f+{ive 5} +----------------------------------------------------- + + because it matches "two" to "three 3", and so forth. It would be + nicer as: + +----------------------------------------------------- +-one +-two +-three +-four ++two +{2} ++three +{3} ++four +{4} ++five 5 +----------------------------------------------------- + + which would probably involve pre-matching the lines into pairs + according to some heuristic. diff --git a/contrib/diff-highlight/diff-highlight b/contrib/diff-highlight/diff-highlight new file mode 100755 index 0000000000..c4404d49c9 --- /dev/null +++ b/contrib/diff-highlight/diff-highlight @@ -0,0 +1,173 @@ +#!/usr/bin/perl + +use warnings FATAL => 'all'; +use strict; + +# Highlight by reversing foreground and background. You could do +# other things like bold or underline if you prefer. +my $HIGHLIGHT = "\x1b[7m"; +my $UNHIGHLIGHT = "\x1b[27m"; +my $COLOR = qr/\x1b\[[0-9;]*m/; +my $BORING = qr/$COLOR|\s/; + +my @removed; +my @added; +my $in_hunk; + +while (<>) { + if (!$in_hunk) { + print; + $in_hunk = /^$COLOR*\@/; + } + elsif (/^$COLOR*-/) { + push @removed, $_; + } + elsif (/^$COLOR*\+/) { + push @added, $_; + } + else { + show_hunk(\@removed, \@added); + @removed = (); + @added = (); + + print; + $in_hunk = /^$COLOR*[\@ ]/; + } + + # Most of the time there is enough output to keep things streaming, + # but for something like "git log -Sfoo", you can get one early + # commit and then many seconds of nothing. We want to show + # that one commit as soon as possible. + # + # Since we can receive arbitrary input, there's no optimal + # place to flush. Flushing on a blank line is a heuristic that + # happens to match git-log output. + if (!length) { + local $| = 1; + } +} + +# Flush any queued hunk (this can happen when there is no trailing context in +# the final diff of the input). +show_hunk(\@removed, \@added); + +exit 0; + +sub show_hunk { + my ($a, $b) = @_; + + # If one side is empty, then there is nothing to compare or highlight. + if (!@$a || !@$b) { + print @$a, @$b; + return; + } + + # If we have mismatched numbers of lines on each side, we could try to + # be clever and match up similar lines. But for now we are simple and + # stupid, and only handle multi-line hunks that remove and add the same + # number of lines. + if (@$a != @$b) { + print @$a, @$b; + return; + } + + my @queue; + for (my $i = 0; $i < @$a; $i++) { + my ($rm, $add) = highlight_pair($a->[$i], $b->[$i]); + print $rm; + push @queue, $add; + } + print @queue; +} + +sub highlight_pair { + my @a = split_line(shift); + my @b = split_line(shift); + + # Find common prefix, taking care to skip any ansi + # color codes. + my $seen_plusminus; + my ($pa, $pb) = (0, 0); + while ($pa < @a && $pb < @b) { + if ($a[$pa] =~ /$COLOR/) { + $pa++; + } + elsif ($b[$pb] =~ /$COLOR/) { + $pb++; + } + elsif ($a[$pa] eq $b[$pb]) { + $pa++; + $pb++; + } + elsif (!$seen_plusminus && $a[$pa] eq '-' && $b[$pb] eq '+') { + $seen_plusminus = 1; + $pa++; + $pb++; + } + else { + last; + } + } + + # Find common suffix, ignoring colors. + my ($sa, $sb) = ($#a, $#b); + while ($sa >= $pa && $sb >= $pb) { + if ($a[$sa] =~ /$COLOR/) { + $sa--; + } + elsif ($b[$sb] =~ /$COLOR/) { + $sb--; + } + elsif ($a[$sa] eq $b[$sb]) { + $sa--; + $sb--; + } + else { + last; + } + } + + if (is_pair_interesting(\@a, $pa, $sa, \@b, $pb, $sb)) { + return highlight_line(\@a, $pa, $sa), + highlight_line(\@b, $pb, $sb); + } + else { + return join('', @a), + join('', @b); + } +} + +sub split_line { + local $_ = shift; + return map { /$COLOR/ ? $_ : (split //) } + split /($COLOR*)/; +} + +sub highlight_line { + my ($line, $prefix, $suffix) = @_; + + return join('', + @{$line}[0..($prefix-1)], + $HIGHLIGHT, + @{$line}[$prefix..$suffix], + $UNHIGHLIGHT, + @{$line}[($suffix+1)..$#$line] + ); +} + +# Pairs are interesting to highlight only if we are going to end up +# highlighting a subset (i.e., not the whole line). Otherwise, the highlighting +# is just useless noise. We can detect this by finding either a matching prefix +# or suffix (disregarding boring bits like whitespace and colorization). +sub is_pair_interesting { + my ($a, $pa, $sa, $b, $pb, $sb) = @_; + my $prefix_a = join('', @$a[0..($pa-1)]); + my $prefix_b = join('', @$b[0..($pb-1)]); + my $suffix_a = join('', @$a[($sa+1)..$#$a]); + my $suffix_b = join('', @$b[($sb+1)..$#$b]); + + return $prefix_a !~ /^$COLOR*-$BORING*$/ || + $prefix_b !~ /^$COLOR*\+$BORING*$/ || + $suffix_a !~ /^$BORING*$/ || + $suffix_b !~ /^$BORING*$/; +} diff --git a/contrib/diffall/README b/contrib/diffall/README new file mode 100644 index 0000000000..507f17dcd6 --- /dev/null +++ b/contrib/diffall/README @@ -0,0 +1,31 @@ +The git-diffall script provides a directory based diff mechanism +for git. + +To determine what diff viewer is used, the script requires either +the 'diff.tool' or 'merge.tool' configuration option to be set. + +This script is compatible with most common forms used to specify a +range of revisions to diff: + + 1. git diffall: shows diff between working tree and staged changes + 2. git diffall --cached [<commit>]: shows diff between staged + changes and HEAD (or other named commit) + 3. git diffall <commit>: shows diff between working tree and named + commit + 4. git diffall <commit> <commit>: show diff between two named commits + 5. git diffall <commit>..<commit>: same as above + 6. git diffall <commit>...<commit>: show the changes on the branch + containing and up to the second, starting at a common ancestor + of both <commit> + +Note: all forms take an optional path limiter [-- <path>*] + +The '--extcmd=<command>' option allows the user to specify a custom +command for viewing diffs. When given, configured defaults are +ignored and the script runs $command $LOCAL $REMOTE. Additionally, +$BASE is set in the environment. + +This script is based on an example provided by Thomas Rast on the +Git list [1]: + +[1] http://thread.gmane.org/gmane.comp.version-control.git/124807 diff --git a/contrib/diffall/git-diffall b/contrib/diffall/git-diffall new file mode 100755 index 0000000000..84f2b654d7 --- /dev/null +++ b/contrib/diffall/git-diffall @@ -0,0 +1,257 @@ +#!/bin/sh +# Copyright 2010 - 2012, Tim Henigan <tim.henigan@gmail.com> +# +# Perform a directory diff between commits in the repository using +# the external diff or merge tool specified in the user's config. + +USAGE='[--cached] [--copy-back] [-x|--extcmd=<command>] <commit>{0,2} [-- <path>*] + + --cached Compare to the index rather than the working tree. + + --copy-back Copy files back to the working tree when the diff + tool exits (in case they were modified by the + user). This option is only valid if the diff + compared with the working tree. + + -x=<command> + --extcmd=<command> Specify a custom command for viewing diffs. + git-diffall ignores the configured defaults and + runs $command $LOCAL $REMOTE when this option is + specified. Additionally, $BASE is set in the + environment. +' + +SUBDIRECTORY_OK=1 +. "$(git --exec-path)/git-sh-setup" + +TOOL_MODE=diff +. "$(git --exec-path)/git-mergetool--lib" + +merge_tool="$(get_merge_tool)" +if test -z "$merge_tool" +then + echo "Error: Either the 'diff.tool' or 'merge.tool' option must be set." + usage +fi + +start_dir=$(pwd) + +# All the file paths returned by the diff command are relative to the root +# of the working copy. So if the script is called from a subdirectory, it +# must switch to the root of working copy before trying to use those paths. +cdup=$(git rev-parse --show-cdup) && +cd "$cdup" || { + echo >&2 "Cannot chdir to $cdup, the toplevel of the working tree" + exit 1 +} + +# set up temp dir +tmp=$(perl -e 'use File::Temp qw(tempdir); + $t=tempdir("/tmp/git-diffall.XXXXX") or exit(1); + print $t') || exit 1 +trap 'rm -rf "$tmp"' EXIT + +left= +right= +paths= +dashdash_seen= +compare_staged= +merge_base= +left_dir= +right_dir= +diff_tool= +copy_back= + +while test $# != 0 +do + case "$1" in + -h|--h|--he|--hel|--help) + usage + ;; + --cached) + compare_staged=1 + ;; + --copy-back) + copy_back=1 + ;; + -x|--e|--ex|--ext|--extc|--extcm|--extcmd) + if test $# = 1 + then + echo You must specify the tool for use with --extcmd + usage + else + diff_tool=$2 + shift + fi + ;; + --) + dashdash_seen=1 + ;; + -*) + echo Invalid option: "$1" + usage + ;; + *) + # could be commit, commit range or path limiter + case "$1" in + *...*) + left=${1%...*} + right=${1#*...} + merge_base=1 + ;; + *..*) + left=${1%..*} + right=${1#*..} + ;; + *) + if test -n "$dashdash_seen" + then + paths="$paths$1 " + elif test -z "$left" + then + left=$1 + elif test -z "$right" + then + right=$1 + else + paths="$paths$1 " + fi + ;; + esac + ;; + esac + shift +done + +# Determine the set of files which changed +if test -n "$left" && test -n "$right" +then + left_dir="cmt-$(git rev-parse --short $left)" + right_dir="cmt-$(git rev-parse --short $right)" + + if test -n "$compare_staged" + then + usage + elif test -n "$merge_base" + then + git diff --name-only "$left"..."$right" -- $paths >"$tmp/filelist" + else + git diff --name-only "$left" "$right" -- $paths >"$tmp/filelist" + fi +elif test -n "$left" +then + left_dir="cmt-$(git rev-parse --short $left)" + + if test -n "$compare_staged" + then + right_dir="staged" + git diff --name-only --cached "$left" -- $paths >"$tmp/filelist" + else + right_dir="working_tree" + git diff --name-only "$left" -- $paths >"$tmp/filelist" + fi +else + left_dir="HEAD" + + if test -n "$compare_staged" + then + right_dir="staged" + git diff --name-only --cached -- $paths >"$tmp/filelist" + else + right_dir="working_tree" + git diff --name-only -- $paths >"$tmp/filelist" + fi +fi + +# Exit immediately if there are no diffs +if test ! -s "$tmp/filelist" +then + exit 0 +fi + +if test -n "$copy_back" && test "$right_dir" != "working_tree" +then + echo "--copy-back is only valid when diff includes the working tree." + exit 1 +fi + +# Create the named tmp directories that will hold the files to be compared +mkdir -p "$tmp/$left_dir" "$tmp/$right_dir" + +# Populate the tmp/right_dir directory with the files to be compared +while read name +do + if test -n "$right" + then + ls_list=$(git ls-tree $right "$name") + if test -n "$ls_list" + then + mkdir -p "$tmp/$right_dir/$(dirname "$name")" + git show "$right":"$name" >"$tmp/$right_dir/$name" || true + fi + elif test -n "$compare_staged" + then + ls_list=$(git ls-files -- "$name") + if test -n "$ls_list" + then + mkdir -p "$tmp/$right_dir/$(dirname "$name")" + git show :"$name" >"$tmp/$right_dir/$name" + fi + else + if test -e "$name" + then + mkdir -p "$tmp/$right_dir/$(dirname "$name")" + cp "$name" "$tmp/$right_dir/$name" + fi + fi +done < "$tmp/filelist" + +# Populate the tmp/left_dir directory with the files to be compared +while read name +do + if test -n "$left" + then + ls_list=$(git ls-tree $left "$name") + if test -n "$ls_list" + then + mkdir -p "$tmp/$left_dir/$(dirname "$name")" + git show "$left":"$name" >"$tmp/$left_dir/$name" || true + fi + else + if test -n "$compare_staged" + then + ls_list=$(git ls-tree HEAD "$name") + if test -n "$ls_list" + then + mkdir -p "$tmp/$left_dir/$(dirname "$name")" + git show HEAD:"$name" >"$tmp/$left_dir/$name" + fi + else + mkdir -p "$tmp/$left_dir/$(dirname "$name")" + git show :"$name" >"$tmp/$left_dir/$name" + fi + fi +done < "$tmp/filelist" + +LOCAL="$tmp/$left_dir" +REMOTE="$tmp/$right_dir" + +if test -n "$diff_tool" +then + export BASE + eval $diff_tool '"$LOCAL"' '"$REMOTE"' +else + run_merge_tool "$merge_tool" false +fi + +# Copy files back to the working dir, if requested +if test -n "$copy_back" && test "$right_dir" = "working_tree" +then + cd "$start_dir" + git_top_dir=$(git rev-parse --show-toplevel) + find "$tmp/$right_dir" -type f | + while read file + do + cp "$file" "$git_top_dir/${file#$tmp/$right_dir/}" + done +fi diff --git a/contrib/emacs/git.el b/contrib/emacs/git.el index 214930a021..65c95d9d5a 100644 --- a/contrib/emacs/git.el +++ b/contrib/emacs/git.el @@ -1310,6 +1310,13 @@ The FILES list must be sorted." (when sign-off (git-append-sign-off committer-name committer-email))) buffer)) +(define-derived-mode git-log-edit-mode log-edit-mode "Git-Log-Edit" + "Major mode for editing git log messages. + +Set up git-specific `font-lock-keywords' for `log-edit-mode'." + (set (make-local-variable 'font-lock-defaults) + '(git-log-edit-font-lock-keywords t t))) + (defun git-commit-file () "Commit the marked file(s), asking for a commit message." (interactive) @@ -1335,9 +1342,9 @@ The FILES list must be sorted." (git-setup-log-buffer buffer (git-get-merge-heads) author-name author-email subject date)) (if (boundp 'log-edit-diff-function) (log-edit 'git-do-commit nil '((log-edit-listfun . git-log-edit-files) - (log-edit-diff-function . git-log-edit-diff)) buffer) - (log-edit 'git-do-commit nil 'git-log-edit-files buffer)) - (setq font-lock-keywords (font-lock-compile-keywords git-log-edit-font-lock-keywords)) + (log-edit-diff-function . git-log-edit-diff)) buffer 'git-log-edit-mode) + (log-edit 'git-do-commit nil 'git-log-edit-files buffer + 'git-log-edit-mode)) (setq paragraph-separate (concat (regexp-quote git-log-msg-separator) "$\\|Author: \\|Date: \\|Merge: \\|Signed-off-by: \\|\f\\|[ ]*$")) (setq buffer-file-coding-system coding-system) (re-search-forward (regexp-quote (concat git-log-msg-separator "\n")) nil t)))) diff --git a/contrib/fast-import/git-p4 b/contrib/fast-import/git-p4 index 04ce7e3b02..c5362c4c11 100755 --- a/contrib/fast-import/git-p4 +++ b/contrib/fast-import/git-p4 @@ -10,7 +10,7 @@ import optparse, sys, os, marshal, subprocess, shelve import tempfile, getopt, os.path, time, platform -import re +import re, shutil verbose = False @@ -22,37 +22,41 @@ def p4_build_cmd(cmd): location. It means that hooking into the environment, or other configuration can be done more easily. """ - real_cmd = "%s " % "p4" + real_cmd = ["p4"] user = gitConfig("git-p4.user") if len(user) > 0: - real_cmd += "-u %s " % user + real_cmd += ["-u",user] password = gitConfig("git-p4.password") if len(password) > 0: - real_cmd += "-P %s " % password + real_cmd += ["-P", password] port = gitConfig("git-p4.port") if len(port) > 0: - real_cmd += "-p %s " % port + real_cmd += ["-p", port] host = gitConfig("git-p4.host") if len(host) > 0: - real_cmd += "-h %s " % host + real_cmd += ["-H", host] client = gitConfig("git-p4.client") if len(client) > 0: - real_cmd += "-c %s " % client + real_cmd += ["-c", client] - real_cmd += "%s" % (cmd) - if verbose: - print real_cmd + + if isinstance(cmd,basestring): + real_cmd = ' '.join(real_cmd) + ' ' + cmd + else: + real_cmd += cmd return real_cmd def chdir(dir): - if os.name == 'nt': - os.environ['PWD']=dir + # P4 uses the PWD environment variable rather than getcwd(). Since we're + # not using the shell, we have to set it ourselves. This path could + # be relative, so go there first, then figure out where we ended up. os.chdir(dir) + os.environ['PWD'] = os.getcwd() def die(msg): if verbose: @@ -61,29 +65,34 @@ def die(msg): sys.stderr.write(msg + "\n") sys.exit(1) -def write_pipe(c, str): +def write_pipe(c, stdin): if verbose: - sys.stderr.write('Writing pipe: %s\n' % c) + sys.stderr.write('Writing pipe: %s\n' % str(c)) - pipe = os.popen(c, 'w') - val = pipe.write(str) - if pipe.close(): - die('Command failed: %s' % c) + expand = isinstance(c,basestring) + p = subprocess.Popen(c, stdin=subprocess.PIPE, shell=expand) + pipe = p.stdin + val = pipe.write(stdin) + pipe.close() + if p.wait(): + die('Command failed: %s' % str(c)) return val -def p4_write_pipe(c, str): +def p4_write_pipe(c, stdin): real_cmd = p4_build_cmd(c) - return write_pipe(real_cmd, str) + return write_pipe(real_cmd, stdin) def read_pipe(c, ignore_error=False): if verbose: - sys.stderr.write('Reading pipe: %s\n' % c) + sys.stderr.write('Reading pipe: %s\n' % str(c)) - pipe = os.popen(c, 'rb') + expand = isinstance(c,basestring) + p = subprocess.Popen(c, stdout=subprocess.PIPE, shell=expand) + pipe = p.stdout val = pipe.read() - if pipe.close() and not ignore_error: - die('Command failed: %s' % c) + if p.wait() and not ignore_error: + die('Command failed: %s' % str(c)) return val @@ -93,12 +102,14 @@ def p4_read_pipe(c, ignore_error=False): def read_pipe_lines(c): if verbose: - sys.stderr.write('Reading pipe: %s\n' % c) - ## todo: check return status - pipe = os.popen(c, 'rb') + sys.stderr.write('Reading pipe: %s\n' % str(c)) + + expand = isinstance(c, basestring) + p = subprocess.Popen(c, stdout=subprocess.PIPE, shell=expand) + pipe = p.stdout val = pipe.readlines() - if pipe.close(): - die('Command failed: %s' % c) + if pipe.close() or p.wait(): + die('Command failed: %s' % str(c)) return val @@ -108,23 +119,114 @@ def p4_read_pipe_lines(c): return read_pipe_lines(real_cmd) def system(cmd): + expand = isinstance(cmd,basestring) if verbose: - sys.stderr.write("executing %s\n" % cmd) - if os.system(cmd) != 0: - die("command failed: %s" % cmd) + sys.stderr.write("executing %s\n" % str(cmd)) + subprocess.check_call(cmd, shell=expand) def p4_system(cmd): """Specifically invoke p4 as the system command. """ real_cmd = p4_build_cmd(cmd) - return system(real_cmd) + expand = isinstance(real_cmd, basestring) + subprocess.check_call(real_cmd, shell=expand) + +def p4_integrate(src, dest): + p4_system(["integrate", "-Dt", src, dest]) + +def p4_sync(path): + p4_system(["sync", path]) + +def p4_add(f): + p4_system(["add", f]) + +def p4_delete(f): + p4_system(["delete", f]) -def isP4Exec(kind): - """Determine if a Perforce 'kind' should have execute permission +def p4_edit(f): + p4_system(["edit", f]) - 'p4 help filetypes' gives a list of the types. If it starts with 'x', - or x follows one of a few letters. Otherwise, if there is an 'x' after - a plus sign, it is also executable""" - return (re.search(r"(^[cku]?x)|\+.*x", kind) != None) +def p4_revert(f): + p4_system(["revert", f]) + +def p4_reopen(type, file): + p4_system(["reopen", "-t", type, file]) + +# +# Canonicalize the p4 type and return a tuple of the +# base type, plus any modifiers. See "p4 help filetypes" +# for a list and explanation. +# +def split_p4_type(p4type): + + p4_filetypes_historical = { + "ctempobj": "binary+Sw", + "ctext": "text+C", + "cxtext": "text+Cx", + "ktext": "text+k", + "kxtext": "text+kx", + "ltext": "text+F", + "tempobj": "binary+FSw", + "ubinary": "binary+F", + "uresource": "resource+F", + "uxbinary": "binary+Fx", + "xbinary": "binary+x", + "xltext": "text+Fx", + "xtempobj": "binary+Swx", + "xtext": "text+x", + "xunicode": "unicode+x", + "xutf16": "utf16+x", + } + if p4type in p4_filetypes_historical: + p4type = p4_filetypes_historical[p4type] + mods = "" + s = p4type.split("+") + base = s[0] + mods = "" + if len(s) > 1: + mods = s[1] + return (base, mods) + +# +# return the raw p4 type of a file (text, text+ko, etc) +# +def p4_type(file): + results = p4CmdList(["fstat", "-T", "headType", file]) + return results[0]['headType'] + +# +# Given a type base and modifier, return a regexp matching +# the keywords that can be expanded in the file +# +def p4_keywords_regexp_for_type(base, type_mods): + if base in ("text", "unicode", "binary"): + kwords = None + if "ko" in type_mods: + kwords = 'Id|Header' + elif "k" in type_mods: + kwords = 'Id|Header|Author|Date|DateTime|Change|File|Revision' + else: + return None + pattern = r""" + \$ # Starts with a dollar, followed by... + (%s) # one of the keywords, followed by... + (:[^$]+)? # possibly an old expansion, followed by... + \$ # another dollar + """ % kwords + return pattern + else: + return None + +# +# Given a file, return a regexp matching the possible +# RCS keywords that will be expanded, or None for files +# with kw expansion turned off. +# +def p4_keywords_regexp_for_file(file): + if not os.path.exists(file): + return None + else: + (type_base, type_mods) = split_p4_type(p4_type(file)) + return p4_keywords_regexp_for_type(type_base, type_mods) def setP4ExecBit(file, mode): # Reopens an already open file and changes the execute bit to match @@ -139,12 +241,12 @@ def setP4ExecBit(file, mode): if p4Type[-1] == "+": p4Type = p4Type[0:-1] - p4_system("reopen -t %s %s" % (p4Type, file)) + p4_reopen(p4Type, file) def getP4OpenedType(file): # Returns the perforce file type for the given file. - result = p4_read_pipe("opened %s" % file) + result = p4_read_pipe(["opened", file]) match = re.match(".*\((.+)\)\r?$", result) if match: return match.group(1) @@ -200,9 +302,17 @@ def isModeExecChanged(src_mode, dst_mode): return isModeExec(src_mode) != isModeExec(dst_mode) def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None): - cmd = p4_build_cmd("-G %s" % (cmd)) + + if isinstance(cmd,basestring): + cmd = "-G " + cmd + expand = True + else: + cmd = ["-G"] + cmd + expand = False + + cmd = p4_build_cmd(cmd) if verbose: - sys.stderr.write("Opening pipe: %s\n" % cmd) + sys.stderr.write("Opening pipe: %s\n" % str(cmd)) # Use a temporary file to avoid deadlocks without # subprocess.communicate(), which would put another copy @@ -210,11 +320,16 @@ def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None): stdin_file = None if stdin is not None: stdin_file = tempfile.TemporaryFile(prefix='p4-stdin', mode=stdin_mode) - stdin_file.write(stdin) + if isinstance(stdin,basestring): + stdin_file.write(stdin) + else: + for i in stdin: + stdin_file.write(i + '\n') stdin_file.flush() stdin_file.seek(0) - p4 = subprocess.Popen(cmd, shell=True, + p4 = subprocess.Popen(cmd, + shell=expand, stdin=stdin_file, stdout=subprocess.PIPE) @@ -222,10 +337,10 @@ def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None): try: while True: entry = marshal.load(p4.stdout) - if cb is not None: - cb(entry) - else: - result.append(entry) + if cb is not None: + cb(entry) + else: + result.append(entry) except EOFError: pass exitCode = p4.wait() @@ -247,7 +362,7 @@ def p4Where(depotPath): if not depotPath.endswith("/"): depotPath += "/" depotPath = depotPath + "..." - outputList = p4CmdList("where %s" % depotPath) + outputList = p4CmdList(["where", depotPath]) output = None for entry in outputList: if "depotFile" in entry: @@ -288,6 +403,11 @@ def isValidGitDir(path): def parseRevision(ref): return read_pipe("git rev-parse %s" % ref).strip() +def branchExists(ref): + rev = read_pipe(["git", "rev-parse", "-q", "--verify", ref], + ignore_error=True) + return len(rev) > 0 + def extractLogMessageFromGitCommit(commit): logMessage = "" @@ -333,9 +453,18 @@ def gitBranchExists(branch): return proc.wait() == 0; _gitConfig = {} -def gitConfig(key): +def gitConfig(key, args = None): # set args to "--bool", for instance + if not _gitConfig.has_key(key): + argsFilter = "" + if args != None: + argsFilter = "%s " % args + cmd = "git config %s%s" % (argsFilter, key) + _gitConfig[key] = read_pipe(cmd, ignore_error=True).strip() + return _gitConfig[key] + +def gitConfigList(key): if not _gitConfig.has_key(key): - _gitConfig[key] = read_pipe("git config %s" % key, ignore_error=True).strip() + _gitConfig[key] = read_pipe("git config --get-all %s" % key, ignore_error=True).strip().split(os.linesep) return _gitConfig[key] def p4BranchesInGit(branchesAreInRemotes = True): @@ -440,23 +569,139 @@ def originP4BranchesExist(): def p4ChangesForPaths(depotPaths, changeRange): assert depotPaths - output = p4_read_pipe_lines("changes " + ' '.join (["%s...%s" % (p, changeRange) - for p in depotPaths])) + cmd = ['changes'] + for p in depotPaths: + cmd += ["%s...%s" % (p, changeRange)] + output = p4_read_pipe_lines(cmd) changes = {} for line in output: - changeNum = int(line.split(" ")[1]) - changes[changeNum] = True + changeNum = int(line.split(" ")[1]) + changes[changeNum] = True changelist = changes.keys() changelist.sort() return changelist +def p4PathStartsWith(path, prefix): + # This method tries to remedy a potential mixed-case issue: + # + # If UserA adds //depot/DirA/file1 + # and UserB adds //depot/dira/file2 + # + # we may or may not have a problem. If you have core.ignorecase=true, + # we treat DirA and dira as the same directory + ignorecase = gitConfig("core.ignorecase", "--bool") == "true" + if ignorecase: + return path.lower().startswith(prefix.lower()) + return path.startswith(prefix) + +def getClientSpec(): + """Look at the p4 client spec, create a View() object that contains + all the mappings, and return it.""" + + specList = p4CmdList("client -o") + if len(specList) != 1: + die('Output from "client -o" is %d lines, expecting 1' % + len(specList)) + + # dictionary of all client parameters + entry = specList[0] + + # just the keys that start with "View" + view_keys = [ k for k in entry.keys() if k.startswith("View") ] + + # hold this new View + view = View() + + # append the lines, in order, to the view + for view_num in range(len(view_keys)): + k = "View%d" % view_num + if k not in view_keys: + die("Expected view key %s missing" % k) + view.append(entry[k]) + + return view + +def getClientRoot(): + """Grab the client directory.""" + + output = p4CmdList("client -o") + if len(output) != 1: + die('Output from "client -o" is %d lines, expecting 1' % len(output)) + + entry = output[0] + if "Root" not in entry: + die('Client has no "Root"') + + return entry["Root"] + class Command: def __init__(self): self.usage = "usage: %prog [options]" self.needsGit = True +class P4UserMap: + def __init__(self): + self.userMapFromPerforceServer = False + self.myP4UserId = None + + def p4UserId(self): + if self.myP4UserId: + return self.myP4UserId + + results = p4CmdList("user -o") + for r in results: + if r.has_key('User'): + self.myP4UserId = r['User'] + return r['User'] + die("Could not find your p4 user id") + + def p4UserIsMe(self, p4User): + # return True if the given p4 user is actually me + me = self.p4UserId() + if not p4User or p4User != me: + return False + else: + return True + + def getUserCacheFilename(self): + home = os.environ.get("HOME", os.environ.get("USERPROFILE")) + return home + "/.gitp4-usercache.txt" + + def getUserMapFromPerforceServer(self): + if self.userMapFromPerforceServer: + return + self.users = {} + self.emails = {} + + for output in p4CmdList("users"): + if not output.has_key("User"): + continue + self.users[output["User"]] = output["FullName"] + " <" + output["Email"] + ">" + self.emails[output["Email"]] = output["User"] + + + s = '' + for (key, val) in self.users.items(): + s += "%s\t%s\n" % (key.expandtabs(1), val.expandtabs(1)) + + open(self.getUserCacheFilename(), "wb").write(s) + self.userMapFromPerforceServer = True + + def loadUserMapFromCache(self): + self.users = {} + self.userMapFromPerforceServer = False + try: + cache = open(self.getUserCacheFilename(), "rb") + lines = cache.readlines() + cache.close() + for line in lines: + entry = line.strip().split("\t") + self.users[entry[0]] = entry[1] + except IOError: + self.getUserMapFromPerforceServer() + class P4Debug(Command): def __init__(self): Command.__init__(self) @@ -470,7 +715,7 @@ class P4Debug(Command): def run(self, args): j = 0 - for output in p4CmdList(" ".join(args)): + for output in p4CmdList(args): print 'Element: %d' % j j += 1 print output @@ -537,20 +782,24 @@ class P4RollBack(Command): return True -class P4Submit(Command): +class P4Submit(Command, P4UserMap): def __init__(self): Command.__init__(self) + P4UserMap.__init__(self) self.options = [ optparse.make_option("--verbose", dest="verbose", action="store_true"), optparse.make_option("--origin", dest="origin"), - optparse.make_option("-M", dest="detectRename", action="store_true"), + optparse.make_option("-M", dest="detectRenames", action="store_true"), + # preserve the user, requires relevant p4 permissions + optparse.make_option("--preserve-user", dest="preserveUser", action="store_true"), ] self.description = "Submit changes from git to the perforce depot." self.usage += " [name of git branch to submit into perforce depot]" self.interactive = True self.origin = "" - self.detectRename = False + self.detectRenames = False self.verbose = False + self.preserveUser = gitConfig("git-p4.preserveUser").lower() == "true" self.isWindows = (platform.system() == "Windows") def check(self): @@ -570,7 +819,7 @@ class P4Submit(Command): continue if inDescriptionSection: - if line.startswith("Files:"): + if line.startswith("Files:") or line.startswith("Jobs:"): inDescriptionSection = False else: continue @@ -585,11 +834,108 @@ class P4Submit(Command): return result + def patchRCSKeywords(self, file, pattern): + # Attempt to zap the RCS keywords in a p4 controlled file matching the given pattern + (handle, outFileName) = tempfile.mkstemp(dir='.') + try: + outFile = os.fdopen(handle, "w+") + inFile = open(file, "r") + regexp = re.compile(pattern, re.VERBOSE) + for line in inFile.readlines(): + line = regexp.sub(r'$\1$', line) + outFile.write(line) + inFile.close() + outFile.close() + # Forcibly overwrite the original file + os.unlink(file) + shutil.move(outFileName, file) + except: + # cleanup our temporary file + os.unlink(outFileName) + print "Failed to strip RCS keywords in %s" % file + raise + + print "Patched up RCS keywords in %s" % file + + def p4UserForCommit(self,id): + # Return the tuple (perforce user,git email) for a given git commit id + self.getUserMapFromPerforceServer() + gitEmail = read_pipe("git log --max-count=1 --format='%%ae' %s" % id) + gitEmail = gitEmail.strip() + if not self.emails.has_key(gitEmail): + return (None,gitEmail) + else: + return (self.emails[gitEmail],gitEmail) + + def checkValidP4Users(self,commits): + # check if any git authors cannot be mapped to p4 users + for id in commits: + (user,email) = self.p4UserForCommit(id) + if not user: + msg = "Cannot find p4 user for email %s in commit %s." % (email, id) + if gitConfig('git-p4.allowMissingP4Users').lower() == "true": + print "%s" % msg + else: + die("Error: %s\nSet git-p4.allowMissingP4Users to true to allow this." % msg) + + def lastP4Changelist(self): + # Get back the last changelist number submitted in this client spec. This + # then gets used to patch up the username in the change. If the same + # client spec is being used by multiple processes then this might go + # wrong. + results = p4CmdList("client -o") # find the current client + client = None + for r in results: + if r.has_key('Client'): + client = r['Client'] + break + if not client: + die("could not get client spec") + results = p4CmdList(["changes", "-c", client, "-m", "1"]) + for r in results: + if r.has_key('change'): + return r['change'] + die("Could not get changelist number for last submit - cannot patch up user details") + + def modifyChangelistUser(self, changelist, newUser): + # fixup the user field of a changelist after it has been submitted. + changes = p4CmdList("change -o %s" % changelist) + if len(changes) != 1: + die("Bad output from p4 change modifying %s to user %s" % + (changelist, newUser)) + + c = changes[0] + if c['User'] == newUser: return # nothing to do + c['User'] = newUser + input = marshal.dumps(c) + + result = p4CmdList("change -f -i", stdin=input) + for r in result: + if r.has_key('code'): + if r['code'] == 'error': + die("Could not modify user field of changelist %s to %s:%s" % (changelist, newUser, r['data'])) + if r.has_key('data'): + print("Updated user field for changelist %s to %s" % (changelist, newUser)) + return + die("Could not modify user field of changelist %s to %s" % (changelist, newUser)) + + def canChangeChangelists(self): + # check to see if we have p4 admin or super-user permissions, either of + # which are required to modify changelists. + results = p4CmdList(["protects", self.depotPath]) + for r in results: + if r.has_key('perm'): + if r['perm'] == 'admin': + return 1 + if r['perm'] == 'super': + return 1 + return 0 + def prepareSubmitTemplate(self): # remove lines in the Files section that show changes to files outside the depot path we're committing into template = "" inFilesSection = False - for line in p4_read_pipe_lines("change -o"): + for line in p4_read_pipe_lines(['change', '-o']): if line.endswith("\r\n"): line = line[:-2] + "\n" if inFilesSection: @@ -599,7 +945,7 @@ class P4Submit(Command): lastTab = path.rfind("\t") if lastTab != -1: path = path[:lastTab] - if not path.startswith(self.depotPath): + if not p4PathStartsWith(path, self.depotPath): continue else: inFilesSection = False @@ -611,20 +957,78 @@ class P4Submit(Command): return template + def edit_template(self, template_file): + """Invoke the editor to let the user change the submission + message. Return true if okay to continue with the submit.""" + + # if configured to skip the editing part, just submit + if gitConfig("git-p4.skipSubmitEdit") == "true": + return True + + # look at the modification time, to check later if the user saved + # the file + mtime = os.stat(template_file).st_mtime + + # invoke the editor + if os.environ.has_key("P4EDITOR"): + editor = os.environ.get("P4EDITOR") + else: + editor = read_pipe("git var GIT_EDITOR").strip() + system(editor + " " + template_file) + + # If the file was not saved, prompt to see if this patch should + # be skipped. But skip this verification step if configured so. + if gitConfig("git-p4.skipSubmitEditCheck") == "true": + return True + + # modification time updated means user saved the file + if os.stat(template_file).st_mtime > mtime: + return True + + while True: + response = raw_input("Submit template unchanged. Submit anyway? [y]es, [n]o (skip this patch) ") + if response == 'y': + return True + if response == 'n': + return False + def applyCommit(self, id): print "Applying %s" % (read_pipe("git log --max-count=1 --pretty=oneline %s" % id)) - diffOpts = ("", "-M")[self.detectRename] + + (p4User, gitEmail) = self.p4UserForCommit(id) + + if not self.detectRenames: + # If not explicitly set check the config variable + self.detectRenames = gitConfig("git-p4.detectRenames") + + if self.detectRenames.lower() == "false" or self.detectRenames == "": + diffOpts = "" + elif self.detectRenames.lower() == "true": + diffOpts = "-M" + else: + diffOpts = "-M%s" % self.detectRenames + + detectCopies = gitConfig("git-p4.detectCopies") + if detectCopies.lower() == "true": + diffOpts += " -C" + elif detectCopies != "" and detectCopies.lower() != "false": + diffOpts += " -C%s" % detectCopies + + if gitConfig("git-p4.detectCopiesHarder", "--bool") == "true": + diffOpts += " --find-copies-harder" + diff = read_pipe_lines("git diff-tree -r %s \"%s^\" \"%s\"" % (diffOpts, id, id)) filesToAdd = set() filesToDelete = set() editedFiles = set() filesToChangeExecBit = {} + for line in diff: diff = parseDiffTreeEntry(line) modifier = diff['status'] path = diff['src'] if modifier == "M": - p4_system("edit \"%s\"" % path) + p4_edit(path) if isModeExecChanged(diff['src_mode'], diff['dst_mode']): filesToChangeExecBit[path] = diff['dst_mode'] editedFiles.add(path) @@ -637,11 +1041,23 @@ class P4Submit(Command): filesToDelete.add(path) if path in filesToAdd: filesToAdd.remove(path) + elif modifier == "C": + src, dest = diff['src'], diff['dst'] + p4_integrate(src, dest) + if diff['src_sha1'] != diff['dst_sha1']: + p4_edit(dest) + if isModeExecChanged(diff['src_mode'], diff['dst_mode']): + p4_edit(dest) + filesToChangeExecBit[dest] = diff['dst_mode'] + os.unlink(dest) + editedFiles.add(dest) elif modifier == "R": src, dest = diff['src'], diff['dst'] - p4_system("integrate -Dt \"%s\" \"%s\"" % (src, dest)) - p4_system("edit \"%s\"" % (dest)) + p4_integrate(src, dest) + if diff['src_sha1'] != diff['dst_sha1']: + p4_edit(dest) if isModeExecChanged(diff['src_mode'], diff['dst_mode']): + p4_edit(dest) filesToChangeExecBit[dest] = diff['dst_mode'] os.unlink(dest) editedFiles.add(dest) @@ -653,9 +1069,45 @@ class P4Submit(Command): patchcmd = diffcmd + " | git apply " tryPatchCmd = patchcmd + "--check -" applyPatchCmd = patchcmd + "--check --apply -" + patch_succeeded = True if os.system(tryPatchCmd) != 0: + fixed_rcs_keywords = False + patch_succeeded = False print "Unfortunately applying the change failed!" + + # Patch failed, maybe it's just RCS keyword woes. Look through + # the patch to see if that's possible. + if gitConfig("git-p4.attemptRCSCleanup","--bool") == "true": + file = None + pattern = None + kwfiles = {} + for file in editedFiles | filesToDelete: + # did this file's delta contain RCS keywords? + pattern = p4_keywords_regexp_for_file(file) + + if pattern: + # this file is a possibility...look for RCS keywords. + regexp = re.compile(pattern, re.VERBOSE) + for line in read_pipe_lines(["git", "diff", "%s^..%s" % (id, id), file]): + if regexp.search(line): + if verbose: + print "got keyword match on %s in %s in %s" % (pattern, line, file) + kwfiles[file] = pattern + break + + for file in kwfiles: + if verbose: + print "zapping %s with %s" % (line,pattern) + self.patchRCSKeywords(file, kwfiles[file]) + fixed_rcs_keywords = True + + if fixed_rcs_keywords: + print "Retrying the patch with RCS keywords cleaned up" + if os.system(tryPatchCmd) == 0: + patch_succeeded = True + + if not patch_succeeded: print "What do you want to do?" response = "x" while response != "s" and response != "a" and response != "w": @@ -664,9 +1116,9 @@ class P4Submit(Command): if response == "s": print "Skipping! Good luck with the next patches..." for f in editedFiles: - p4_system("revert \"%s\"" % f); + p4_revert(f) for f in filesToAdd: - system("rm %s" %f) + os.remove(f) return elif response == "a": os.system(applyPatchCmd) @@ -687,10 +1139,10 @@ class P4Submit(Command): system(applyPatchCmd) for f in filesToAdd: - p4_system("add \"%s\"" % f) + p4_add(f) for f in filesToDelete: - p4_system("revert \"%s\"" % f) - p4_system("delete \"%s\"" % f) + p4_revert(f) + p4_delete(f) # Set/clear executable bits for f in filesToChangeExecBit.keys(): @@ -704,11 +1156,15 @@ class P4Submit(Command): if self.interactive: submitTemplate = self.prepareLogMessage(template, logMessage) + + if self.preserveUser: + submitTemplate = submitTemplate + ("\n######## Actual user %s, modified after commit\n" % p4User) + if os.environ.has_key("P4DIFF"): del(os.environ["P4DIFF"]) diff = "" for editedFile in editedFiles: - diff += p4_read_pipe("diff -du %r" % editedFile) + diff += p4_read_pipe(['diff', '-du', editedFile]) newdiff = "" for newFile in filesToAdd: @@ -720,9 +1176,14 @@ class P4Submit(Command): newdiff += "+" + line f.close() + if self.checkAuthorship and not self.p4UserIsMe(p4User): + submitTemplate += "######## git author %s does not match your p4 account.\n" % gitEmail + submitTemplate += "######## Use git-p4 option --preserve-user to modify authorship\n" + submitTemplate += "######## Use git-p4 config git-p4.skipUserNameCheck hides this message.\n" + separatorLine = "######## everything below this line is just the diff #######\n" - [handle, fileName] = tempfile.mkstemp() + (handle, fileName) = tempfile.mkstemp() tmpFile = os.fdopen(handle, "w+") if self.isWindows: submitTemplate = submitTemplate.replace("\n", "\r\n") @@ -730,33 +1191,32 @@ class P4Submit(Command): newdiff = newdiff.replace("\n", "\r\n") tmpFile.write(submitTemplate + separatorLine + diff + newdiff) tmpFile.close() - mtime = os.stat(fileName).st_mtime - if os.environ.has_key("P4EDITOR"): - editor = os.environ.get("P4EDITOR") - else: - editor = read_pipe("git var GIT_EDITOR").strip() - system(editor + " " + fileName) - - response = "y" - if os.stat(fileName).st_mtime <= mtime: - response = "x" - while response != "y" and response != "n": - response = raw_input("Submit template unchanged. Submit anyway? [y]es, [n]o (skip this patch) ") - if response == "y": + if self.edit_template(fileName): + # read the edited message and submit tmpFile = open(fileName, "rb") message = tmpFile.read() tmpFile.close() submitTemplate = message[:message.index(separatorLine)] if self.isWindows: submitTemplate = submitTemplate.replace("\r\n", "\n") - p4_write_pipe("submit -i", submitTemplate) + p4_write_pipe(['submit', '-i'], submitTemplate) + + if self.preserveUser: + if p4User: + # Get last changelist number. Cannot easily get it from + # the submit command output as the output is + # unmarshalled. + changelist = self.lastP4Changelist() + self.modifyChangelistUser(changelist, p4User) else: + # skip this patch + print "Submission cancelled, undoing p4 changes." for f in editedFiles: - p4_system("revert \"%s\"" % f); + p4_revert(f) for f in filesToAdd: - p4_system("revert \"%s\"" % f); - system("rm %s" %f) + p4_revert(f) + os.remove(f) os.remove(fileName) else: @@ -775,6 +1235,8 @@ class P4Submit(Command): die("Detecting current git branch failed!") elif len(args) == 1: self.master = args[0] + if not branchExists(self.master): + die("Branch %s does not exist" % self.master) else: return False @@ -787,6 +1249,10 @@ class P4Submit(Command): if len(self.origin) == 0: self.origin = upstream + if self.preserveUser: + if not self.canChangeChangelists(): + die("Cannot preserve user names without p4 super-user or admin permissions") + if self.verbose: print "Origin branch is " + self.origin @@ -794,19 +1260,31 @@ class P4Submit(Command): print "Internal error: cannot locate perforce depot path from existing branches" sys.exit(128) - self.clientPath = p4Where(self.depotPath) + self.useClientSpec = False + if gitConfig("git-p4.useclientspec", "--bool") == "true": + self.useClientSpec = True + if self.useClientSpec: + self.clientSpecDirs = getClientSpec() + + if self.useClientSpec: + # all files are relative to the client spec + self.clientPath = getClientRoot() + else: + self.clientPath = p4Where(self.depotPath) - if len(self.clientPath) == 0: - print "Error: Cannot locate perforce checkout of %s in client view" % self.depotPath - sys.exit(128) + if self.clientPath == "": + die("Error: Cannot locate perforce checkout of %s in client view" % self.depotPath) print "Perforce checkout for depot path %s located at %s" % (self.depotPath, self.clientPath) self.oldWorkingDirectory = os.getcwd() + # ensure the clientPath exists + if not os.path.exists(self.clientPath): + os.makedirs(self.clientPath) + chdir(self.clientPath) print "Synchronizing p4 checkout..." - p4_system("sync ...") - + p4_sync("...") self.check() commits = [] @@ -814,6 +1292,14 @@ class P4Submit(Command): commits.append(line.strip()) commits.reverse() + if self.preserveUser or (gitConfig("git-p4.skipUserNameCheck") == "true"): + self.checkAuthorship = False + else: + self.checkAuthorship = True + + if self.preserveUser: + self.checkValidP4Users(commits) + while len(commits) > 0: commit = commits[0] commits = commits[1:] @@ -833,9 +1319,225 @@ class P4Submit(Command): return True -class P4Sync(Command): +class View(object): + """Represent a p4 view ("p4 help views"), and map files in a + repo according to the view.""" + + class Path(object): + """A depot or client path, possibly containing wildcards. + The only one supported is ... at the end, currently. + Initialize with the full path, with //depot or //client.""" + + def __init__(self, path, is_depot): + self.path = path + self.is_depot = is_depot + self.find_wildcards() + # remember the prefix bit, useful for relative mappings + m = re.match("(//[^/]+/)", self.path) + if not m: + die("Path %s does not start with //prefix/" % self.path) + prefix = m.group(1) + if not self.is_depot: + # strip //client/ on client paths + self.path = self.path[len(prefix):] + + def find_wildcards(self): + """Make sure wildcards are valid, and set up internal + variables.""" + + self.ends_triple_dot = False + # There are three wildcards allowed in p4 views + # (see "p4 help views"). This code knows how to + # handle "..." (only at the end), but cannot deal with + # "%%n" or "*". Only check the depot_side, as p4 should + # validate that the client_side matches too. + if re.search(r'%%[1-9]', self.path): + die("Can't handle %%n wildcards in view: %s" % self.path) + if self.path.find("*") >= 0: + die("Can't handle * wildcards in view: %s" % self.path) + triple_dot_index = self.path.find("...") + if triple_dot_index >= 0: + if triple_dot_index != len(self.path) - 3: + die("Can handle only single ... wildcard, at end: %s" % + self.path) + self.ends_triple_dot = True + + def ensure_compatible(self, other_path): + """Make sure the wildcards agree.""" + if self.ends_triple_dot != other_path.ends_triple_dot: + die("Both paths must end with ... if either does;\n" + + "paths: %s %s" % (self.path, other_path.path)) + + def match_wildcards(self, test_path): + """See if this test_path matches us, and fill in the value + of the wildcards if so. Returns a tuple of + (True|False, wildcards[]). For now, only the ... at end + is supported, so at most one wildcard.""" + if self.ends_triple_dot: + dotless = self.path[:-3] + if test_path.startswith(dotless): + wildcard = test_path[len(dotless):] + return (True, [ wildcard ]) + else: + if test_path == self.path: + return (True, []) + return (False, []) + + def match(self, test_path): + """Just return if it matches; don't bother with the wildcards.""" + b, _ = self.match_wildcards(test_path) + return b + + def fill_in_wildcards(self, wildcards): + """Return the relative path, with the wildcards filled in + if there are any.""" + if self.ends_triple_dot: + return self.path[:-3] + wildcards[0] + else: + return self.path + + class Mapping(object): + def __init__(self, depot_side, client_side, overlay, exclude): + # depot_side is without the trailing /... if it had one + self.depot_side = View.Path(depot_side, is_depot=True) + self.client_side = View.Path(client_side, is_depot=False) + self.overlay = overlay # started with "+" + self.exclude = exclude # started with "-" + assert not (self.overlay and self.exclude) + self.depot_side.ensure_compatible(self.client_side) + + def __str__(self): + c = " " + if self.overlay: + c = "+" + if self.exclude: + c = "-" + return "View.Mapping: %s%s -> %s" % \ + (c, self.depot_side.path, self.client_side.path) + + def map_depot_to_client(self, depot_path): + """Calculate the client path if using this mapping on the + given depot path; does not consider the effect of other + mappings in a view. Even excluded mappings are returned.""" + matches, wildcards = self.depot_side.match_wildcards(depot_path) + if not matches: + return "" + client_path = self.client_side.fill_in_wildcards(wildcards) + return client_path + + # + # View methods + # + def __init__(self): + self.mappings = [] + + def append(self, view_line): + """Parse a view line, splitting it into depot and client + sides. Append to self.mappings, preserving order.""" + + # Split the view line into exactly two words. P4 enforces + # structure on these lines that simplifies this quite a bit. + # + # Either or both words may be double-quoted. + # Single quotes do not matter. + # Double-quote marks cannot occur inside the words. + # A + or - prefix is also inside the quotes. + # There are no quotes unless they contain a space. + # The line is already white-space stripped. + # The two words are separated by a single space. + # + if view_line[0] == '"': + # First word is double quoted. Find its end. + close_quote_index = view_line.find('"', 1) + if close_quote_index <= 0: + die("No first-word closing quote found: %s" % view_line) + depot_side = view_line[1:close_quote_index] + # skip closing quote and space + rhs_index = close_quote_index + 1 + 1 + else: + space_index = view_line.find(" ") + if space_index <= 0: + die("No word-splitting space found: %s" % view_line) + depot_side = view_line[0:space_index] + rhs_index = space_index + 1 + + if view_line[rhs_index] == '"': + # Second word is double quoted. Make sure there is a + # double quote at the end too. + if not view_line.endswith('"'): + die("View line with rhs quote should end with one: %s" % + view_line) + # skip the quotes + client_side = view_line[rhs_index+1:-1] + else: + client_side = view_line[rhs_index:] + + # prefix + means overlay on previous mapping + overlay = False + if depot_side.startswith("+"): + overlay = True + depot_side = depot_side[1:] + + # prefix - means exclude this path + exclude = False + if depot_side.startswith("-"): + exclude = True + depot_side = depot_side[1:] + + m = View.Mapping(depot_side, client_side, overlay, exclude) + self.mappings.append(m) + + def map_in_client(self, depot_path): + """Return the relative location in the client where this + depot file should live. Returns "" if the file should + not be mapped in the client.""" + + paths_filled = [] + client_path = "" + + # look at later entries first + for m in self.mappings[::-1]: + + # see where will this path end up in the client + p = m.map_depot_to_client(depot_path) + + if p == "": + # Depot path does not belong in client. Must remember + # this, as previous items should not cause files to + # exist in this path either. Remember that the list is + # being walked from the end, which has higher precedence. + # Overlap mappings do not exclude previous mappings. + if not m.overlay: + paths_filled.append(m.client_side) + + else: + # This mapping matched; no need to search any further. + # But, the mapping could be rejected if the client path + # has already been claimed by an earlier mapping (i.e. + # one later in the list, which we are walking backwards). + already_mapped_in_client = False + for f in paths_filled: + # this is View.Path.match + if f.match(p): + already_mapped_in_client = True + break + if not already_mapped_in_client: + # Include this file, unless it is from a line that + # explicitly said to exclude it. + if not m.exclude: + client_path = p + + # a match, even if rejected, always stops the search + break + + return client_path + +class P4Sync(Command, P4UserMap): + delete_actions = ( "delete", "move/delete", "purge" ) + def __init__(self): Command.__init__(self) + P4UserMap.__init__(self) self.options = [ optparse.make_option("--branch", dest="branch"), optparse.make_option("--detect-branches", dest="detectBranches", action="store_true"), @@ -877,11 +1579,39 @@ class P4Sync(Command): self.p4BranchesInGit = [] self.cloneExclude = [] self.useClientSpec = False - self.clientSpecDirs = [] + self.useClientSpec_from_options = False + self.clientSpecDirs = None + self.tempBranches = [] + self.tempBranchLocation = "git-p4-tmp" if gitConfig("git-p4.syncFromOrigin") == "false": self.syncWithOrigin = False + # + # P4 wildcards are not allowed in filenames. P4 complains + # if you simply add them, but you can force it with "-f", in + # which case it translates them into %xx encoding internally. + # Search for and fix just these four characters. Do % last so + # that fixing it does not inadvertently create new %-escapes. + # + def wildcard_decode(self, path): + # Cannot have * in a filename in windows; untested as to + # what p4 would do in such a case. + if not self.isWindows: + path = path.replace("%2A", "*") + path = path.replace("%23", "#") \ + .replace("%40", "@") \ + .replace("%25", "%") + return path + + # Force a checkpoint in fast-import and wait for it to finish + def checkpoint(self): + self.gitStream.write("checkpoint\n\n") + self.gitStream.write("progress checkpoint\n\n") + out = self.gitOutput.readline() + if self.verbose: + print "checkpoint finished: " + out + def extractFilesFromCommit(self, commit): self.cloneExclude = [re.sub(r"\.\.\.$", "", path) for path in self.cloneExclude] @@ -891,11 +1621,11 @@ class P4Sync(Command): path = commit["depotFile%s" % fnum] if [p for p in self.cloneExclude - if path.startswith (p)]: + if p4PathStartsWith(path, p)]: found = False else: found = [p for p in self.depotPaths - if path.startswith (p)] + if p4PathStartsWith(path, p)] if not found: fnum = fnum + 1 continue @@ -910,11 +1640,14 @@ class P4Sync(Command): return files def stripRepoPath(self, path, prefixes): + if self.useClientSpec: + return self.clientSpecDirs.map_in_client(path) + if self.keepRepoPath: prefixes = [re.sub("^(//[^/]+/).*", r'\1', prefixes[0])] for p in prefixes: - if path.startswith(p): + if p4PathStartsWith(path, p): path = path[len(p):] return path @@ -925,7 +1658,7 @@ class P4Sync(Command): while commit.has_key("depotFile%s" % fnum): path = commit["depotFile%s" % fnum] found = [p for p in self.depotPaths - if path.startswith (p)] + if p4PathStartsWith(path, p)] if not found: fnum = fnum + 1 continue @@ -954,37 +1687,63 @@ class P4Sync(Command): # - helper for streamP4Files def streamOneP4File(self, file, contents): - if file["type"] == "apple": - print "\nfile %s is a strange apple file that forks. Ignoring" % \ - file['depotFile'] - return - relPath = self.stripRepoPath(file['depotFile'], self.branchPrefixes) + relPath = self.wildcard_decode(relPath) if verbose: sys.stderr.write("%s\n" % relPath) - mode = "644" - if isP4Exec(file["type"]): - mode = "755" - elif file["type"] == "symlink": - mode = "120000" - # p4 print on a symlink contains "target\n", so strip it off + (type_base, type_mods) = split_p4_type(file["type"]) + + git_mode = "100644" + if "x" in type_mods: + git_mode = "100755" + if type_base == "symlink": + git_mode = "120000" + # p4 print on a symlink contains "target\n"; remove the newline data = ''.join(contents) contents = [data[:-1]] - if self.isWindows and file["type"].endswith("text"): + if type_base == "utf16": + # p4 delivers different text in the python output to -G + # than it does when using "print -o", or normal p4 client + # operations. utf16 is converted to ascii or utf8, perhaps. + # But ascii text saved as -t utf16 is completely mangled. + # Invoke print -o to get the real contents. + text = p4_read_pipe(['print', '-q', '-o', '-', file['depotFile']]) + contents = [ text ] + + if type_base == "apple": + # Apple filetype files will be streamed as a concatenation of + # its appledouble header and the contents. This is useless + # on both macs and non-macs. If using "print -q -o xx", it + # will create "xx" with the data, and "%xx" with the header. + # This is also not very useful. + # + # Ideally, someday, this script can learn how to generate + # appledouble files directly and import those to git, but + # non-mac machines can never find a use for apple filetype. + print "\nIgnoring apple filetype file %s" % file['depotFile'] + return + + # Perhaps windows wants unicode, utf16 newlines translated too; + # but this is not doing it. + if self.isWindows and type_base == "text": mangled = [] for data in contents: data = data.replace("\r\n", "\n") mangled.append(data) contents = mangled - if file['type'] in ('text+ko', 'unicode+ko', 'binary+ko'): - contents = map(lambda text: re.sub(r'(?i)\$(Id|Header):[^$]*\$',r'$\1$', text), contents) - elif file['type'] in ('text+k', 'ktext', 'kxtext', 'unicode+k', 'binary+k'): - contents = map(lambda text: re.sub(r'\$(Id|Header|Author|Date|DateTime|Change|File|Revision):[^$\n]*\$',r'$\1$', text), contents) + # Note that we do not try to de-mangle keywords on utf16 files, + # even though in theory somebody may want that. + pattern = p4_keywords_regexp_for_type(type_base, type_mods) + if pattern: + regexp = re.compile(pattern, re.VERBOSE) + text = ''.join(contents) + text = regexp.sub(r'$\1$', text) + contents = [ text ] - self.gitStream.write("M %s inline %s\n" % (mode, relPath)) + self.gitStream.write("M %s inline %s\n" % (git_mode, relPath)) # total length... length = 0 @@ -1005,22 +1764,22 @@ class P4Sync(Command): # handle another chunk of streaming data def streamP4FilesCb(self, marshalled): - if marshalled.has_key('depotFile') and self.stream_have_file_info: - # start of a new file - output the old one first - self.streamOneP4File(self.stream_file, self.stream_contents) - self.stream_file = {} - self.stream_contents = [] - self.stream_have_file_info = False + if marshalled.has_key('depotFile') and self.stream_have_file_info: + # start of a new file - output the old one first + self.streamOneP4File(self.stream_file, self.stream_contents) + self.stream_file = {} + self.stream_contents = [] + self.stream_have_file_info = False - # pick up the new file information... for the - # 'data' field we need to append to our array - for k in marshalled.keys(): - if k == 'data': - self.stream_contents.append(marshalled['data']) - else: - self.stream_file[k] = marshalled[k] + # pick up the new file information... for the + # 'data' field we need to append to our array + for k in marshalled.keys(): + if k == 'data': + self.stream_contents.append(marshalled['data']) + else: + self.stream_file[k] = marshalled[k] - self.stream_have_file_info = True + self.stream_have_file_info = True # Stream directly from "p4 files" into "git fast-import" def streamP4Files(self, files): @@ -1029,19 +1788,17 @@ class P4Sync(Command): filesToDelete = [] for f in files: - includeFile = True - for val in self.clientSpecDirs: - if f['path'].startswith(val[0]): - if val[1] <= 0: - includeFile = False - break + # if using a client spec, only add the files that have + # a path in the client + if self.clientSpecDirs: + if self.clientSpecDirs.map_in_client(f['path']) == "": + continue - if includeFile: - filesForCommit.append(f) - if f['action'] not in ('delete', 'move/delete', 'purge'): - filesToRead.append(f) - else: - filesToDelete.append(f) + filesForCommit.append(f) + if f['action'] in self.delete_actions: + filesToDelete.append(f) + else: + filesToRead.append(f) # deleted files... for f in filesToDelete: @@ -1052,23 +1809,30 @@ class P4Sync(Command): self.stream_contents = [] self.stream_have_file_info = False - # curry self argument - def streamP4FilesCbSelf(entry): - self.streamP4FilesCb(entry) + # curry self argument + def streamP4FilesCbSelf(entry): + self.streamP4FilesCb(entry) - p4CmdList("-x - print", - '\n'.join(['%s#%s' % (f['path'], f['rev']) - for f in filesToRead]), - cb=streamP4FilesCbSelf) + fileArgs = ['%s#%s' % (f['path'], f['rev']) for f in filesToRead] + + p4CmdList(["-x", "-", "print"], + stdin=fileArgs, + cb=streamP4FilesCbSelf) # do the last chunk if self.stream_file.has_key('depotFile'): self.streamOneP4File(self.stream_file, self.stream_contents) + def make_email(self, userid): + if userid in self.users: + return self.users[userid] + else: + return "%s <a@b>" % userid + def commit(self, details, files, branch, branchPrefixes, parent = ""): epoch = details["time"] author = details["user"] - self.branchPrefixes = branchPrefixes + self.branchPrefixes = branchPrefixes if self.verbose: print "commit into %s" % branch @@ -1077,10 +1841,10 @@ class P4Sync(Command): # create a commit. new_files = [] for f in files: - if [p for p in branchPrefixes if f['path'].startswith(p)]: + if [p for p in branchPrefixes if p4PathStartsWith(f['path'], p)]: new_files.append (f) else: - sys.stderr.write("Ignoring file outside of prefix: %s\n" % path) + sys.stderr.write("Ignoring file outside of prefix: %s\n" % f['path']) self.gitStream.write("commit %s\n" % branch) # gitStream.write("mark :%s\n" % details["change"]) @@ -1088,10 +1852,7 @@ class P4Sync(Command): committer = "" if author not in self.users: self.getUserMapFromPerforceServer() - if author in self.users: - committer = "%s %s %s" % (self.users[author], epoch, self.tz) - else: - committer = "%s <a@b> %s %s" % (author, epoch, self.tz) + committer = "%s %s %s" % (self.make_email(author), epoch, self.tz) self.gitStream.write("committer %s\n" % committer) @@ -1120,14 +1881,14 @@ class P4Sync(Command): if self.verbose: print "Change %s is labelled %s" % (change, labelDetails) - files = p4CmdList("files " + ' '.join (["%s...@%s" % (p, change) - for p in branchPrefixes])) + files = p4CmdList(["files"] + ["%s...@%s" % (p, change) + for p in branchPrefixes]) if len(files) == len(labelRevisions): cleanedFiles = {} for info in files: - if info["action"] in ("delete", "purge"): + if info["action"] in self.delete_actions: continue cleanedFiles[info["depotFile"]] = info["rev"] @@ -1136,15 +1897,21 @@ class P4Sync(Command): self.gitStream.write("from %s\n" % branch) owner = labelDetails["Owner"] - tagger = "" - if author in self.users: - tagger = "%s %s %s" % (self.users[owner], epoch, self.tz) + + # Try to use the owner of the p4 label, or failing that, + # the current p4 user id. + if owner: + email = self.make_email(owner) else: - tagger = "%s <a@b> %s %s" % (owner, epoch, self.tz) + email = self.make_email(self.p4UserId()) + tagger = "%s %s %s" % (email, epoch, self.tz) + self.gitStream.write("tagger %s\n" % tagger) - self.gitStream.write("data <<EOT\n") - self.gitStream.write(labelDetails["Description"]) - self.gitStream.write("EOT\n\n") + + description = labelDetails["Description"] + self.gitStream.write("data %d\n" % len(description)) + self.gitStream.write(description) + self.gitStream.write("\n") else: if not self.silent: @@ -1156,45 +1923,10 @@ class P4Sync(Command): print ("Tag %s does not match with change %s: file count is different." % (labelDetails["label"], change)) - def getUserCacheFilename(self): - home = os.environ.get("HOME", os.environ.get("USERPROFILE")) - return home + "/.gitp4-usercache.txt" - - def getUserMapFromPerforceServer(self): - if self.userMapFromPerforceServer: - return - self.users = {} - - for output in p4CmdList("users"): - if not output.has_key("User"): - continue - self.users[output["User"]] = output["FullName"] + " <" + output["Email"] + ">" - - - s = '' - for (key, val) in self.users.items(): - s += "%s\t%s\n" % (key.expandtabs(1), val.expandtabs(1)) - - open(self.getUserCacheFilename(), "wb").write(s) - self.userMapFromPerforceServer = True - - def loadUserMapFromCache(self): - self.users = {} - self.userMapFromPerforceServer = False - try: - cache = open(self.getUserCacheFilename(), "rb") - lines = cache.readlines() - cache.close() - for line in lines: - entry = line.strip().split("\t") - self.users[entry[0]] = entry[1] - except IOError: - self.getUserMapFromPerforceServer() - def getLabels(self): self.labels = {} - l = p4CmdList("labels %s..." % ' '.join (self.depotPaths)) + l = p4CmdList(["labels"] + ["%s..." % p for p in self.depotPaths]) if len(l) > 0 and not self.silent: print "Finding files belonging to labels in %s" % `self.depotPaths` @@ -1204,9 +1936,9 @@ class P4Sync(Command): newestChange = 0 if self.verbose: print "Querying files for label %s" % label - for file in p4CmdList("files " - + ' '.join (["%s...@%s" % (p, label) - for p in self.depotPaths])): + for file in p4CmdList(["files"] + + ["%s...@%s" % (p, label) + for p in self.depotPaths]): revisions[file["depotFile"]] = file["rev"] change = int(file["change"]) if change > newestChange: @@ -1229,8 +1961,14 @@ class P4Sync(Command): def getBranchMapping(self): lostAndFoundBranches = set() - for info in p4CmdList("branches"): - details = p4Cmd("branch -o %s" % info["branch"]) + user = gitConfig("git-p4.branchUser") + if len(user) > 0: + command = "branches -u %s" % user + else: + command = "branches" + + for info in p4CmdList(command): + details = p4Cmd(["branch", "-o", info["branch"]]) viewIdx = 0 while details.has_key("View%s" % viewIdx): paths = details["View%s" % viewIdx].split(" ") @@ -1241,7 +1979,7 @@ class P4Sync(Command): source = paths[0] destination = paths[1] ## HACK - if source.startswith(self.depotPaths[0]) and destination.startswith(self.depotPaths[0]): + if p4PathStartsWith(source, self.depotPaths[0]) and p4PathStartsWith(destination, self.depotPaths[0]): source = source[len(self.depotPaths[0]):-4] destination = destination[len(self.depotPaths[0]):-4] @@ -1258,6 +1996,25 @@ class P4Sync(Command): if source not in self.knownBranches: lostAndFoundBranches.add(source) + # Perforce does not strictly require branches to be defined, so we also + # check git config for a branch list. + # + # Example of branch definition in git config file: + # [git-p4] + # branchList=main:branchA + # branchList=main:branchB + # branchList=branchA:branchC + configBranches = gitConfigList("git-p4.branchList") + for branch in configBranches: + if branch: + (source, destination) = branch.split(":") + self.knownBranches[destination] = source + + lostAndFoundBranches.discard(destination) + + if source not in self.knownBranches: + lostAndFoundBranches.add(source) + for branch in lostAndFoundBranches: self.knownBranches[branch] = branch @@ -1349,7 +2106,7 @@ class P4Sync(Command): sourceRef = self.gitRefForBranch(sourceBranch) #print "source " + sourceBranch - branchParentChange = int(p4Cmd("changes -m 1 %s...@1,%s" % (sourceDepotPath, firstChange))["change"]) + branchParentChange = int(p4Cmd(["changes", "-m", "1", "%s...@1,%s" % (sourceDepotPath, firstChange)])["change"]) #print "branch parent: %s" % branchParentChange gitParent = self.gitCommitByP4Change(sourceRef, branchParentChange) if len(gitParent) > 0: @@ -1359,10 +2116,24 @@ class P4Sync(Command): self.importChanges(changes) return True + def searchParent(self, parent, branch, target): + parentFound = False + for blob in read_pipe_lines(["git", "rev-list", "--reverse", "--no-merges", parent]): + blob = blob.strip() + if len(read_pipe(["git", "diff-tree", blob, target])) == 0: + parentFound = True + if self.verbose: + print "Found parent of %s in commit %s" % (branch, blob) + break + if parentFound: + return blob + else: + return None + def importChanges(self, changes): cnt = 1 for change in changes: - description = p4Cmd("describe %s" % change) + description = p4Cmd(["describe", str(change)]) self.updateOptionDict(description) if not self.silent: @@ -1415,7 +2186,21 @@ class P4Sync(Command): parent = self.initialParents[branch] del self.initialParents[branch] - self.commit(description, filesForCommit, branch, [branchPrefix], parent) + blob = None + if len(parent) > 0: + tempBranch = os.path.join(self.tempBranchLocation, "%d" % (change)) + if self.verbose: + print "Creating temporary branch: " + tempBranch + self.commit(description, filesForCommit, tempBranch, [branchPrefix]) + self.tempBranches.append(tempBranch) + self.checkpoint() + blob = self.searchParent(parent, branch, tempBranch) + if blob: + self.commit(description, filesForCommit, branch, [branchPrefix], blob) + else: + if self.verbose: + print "Parent of %s not found. Committing into head of %s" % (branch, parent) + self.commit(description, filesForCommit, branch, [branchPrefix], parent) else: files = self.extractFilesFromCommit(description) self.commit(description, files, self.branch, self.depotPaths, @@ -1428,21 +2213,28 @@ class P4Sync(Command): def importHeadRevision(self, revision): print "Doing initial import of %s from revision %s into %s" % (' '.join(self.depotPaths), revision, self.branch) - details = { "user" : "git perforce import user", "time" : int(time.time()) } - details["desc"] = ("Initial import of %s from the state at revision %s" + details = {} + details["user"] = "git perforce import user" + details["desc"] = ("Initial import of %s from the state at revision %s\n" % (' '.join(self.depotPaths), revision)) details["change"] = revision newestRevision = 0 fileCnt = 0 - for info in p4CmdList("files " - + ' '.join(["%s...%s" - % (p, revision) - for p in self.depotPaths])): + fileArgs = ["%s...%s" % (p,revision) for p in self.depotPaths] + + for info in p4CmdList(["files"] + fileArgs): - if info['code'] == 'error': + if 'code' in info and info['code'] == 'error': sys.stderr.write("p4 returned an error: %s\n" % info['data']) + if info['data'].find("must refer to client") >= 0: + sys.stderr.write("This particular p4 error is misleading.\n") + sys.stderr.write("Perhaps the depot path was misspelled.\n"); + sys.stderr.write("Depot path: %s\n" % " ".join(self.depotPaths)) + sys.exit(1) + if 'p4ExitCode' in info: + sys.stderr.write("p4 exitcode: %s\n" % info['p4ExitCode']) sys.exit(1) @@ -1450,7 +2242,7 @@ class P4Sync(Command): if change > newestRevision: newestRevision = change - if info["action"] in ("delete", "purge"): + if info["action"] in self.delete_actions: # don't increase the file cnt, otherwise details["depotFile123"] will have gaps! #fileCnt = fileCnt + 1 continue @@ -1461,6 +2253,18 @@ class P4Sync(Command): fileCnt = fileCnt + 1 details["change"] = newestRevision + + # Use time from top-most change so that all git-p4 clones of + # the same p4 repo have the same commit SHA1s. + res = p4CmdList("describe -s %d" % newestRevision) + newestTime = None + for r in res: + if r.has_key('time'): + newestTime = int(r['time']) + if newestTime is None: + die("\"describe -s\" on newest change %d did not give a time") + details["time"] = newestTime + self.updateOptionDict(details) try: self.commit(details, self.extractFilesFromCommit(details), self.branch, self.depotPaths) @@ -1469,26 +2273,6 @@ class P4Sync(Command): print self.gitError.read() - def getClientSpec(self): - specList = p4CmdList( "client -o" ) - temp = {} - for entry in specList: - for k,v in entry.iteritems(): - if k.startswith("View"): - if v.startswith('"'): - start = 1 - else: - start = 0 - index = v.find("...") - v = v[start:index] - if v.startswith("-"): - v = v[1:] - temp[v] = -len(v) - else: - temp[v] = len(v) - self.clientSpecDirs = temp.items() - self.clientSpecDirs.sort( lambda x, y: abs( y[1] ) - abs( x[1] ) ) - def run(self, args): self.depotPaths = [] self.changeRange = "" @@ -1521,8 +2305,15 @@ class P4Sync(Command): if not gitBranchExists(self.refPrefix + "HEAD") and self.importIntoRemotes and gitBranchExists(self.branch): system("git symbolic-ref %sHEAD %s" % (self.refPrefix, self.branch)) - if self.useClientSpec or gitConfig("git-p4.useclientspec") == "true": - self.getClientSpec() + # accept either the command-line option, or the configuration variable + if self.useClientSpec: + # will use this after clone to set the variable + self.useClientSpec_from_options = True + else: + if gitConfig("git-p4.useclientspec", "--bool") == "true": + self.useClientSpec = True + if self.useClientSpec: + self.clientSpecDirs = getClientSpec() # TODO: should always look at previous commits, # merge with previous imports, if possible. @@ -1557,12 +2348,14 @@ class P4Sync(Command): else: paths = [] for (prev, cur) in zip(self.previousDepotPaths, depotPaths): - for i in range(0, min(len(cur), len(prev))): - if cur[i] <> prev[i]: + prev_list = prev.split("/") + cur_list = cur.split("/") + for i in range(0, min(len(cur_list), len(prev_list))): + if cur_list[i] <> prev_list[i]: i = i - 1 break - paths.append (cur[:i + 1]) + paths.append ("/".join(cur_list[:i + 1])) self.previousDepotPaths = paths @@ -1592,6 +2385,17 @@ class P4Sync(Command): revision = "" self.users = {} + # Make sure no revision specifiers are used when --changesfile + # is specified. + bad_changesfile = False + if len(self.changesFile) > 0: + for p in self.depotPaths: + if p.find("@") >= 0 or p.find("#") >= 0: + bad_changesfile = True + break + if bad_changesfile: + die("Option --changesfile is incompatible with revision specifiers") + newPaths = [] for p in self.depotPaths: if p.find("@") != -1: @@ -1608,7 +2412,10 @@ class P4Sync(Command): revision = p[hashIdx:] p = p[:hashIdx] elif self.previousDepotPaths == []: - revision = "#head" + # pay attention to changesfile, if given, else import + # the entire p4 tree at the head revision + if len(self.changesFile) == 0: + revision = "#head" p = re.sub ("\.\.\.$", "", p) if not p.endswith("/"): @@ -1667,6 +2474,10 @@ class P4Sync(Command): changes.sort() else: + # catch "git-p4 sync" with no new branches, in a repo that + # does not have any existing git-p4 branches + if len(args) == 0 and not self.p4BranchesInGit: + die("No remote p4 branches. Perhaps you never did \"git p4 clone\" in here."); if self.verbose: print "Getting p4 changes for %s...%s" % (', '.join(self.depotPaths), self.changeRange) @@ -1701,6 +2512,12 @@ class P4Sync(Command): self.gitOutput.close() self.gitError.close() + # Cleanup temporary branches created during import + if self.tempBranches != []: + for branch in self.tempBranches: + read_pipe("git update-ref -d %s" % branch) + os.rmdir(os.path.join(os.environ.get("GIT_DIR", ".git"), self.tempBranchLocation)) + return True class P4Rebase(Command): @@ -1747,10 +2564,13 @@ class P4Clone(P4Sync): help="where to leave result of the clone"), optparse.make_option("-/", dest="cloneExclude", action="append", type="string", - help="exclude depot path") + help="exclude depot path"), + optparse.make_option("--bare", dest="cloneBare", + action="store_true", default=False), ] self.cloneDestination = None self.needsGit = False + self.cloneBare = False # This is required for the "append" cloneExclude action def ensure_value(self, attr, value): @@ -1790,11 +2610,16 @@ class P4Clone(P4Sync): self.cloneDestination = self.defaultDestination(args) print "Importing from %s into %s" % (', '.join(depotPaths), self.cloneDestination) + if not os.path.exists(self.cloneDestination): os.makedirs(self.cloneDestination) chdir(self.cloneDestination) - system("git init") - self.gitdir = os.getcwd() + "/.git" + + init_cmd = [ "git", "init" ] + if self.cloneBare: + init_cmd.append("--bare") + subprocess.check_call(init_cmd) + if not P4Sync.run(self, depotPaths): return False if self.branch != "master": @@ -1804,10 +2629,15 @@ class P4Clone(P4Sync): masterbranch = "refs/heads/p4/master" if gitBranchExists(masterbranch): system("git branch master %s" % masterbranch) - system("git checkout -f") + if not self.cloneBare: + system("git checkout -f") else: print "Could not detect main branch. No checkout/master branch created." + # auto-set this variable if invoked with --use-client-spec + if self.useClientSpec_from_options: + system("git config --bool git-p4.useclientspec true") + return True class P4Branches(Command): @@ -1890,7 +2720,8 @@ def main(): args = sys.argv[2:] if len(options) > 0: - options.append(optparse.make_option("--git-dir", dest="gitdir")) + if cmd.needsGit: + options.append(optparse.make_option("--git-dir", dest="gitdir")) parser = optparse.OptionParser(cmd.usage.replace("%prog", "%prog " + cmdName), options, @@ -1920,6 +2751,7 @@ def main(): if not cmd.run(args): parser.print_help() + sys.exit(2) if __name__ == '__main__': diff --git a/contrib/fast-import/git-p4.txt b/contrib/fast-import/git-p4.txt deleted file mode 100644 index 49b335921a..0000000000 --- a/contrib/fast-import/git-p4.txt +++ /dev/null @@ -1,210 +0,0 @@ -git-p4 - Perforce <-> Git converter using git-fast-import - -Usage -===== - -git-p4 can be used in two different ways: - -1) To import changes from Perforce to a Git repository, using "git-p4 sync". - -2) To submit changes from Git back to Perforce, using "git-p4 submit". - -Importing -========= - -Simply start with - - git-p4 clone //depot/path/project - -or - - git-p4 clone //depot/path/project myproject - -This will: - -1) Create an empty git repository in a subdirectory called "project" (or -"myproject" with the second command) - -2) Import the head revision from the given Perforce path into a git branch -called "p4" (remotes/p4 actually) - -3) Create a master branch based on it and check it out. - -If you want the entire history (not just the head revision) then you can simply -append a "@all" to the depot path: - - git-p4 clone //depot/project/main@all myproject - - - -If you want more control you can also use the git-p4 sync command directly: - - mkdir repo-git - cd repo-git - git init - git-p4 sync //path/in/your/perforce/depot - -This will import the current head revision of the specified depot path into a -"remotes/p4/master" branch of your git repository. You can use the ---branch=mybranch option to import into a different branch. - -If you want to import the entire history of a given depot path simply use: - - git-p4 sync //path/in/depot@all - - -Note: - -To achieve optimal compression you may want to run 'git repack -a -d -f' after -a big import. This may take a while. - -Incremental Imports -=================== - -After an initial import you can continue to synchronize your git repository -with newer changes from the Perforce depot by just calling - - git-p4 sync - -in your git repository. By default the "remotes/p4/master" branch is updated. - -Advanced Setup -============== - -Suppose you have a periodically updated git repository somewhere, containing a -complete import of a Perforce project. This repository can be cloned and used -with git-p4. When updating the cloned repository with the "sync" command, -git-p4 will try to fetch changes from the original repository first. The git -protocol used with this is usually faster than importing from Perforce -directly. - -This behaviour can be disabled by setting the "git-p4.syncFromOrigin" git -configuration variable to "false". - -Updating -======== - -A common working pattern is to fetch the latest changes from the Perforce depot -and merge them with local uncommitted changes. The recommended way is to use -git's rebase mechanism to preserve linear history. git-p4 provides a convenient - - git-p4 rebase - -command that calls git-p4 sync followed by git rebase to rebase the current -working branch. - -Submitting -========== - -git-p4 has support for submitting changes from a git repository back to the -Perforce depot. This requires a Perforce checkout separate from your git -repository. To submit all changes that are in the current git branch but not in -the "p4" branch (or "origin" if "p4" doesn't exist) simply call - - git-p4 submit - -in your git repository. If you want to submit changes in a specific branch that -is not your current git branch you can also pass that as an argument: - - git-p4 submit mytopicbranch - -You can override the reference branch with the --origin=mysourcebranch option. - -If a submit fails you may have to "p4 resolve" and submit manually. You can -continue importing the remaining changes with - - git-p4 submit --continue - -Example -======= - -# Clone a repository - git-p4 clone //depot/path/project -# Enter the newly cloned directory - cd project -# Do some work... - vi foo.h -# ... and commit locally to gi - git commit foo.h -# In the meantime somebody submitted changes to the Perforce depot. Rebase your latest -# changes against the latest changes in Perforce: - git-p4 rebase -# Submit your locally committed changes back to Perforce - git-p4 submit -# ... and synchronize with Perforce - git-p4 rebase - - -Configuration parameters -======================== - -git-p4.user ($P4USER) - -Allows you to specify the username to use to connect to the Perforce repository. - - git config [--global] git-p4.user public - -git-p4.password ($P4PASS) - -Allows you to specify the password to use to connect to the Perforce repository. -Warning this password will be visible on the command-line invocation of the p4 binary. - - git config [--global] git-p4.password public1234 - -git-p4.port ($P4PORT) - -Specify the port to be used to contact the Perforce server. As this will be passed -directly to the p4 binary, it may be in the format host:port as well. - - git config [--global] git-p4.port codes.zimbra.com:2666 - -git-p4.host ($P4HOST) - -Specify the host to contact for a Perforce repository. - - git config [--global] git-p4.host perforce.example.com - -git-p4.client ($P4CLIENT) - -Specify the client name to use - - git config [--global] git-p4.client public-view - -git-p4.allowSubmit - - git config [--global] git-p4.allowSubmit false - -git-p4.syncFromOrigin - -A useful setup may be that you have a periodically updated git repository -somewhere that contains a complete import of a Perforce project. That git -repository can be used to clone the working repository from and one would -import from Perforce directly after cloning using git-p4. If the connection to -the Perforce server is slow and the working repository hasn't been synced for a -while it may be desirable to fetch changes from the origin git repository using -the efficient git protocol. git-p4 supports this setup by calling "git fetch origin" -by default if there is an origin branch. You can disable this using: - - git config [--global] git-p4.syncFromOrigin false - -git-p4.useclientspec - - git config [--global] git-p4.useclientspec false - -Implementation Details... -========================= - -* Changesets from Perforce are imported using git fast-import. -* The import does not require anything from the Perforce client view as it just uses - "p4 print //depot/path/file#revision" to get the actual file contents. -* Every imported changeset has a special [git-p4...] line at the - end of the log message that gives information about the corresponding - Perforce change number and is also used by git-p4 itself to find out - where to continue importing when doing incremental imports. - Basically when syncing it extracts the perforce change number of the - latest commit in the "p4" branch and uses "p4 changes //depot/path/...@changenum,#head" - to find out which changes need to be imported. -* git-p4 submit uses "git rev-list" to pick the commits between the "p4" branch - and the current branch. - The commits themselves are applied using git diff/format-patch ... | git apply - diff --git a/contrib/git-jump/README b/contrib/git-jump/README new file mode 100644 index 0000000000..1cebc328cb --- /dev/null +++ b/contrib/git-jump/README @@ -0,0 +1,92 @@ +git-jump +======== + +Git-jump is a script for helping you jump to "interesting" parts of your +project in your editor. It works by outputting a set of interesting +spots in the "quickfix" format, which editors like vim can use as a +queue of places to visit (this feature is usually used to jump to errors +produced by a compiler). For example, given a diff like this: + +------------------------------------ +diff --git a/foo.c b/foo.c +index a655540..5a59044 100644 +--- a/foo.c ++++ b/foo.c +@@ -1,3 +1,3 @@ + int main(void) { +- printf("hello word!\n"); ++ printf("hello world!\n"); + } +----------------------------------- + +git-jump will feed this to the editor: + +----------------------------------- +foo.c:2: printf("hello word!\n"); +----------------------------------- + +Obviously this trivial case isn't that interesting; you could just open +`foo.c` yourself. But when you have many changes scattered across a +project, you can use the editor's support to "jump" from point to point. + +Git-jump can generate three types of interesting lists: + + 1. The beginning of any diff hunks. + + 2. The beginning of any merge conflict markers. + + 3. Any grep matches. + + +Using git-jump +-------------- + +To use it, just drop git-jump in your PATH, and then invoke it like +this: + +-------------------------------------------------- +# jump to changes not yet staged for commit +git jump diff + +# jump to changes that are staged for commit; you can give +# arbitrary diff options +git jump diff --cached + +# jump to merge conflicts +git jump merge + +# jump to all instances of foo_bar +git jump grep foo_bar + +# same as above, but case-insensitive; you can give +# arbitrary grep options +git jump grep -i foo_bar +-------------------------------------------------- + + +Related Programs +---------------- + +You can accomplish some of the same things with individual tools. For +example, you can use `git mergetool` to start vimdiff on each unmerged +file. `git jump merge` is for the vim-wielding luddite who just wants to +jump straight to the conflict text with no fanfare. + +As of git v1.7.2, `git grep` knows the `--open-files-in-pager` option, +which does something similar to `git jump grep`. However, it is limited +to positioning the cursor to the correct line in only the first file, +leaving you to locate subsequent hits in that file or other files using +the editor or pager. By contrast, git-jump provides the editor with a +complete list of files and line numbers for each match. + + +Limitations +----------- + +This scripts was written and tested with vim. Given that the quickfix +format is the same as what gcc produces, I expect emacs users have a +similar feature for iterating through the list, but I know nothing about +how to activate it. + +The shell snippets to generate the quickfix lines will almost certainly +choke on filenames with exotic characters (like newlines). diff --git a/contrib/git-jump/git-jump b/contrib/git-jump/git-jump new file mode 100755 index 0000000000..a33674e47a --- /dev/null +++ b/contrib/git-jump/git-jump @@ -0,0 +1,69 @@ +#!/bin/sh + +usage() { + cat <<\EOF +usage: git jump <mode> [<args>] + +Jump to interesting elements in an editor. +The <mode> parameter is one of: + +diff: elements are diff hunks. Arguments are given to diff. + +merge: elements are merge conflicts. Arguments are ignored. + +grep: elements are grep hits. Arguments are given to grep. +EOF +} + +open_editor() { + editor=`git var GIT_EDITOR` + eval "$editor -q \$1" +} + +mode_diff() { + git diff --relative "$@" | + perl -ne ' + if (m{^\+\+\+ b/(.*)}) { $file = $1; next } + defined($file) or next; + if (m/^@@ .*\+(\d+)/) { $line = $1; next } + defined($line) or next; + if (/^ /) { $line++; next } + if (/^[-+]\s*(.*)/) { + print "$file:$line: $1\n"; + $line = undef; + } + ' +} + +mode_merge() { + git ls-files -u | + perl -pe 's/^.*?\t//' | + sort -u | + while IFS= read fn; do + grep -Hn '^<<<<<<<' "$fn" + done +} + +# Grep -n generates nice quickfix-looking lines by itself, +# but let's clean up extra whitespace, so they look better if the +# editor shows them to us in the status bar. +mode_grep() { + git grep -n "$@" | + perl -pe ' + s/[ \t]+/ /g; + s/^ *//; + ' +} + +if test $# -lt 1; then + usage >&2 + exit 1 +fi +mode=$1; shift + +trap 'rm -f "$tmp"' 0 1 2 3 15 +tmp=`mktemp -t git-jump.XXXXXX` || exit 1 +type "mode_$mode" >/dev/null 2>&1 || { usage >&2; exit 1; } +"mode_$mode" "$@" >"$tmp" +test -s "$tmp" || exit 0 +open_editor "$tmp" diff --git a/contrib/gitview/gitview.txt b/contrib/gitview/gitview.txt index 77c29de305..9e12f97842 100644 --- a/contrib/gitview/gitview.txt +++ b/contrib/gitview/gitview.txt @@ -7,6 +7,7 @@ gitview - A GTK based repository browser for git SYNOPSIS -------- +[verse] 'gitview' [options] [args] DESCRIPTION diff --git a/contrib/hooks/post-receive-email b/contrib/hooks/post-receive-email index 21989fc6ab..01af9df15e 100755 --- a/contrib/hooks/post-receive-email +++ b/contrib/hooks/post-receive-email @@ -11,11 +11,11 @@ # will have put this somewhere standard. You should make this script # executable then link to it in the repository you would like to use it in. # For example, on debian the hook is stored in -# /usr/share/doc/git-core/contrib/hooks/post-receive-email: +# /usr/share/git-core/contrib/hooks/post-receive-email: # # chmod a+x post-receive-email # cd /path/to/your/repository.git -# ln -sf /usr/share/doc/git-core/contrib/hooks/post-receive-email hooks/post-receive +# ln -sf /usr/share/git-core/contrib/hooks/post-receive-email hooks/post-receive # # This hook script assumes it is enabled on the central repository of a # project, with all users pushing only to it and not between each other. It @@ -60,6 +60,11 @@ # email body. If not specified, there is no limit. # Lines beyond the limit are suppressed and counted, and a final # line is added indicating the number of suppressed lines. +# hooks.diffopts +# Alternate options for the git diff-tree invocation that shows changes. +# Default is "--stat --summary --find-copies-harder". Add -p to those +# options to include a unified diff of changes in addition to the usual +# summary output. # # Notes # ----- @@ -80,7 +85,6 @@ prep_for_email() oldrev=$(git rev-parse $1) newrev=$(git rev-parse $2) refname="$3" - maxlines=$4 # --- Interpret # 0000->1234 (create) @@ -446,7 +450,7 @@ generate_update_branch_email() # non-fast-forward updates. echo "" echo "Summary of changes:" - git diff-tree --stat --summary --find-copies-harder $oldrev..$newrev + git diff-tree $diffopts $oldrev..$newrev } # @@ -456,7 +460,7 @@ generate_delete_branch_email() { echo " was $oldrev" echo "" - echo $LOGEND + echo $LOGBEGIN git show -s --pretty=oneline $oldrev echo $LOGEND } @@ -556,7 +560,7 @@ generate_delete_atag_email() { echo " was $oldrev" echo "" - echo $LOGEND + echo $LOGBEGIN git show -s --pretty=oneline $oldrev echo $LOGEND } @@ -621,7 +625,7 @@ generate_delete_general_email() { echo " was $oldrev" echo "" - echo $LOGEND + echo $LOGBEGIN git show -s --pretty=oneline $oldrev echo $LOGEND } @@ -723,6 +727,8 @@ envelopesender=$(git config hooks.envelopesender) emailprefix=$(git config hooks.emailprefix || echo '[SCM] ') custom_showrev=$(git config hooks.showrev) maxlines=$(git config hooks.emailmaxlines) +diffopts=$(git config hooks.diffopts) +: ${diffopts:="--stat --summary --find-copies-harder"} # --- Main loop # Allow dual mode: run from the command line just like the update hook, or diff --git a/contrib/mw-to-git/git-remote-mediawiki b/contrib/mw-to-git/git-remote-mediawiki new file mode 100755 index 0000000000..c18bfa1f15 --- /dev/null +++ b/contrib/mw-to-git/git-remote-mediawiki @@ -0,0 +1,827 @@ +#! /usr/bin/perl + +# Copyright (C) 2011 +# Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr> +# Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr> +# Claire Fousse <claire.fousse@ensimag.imag.fr> +# David Amouyal <david.amouyal@ensimag.imag.fr> +# Matthieu Moy <matthieu.moy@grenoble-inp.fr> +# License: GPL v2 or later + +# Gateway between Git and MediaWiki. +# https://github.com/Bibzball/Git-Mediawiki/wiki +# +# Known limitations: +# +# - Only wiki pages are managed, no support for [[File:...]] +# attachments. +# +# - Poor performance in the best case: it takes forever to check +# whether we're up-to-date (on fetch or push) or to fetch a few +# revisions from a large wiki, because we use exclusively a +# page-based synchronization. We could switch to a wiki-wide +# synchronization when the synchronization involves few revisions +# but the wiki is large. +# +# - Git renames could be turned into MediaWiki renames (see TODO +# below) +# +# - login/password support requires the user to write the password +# cleartext in a file (see TODO below). +# +# - No way to import "one page, and all pages included in it" +# +# - Multiple remote MediaWikis have not been very well tested. + +use strict; +use MediaWiki::API; +use DateTime::Format::ISO8601; +use encoding 'utf8'; + +# use encoding 'utf8' doesn't change STDERROR +# but we're going to output UTF-8 filenames to STDERR +binmode STDERR, ":utf8"; + +use URI::Escape; +use warnings; + +# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced +use constant SLASH_REPLACEMENT => "%2F"; + +# It's not always possible to delete pages (may require some +# priviledges). Deleted pages are replaced with this content. +use constant DELETED_CONTENT => "[[Category:Deleted]]\n"; + +# It's not possible to create empty pages. New empty files in Git are +# sent with this content instead. +use constant EMPTY_CONTENT => "<!-- empty page -->\n"; + +# used to reflect file creation or deletion in diff. +use constant NULL_SHA1 => "0000000000000000000000000000000000000000"; + +my $remotename = $ARGV[0]; +my $url = $ARGV[1]; + +# Accept both space-separated and multiple keys in config file. +# Spaces should be written as _ anyway because we'll use chomp. +my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages")); +chomp(@tracked_pages); + +# Just like @tracked_pages, but for MediaWiki categories. +my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories")); +chomp(@tracked_categories); + +my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin"); +# TODO: ideally, this should be able to read from keyboard, but we're +# inside a remote helper, so our stdin is connect to git, not to a +# terminal. +my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword"); +my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain"); +chomp($wiki_login); +chomp($wiki_passwd); +chomp($wiki_domain); + +# Import only last revisions (both for clone and fetch) +my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow"); +chomp($shallow_import); +$shallow_import = ($shallow_import eq "true"); + +# Dumb push: don't update notes and mediawiki ref to reflect the last push. +# +# Configurable with mediawiki.dumbPush, or per-remote with +# remote.<remotename>.dumbPush. +# +# This means the user will have to re-import the just-pushed +# revisions. On the other hand, this means that the Git revisions +# corresponding to MediaWiki revisions are all imported from the wiki, +# regardless of whether they were initially created in Git or from the +# web interface, hence all users will get the same history (i.e. if +# the push from Git to MediaWiki loses some information, everybody +# will get the history with information lost). If the import is +# deterministic, this means everybody gets the same sha1 for each +# MediaWiki revision. +my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush"); +unless ($dumb_push) { + $dumb_push = run_git("config --get --bool mediawiki.dumbPush"); +} +chomp($dumb_push); +$dumb_push = ($dumb_push eq "true"); + +my $wiki_name = $url; +$wiki_name =~ s/[^\/]*:\/\///; +# If URL is like http://user:password@example.com/, we clearly don't +# want the password in $wiki_name. While we're there, also remove user +# and '@' sign, to avoid author like MWUser@HTTPUser@host.com +$wiki_name =~ s/^.*@//; + +# Commands parser +my $entry; +my @cmd; +while (<STDIN>) { + chomp; + @cmd = split(/ /); + if (defined($cmd[0])) { + # Line not blank + if ($cmd[0] eq "capabilities") { + die("Too many arguments for capabilities") unless (!defined($cmd[1])); + mw_capabilities(); + } elsif ($cmd[0] eq "list") { + die("Too many arguments for list") unless (!defined($cmd[2])); + mw_list($cmd[1]); + } elsif ($cmd[0] eq "import") { + die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2])); + mw_import($cmd[1]); + } elsif ($cmd[0] eq "option") { + die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3])); + mw_option($cmd[1],$cmd[2]); + } elsif ($cmd[0] eq "push") { + mw_push($cmd[1]); + } else { + print STDERR "Unknown command. Aborting...\n"; + last; + } + } else { + # blank line: we should terminate + last; + } + + BEGIN { $| = 1 } # flush STDOUT, to make sure the previous + # command is fully processed. +} + +########################## Functions ############################## + +# MediaWiki API instance, created lazily. +my $mediawiki; + +sub mw_connect_maybe { + if ($mediawiki) { + return; + } + $mediawiki = MediaWiki::API->new; + $mediawiki->{config}->{api_url} = "$url/api.php"; + if ($wiki_login) { + if (!$mediawiki->login({ + lgname => $wiki_login, + lgpassword => $wiki_passwd, + lgdomain => $wiki_domain, + })) { + print STDERR "Failed to log in mediawiki user \"$wiki_login\" on $url\n"; + print STDERR "(error " . + $mediawiki->{error}->{code} . ': ' . + $mediawiki->{error}->{details} . ")\n"; + exit 1; + } else { + print STDERR "Logged in with user \"$wiki_login\".\n"; + } + } +} + +sub get_mw_first_pages { + my $some_pages = shift; + my @some_pages = @{$some_pages}; + + my $pages = shift; + + # pattern 'page1|page2|...' required by the API + my $titles = join('|', @some_pages); + + my $mw_pages = $mediawiki->api({ + action => 'query', + titles => $titles, + }); + if (!defined($mw_pages)) { + print STDERR "fatal: could not query the list of wiki pages.\n"; + print STDERR "fatal: '$url' does not appear to be a mediawiki\n"; + print STDERR "fatal: make sure '$url/api.php' is a valid page.\n"; + exit 1; + } + while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) { + if ($id < 0) { + print STDERR "Warning: page $page->{title} not found on wiki\n"; + } else { + $pages->{$page->{title}} = $page; + } + } +} + +sub get_mw_pages { + mw_connect_maybe(); + + my %pages; # hash on page titles to avoid duplicates + my $user_defined; + if (@tracked_pages) { + $user_defined = 1; + # The user provided a list of pages titles, but we + # still need to query the API to get the page IDs. + + my @some_pages = @tracked_pages; + while (@some_pages) { + my $last = 50; + if ($#some_pages < $last) { + $last = $#some_pages; + } + my @slice = @some_pages[0..$last]; + get_mw_first_pages(\@slice, \%pages); + @some_pages = @some_pages[51..$#some_pages]; + } + } + if (@tracked_categories) { + $user_defined = 1; + foreach my $category (@tracked_categories) { + if (index($category, ':') < 0) { + # Mediawiki requires the Category + # prefix, but let's not force the user + # to specify it. + $category = "Category:" . $category; + } + my $mw_pages = $mediawiki->list( { + action => 'query', + list => 'categorymembers', + cmtitle => $category, + cmlimit => 'max' } ) + || die $mediawiki->{error}->{code} . ': ' . $mediawiki->{error}->{details}; + foreach my $page (@{$mw_pages}) { + $pages{$page->{title}} = $page; + } + } + } + if (!$user_defined) { + # No user-provided list, get the list of pages from + # the API. + my $mw_pages = $mediawiki->list({ + action => 'query', + list => 'allpages', + aplimit => 500, + }); + if (!defined($mw_pages)) { + print STDERR "fatal: could not get the list of wiki pages.\n"; + print STDERR "fatal: '$url' does not appear to be a mediawiki\n"; + print STDERR "fatal: make sure '$url/api.php' is a valid page.\n"; + exit 1; + } + foreach my $page (@{$mw_pages}) { + $pages{$page->{title}} = $page; + } + } + return values(%pages); +} + +sub run_git { + open(my $git, "-|:encoding(UTF-8)", "git " . $_[0]); + my $res = do { local $/; <$git> }; + close($git); + + return $res; +} + + +sub get_last_local_revision { + # Get note regarding last mediawiki revision + my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null"); + my @note_info = split(/ /, $note); + + my $lastrevision_number; + if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) { + print STDERR "No previous mediawiki revision found"; + $lastrevision_number = 0; + } else { + # Notes are formatted : mediawiki_revision: #number + $lastrevision_number = $note_info[1]; + chomp($lastrevision_number); + print STDERR "Last local mediawiki revision found is $lastrevision_number"; + } + return $lastrevision_number; +} + +# Remember the timestamp corresponding to a revision id. +my %basetimestamps; + +sub get_last_remote_revision { + mw_connect_maybe(); + + my @pages = get_mw_pages(); + + my $max_rev_num = 0; + + foreach my $page (@pages) { + my $id = $page->{pageid}; + + my $query = { + action => 'query', + prop => 'revisions', + rvprop => 'ids|timestamp', + pageids => $id, + }; + + my $result = $mediawiki->api($query); + + my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}}); + + $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp}; + + $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num); + } + + print STDERR "Last remote revision found is $max_rev_num.\n"; + return $max_rev_num; +} + +# Clean content before sending it to MediaWiki +sub mediawiki_clean { + my $string = shift; + my $page_created = shift; + # Mediawiki does not allow blank space at the end of a page and ends with a single \n. + # This function right trims a string and adds a \n at the end to follow this rule + $string =~ s/\s+$//; + if ($string eq "" && $page_created) { + # Creating empty pages is forbidden. + $string = EMPTY_CONTENT; + } + return $string."\n"; +} + +# Filter applied on MediaWiki data before adding them to Git +sub mediawiki_smudge { + my $string = shift; + if ($string eq EMPTY_CONTENT) { + $string = ""; + } + # This \n is important. This is due to mediawiki's way to handle end of files. + return $string."\n"; +} + +sub mediawiki_clean_filename { + my $filename = shift; + $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g; + # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded. + # Do a variant of URL-encoding, i.e. looks like URL-encoding, + # but with _ added to prevent MediaWiki from thinking this is + # an actual special character. + $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge; + # If we use the uri escape before + # we should unescape here, before anything + + return $filename; +} + +sub mediawiki_smudge_filename { + my $filename = shift; + $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g; + $filename =~ s/ /_/g; + # Decode forbidden characters encoded in mediawiki_clean_filename + $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge; + return $filename; +} + +sub literal_data { + my ($content) = @_; + print STDOUT "data ", bytes::length($content), "\n", $content; +} + +sub mw_capabilities { + # Revisions are imported to the private namespace + # refs/mediawiki/$remotename/ by the helper and fetched into + # refs/remotes/$remotename later by fetch. + print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n"; + print STDOUT "import\n"; + print STDOUT "list\n"; + print STDOUT "push\n"; + print STDOUT "\n"; +} + +sub mw_list { + # MediaWiki do not have branches, we consider one branch arbitrarily + # called master, and HEAD pointing to it. + print STDOUT "? refs/heads/master\n"; + print STDOUT "\@refs/heads/master HEAD\n"; + print STDOUT "\n"; +} + +sub mw_option { + print STDERR "remote-helper command 'option $_[0]' not yet implemented\n"; + print STDOUT "unsupported\n"; +} + +sub fetch_mw_revisions_for_page { + my $page = shift; + my $id = shift; + my $fetch_from = shift; + my @page_revs = (); + my $query = { + action => 'query', + prop => 'revisions', + rvprop => 'ids', + rvdir => 'newer', + rvstartid => $fetch_from, + rvlimit => 500, + pageids => $id, + }; + + my $revnum = 0; + # Get 500 revisions at a time due to the mediawiki api limit + while (1) { + my $result = $mediawiki->api($query); + + # Parse each of those 500 revisions + foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) { + my $page_rev_ids; + $page_rev_ids->{pageid} = $page->{pageid}; + $page_rev_ids->{revid} = $revision->{revid}; + push(@page_revs, $page_rev_ids); + $revnum++; + } + last unless $result->{'query-continue'}; + $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid}; + } + if ($shallow_import && @page_revs) { + print STDERR " Found 1 revision (shallow import).\n"; + @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs); + return $page_revs[0]; + } + print STDERR " Found ", $revnum, " revision(s).\n"; + return @page_revs; +} + +sub fetch_mw_revisions { + my $pages = shift; my @pages = @{$pages}; + my $fetch_from = shift; + + my @revisions = (); + my $n = 1; + foreach my $page (@pages) { + my $id = $page->{pageid}; + + print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n"; + $n++; + my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from); + @revisions = (@page_revs, @revisions); + } + + return ($n, @revisions); +} + +sub import_file_revision { + my $commit = shift; + my %commit = %{$commit}; + my $full_import = shift; + my $n = shift; + + my $title = $commit{title}; + my $comment = $commit{comment}; + my $content = $commit{content}; + my $author = $commit{author}; + my $date = $commit{date}; + + print STDOUT "commit refs/mediawiki/$remotename/master\n"; + print STDOUT "mark :$n\n"; + print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n"; + literal_data($comment); + + # If it's not a clone, we need to know where to start from + if (!$full_import && $n == 1) { + print STDOUT "from refs/mediawiki/$remotename/master^0\n"; + } + if ($content ne DELETED_CONTENT) { + print STDOUT "M 644 inline $title.mw\n"; + literal_data($content); + print STDOUT "\n\n"; + } else { + print STDOUT "D $title.mw\n"; + } + + # mediawiki revision number in the git note + if ($full_import && $n == 1) { + print STDOUT "reset refs/notes/$remotename/mediawiki\n"; + } + print STDOUT "commit refs/notes/$remotename/mediawiki\n"; + print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n"; + literal_data("Note added by git-mediawiki during import"); + if (!$full_import && $n == 1) { + print STDOUT "from refs/notes/$remotename/mediawiki^0\n"; + } + print STDOUT "N inline :$n\n"; + literal_data("mediawiki_revision: " . $commit{mw_revision}); + print STDOUT "\n\n"; +} + +# parse a sequence of +# <cmd> <arg1> +# <cmd> <arg2> +# \n +# (like batch sequence of import and sequence of push statements) +sub get_more_refs { + my $cmd = shift; + my @refs; + while (1) { + my $line = <STDIN>; + if ($line =~ m/^$cmd (.*)$/) { + push(@refs, $1); + } elsif ($line eq "\n") { + return @refs; + } else { + die("Invalid command in a '$cmd' batch: ". $_); + } + } +} + +sub mw_import { + # multiple import commands can follow each other. + my @refs = (shift, get_more_refs("import")); + foreach my $ref (@refs) { + mw_import_ref($ref); + } + print STDOUT "done\n"; +} + +sub mw_import_ref { + my $ref = shift; + # The remote helper will call "import HEAD" and + # "import refs/heads/master". + # Since HEAD is a symbolic ref to master (by convention, + # followed by the output of the command "list" that we gave), + # we don't need to do anything in this case. + if ($ref eq "HEAD") { + return; + } + + mw_connect_maybe(); + + my @pages = get_mw_pages(); + + print STDERR "Searching revisions...\n"; + my $last_local = get_last_local_revision(); + my $fetch_from = $last_local + 1; + if ($fetch_from == 1) { + print STDERR ", fetching from beginning.\n"; + } else { + print STDERR ", fetching from here.\n"; + } + my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from); + + # Creation of the fast-import stream + print STDERR "Fetching & writing export data...\n"; + + $n = 0; + my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined + + foreach my $pagerevid (sort {$a->{revid} <=> $b->{revid}} @revisions) { + # fetch the content of the pages + my $query = { + action => 'query', + prop => 'revisions', + rvprop => 'content|timestamp|comment|user|ids', + revids => $pagerevid->{revid}, + }; + + my $result = $mediawiki->api($query); + + my $rev = pop(@{$result->{query}->{pages}->{$pagerevid->{pageid}}->{revisions}}); + + $n++; + + my %commit; + $commit{author} = $rev->{user} || 'Anonymous'; + $commit{comment} = $rev->{comment} || '*Empty MediaWiki Message*'; + $commit{title} = mediawiki_smudge_filename( + $result->{query}->{pages}->{$pagerevid->{pageid}}->{title} + ); + $commit{mw_revision} = $pagerevid->{revid}; + $commit{content} = mediawiki_smudge($rev->{'*'}); + + if (!defined($rev->{timestamp})) { + $last_timestamp++; + } else { + $last_timestamp = $rev->{timestamp}; + } + $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp); + + print STDERR "$n/", scalar(@revisions), ": Revision #$pagerevid->{revid} of $commit{title}\n"; + + import_file_revision(\%commit, ($fetch_from == 1), $n); + } + + if ($fetch_from == 1 && $n == 0) { + print STDERR "You appear to have cloned an empty MediaWiki.\n"; + # Something has to be done remote-helper side. If nothing is done, an error is + # thrown saying that HEAD is refering to unknown object 0000000000000000000 + # and the clone fails. + } +} + +sub error_non_fast_forward { + my $advice = run_git("config --bool advice.pushNonFastForward"); + chomp($advice); + if ($advice ne "false") { + # Native git-push would show this after the summary. + # We can't ask it to display it cleanly, so print it + # ourselves before. + print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n"; + print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n"; + print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n"; + } + print STDOUT "error $_[0] \"non-fast-forward\"\n"; + return 0; +} + +sub mw_push_file { + my $diff_info = shift; + # $diff_info contains a string in this format: + # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status> + my @diff_info_split = split(/[ \t]/, $diff_info); + + # Filename, including .mw extension + my $complete_file_name = shift; + # Commit message + my $summary = shift; + # MediaWiki revision number. Keep the previous one by default, + # in case there's no edit to perform. + my $newrevid = shift; + + my $new_sha1 = $diff_info_split[3]; + my $old_sha1 = $diff_info_split[2]; + my $page_created = ($old_sha1 eq NULL_SHA1); + my $page_deleted = ($new_sha1 eq NULL_SHA1); + $complete_file_name = mediawiki_clean_filename($complete_file_name); + + if (substr($complete_file_name,-3) eq ".mw") { + my $title = substr($complete_file_name,0,-3); + + my $file_content; + if ($page_deleted) { + # Deleting a page usually requires + # special priviledges. A common + # convention is to replace the page + # with this content instead: + $file_content = DELETED_CONTENT; + } else { + $file_content = run_git("cat-file blob $new_sha1"); + } + + mw_connect_maybe(); + + my $result = $mediawiki->edit( { + action => 'edit', + summary => $summary, + title => $title, + basetimestamp => $basetimestamps{$newrevid}, + text => mediawiki_clean($file_content, $page_created), + }, { + skip_encoding => 1 # Helps with names with accentuated characters + }); + if (!$result) { + if ($mediawiki->{error}->{code} == 3) { + # edit conflicts, considered as non-fast-forward + print STDERR 'Warning: Error ' . + $mediawiki->{error}->{code} . + ' from mediwiki: ' . $mediawiki->{error}->{details} . + ".\n"; + return ($newrevid, "non-fast-forward"); + } else { + # Other errors. Shouldn't happen => just die() + die 'Fatal: Error ' . + $mediawiki->{error}->{code} . + ' from mediwiki: ' . $mediawiki->{error}->{details}; + } + } + $newrevid = $result->{edit}->{newrevid}; + print STDERR "Pushed file: $new_sha1 - $title\n"; + } else { + print STDERR "$complete_file_name not a mediawiki file (Not pushable on this version of git-remote-mediawiki).\n" + } + return ($newrevid, "ok"); +} + +sub mw_push { + # multiple push statements can follow each other + my @refsspecs = (shift, get_more_refs("push")); + my $pushed; + for my $refspec (@refsspecs) { + my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/ + or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>"); + if ($force) { + print STDERR "Warning: forced push not allowed on a MediaWiki.\n"; + } + if ($local eq "") { + print STDERR "Cannot delete remote branch on a MediaWiki\n"; + print STDOUT "error $remote cannot delete\n"; + next; + } + if ($remote ne "refs/heads/master") { + print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n"; + print STDOUT "error $remote only master allowed\n"; + next; + } + if (mw_push_revision($local, $remote)) { + $pushed = 1; + } + } + + # Notify Git that the push is done + print STDOUT "\n"; + + if ($pushed && $dumb_push) { + print STDERR "Just pushed some revisions to MediaWiki.\n"; + print STDERR "The pushed revisions now have to be re-imported, and your current branch\n"; + print STDERR "needs to be updated with these re-imported commits. You can do this with\n"; + print STDERR "\n"; + print STDERR " git pull --rebase\n"; + print STDERR "\n"; + } +} + +sub mw_push_revision { + my $local = shift; + my $remote = shift; # actually, this has to be "refs/heads/master" at this point. + my $last_local_revid = get_last_local_revision(); + print STDERR ".\n"; # Finish sentence started by get_last_local_revision() + my $last_remote_revid = get_last_remote_revision(); + my $mw_revision = $last_remote_revid; + + # Get sha1 of commit pointed by local HEAD + my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1); + # Get sha1 of commit pointed by remotes/$remotename/master + my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null"); + chomp($remoteorigin_sha1); + + if ($last_local_revid > 0 && + $last_local_revid < $last_remote_revid) { + return error_non_fast_forward($remote); + } + + if ($HEAD_sha1 eq $remoteorigin_sha1) { + # nothing to push + return 0; + } + + # Get every commit in between HEAD and refs/remotes/origin/master, + # including HEAD and refs/remotes/origin/master + my @commit_pairs = (); + if ($last_local_revid > 0) { + my $parsed_sha1 = $remoteorigin_sha1; + # Find a path from last MediaWiki commit to pushed commit + while ($parsed_sha1 ne $HEAD_sha1) { + my @commit_info = grep(/^$parsed_sha1/, split(/\n/, run_git("rev-list --children $local"))); + if (!@commit_info) { + return error_non_fast_forward($remote); + } + my @commit_info_split = split(/ |\n/, $commit_info[0]); + # $commit_info_split[1] is the sha1 of the commit to export + # $commit_info_split[0] is the sha1 of its direct child + push(@commit_pairs, \@commit_info_split); + $parsed_sha1 = $commit_info_split[1]; + } + } else { + # No remote mediawiki revision. Export the whole + # history (linearized with --first-parent) + print STDERR "Warning: no common ancestor, pushing complete history\n"; + my $history = run_git("rev-list --first-parent --children $local"); + my @history = split('\n', $history); + @history = @history[1..$#history]; + foreach my $line (reverse @history) { + my @commit_info_split = split(/ |\n/, $line); + push(@commit_pairs, \@commit_info_split); + } + } + + foreach my $commit_info_split (@commit_pairs) { + my $sha1_child = @{$commit_info_split}[0]; + my $sha1_commit = @{$commit_info_split}[1]; + my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit"); + # TODO: we could detect rename, and encode them with a #redirect on the wiki. + # TODO: for now, it's just a delete+add + my @diff_info_list = split(/\0/, $diff_infos); + # Keep the first line of the commit message as mediawiki comment for the revision + my $commit_msg = (split(/\n/, run_git("show --pretty=format:\"%s\" $sha1_commit")))[0]; + chomp($commit_msg); + # Push every blob + while (@diff_info_list) { + my $status; + # git diff-tree -z gives an output like + # <metadata>\0<filename1>\0 + # <metadata>\0<filename2>\0 + # and we've split on \0. + my $info = shift(@diff_info_list); + my $file = shift(@diff_info_list); + ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision); + if ($status eq "non-fast-forward") { + # we may already have sent part of the + # commit to MediaWiki, but it's too + # late to cancel it. Stop the push in + # the middle, but still give an + # accurate error message. + return error_non_fast_forward($remote); + } + if ($status ne "ok") { + die("Unknown error from mw_push_file()"); + } + } + unless ($dumb_push) { + run_git("notes --ref=$remotename/mediawiki add -m \"mediawiki_revision: $mw_revision\" $sha1_commit"); + run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child"); + } + } + + print STDOUT "ok $remote\n"; + return 1; +} diff --git a/contrib/mw-to-git/git-remote-mediawiki.txt b/contrib/mw-to-git/git-remote-mediawiki.txt new file mode 100644 index 0000000000..4d211f5b81 --- /dev/null +++ b/contrib/mw-to-git/git-remote-mediawiki.txt @@ -0,0 +1,7 @@ +Git-Mediawiki is a project which aims the creation of a gate +between git and mediawiki, allowing git users to push and pull +objects from mediawiki just as one would do with a classic git +repository thanks to remote-helpers. + +For more information, visit the wiki at +https://github.com/Bibzball/Git-Mediawiki/wiki diff --git a/contrib/rerere-train.sh b/contrib/rerere-train.sh index 2cfe1b936b..36b6feebe0 100755 --- a/contrib/rerere-train.sh +++ b/contrib/rerere-train.sh @@ -7,7 +7,7 @@ USAGE="$me rev-list-args" SUBDIRECTORY_OK=Yes OPTIONS_SPEC= -. git-sh-setup +. $(git --exec-path)/git-sh-setup require_work_tree cd_to_toplevel diff --git a/contrib/svn-fe/svn-fe.c b/contrib/svn-fe/svn-fe.c index a2677b03e0..35db24f5ea 100644 --- a/contrib/svn-fe/svn-fe.c +++ b/contrib/svn-fe/svn-fe.c @@ -8,7 +8,8 @@ int main(int argc, char **argv) { - svndump_init(NULL); + if (svndump_init(NULL)) + return 1; svndump_read((argc > 1) ? argv[1] : NULL); svndump_deinit(); svndump_reset(); diff --git a/contrib/svn-fe/svn-fe.txt b/contrib/svn-fe/svn-fe.txt index 35f84bd9e7..1128ab2ce4 100644 --- a/contrib/svn-fe/svn-fe.txt +++ b/contrib/svn-fe/svn-fe.txt @@ -7,7 +7,11 @@ svn-fe - convert an SVN "dumpfile" to a fast-import stream SYNOPSIS -------- -svnadmin dump --incremental REPO | svn-fe [url] | git fast-import +[verse] +mkfifo backchannel && +svnadmin dump --deltas REPO | + svn-fe [url] 3<backchannel | + git fast-import --cat-blob-fd=3 3>backchannel DESCRIPTION ----------- @@ -18,6 +22,9 @@ Subversion repository mirrored on the local disk. Remote Subversion repositories can be mirrored on local disk using the `svnsync` command. +Note: this tool is very young. The details of its commandline +interface may change in backward incompatible ways. + INPUT FORMAT ------------ Subversion's repository dump format is documented in full in @@ -25,9 +32,6 @@ Subversion's repository dump format is documented in full in Files in this format can be generated using the 'svnadmin dump' or 'svk admin dump' command. -Dumps produced with 'svnadmin dump --deltas' (dumpfile format v3) -are not supported. - OUTPUT FORMAT ------------- The fast-import format is documented by the git-fast-import(1) @@ -47,7 +51,7 @@ as committer, where 'user' is the value of the `svn:author` property and 'UUID' the repository's identifier. To support incremental imports, 'svn-fe' puts a `git-svn-id` line at -the end of each commit log message if passed an url on the command +the end of each commit log message if passed a URL on the command line. This line has the form `git-svn-id: URL@REVNO UUID`. The resulting repository will generally require further processing diff --git a/contrib/thunderbird-patch-inline/appp.sh b/contrib/thunderbird-patch-inline/appp.sh index cc518f3c89..5eb4a51643 100755 --- a/contrib/thunderbird-patch-inline/appp.sh +++ b/contrib/thunderbird-patch-inline/appp.sh @@ -1,8 +1,8 @@ -#!/bin/bash +#!/bin/sh # Copyright 2008 Lukas Sandström <luksan@gmail.com> # # AppendPatch - A script to be used together with ExternalEditor -# for Mozilla Thunderbird to properly include pathes inline i e-mails. +# for Mozilla Thunderbird to properly include patches inline in e-mails. # ExternalEditor can be downloaded at http://globs.org/articles.php?lng=en&pg=2 |