diff options
Diffstat (limited to 'contrib')
48 files changed, 7125 insertions, 506 deletions
diff --git a/contrib/ciabot/ciabot.py b/contrib/ciabot/ciabot.py index bd24395d4c..36b5665ff8 100755 --- a/contrib/ciabot/ciabot.py +++ b/contrib/ciabot/ciabot.py @@ -47,7 +47,13 @@ # we default to that. # -import os, sys, commands, socket, urllib +import sys +if sys.hexversion < 0x02000000: + # The limiter is the xml.sax module + sys.stderr.write("ciabot.py: requires Python 2.0.0 or later.\n") + sys.exit(1) + +import os, commands, socket, urllib from xml.sax.saxutils import escape # Changeset URL prefix for your repo: when the commit ID is appended diff --git a/contrib/completion/git-completion.bash b/contrib/completion/git-completion.bash index 1fb896b8c5..7147d64af2 100755..100644 --- a/contrib/completion/git-completion.bash +++ b/contrib/completion/git-completion.bash @@ -20,50 +20,8 @@ # 1) Copy this file to somewhere (e.g. ~/.git-completion.sh). # 2) Add the following line to your .bashrc/.zshrc: # source ~/.git-completion.sh -# -# 3) Consider changing your PS1 to also show the current branch: -# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ ' -# ZSH: PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ ' -# -# The argument to __git_ps1 will be displayed only if you -# are currently in a git repository. The %s token will be -# the name of the current branch. -# -# In addition, if you set GIT_PS1_SHOWDIRTYSTATE to a nonempty -# value, unstaged (*) and staged (+) changes will be shown next -# to the branch name. You can configure this per-repository -# with the bash.showDirtyState variable, which defaults to true -# once GIT_PS1_SHOWDIRTYSTATE is enabled. -# -# You can also see if currently something is stashed, by setting -# GIT_PS1_SHOWSTASHSTATE to a nonempty value. If something is stashed, -# then a '$' will be shown next to the branch name. -# -# If you would like to see if there're untracked files, then you can -# set GIT_PS1_SHOWUNTRACKEDFILES to a nonempty value. If there're -# untracked files, then a '%' will be shown next to the branch name. -# -# If you would like to see the difference between HEAD and its -# upstream, set GIT_PS1_SHOWUPSTREAM="auto". A "<" indicates -# you are behind, ">" indicates you are ahead, and "<>" -# indicates you have diverged. You can further control -# behaviour by setting GIT_PS1_SHOWUPSTREAM to a space-separated -# list of values: -# verbose show number of commits ahead/behind (+/-) upstream -# legacy don't use the '--count' option available in recent -# versions of git-rev-list -# git always compare HEAD to @{upstream} -# svn always compare HEAD to your SVN upstream -# By default, __git_ps1 will compare HEAD to your SVN upstream -# if it can find one, or @{upstream} otherwise. Once you have -# set GIT_PS1_SHOWUPSTREAM, you can override it on a -# per-repository basis by setting the bash.showUpstream config -# variable. -# - -if [[ -n ${ZSH_VERSION-} ]]; then - autoload -U +X bashcompinit && bashcompinit -fi +# 3) Consider changing your PS1 to also show the current branch, +# see git-prompt.sh for details. case "$COMP_WORDBREAKS" in *:*) : great ;; @@ -74,9 +32,14 @@ esac # returns location of .git repo __gitdir () { + # Note: this function is duplicated in git-prompt.sh + # When updating it, make sure you update the other one to match. if [ -z "${1-}" ]; then if [ -n "${__git_dir-}" ]; then echo "$__git_dir" + elif [ -n "${GIT_DIR-}" ]; then + test -d "${GIT_DIR-}" || return 1 + echo "$GIT_DIR" elif [ -d .git ]; then echo .git else @@ -89,221 +52,6 @@ __gitdir () fi } -# stores the divergence from upstream in $p -# used by GIT_PS1_SHOWUPSTREAM -__git_ps1_show_upstream () -{ - local key value - local svn_remote svn_url_pattern count n - local upstream=git legacy="" verbose="" - - svn_remote=() - # get some config options from git-config - local output="$(git config -z --get-regexp '^(svn-remote\..*\.url|bash\.showupstream)$' 2>/dev/null | tr '\0\n' '\n ')" - while read -r key value; do - case "$key" in - bash.showupstream) - GIT_PS1_SHOWUPSTREAM="$value" - if [[ -z "${GIT_PS1_SHOWUPSTREAM}" ]]; then - p="" - return - fi - ;; - svn-remote.*.url) - svn_remote[ $((${#svn_remote[@]} + 1)) ]="$value" - svn_url_pattern+="\\|$value" - upstream=svn+git # default upstream is SVN if available, else git - ;; - esac - done <<< "$output" - - # parse configuration values - for option in ${GIT_PS1_SHOWUPSTREAM}; do - case "$option" in - git|svn) upstream="$option" ;; - verbose) verbose=1 ;; - legacy) legacy=1 ;; - esac - done - - # Find our upstream - case "$upstream" in - git) upstream="@{upstream}" ;; - svn*) - # get the upstream from the "git-svn-id: ..." in a commit message - # (git-svn uses essentially the same procedure internally) - local svn_upstream=($(git log --first-parent -1 \ - --grep="^git-svn-id: \(${svn_url_pattern#??}\)" 2>/dev/null)) - if [[ 0 -ne ${#svn_upstream[@]} ]]; then - svn_upstream=${svn_upstream[ ${#svn_upstream[@]} - 2 ]} - svn_upstream=${svn_upstream%@*} - local n_stop="${#svn_remote[@]}" - for ((n=1; n <= n_stop; n++)); do - svn_upstream=${svn_upstream#${svn_remote[$n]}} - done - - if [[ -z "$svn_upstream" ]]; then - # default branch name for checkouts with no layout: - upstream=${GIT_SVN_ID:-git-svn} - else - upstream=${svn_upstream#/} - fi - elif [[ "svn+git" = "$upstream" ]]; then - upstream="@{upstream}" - fi - ;; - esac - - # Find how many commits we are ahead/behind our upstream - if [[ -z "$legacy" ]]; then - count="$(git rev-list --count --left-right \ - "$upstream"...HEAD 2>/dev/null)" - else - # produce equivalent output to --count for older versions of git - local commits - if commits="$(git rev-list --left-right "$upstream"...HEAD 2>/dev/null)" - then - local commit behind=0 ahead=0 - for commit in $commits - do - case "$commit" in - "<"*) ((behind++)) ;; - *) ((ahead++)) ;; - esac - done - count="$behind $ahead" - else - count="" - fi - fi - - # calculate the result - if [[ -z "$verbose" ]]; then - case "$count" in - "") # no upstream - p="" ;; - "0 0") # equal to upstream - p="=" ;; - "0 "*) # ahead of upstream - p=">" ;; - *" 0") # behind upstream - p="<" ;; - *) # diverged from upstream - p="<>" ;; - esac - else - case "$count" in - "") # no upstream - p="" ;; - "0 0") # equal to upstream - p=" u=" ;; - "0 "*) # ahead of upstream - p=" u+${count#0 }" ;; - *" 0") # behind upstream - p=" u-${count% 0}" ;; - *) # diverged from upstream - p=" u+${count#* }-${count% *}" ;; - esac - fi - -} - - -# __git_ps1 accepts 0 or 1 arguments (i.e., format string) -# returns text to add to bash PS1 prompt (includes branch name) -__git_ps1 () -{ - local g="$(__gitdir)" - if [ -n "$g" ]; then - local r="" - local b="" - if [ -f "$g/rebase-merge/interactive" ]; then - r="|REBASE-i" - b="$(cat "$g/rebase-merge/head-name")" - elif [ -d "$g/rebase-merge" ]; then - r="|REBASE-m" - b="$(cat "$g/rebase-merge/head-name")" - else - if [ -d "$g/rebase-apply" ]; then - if [ -f "$g/rebase-apply/rebasing" ]; then - r="|REBASE" - elif [ -f "$g/rebase-apply/applying" ]; then - r="|AM" - else - r="|AM/REBASE" - fi - elif [ -f "$g/MERGE_HEAD" ]; then - r="|MERGING" - elif [ -f "$g/CHERRY_PICK_HEAD" ]; then - r="|CHERRY-PICKING" - elif [ -f "$g/BISECT_LOG" ]; then - r="|BISECTING" - fi - - b="$(git symbolic-ref HEAD 2>/dev/null)" || { - - b="$( - case "${GIT_PS1_DESCRIBE_STYLE-}" in - (contains) - git describe --contains HEAD ;; - (branch) - git describe --contains --all HEAD ;; - (describe) - git describe HEAD ;; - (* | default) - git describe --tags --exact-match HEAD ;; - esac 2>/dev/null)" || - - b="$(cut -c1-7 "$g/HEAD" 2>/dev/null)..." || - b="unknown" - b="($b)" - } - fi - - local w="" - local i="" - local s="" - local u="" - local c="" - local p="" - - if [ "true" = "$(git rev-parse --is-inside-git-dir 2>/dev/null)" ]; then - if [ "true" = "$(git rev-parse --is-bare-repository 2>/dev/null)" ]; then - c="BARE:" - else - b="GIT_DIR!" - fi - elif [ "true" = "$(git rev-parse --is-inside-work-tree 2>/dev/null)" ]; then - if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ]; then - if [ "$(git config --bool bash.showDirtyState)" != "false" ]; then - git diff --no-ext-diff --quiet --exit-code || w="*" - if git rev-parse --quiet --verify HEAD >/dev/null; then - git diff-index --cached --quiet HEAD -- || i="+" - else - i="#" - fi - fi - fi - if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ]; then - git rev-parse --verify refs/stash >/dev/null 2>&1 && s="$" - fi - - if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ]; then - if [ -n "$(git ls-files --others --exclude-standard)" ]; then - u="%" - fi - fi - - if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then - __git_ps1_show_upstream - fi - fi - - local f="$w$i$s$u" - printf -- "${1:- (%s)}" "$c${b##refs/heads/}${f:+ $f}$r$p" - fi -} - __gitcomp_1 () { local c IFS=$' \t\n' @@ -417,7 +165,6 @@ __git_reassemble_comp_words_by_ref() } if ! type _get_comp_words_by_ref >/dev/null 2>&1; then -if [[ -z ${ZSH_VERSION:+set} ]]; then _get_comp_words_by_ref () { local exclude cur_ words_ cword_ @@ -445,32 +192,6 @@ _get_comp_words_by_ref () shift done } -else -_get_comp_words_by_ref () -{ - while [ $# -gt 0 ]; do - case "$1" in - cur) - cur=${COMP_WORDS[COMP_CWORD]} - ;; - prev) - prev=${COMP_WORDS[COMP_CWORD-1]} - ;; - words) - words=("${COMP_WORDS[@]}") - ;; - cword) - cword=$COMP_CWORD - ;; - -n) - # assume COMP_WORDBREAKS is already set sanely - shift - ;; - esac - shift - done -} -fi fi # Generates completion reply with compgen, appending a space to possible @@ -569,7 +290,7 @@ __git_refs () if [[ "$ref" == "$cur"* ]]; then echo "$ref" fi - done | uniq -u + done | sort | uniq -u fi return fi @@ -676,7 +397,7 @@ __git_complete_revlist_file () *) pfx="$ref:$pfx" ;; esac - __gitcomp_nl "$(git --git-dir="$(__gitdir)" ls-tree "$ls" \ + __gitcomp_nl "$(git --git-dir="$(__gitdir)" ls-tree "$ls" 2>/dev/null \ | sed '/^100... blob /{ s,^.* ,, s,$, , @@ -833,7 +554,7 @@ __git_list_porcelain_commands () { local i IFS=" "$'\n' __git_compute_all_commands - for i in "help" $__git_all_commands + for i in $__git_all_commands do case $i in *--*) : helper pattern;; @@ -1124,11 +845,15 @@ _git_branch () done case "$cur" in + --set-upstream-to=*) + __gitcomp "$(__git_refs)" "" "${cur##--set-upstream-to=}" + ;; --*) __gitcomp " --color --no-color --verbose --abbrev= --no-abbrev --track --no-track --contains --merged --no-merged - --set-upstream --edit-description --list + --set-upstream-to= --edit-description --list + --unset-upstream " ;; *) @@ -1234,6 +959,8 @@ _git_clone () --upload-pack --template= --depth + --single-branch + --branch " return ;; @@ -1245,6 +972,13 @@ _git_commit () { __git_has_doubledash && return + case "$prev" in + -c|-C) + __gitcomp_nl "$(__git_refs)" "" "${cur}" + return + ;; + esac + case "$cur" in --cleanup=*) __gitcomp "default strip verbatim whitespace @@ -1263,7 +997,8 @@ _git_commit () --*) __gitcomp " --all --author= --signoff --verify --no-verify - --edit --amend --include --only --interactive + --edit --no-edit + --amend --include --only --interactive --dry-run --reuse-message= --reedit-message= --reset-author --file= --message= --template= --cleanup= --untracked-files --untracked-files= @@ -1320,7 +1055,7 @@ _git_diff () } __git_mergetools_common="diffuse ecmerge emerge kdiff3 meld opendiff - tkdiff vimdiff gvimdiff xxdiff araxis p4merge bc3 + tkdiff vimdiff gvimdiff xxdiff araxis p4merge bc3 codecompare " _git_difftool () @@ -1360,6 +1095,14 @@ _git_fetch () __git_complete_remote_or_refspec } +__git_format_patch_options=" + --stdout --attach --no-attach --thread --thread= --output-directory + --numbered --start-number --numbered-files --keep-subject --signoff + --signature --no-signature --in-reply-to= --cc= --full-index --binary + --not --all --cover-letter --no-prefix --src-prefix= --dst-prefix= + --inline --suffix= --ignore-if-in-upstream --subject-prefix= +" + _git_format_patch () { case "$cur" in @@ -1370,21 +1113,7 @@ _git_format_patch () return ;; --*) - __gitcomp " - --stdout --attach --no-attach --thread --thread= - --output-directory - --numbered --start-number - --numbered-files - --keep-subject - --signoff --signature --no-signature - --in-reply-to= --cc= - --full-index --binary - --not --all - --cover-letter - --no-prefix --src-prefix= --dst-prefix= - --inline --suffix= --ignore-if-in-upstream - --subject-prefix= - " + __gitcomp "$__git_format_patch_options" return ;; esac @@ -1798,6 +1527,12 @@ _git_send_email () __gitcomp "ssl tls" "" "${cur##--smtp-encryption=}" return ;; + --thread=*) + __gitcomp " + deep shallow + " "" "${cur##--thread=}" + return + ;; --*) __gitcomp "--annotate --bcc --cc --cc-cmd --chain-reply-to --compose --confirm= --dry-run --envelope-sender @@ -1807,11 +1542,12 @@ _git_send_email () --signed-off-by-cc --smtp-pass --smtp-server --smtp-server-port --smtp-encryption= --smtp-user --subject --suppress-cc= --suppress-from --thread --to - --validate --no-validate" + --validate --no-validate + $__git_format_patch_options" return ;; esac - COMPREPLY=() + __git_complete_revlist } _git_stage () @@ -2281,7 +2017,7 @@ _git_config () _git_remote () { - local subcommands="add rename rm set-head set-branches set-url show prune update" + local subcommands="add rename remove set-head set-branches set-url show prune update" local subcommand="$(__git_find_on_cmdline "$subcommands")" if [ -z "$subcommand" ]; then __gitcomp "$subcommands" @@ -2289,7 +2025,7 @@ _git_remote () fi case "$subcommand" in - rename|rm|set-url|show|prune) + rename|remove|set-url|show|prune) __gitcomp_nl "$(__git_remotes)" ;; set-head|set-branches) @@ -2673,20 +2409,71 @@ __gitk_main () __git_complete_revlist } -__git_func_wrap () -{ - if [[ -n ${ZSH_VERSION-} ]]; then - emulate -L bash - setopt KSH_TYPESET +if [[ -n ${ZSH_VERSION-} ]]; then + echo "WARNING: this script is deprecated, please see git-completion.zsh" 1>&2 - # workaround zsh's bug that leaves 'words' as a special - # variable in versions < 4.3.12 - typeset -h words + autoload -U +X compinit && compinit - # workaround zsh's bug that quotes spaces in the COMPREPLY - # array if IFS doesn't contain spaces. - typeset -h IFS - fi + __gitcomp () + { + emulate -L zsh + + local cur_="${3-$cur}" + + case "$cur_" in + --*=) + ;; + *) + local c IFS=$' \t\n' + local -a array + for c in ${=1}; do + c="$c${4-}" + case $c in + --*=*|*.) ;; + *) c="$c " ;; + esac + array+=("$c") + done + compset -P '*[=:]' + compadd -Q -S '' -p "${2-}" -a -- array && _ret=0 + ;; + esac + } + + __gitcomp_nl () + { + emulate -L zsh + + local IFS=$'\n' + compset -P '*[=:]' + compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0 + } + + __git_zsh_helper () + { + emulate -L ksh + local cur cword prev + cur=${words[CURRENT-1]} + prev=${words[CURRENT-2]} + let cword=CURRENT-1 + __${service}_main + } + + _git () + { + emulate -L zsh + local _ret=1 + __git_zsh_helper + let _ret && _default -S '' && _ret=0 + return _ret + } + + compdef _git git gitk + return +fi + +__git_func_wrap () +{ local cur words cword prev _get_comp_words_by_ref -n =: cur words cword prev $1 diff --git a/contrib/completion/git-completion.tcsh b/contrib/completion/git-completion.tcsh new file mode 100644 index 0000000000..3e3889f2b4 --- /dev/null +++ b/contrib/completion/git-completion.tcsh @@ -0,0 +1,116 @@ +#!tcsh +# +# tcsh completion support for core Git. +# +# Copyright (C) 2012 Marc Khouzam <marc.khouzam@gmail.com> +# Distributed under the GNU General Public License, version 2.0. +# +# When sourced, this script will generate a new script that uses +# the git-completion.bash script provided by core Git. This new +# script can be used by tcsh to perform git completion. +# The current script also issues the necessary tcsh 'complete' +# commands. +# +# To use this completion script: +# +# 0) You need tcsh 6.16.00 or newer. +# 1) Copy both this file and the bash completion script to ${HOME}. +# You _must_ use the name ${HOME}/.git-completion.bash for the +# bash script. +# (e.g. ~/.git-completion.tcsh and ~/.git-completion.bash). +# 2) Add the following line to your .tcshrc/.cshrc: +# source ~/.git-completion.tcsh +# 3) For completion similar to bash, it is recommended to also +# add the following line to your .tcshrc/.cshrc: +# set autolist=ambiguous +# It will tell tcsh to list the possible completion choices. + +set __git_tcsh_completion_version = `\echo ${tcsh} | \sed 's/\./ /g'` +if ( ${__git_tcsh_completion_version[1]} < 6 || \ + ( ${__git_tcsh_completion_version[1]} == 6 && \ + ${__git_tcsh_completion_version[2]} < 16 ) ) then + echo "git-completion.tcsh: Your version of tcsh is too old, you need version 6.16.00 or newer. Git completion will not work." + exit +endif +unset __git_tcsh_completion_version + +set __git_tcsh_completion_original_script = ${HOME}/.git-completion.bash +set __git_tcsh_completion_script = ${HOME}/.git-completion.tcsh.bash + +# Check that the user put the script in the right place +if ( ! -e ${__git_tcsh_completion_original_script} ) then + echo "git-completion.tcsh: Cannot find: ${__git_tcsh_completion_original_script}. Git completion will not work." + exit +endif + +cat << EOF > ${__git_tcsh_completion_script} +#!bash +# +# This script is GENERATED and will be overwritten automatically. +# Do not modify it directly. Instead, modify git-completion.tcsh +# and source it again. + +source ${__git_tcsh_completion_original_script} + +# Set COMP_WORDS in a way that can be handled by the bash script. +COMP_WORDS=(\$2) + +# The cursor is at the end of parameter #1. +# We must check for a space as the last character which will +# tell us that the previous word is complete and the cursor +# is on the next word. +if [ "\${2: -1}" == " " ]; then + # The last character is a space, so our location is at the end + # of the command-line array + COMP_CWORD=\${#COMP_WORDS[@]} +else + # The last character is not a space, so our location is on the + # last word of the command-line array, so we must decrement the + # count by 1 + COMP_CWORD=\$((\${#COMP_WORDS[@]}-1)) +fi + +# Call _git() or _gitk() of the bash script, based on the first argument +_\${1} + +IFS=\$'\n' +if [ \${#COMPREPLY[*]} -eq 0 ]; then + # No completions suggested. In this case, we want tcsh to perform + # standard file completion. However, there does not seem to be way + # to tell tcsh to do that. To help the user, we try to simulate + # file completion directly in this script. + # + # Known issues: + # - Possible completions are shown with their directory prefix. + # - Completions containing shell variables are not handled. + # - Completions with ~ as the first character are not handled. + + # No file completion should be done unless we are completing beyond + # the git sub-command. An improvement on the bash completion :) + if [ \${COMP_CWORD} -gt 1 ]; then + TO_COMPLETE="\${COMP_WORDS[\${COMP_CWORD}]}" + + # We don't support ~ expansion: too tricky. + if [ "\${TO_COMPLETE:0:1}" != "~" ]; then + # Use ls so as to add the '/' at the end of directories. + COMPREPLY=(\`ls -dp \${TO_COMPLETE}* 2> /dev/null\`) + fi + fi +fi + +# tcsh does not automatically remove duplicates, so we do it ourselves +echo "\${COMPREPLY[*]}" | sort | uniq + +# If there is a single completion and it is a directory, we output it +# a second time to trick tcsh into not adding a space after it. +if [ \${#COMPREPLY[*]} -eq 1 ] && [ "\${COMPREPLY[0]: -1}" == "/" ]; then + echo "\${COMPREPLY[*]}" +fi + +EOF + +# Don't need this variable anymore, so don't pollute the users environment +unset __git_tcsh_completion_original_script + +complete git 'p,*,`bash ${__git_tcsh_completion_script} git "${COMMAND_LINE}"`,' +complete gitk 'p,*,`bash ${__git_tcsh_completion_script} gitk "${COMMAND_LINE}"`,' diff --git a/contrib/completion/git-completion.zsh b/contrib/completion/git-completion.zsh new file mode 100644 index 0000000000..45775021ff --- /dev/null +++ b/contrib/completion/git-completion.zsh @@ -0,0 +1,78 @@ +#compdef git gitk + +# zsh completion wrapper for git +# +# You need git's bash completion script installed somewhere, by default on the +# same directory as this script. +# +# If your script is on ~/.git-completion.sh instead, you can configure it on +# your ~/.zshrc: +# +# zstyle ':completion:*:*:git:*' script ~/.git-completion.sh +# +# The recommended way to install this script is to copy to +# '~/.zsh/completion/_git', and then add the following to your ~/.zshrc file: +# +# fpath=(~/.zsh/completion $fpath) + +complete () +{ + # do nothing + return 0 +} + +zstyle -s ":completion:*:*:git:*" script script +test -z "$script" && script="$(dirname ${funcsourcetrace[1]%:*})"/git-completion.bash +ZSH_VERSION='' . "$script" + +__gitcomp () +{ + emulate -L zsh + + local cur_="${3-$cur}" + + case "$cur_" in + --*=) + ;; + *) + local c IFS=$' \t\n' + local -a array + for c in ${=1}; do + c="$c${4-}" + case $c in + --*=*|*.) ;; + *) c="$c " ;; + esac + array+=("$c") + done + compset -P '*[=:]' + compadd -Q -S '' -p "${2-}" -a -- array && _ret=0 + ;; + esac +} + +__gitcomp_nl () +{ + emulate -L zsh + + local IFS=$'\n' + compset -P '*[=:]' + compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0 +} + +_git () +{ + local _ret=1 + () { + emulate -L ksh + local cur cword prev + cur=${words[CURRENT-1]} + prev=${words[CURRENT-2]} + let cword=CURRENT-1 + __${service}_main + } + let _ret && _default -S '' && _ret=0 + return _ret +} + +_git diff --git a/contrib/completion/git-prompt.sh b/contrib/completion/git-prompt.sh new file mode 100644 index 0000000000..9bef0531c5 --- /dev/null +++ b/contrib/completion/git-prompt.sh @@ -0,0 +1,395 @@ +# bash/zsh git prompt support +# +# Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org> +# Distributed under the GNU General Public License, version 2.0. +# +# This script allows you to see the current branch in your prompt. +# +# To enable: +# +# 1) Copy this file to somewhere (e.g. ~/.git-prompt.sh). +# 2) Add the following line to your .bashrc/.zshrc: +# source ~/.git-prompt.sh +# 3a) Change your PS1 to call __git_ps1 as +# command-substitution: +# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ ' +# ZSH: PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ ' +# the optional argument will be used as format string. +# 3b) Alternatively, if you are using bash, __git_ps1 can be +# used for PROMPT_COMMAND with two parameters, <pre> and +# <post>, which are strings you would put in $PS1 before +# and after the status string generated by the git-prompt +# machinery. e.g. +# PROMPT_COMMAND='__git_ps1 "\u@\h:\w" "\\\$ "' +# will show username, at-sign, host, colon, cwd, then +# various status string, followed by dollar and SP, as +# your prompt. +# Optionally, you can supply a third argument with a printf +# format string to finetune the output of the branch status +# +# The argument to __git_ps1 will be displayed only if you are currently +# in a git repository. The %s token will be the name of the current +# branch. +# +# In addition, if you set GIT_PS1_SHOWDIRTYSTATE to a nonempty value, +# unstaged (*) and staged (+) changes will be shown next to the branch +# name. You can configure this per-repository with the +# bash.showDirtyState variable, which defaults to true once +# GIT_PS1_SHOWDIRTYSTATE is enabled. +# +# You can also see if currently something is stashed, by setting +# GIT_PS1_SHOWSTASHSTATE to a nonempty value. If something is stashed, +# then a '$' will be shown next to the branch name. +# +# If you would like to see if there're untracked files, then you can set +# GIT_PS1_SHOWUNTRACKEDFILES to a nonempty value. If there're untracked +# files, then a '%' will be shown next to the branch name. +# +# If you would like to see the difference between HEAD and its upstream, +# set GIT_PS1_SHOWUPSTREAM="auto". A "<" indicates you are behind, ">" +# indicates you are ahead, "<>" indicates you have diverged and "=" +# indicates that there is no difference. You can further control +# behaviour by setting GIT_PS1_SHOWUPSTREAM to a space-separated list +# of values: +# +# verbose show number of commits ahead/behind (+/-) upstream +# legacy don't use the '--count' option available in recent +# versions of git-rev-list +# git always compare HEAD to @{upstream} +# svn always compare HEAD to your SVN upstream +# +# By default, __git_ps1 will compare HEAD to your SVN upstream if it can +# find one, or @{upstream} otherwise. Once you have set +# GIT_PS1_SHOWUPSTREAM, you can override it on a per-repository basis by +# setting the bash.showUpstream config variable. +# +# If you would like to see more information about the identity of +# commits checked out as a detached HEAD, set GIT_PS1_DESCRIBE_STYLE +# to one of these values: +# +# contains relative to newer annotated tag (v1.6.3.2~35) +# branch relative to newer tag or branch (master~4) +# describe relative to older annotated tag (v1.6.3.1-13-gdd42c2f) +# default exactly matching tag +# +# If you would like a colored hint about the current dirty state, set +# GIT_PS1_SHOWCOLORHINTS to a nonempty value. The colors are based on +# the colored output of "git status -sb". + +# __gitdir accepts 0 or 1 arguments (i.e., location) +# returns location of .git repo +__gitdir () +{ + # Note: this function is duplicated in git-completion.bash + # When updating it, make sure you update the other one to match. + if [ -z "${1-}" ]; then + if [ -n "${__git_dir-}" ]; then + echo "$__git_dir" + elif [ -n "${GIT_DIR-}" ]; then + test -d "${GIT_DIR-}" || return 1 + echo "$GIT_DIR" + elif [ -d .git ]; then + echo .git + else + git rev-parse --git-dir 2>/dev/null + fi + elif [ -d "$1/.git" ]; then + echo "$1/.git" + else + echo "$1" + fi +} + +# stores the divergence from upstream in $p +# used by GIT_PS1_SHOWUPSTREAM +__git_ps1_show_upstream () +{ + local key value + local svn_remote svn_url_pattern count n + local upstream=git legacy="" verbose="" + + svn_remote=() + # get some config options from git-config + local output="$(git config -z --get-regexp '^(svn-remote\..*\.url|bash\.showupstream)$' 2>/dev/null | tr '\0\n' '\n ')" + while read -r key value; do + case "$key" in + bash.showupstream) + GIT_PS1_SHOWUPSTREAM="$value" + if [[ -z "${GIT_PS1_SHOWUPSTREAM}" ]]; then + p="" + return + fi + ;; + svn-remote.*.url) + svn_remote[ $((${#svn_remote[@]} + 1)) ]="$value" + svn_url_pattern+="\\|$value" + upstream=svn+git # default upstream is SVN if available, else git + ;; + esac + done <<< "$output" + + # parse configuration values + for option in ${GIT_PS1_SHOWUPSTREAM}; do + case "$option" in + git|svn) upstream="$option" ;; + verbose) verbose=1 ;; + legacy) legacy=1 ;; + esac + done + + # Find our upstream + case "$upstream" in + git) upstream="@{upstream}" ;; + svn*) + # get the upstream from the "git-svn-id: ..." in a commit message + # (git-svn uses essentially the same procedure internally) + local svn_upstream=($(git log --first-parent -1 \ + --grep="^git-svn-id: \(${svn_url_pattern#??}\)" 2>/dev/null)) + if [[ 0 -ne ${#svn_upstream[@]} ]]; then + svn_upstream=${svn_upstream[ ${#svn_upstream[@]} - 2 ]} + svn_upstream=${svn_upstream%@*} + local n_stop="${#svn_remote[@]}" + for ((n=1; n <= n_stop; n++)); do + svn_upstream=${svn_upstream#${svn_remote[$n]}} + done + + if [[ -z "$svn_upstream" ]]; then + # default branch name for checkouts with no layout: + upstream=${GIT_SVN_ID:-git-svn} + else + upstream=${svn_upstream#/} + fi + elif [[ "svn+git" = "$upstream" ]]; then + upstream="@{upstream}" + fi + ;; + esac + + # Find how many commits we are ahead/behind our upstream + if [[ -z "$legacy" ]]; then + count="$(git rev-list --count --left-right \ + "$upstream"...HEAD 2>/dev/null)" + else + # produce equivalent output to --count for older versions of git + local commits + if commits="$(git rev-list --left-right "$upstream"...HEAD 2>/dev/null)" + then + local commit behind=0 ahead=0 + for commit in $commits + do + case "$commit" in + "<"*) ((behind++)) ;; + *) ((ahead++)) ;; + esac + done + count="$behind $ahead" + else + count="" + fi + fi + + # calculate the result + if [[ -z "$verbose" ]]; then + case "$count" in + "") # no upstream + p="" ;; + "0 0") # equal to upstream + p="=" ;; + "0 "*) # ahead of upstream + p=">" ;; + *" 0") # behind upstream + p="<" ;; + *) # diverged from upstream + p="<>" ;; + esac + else + case "$count" in + "") # no upstream + p="" ;; + "0 0") # equal to upstream + p=" u=" ;; + "0 "*) # ahead of upstream + p=" u+${count#0 }" ;; + *" 0") # behind upstream + p=" u-${count% 0}" ;; + *) # diverged from upstream + p=" u+${count#* }-${count% *}" ;; + esac + fi + +} + + +# __git_ps1 accepts 0 or 1 arguments (i.e., format string) +# when called from PS1 using command substitution +# in this mode it prints text to add to bash PS1 prompt (includes branch name) +# +# __git_ps1 requires 2 or 3 arguments when called from PROMPT_COMMAND (pc) +# in that case it _sets_ PS1. The arguments are parts of a PS1 string. +# when two arguments are given, the first is prepended and the second appended +# to the state string when assigned to PS1. +# The optional third parameter will be used as printf format string to further +# customize the output of the git-status string. +# In this mode you can request colored hints using GIT_PS1_SHOWCOLORHINTS=true +__git_ps1 () +{ + local pcmode=no + local detached=no + local ps1pc_start='\u@\h:\w ' + local ps1pc_end='\$ ' + local printf_format=' (%s)' + + case "$#" in + 2|3) pcmode=yes + ps1pc_start="$1" + ps1pc_end="$2" + printf_format="${3:-$printf_format}" + ;; + 0|1) printf_format="${1:-$printf_format}" + ;; + *) return + ;; + esac + + local g="$(__gitdir)" + if [ -z "$g" ]; then + if [ $pcmode = yes ]; then + #In PC mode PS1 always needs to be set + PS1="$ps1pc_start$ps1pc_end" + fi + else + local r="" + local b="" + if [ -f "$g/rebase-merge/interactive" ]; then + r="|REBASE-i" + b="$(cat "$g/rebase-merge/head-name")" + elif [ -d "$g/rebase-merge" ]; then + r="|REBASE-m" + b="$(cat "$g/rebase-merge/head-name")" + else + if [ -d "$g/rebase-apply" ]; then + if [ -f "$g/rebase-apply/rebasing" ]; then + r="|REBASE" + elif [ -f "$g/rebase-apply/applying" ]; then + r="|AM" + else + r="|AM/REBASE" + fi + elif [ -f "$g/MERGE_HEAD" ]; then + r="|MERGING" + elif [ -f "$g/CHERRY_PICK_HEAD" ]; then + r="|CHERRY-PICKING" + elif [ -f "$g/BISECT_LOG" ]; then + r="|BISECTING" + fi + + b="$(git symbolic-ref HEAD 2>/dev/null)" || { + detached=yes + b="$( + case "${GIT_PS1_DESCRIBE_STYLE-}" in + (contains) + git describe --contains HEAD ;; + (branch) + git describe --contains --all HEAD ;; + (describe) + git describe HEAD ;; + (* | default) + git describe --tags --exact-match HEAD ;; + esac 2>/dev/null)" || + + b="$(cut -c1-7 "$g/HEAD" 2>/dev/null)..." || + b="unknown" + b="($b)" + } + fi + + local w="" + local i="" + local s="" + local u="" + local c="" + local p="" + + if [ "true" = "$(git rev-parse --is-inside-git-dir 2>/dev/null)" ]; then + if [ "true" = "$(git rev-parse --is-bare-repository 2>/dev/null)" ]; then + c="BARE:" + else + b="GIT_DIR!" + fi + elif [ "true" = "$(git rev-parse --is-inside-work-tree 2>/dev/null)" ]; then + if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ]; then + if [ "$(git config --bool bash.showDirtyState)" != "false" ]; then + git diff --no-ext-diff --quiet --exit-code || w="*" + if git rev-parse --quiet --verify HEAD >/dev/null; then + git diff-index --cached --quiet HEAD -- || i="+" + else + i="#" + fi + fi + fi + if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ]; then + git rev-parse --verify refs/stash >/dev/null 2>&1 && s="$" + fi + + if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ]; then + if [ -n "$(git ls-files --others --exclude-standard)" ]; then + u="%" + fi + fi + + if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then + __git_ps1_show_upstream + fi + fi + + local f="$w$i$s$u" + if [ $pcmode = yes ]; then + local gitstring= + if [ -n "${GIT_PS1_SHOWCOLORHINTS-}" ]; then + local c_red='\e[31m' + local c_green='\e[32m' + local c_lblue='\e[1;34m' + local c_clear='\e[0m' + local bad_color=$c_red + local ok_color=$c_green + local branch_color="$c_clear" + local flags_color="$c_lblue" + local branchstring="$c${b##refs/heads/}" + + if [ $detached = no ]; then + branch_color="$ok_color" + else + branch_color="$bad_color" + fi + + # Setting gitstring directly with \[ and \] around colors + # is necessary to prevent wrapping issues! + gitstring="\[$branch_color\]$branchstring\[$c_clear\]" + + if [ -n "$w$i$s$u$r$p" ]; then + gitstring="$gitstring " + fi + if [ "$w" = "*" ]; then + gitstring="$gitstring\[$bad_color\]$w" + fi + if [ -n "$i" ]; then + gitstring="$gitstring\[$ok_color\]$i" + fi + if [ -n "$s" ]; then + gitstring="$gitstring\[$flags_color\]$s" + fi + if [ -n "$u" ]; then + gitstring="$gitstring\[$bad_color\]$u" + fi + gitstring="$gitstring\[$c_clear\]$r$p" + else + gitstring="$c${b##refs/heads/}${f:+ $f}$r$p" + fi + gitstring=$(printf -- "$printf_format" "$gitstring") + PS1="$ps1pc_start$gitstring$ps1pc_end" + else + # NO color option unless in PROMPT_COMMAND mode + printf -- "$printf_format" "$c${b##refs/heads/}${f:+ $f}$r$p" + fi + fi +} diff --git a/contrib/credential/gnome-keyring/.gitignore b/contrib/credential/gnome-keyring/.gitignore new file mode 100644 index 0000000000..88d8fcdbce --- /dev/null +++ b/contrib/credential/gnome-keyring/.gitignore @@ -0,0 +1 @@ +git-credential-gnome-keyring diff --git a/contrib/credential/gnome-keyring/Makefile b/contrib/credential/gnome-keyring/Makefile new file mode 100644 index 0000000000..e6561d8db6 --- /dev/null +++ b/contrib/credential/gnome-keyring/Makefile @@ -0,0 +1,24 @@ +MAIN:=git-credential-gnome-keyring +all:: $(MAIN) + +CC = gcc +RM = rm -f +CFLAGS = -g -O2 -Wall + +-include ../../../config.mak.autogen +-include ../../../config.mak + +INCS:=$(shell pkg-config --cflags gnome-keyring-1) +LIBS:=$(shell pkg-config --libs gnome-keyring-1) + +SRCS:=$(MAIN).c +OBJS:=$(SRCS:.c=.o) + +%.o: %.c + $(CC) $(CFLAGS) $(CPPFLAGS) $(INCS) -o $@ -c $< + +$(MAIN): $(OBJS) + $(CC) -o $@ $(LDFLAGS) $^ $(LIBS) + +clean: + @$(RM) $(MAIN) $(OBJS) diff --git a/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c b/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c new file mode 100644 index 0000000000..41f61c5db3 --- /dev/null +++ b/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c @@ -0,0 +1,445 @@ +/* + * Copyright (C) 2011 John Szakmeister <john@szakmeister.net> + * 2012 Philipp A. Hartmann <pah@qo.cx> + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + */ + +/* + * Credits: + * - GNOME Keyring API handling originally written by John Szakmeister + * - ported to credential helper API by Philipp A. Hartmann + */ + +#include <stdio.h> +#include <string.h> +#include <stdarg.h> +#include <stdlib.h> +#include <errno.h> +#include <gnome-keyring.h> + +/* + * This credential struct and API is simplified from git's credential.{h,c} + */ +struct credential +{ + char *protocol; + char *host; + unsigned short port; + char *path; + char *username; + char *password; +}; + +#define CREDENTIAL_INIT \ + { NULL,NULL,0,NULL,NULL,NULL } + +void credential_init(struct credential *c); +void credential_clear(struct credential *c); +int credential_read(struct credential *c); +void credential_write(const struct credential *c); + +typedef int (*credential_op_cb)(struct credential*); + +struct credential_operation +{ + char *name; + credential_op_cb op; +}; + +#define CREDENTIAL_OP_END \ + { NULL,NULL } + +/* + * Table with operation callbacks is defined in concrete + * credential helper implementation and contains entries + * like { "get", function_to_get_credential } terminated + * by CREDENTIAL_OP_END. + */ +struct credential_operation const credential_helper_ops[]; + +/* ---------------- common helper functions ----------------- */ + +static inline void free_password(char *password) +{ + char *c = password; + if (!password) + return; + + while (*c) *c++ = '\0'; + free(password); +} + +static inline void warning(const char *fmt, ...) +{ + va_list ap; + + va_start(ap, fmt); + fprintf(stderr, "warning: "); + vfprintf(stderr, fmt, ap); + fprintf(stderr, "\n" ); + va_end(ap); +} + +static inline void error(const char *fmt, ...) +{ + va_list ap; + + va_start(ap, fmt); + fprintf(stderr, "error: "); + vfprintf(stderr, fmt, ap); + fprintf(stderr, "\n" ); + va_end(ap); +} + +static inline void die(const char *fmt, ...) +{ + va_list ap; + + va_start(ap,fmt); + error(fmt, ap); + va_end(ap); + exit(EXIT_FAILURE); +} + +static inline void die_errno(int err) +{ + error("%s", strerror(err)); + exit(EXIT_FAILURE); +} + +static inline char *xstrdup(const char *str) +{ + char *ret = strdup(str); + if (!ret) + die_errno(errno); + + return ret; +} + +/* ----------------- GNOME Keyring functions ----------------- */ + +/* create a special keyring option string, if path is given */ +static char* keyring_object(struct credential *c) +{ + char* object = NULL; + + if (!c->path) + return object; + + object = (char*) malloc(strlen(c->host)+strlen(c->path)+8); + if(!object) + die_errno(errno); + + if(c->port) + sprintf(object,"%s:%hd/%s",c->host,c->port,c->path); + else + sprintf(object,"%s/%s",c->host,c->path); + + return object; +} + +int keyring_get(struct credential *c) +{ + char* object = NULL; + GList *entries; + GnomeKeyringNetworkPasswordData *password_data; + GnomeKeyringResult result; + + if (!c->protocol || !(c->host || c->path)) + return EXIT_FAILURE; + + object = keyring_object(c); + + result = gnome_keyring_find_network_password_sync( + c->username, + NULL /* domain */, + c->host, + object, + c->protocol, + NULL /* authtype */, + c->port, + &entries); + + free(object); + + if (result == GNOME_KEYRING_RESULT_NO_MATCH) + return EXIT_SUCCESS; + + if (result == GNOME_KEYRING_RESULT_CANCELLED) + return EXIT_SUCCESS; + + if (result != GNOME_KEYRING_RESULT_OK) { + error("%s",gnome_keyring_result_to_message(result)); + return EXIT_FAILURE; + } + + /* pick the first one from the list */ + password_data = (GnomeKeyringNetworkPasswordData *) entries->data; + + free_password(c->password); + c->password = xstrdup(password_data->password); + + if (!c->username) + c->username = xstrdup(password_data->user); + + gnome_keyring_network_password_list_free(entries); + + return EXIT_SUCCESS; +} + + +int keyring_store(struct credential *c) +{ + guint32 item_id; + char *object = NULL; + + /* + * Sanity check that what we are storing is actually sensible. + * In particular, we can't make a URL without a protocol field. + * Without either a host or pathname (depending on the scheme), + * we have no primary key. And without a username and password, + * we are not actually storing a credential. + */ + if (!c->protocol || !(c->host || c->path) || + !c->username || !c->password) + return EXIT_FAILURE; + + object = keyring_object(c); + + gnome_keyring_set_network_password_sync( + GNOME_KEYRING_DEFAULT, + c->username, + NULL /* domain */, + c->host, + object, + c->protocol, + NULL /* authtype */, + c->port, + c->password, + &item_id); + + free(object); + return EXIT_SUCCESS; +} + +int keyring_erase(struct credential *c) +{ + char *object = NULL; + GList *entries; + GnomeKeyringNetworkPasswordData *password_data; + GnomeKeyringResult result; + + /* + * Sanity check that we actually have something to match + * against. The input we get is a restrictive pattern, + * so technically a blank credential means "erase everything". + * But it is too easy to accidentally send this, since it is equivalent + * to empty input. So explicitly disallow it, and require that the + * pattern have some actual content to match. + */ + if (!c->protocol && !c->host && !c->path && !c->username) + return EXIT_FAILURE; + + object = keyring_object(c); + + result = gnome_keyring_find_network_password_sync( + c->username, + NULL /* domain */, + c->host, + object, + c->protocol, + NULL /* authtype */, + c->port, + &entries); + + free(object); + + if (result == GNOME_KEYRING_RESULT_NO_MATCH) + return EXIT_SUCCESS; + + if (result == GNOME_KEYRING_RESULT_CANCELLED) + return EXIT_SUCCESS; + + if (result != GNOME_KEYRING_RESULT_OK) + { + error("%s",gnome_keyring_result_to_message(result)); + return EXIT_FAILURE; + } + + /* pick the first one from the list (delete all matches?) */ + password_data = (GnomeKeyringNetworkPasswordData *) entries->data; + + result = gnome_keyring_item_delete_sync( + password_data->keyring, password_data->item_id); + + gnome_keyring_network_password_list_free(entries); + + if (result != GNOME_KEYRING_RESULT_OK) + { + error("%s",gnome_keyring_result_to_message(result)); + return EXIT_FAILURE; + } + + return EXIT_SUCCESS; +} + +/* + * Table with helper operation callbacks, used by generic + * credential helper main function. + */ +struct credential_operation const credential_helper_ops[] = +{ + { "get", keyring_get }, + { "store", keyring_store }, + { "erase", keyring_erase }, + CREDENTIAL_OP_END +}; + +/* ------------------ credential functions ------------------ */ + +void credential_init(struct credential *c) +{ + memset(c, 0, sizeof(*c)); +} + +void credential_clear(struct credential *c) +{ + free(c->protocol); + free(c->host); + free(c->path); + free(c->username); + free_password(c->password); + + credential_init(c); +} + +int credential_read(struct credential *c) +{ + char buf[1024]; + ssize_t line_len = 0; + char *key = buf; + char *value; + + while (fgets(buf, sizeof(buf), stdin)) + { + line_len = strlen(buf); + + if(buf[line_len-1]=='\n') + buf[--line_len]='\0'; + + if(!line_len) + break; + + value = strchr(buf,'='); + if(!value) { + warning("invalid credential line: %s", key); + return -1; + } + *value++ = '\0'; + + if (!strcmp(key, "protocol")) { + free(c->protocol); + c->protocol = xstrdup(value); + } else if (!strcmp(key, "host")) { + free(c->host); + c->host = xstrdup(value); + value = strrchr(c->host,':'); + if (value) { + *value++ = '\0'; + c->port = atoi(value); + } + } else if (!strcmp(key, "path")) { + free(c->path); + c->path = xstrdup(value); + } else if (!strcmp(key, "username")) { + free(c->username); + c->username = xstrdup(value); + } else if (!strcmp(key, "password")) { + free_password(c->password); + c->password = xstrdup(value); + while (*value) *value++ = '\0'; + } + /* + * Ignore other lines; we don't know what they mean, but + * this future-proofs us when later versions of git do + * learn new lines, and the helpers are updated to match. + */ + } + return 0; +} + +void credential_write_item(FILE *fp, const char *key, const char *value) +{ + if (!value) + return; + fprintf(fp, "%s=%s\n", key, value); +} + +void credential_write(const struct credential *c) +{ + /* only write username/password, if set */ + credential_write_item(stdout, "username", c->username); + credential_write_item(stdout, "password", c->password); +} + +static void usage(const char *name) +{ + struct credential_operation const *try_op = credential_helper_ops; + const char *basename = strrchr(name,'/'); + + basename = (basename) ? basename + 1 : name; + fprintf(stderr, "Usage: %s <", basename); + while(try_op->name) { + fprintf(stderr,"%s",(try_op++)->name); + if(try_op->name) + fprintf(stderr,"%s","|"); + } + fprintf(stderr,"%s",">\n"); +} + +int main(int argc, char *argv[]) +{ + int ret = EXIT_SUCCESS; + + struct credential_operation const *try_op = credential_helper_ops; + struct credential cred = CREDENTIAL_INIT; + + if (!argv[1]) { + usage(argv[0]); + goto out; + } + + /* lookup operation callback */ + while(try_op->name && strcmp(argv[1], try_op->name)) + try_op++; + + /* unsupported operation given -- ignore silently */ + if(!try_op->name || !try_op->op) + goto out; + + ret = credential_read(&cred); + if(ret) + goto out; + + /* perform credential operation */ + ret = (*try_op->op)(&cred); + + credential_write(&cred); + +out: + credential_clear(&cred); + return ret; +} diff --git a/contrib/credential/wincred/Makefile b/contrib/credential/wincred/Makefile new file mode 100644 index 0000000000..bad45ca47a --- /dev/null +++ b/contrib/credential/wincred/Makefile @@ -0,0 +1,14 @@ +all: git-credential-wincred.exe + +CC = gcc +RM = rm -f +CFLAGS = -O2 -Wall + +-include ../../../config.mak.autogen +-include ../../../config.mak + +git-credential-wincred.exe : git-credential-wincred.c + $(LINK.c) $^ $(LOADLIBES) $(LDLIBS) -o $@ + +clean: + $(RM) git-credential-wincred.exe diff --git a/contrib/credential/wincred/git-credential-wincred.c b/contrib/credential/wincred/git-credential-wincred.c new file mode 100644 index 0000000000..cbaec5f24b --- /dev/null +++ b/contrib/credential/wincred/git-credential-wincred.c @@ -0,0 +1,357 @@ +/* + * A git credential helper that interface with Windows' Credential Manager + * + */ +#include <windows.h> +#include <stdio.h> +#include <io.h> +#include <fcntl.h> + +/* common helpers */ + +static void die(const char *err, ...) +{ + char msg[4096]; + va_list params; + va_start(params, err); + vsnprintf(msg, sizeof(msg), err, params); + fprintf(stderr, "%s\n", msg); + va_end(params); + exit(1); +} + +static void *xmalloc(size_t size) +{ + void *ret = malloc(size); + if (!ret && !size) + ret = malloc(1); + if (!ret) + die("Out of memory"); + return ret; +} + +static char *xstrdup(const char *str) +{ + char *ret = strdup(str); + if (!ret) + die("Out of memory"); + return ret; +} + +/* MinGW doesn't have wincred.h, so we need to define stuff */ + +typedef struct _CREDENTIAL_ATTRIBUTEW { + LPWSTR Keyword; + DWORD Flags; + DWORD ValueSize; + LPBYTE Value; +} CREDENTIAL_ATTRIBUTEW, *PCREDENTIAL_ATTRIBUTEW; + +typedef struct _CREDENTIALW { + DWORD Flags; + DWORD Type; + LPWSTR TargetName; + LPWSTR Comment; + FILETIME LastWritten; + DWORD CredentialBlobSize; + LPBYTE CredentialBlob; + DWORD Persist; + DWORD AttributeCount; + PCREDENTIAL_ATTRIBUTEW Attributes; + LPWSTR TargetAlias; + LPWSTR UserName; +} CREDENTIALW, *PCREDENTIALW; + +#define CRED_TYPE_GENERIC 1 +#define CRED_PERSIST_LOCAL_MACHINE 2 +#define CRED_MAX_ATTRIBUTES 64 + +typedef BOOL (WINAPI *CredWriteWT)(PCREDENTIALW, DWORD); +typedef BOOL (WINAPI *CredUnPackAuthenticationBufferWT)(DWORD, PVOID, DWORD, + LPWSTR, DWORD *, LPWSTR, DWORD *, LPWSTR, DWORD *); +typedef BOOL (WINAPI *CredEnumerateWT)(LPCWSTR, DWORD, DWORD *, + PCREDENTIALW **); +typedef BOOL (WINAPI *CredPackAuthenticationBufferWT)(DWORD, LPWSTR, LPWSTR, + PBYTE, DWORD *); +typedef VOID (WINAPI *CredFreeT)(PVOID); +typedef BOOL (WINAPI *CredDeleteWT)(LPCWSTR, DWORD, DWORD); + +static HMODULE advapi, credui; +static CredWriteWT CredWriteW; +static CredUnPackAuthenticationBufferWT CredUnPackAuthenticationBufferW; +static CredEnumerateWT CredEnumerateW; +static CredPackAuthenticationBufferWT CredPackAuthenticationBufferW; +static CredFreeT CredFree; +static CredDeleteWT CredDeleteW; + +static void load_cred_funcs(void) +{ + /* load DLLs */ + advapi = LoadLibrary("advapi32.dll"); + credui = LoadLibrary("credui.dll"); + if (!advapi || !credui) + die("failed to load DLLs"); + + /* get function pointers */ + CredWriteW = (CredWriteWT)GetProcAddress(advapi, "CredWriteW"); + CredUnPackAuthenticationBufferW = (CredUnPackAuthenticationBufferWT) + GetProcAddress(credui, "CredUnPackAuthenticationBufferW"); + CredEnumerateW = (CredEnumerateWT)GetProcAddress(advapi, + "CredEnumerateW"); + CredPackAuthenticationBufferW = (CredPackAuthenticationBufferWT) + GetProcAddress(credui, "CredPackAuthenticationBufferW"); + CredFree = (CredFreeT)GetProcAddress(advapi, "CredFree"); + CredDeleteW = (CredDeleteWT)GetProcAddress(advapi, "CredDeleteW"); + if (!CredWriteW || !CredUnPackAuthenticationBufferW || + !CredEnumerateW || !CredPackAuthenticationBufferW || !CredFree || + !CredDeleteW) + die("failed to load functions"); +} + +static char target_buf[1024]; +static char *protocol, *host, *path, *username; +static WCHAR *wusername, *password, *target; + +static void write_item(const char *what, WCHAR *wbuf) +{ + char *buf; + int len = WideCharToMultiByte(CP_UTF8, 0, wbuf, -1, NULL, 0, NULL, + FALSE); + buf = xmalloc(len); + + if (!WideCharToMultiByte(CP_UTF8, 0, wbuf, -1, buf, len, NULL, FALSE)) + die("WideCharToMultiByte failed!"); + + printf("%s=", what); + fwrite(buf, 1, len - 1, stdout); + putchar('\n'); + free(buf); +} + +static int match_attr(const CREDENTIALW *cred, const WCHAR *keyword, + const char *want) +{ + int i; + if (!want) + return 1; + + for (i = 0; i < cred->AttributeCount; ++i) + if (!wcscmp(cred->Attributes[i].Keyword, keyword)) + return !strcmp((const char *)cred->Attributes[i].Value, + want); + + return 0; /* not found */ +} + +static int match_cred(const CREDENTIALW *cred) +{ + return (!wusername || !wcscmp(wusername, cred->UserName)) && + match_attr(cred, L"git_protocol", protocol) && + match_attr(cred, L"git_host", host) && + match_attr(cred, L"git_path", path); +} + +static void get_credential(void) +{ + WCHAR *user_buf, *pass_buf; + DWORD user_buf_size = 0, pass_buf_size = 0; + CREDENTIALW **creds, *cred = NULL; + DWORD num_creds; + int i; + + if (!CredEnumerateW(L"git:*", 0, &num_creds, &creds)) + return; + + /* search for the first credential that matches username */ + for (i = 0; i < num_creds; ++i) + if (match_cred(creds[i])) { + cred = creds[i]; + break; + } + if (!cred) + return; + + CredUnPackAuthenticationBufferW(0, cred->CredentialBlob, + cred->CredentialBlobSize, NULL, &user_buf_size, NULL, NULL, + NULL, &pass_buf_size); + + user_buf = xmalloc(user_buf_size * sizeof(WCHAR)); + pass_buf = xmalloc(pass_buf_size * sizeof(WCHAR)); + + if (!CredUnPackAuthenticationBufferW(0, cred->CredentialBlob, + cred->CredentialBlobSize, user_buf, &user_buf_size, NULL, NULL, + pass_buf, &pass_buf_size)) + die("CredUnPackAuthenticationBuffer failed"); + + CredFree(creds); + + /* zero-terminate (sizes include zero-termination) */ + user_buf[user_buf_size - 1] = L'\0'; + pass_buf[pass_buf_size - 1] = L'\0'; + + write_item("username", user_buf); + write_item("password", pass_buf); + + free(user_buf); + free(pass_buf); +} + +static void write_attr(CREDENTIAL_ATTRIBUTEW *attr, const WCHAR *keyword, + const char *value) +{ + attr->Keyword = (LPWSTR)keyword; + attr->Flags = 0; + attr->ValueSize = strlen(value) + 1; /* store zero-termination */ + attr->Value = (LPBYTE)value; +} + +static void store_credential(void) +{ + CREDENTIALW cred; + BYTE *auth_buf; + DWORD auth_buf_size = 0; + CREDENTIAL_ATTRIBUTEW attrs[CRED_MAX_ATTRIBUTES]; + + if (!wusername || !password) + return; + + /* query buffer size */ + CredPackAuthenticationBufferW(0, wusername, password, + NULL, &auth_buf_size); + + auth_buf = xmalloc(auth_buf_size); + + if (!CredPackAuthenticationBufferW(0, wusername, password, + auth_buf, &auth_buf_size)) + die("CredPackAuthenticationBuffer failed"); + + cred.Flags = 0; + cred.Type = CRED_TYPE_GENERIC; + cred.TargetName = target; + cred.Comment = L"saved by git-credential-wincred"; + cred.CredentialBlobSize = auth_buf_size; + cred.CredentialBlob = auth_buf; + cred.Persist = CRED_PERSIST_LOCAL_MACHINE; + cred.AttributeCount = 1; + cred.Attributes = attrs; + cred.TargetAlias = NULL; + cred.UserName = wusername; + + write_attr(attrs, L"git_protocol", protocol); + + if (host) { + write_attr(attrs + cred.AttributeCount, L"git_host", host); + cred.AttributeCount++; + } + + if (path) { + write_attr(attrs + cred.AttributeCount, L"git_path", path); + cred.AttributeCount++; + } + + if (!CredWriteW(&cred, 0)) + die("CredWrite failed"); +} + +static void erase_credential(void) +{ + CREDENTIALW **creds; + DWORD num_creds; + int i; + + if (!CredEnumerateW(L"git:*", 0, &num_creds, &creds)) + return; + + for (i = 0; i < num_creds; ++i) { + if (match_cred(creds[i])) + CredDeleteW(creds[i]->TargetName, creds[i]->Type, 0); + } + + CredFree(creds); +} + +static WCHAR *utf8_to_utf16_dup(const char *str) +{ + int wlen = MultiByteToWideChar(CP_UTF8, 0, str, -1, NULL, 0); + WCHAR *wstr = xmalloc(sizeof(WCHAR) * wlen); + MultiByteToWideChar(CP_UTF8, 0, str, -1, wstr, wlen); + return wstr; +} + +static void read_credential(void) +{ + char buf[1024]; + + while (fgets(buf, sizeof(buf), stdin)) { + char *v; + + if (!strcmp(buf, "\n")) + break; + buf[strlen(buf)-1] = '\0'; + + v = strchr(buf, '='); + if (!v) + die("bad input: %s", buf); + *v++ = '\0'; + + if (!strcmp(buf, "protocol")) + protocol = xstrdup(v); + else if (!strcmp(buf, "host")) + host = xstrdup(v); + else if (!strcmp(buf, "path")) + path = xstrdup(v); + else if (!strcmp(buf, "username")) { + username = xstrdup(v); + wusername = utf8_to_utf16_dup(v); + } else if (!strcmp(buf, "password")) + password = utf8_to_utf16_dup(v); + else + die("unrecognized input"); + } +} + +int main(int argc, char *argv[]) +{ + const char *usage = + "Usage: git credential-wincred <get|store|erase>\n"; + + if (!argv[1]) + die(usage); + + /* git use binary pipes to avoid CRLF-issues */ + _setmode(_fileno(stdin), _O_BINARY); + _setmode(_fileno(stdout), _O_BINARY); + + read_credential(); + + load_cred_funcs(); + + if (!protocol || !(host || path)) + return 0; + + /* prepare 'target', the unique key for the credential */ + strncat(target_buf, "git:", sizeof(target_buf)); + strncat(target_buf, protocol, sizeof(target_buf)); + strncat(target_buf, "://", sizeof(target_buf)); + if (username) { + strncat(target_buf, username, sizeof(target_buf)); + strncat(target_buf, "@", sizeof(target_buf)); + } + if (host) + strncat(target_buf, host, sizeof(target_buf)); + if (path) { + strncat(target_buf, "/", sizeof(target_buf)); + strncat(target_buf, path, sizeof(target_buf)); + } + + target = utf8_to_utf16_dup(target_buf); + + if (!strcmp(argv[1], "get")) + get_credential(); + else if (!strcmp(argv[1], "store")) + store_credential(); + else if (!strcmp(argv[1], "erase")) + erase_credential(); + /* otherwise, ignore unknown action */ + return 0; +} diff --git a/contrib/emacs/git.el b/contrib/emacs/git.el index 65c95d9d5a..5ffc506f6d 100644 --- a/contrib/emacs/git.el +++ b/contrib/emacs/git.el @@ -1671,7 +1671,7 @@ Commands: "Entry point into git-status mode." (interactive "DSelect directory: ") (setq dir (git-get-top-dir dir)) - (if (file-directory-p (concat (file-name-as-directory dir) ".git")) + (if (file-exists-p (concat (file-name-as-directory dir) ".git")) (let ((buffer (or (and git-reuse-status-buffer (git-find-status-buffer dir)) (create-file-buffer (expand-file-name "*git-status*" dir))))) (switch-to-buffer buffer) diff --git a/contrib/examples/builtin-fetch--tool.c b/contrib/examples/builtin-fetch--tool.c index 0d54aa7061..8bc8c7533a 100644 --- a/contrib/examples/builtin-fetch--tool.c +++ b/contrib/examples/builtin-fetch--tool.c @@ -96,7 +96,7 @@ static int update_local_ref(const char *name, strcpy(oldh, find_unique_abbrev(current->object.sha1, DEFAULT_ABBREV)); strcpy(newh, find_unique_abbrev(sha1_new, DEFAULT_ABBREV)); - if (in_merge_bases(current, &updated, 1)) { + if (in_merge_bases(current, updated)) { fprintf(stderr, "* %s: fast-forward to %s\n", name, note); fprintf(stderr, " old..new: %s..%s\n", oldh, newh); diff --git a/contrib/fast-import/import-zips.py b/contrib/fast-import/import-zips.py index 82f5ed3ddc..5cec9b0129 100755 --- a/contrib/fast-import/import-zips.py +++ b/contrib/fast-import/import-zips.py @@ -9,10 +9,15 @@ ## git log --stat import-zips from os import popen, path -from sys import argv, exit +from sys import argv, exit, hexversion, stderr from time import mktime from zipfile import ZipFile +if hexversion < 0x01060000: + # The limiter is the zipfile module + sys.stderr.write("import-zips.py: requires Python 1.6.0 or later.\n") + sys.exit(1) + if len(argv) < 2: print 'Usage:', argv[0], '<zipfile>...' exit(1) diff --git a/contrib/git-jump/git-jump b/contrib/git-jump/git-jump index a33674e47a..dc90cd6379 100755 --- a/contrib/git-jump/git-jump +++ b/contrib/git-jump/git-jump @@ -21,9 +21,9 @@ open_editor() { } mode_diff() { - git diff --relative "$@" | + git diff --no-prefix --relative "$@" | perl -ne ' - if (m{^\+\+\+ b/(.*)}) { $file = $1; next } + if (m{^\+\+\+ (.*)}) { $file = $1; next } defined($file) or next; if (m/^@@ .*\+(\d+)/) { $line = $1; next } defined($line) or next; diff --git a/contrib/hg-to-git/hg-to-git.py b/contrib/hg-to-git/hg-to-git.py index 046cb2b268..232625a7b7 100755 --- a/contrib/hg-to-git/hg-to-git.py +++ b/contrib/hg-to-git/hg-to-git.py @@ -23,6 +23,11 @@ import os, os.path, sys import tempfile, pickle, getopt import re +if sys.hexversion < 0x02030000: + # The behavior of the pickle module changed significantly in 2.3 + sys.stderr.write("hg-to-git.py: requires Python 2.3 or later.\n") + sys.exit(1) + # Maps hg version -> git version hgvers = {} # List of children for each hg revision diff --git a/contrib/hooks/post-receive-email b/contrib/hooks/post-receive-email index 01af9df15e..b2171a092e 100755 --- a/contrib/hooks/post-receive-email +++ b/contrib/hooks/post-receive-email @@ -403,7 +403,7 @@ generate_update_branch_email() echo " \\" echo " O -- O -- O ($oldrev)" echo "" - echo "The removed revisions are not necessarilly gone - if another reference" + echo "The removed revisions are not necessarily gone - if another reference" echo "still refers to them they will stay in the repository." rewind_only=1 else diff --git a/contrib/mw-to-git/Makefile b/contrib/mw-to-git/Makefile new file mode 100644 index 0000000000..3ed728b0ef --- /dev/null +++ b/contrib/mw-to-git/Makefile @@ -0,0 +1,47 @@ +# +# Copyright (C) 2012 +# Charles Roussel <charles.roussel@ensimag.imag.fr> +# Simon Cathebras <simon.cathebras@ensimag.imag.fr> +# Julien Khayat <julien.khayat@ensimag.imag.fr> +# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr> +# Simon Perrat <simon.perrat@ensimag.imag.fr> +# +## Build git-remote-mediawiki + +-include ../../config.mak.autogen +-include ../../config.mak + +ifndef PERL_PATH + PERL_PATH = /usr/bin/perl +endif +ifndef gitexecdir + gitexecdir = $(shell git --exec-path) +endif + +PERL_PATH_SQ = $(subst ','\'',$(PERL_PATH)) +gitexecdir_SQ = $(subst ','\'',$(gitexecdir)) +SCRIPT = git-remote-mediawiki + +.PHONY: install help doc test clean + +help: + @echo 'This is the help target of the Makefile. Current configuration:' + @echo ' gitexecdir = $(gitexecdir_SQ)' + @echo ' PERL_PATH = $(PERL_PATH_SQ)' + @echo 'Run "$(MAKE) install" to install $(SCRIPT) in gitexecdir' + @echo 'Run "$(MAKE) test" to run the testsuite' + +install: + sed -e '1s|#!.*/perl|#!$(PERL_PATH_SQ)|' $(SCRIPT) \ + > '$(gitexecdir_SQ)/$(SCRIPT)' + chmod +x '$(gitexecdir)/$(SCRIPT)' + +doc: + @echo 'Sorry, "make doc" is not implemented yet for $(SCRIPT)' + +test: + $(MAKE) -C t/ test + +clean: + $(RM) '$(gitexecdir)/$(SCRIPT)' + $(MAKE) -C t/ clean diff --git a/contrib/mw-to-git/git-remote-mediawiki b/contrib/mw-to-git/git-remote-mediawiki index c18bfa1f15..094129de09 100755 --- a/contrib/mw-to-git/git-remote-mediawiki +++ b/contrib/mw-to-git/git-remote-mediawiki @@ -9,40 +9,19 @@ # License: GPL v2 or later # Gateway between Git and MediaWiki. -# https://github.com/Bibzball/Git-Mediawiki/wiki -# -# Known limitations: -# -# - Only wiki pages are managed, no support for [[File:...]] -# attachments. -# -# - Poor performance in the best case: it takes forever to check -# whether we're up-to-date (on fetch or push) or to fetch a few -# revisions from a large wiki, because we use exclusively a -# page-based synchronization. We could switch to a wiki-wide -# synchronization when the synchronization involves few revisions -# but the wiki is large. -# -# - Git renames could be turned into MediaWiki renames (see TODO -# below) -# -# - login/password support requires the user to write the password -# cleartext in a file (see TODO below). -# -# - No way to import "one page, and all pages included in it" -# -# - Multiple remote MediaWikis have not been very well tested. +# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/ use strict; use MediaWiki::API; use DateTime::Format::ISO8601; -use encoding 'utf8'; -# use encoding 'utf8' doesn't change STDERROR -# but we're going to output UTF-8 filenames to STDERR +# By default, use UTF-8 to communicate with Git and the user binmode STDERR, ":utf8"; +binmode STDOUT, ":utf8"; use URI::Escape; +use IPC::Open2; + use warnings; # Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced @@ -59,6 +38,9 @@ use constant EMPTY_CONTENT => "<!-- empty page -->\n"; # used to reflect file creation or deletion in diff. use constant NULL_SHA1 => "0000000000000000000000000000000000000000"; +# Used on Git's side to reflect empty edit messages on the wiki +use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*'; + my $remotename = $ARGV[0]; my $url = $ARGV[1]; @@ -71,10 +53,18 @@ chomp(@tracked_pages); my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories")); chomp(@tracked_categories); +# Import media files on pull +my $import_media = run_git("config --get --bool remote.". $remotename .".mediaimport"); +chomp($import_media); +$import_media = ($import_media eq "true"); + +# Export media files on push +my $export_media = run_git("config --get --bool remote.". $remotename .".mediaexport"); +chomp($export_media); +$export_media = !($export_media eq "false"); + my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin"); -# TODO: ideally, this should be able to read from keyboard, but we're -# inside a remote helper, so our stdin is connect to git, not to a -# terminal. +# Note: mwPassword is discourraged. Use the credential system instead. my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword"); my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain"); chomp($wiki_login); @@ -86,6 +76,21 @@ my $shallow_import = run_git("config --get --bool remote.". $remotename .".shall chomp($shallow_import); $shallow_import = ($shallow_import eq "true"); +# Fetch (clone and pull) by revisions instead of by pages. This behavior +# is more efficient when we have a wiki with lots of pages and we fetch +# the revisions quite often so that they concern only few pages. +# Possible values: +# - by_rev: perform one query per new revision on the remote wiki +# - by_page: query each tracked page for new revision +my $fetch_strategy = run_git("config --get remote.$remotename.fetchStrategy"); +unless ($fetch_strategy) { + $fetch_strategy = run_git("config --get mediawiki.fetchStrategy"); +} +chomp($fetch_strategy); +unless ($fetch_strategy) { + $fetch_strategy = "by_page"; +} + # Dumb push: don't update notes and mediawiki ref to reflect the last push. # # Configurable with mediawiki.dumbPush, or per-remote with @@ -151,32 +156,152 @@ while (<STDIN>) { ########################## Functions ############################## +## credential API management (generic functions) + +sub credential_read { + my %credential; + my $reader = shift; + my $op = shift; + while (<$reader>) { + my ($key, $value) = /([^=]*)=(.*)/; + if (not defined $key) { + die "ERROR receiving response from git credential $op:\n$_\n"; + } + $credential{$key} = $value; + } + return %credential; +} + +sub credential_write { + my $credential = shift; + my $writer = shift; + # url overwrites other fields, so it must come first + print $writer "url=$credential->{url}\n" if exists $credential->{url}; + while (my ($key, $value) = each(%$credential) ) { + if (length $value && $key ne 'url') { + print $writer "$key=$value\n"; + } + } +} + +sub credential_run { + my $op = shift; + my $credential = shift; + my $pid = open2(my $reader, my $writer, "git credential $op"); + credential_write($credential, $writer); + print $writer "\n"; + close($writer); + + if ($op eq "fill") { + %$credential = credential_read($reader, $op); + } else { + if (<$reader>) { + die "ERROR while running git credential $op:\n$_"; + } + } + close($reader); + waitpid($pid, 0); + my $child_exit_status = $? >> 8; + if ($child_exit_status != 0) { + die "'git credential $op' failed with code $child_exit_status."; + } +} + # MediaWiki API instance, created lazily. my $mediawiki; sub mw_connect_maybe { if ($mediawiki) { - return; + return; } $mediawiki = MediaWiki::API->new; $mediawiki->{config}->{api_url} = "$url/api.php"; if ($wiki_login) { - if (!$mediawiki->login({ - lgname => $wiki_login, - lgpassword => $wiki_passwd, - lgdomain => $wiki_domain, - })) { - print STDERR "Failed to log in mediawiki user \"$wiki_login\" on $url\n"; - print STDERR "(error " . - $mediawiki->{error}->{code} . ': ' . - $mediawiki->{error}->{details} . ")\n"; - exit 1; + my %credential = (url => $url); + $credential{username} = $wiki_login; + $credential{password} = $wiki_passwd; + credential_run("fill", \%credential); + my $request = {lgname => $credential{username}, + lgpassword => $credential{password}, + lgdomain => $wiki_domain}; + if ($mediawiki->login($request)) { + credential_run("approve", \%credential); + print STDERR "Logged in mediawiki user \"$credential{username}\".\n"; } else { - print STDERR "Logged in with user \"$wiki_login\".\n"; + print STDERR "Failed to log in mediawiki user \"$credential{username}\" on $url\n"; + print STDERR " (error " . + $mediawiki->{error}->{code} . ': ' . + $mediawiki->{error}->{details} . ")\n"; + credential_run("reject", \%credential); + exit 1; + } + } +} + +## Functions for listing pages on the remote wiki +sub get_mw_tracked_pages { + my $pages = shift; + get_mw_page_list(\@tracked_pages, $pages); +} + +sub get_mw_page_list { + my $page_list = shift; + my $pages = shift; + my @some_pages = @$page_list; + while (@some_pages) { + my $last = 50; + if ($#some_pages < $last) { + $last = $#some_pages; + } + my @slice = @some_pages[0..$last]; + get_mw_first_pages(\@slice, $pages); + @some_pages = @some_pages[51..$#some_pages]; + } +} + +sub get_mw_tracked_categories { + my $pages = shift; + foreach my $category (@tracked_categories) { + if (index($category, ':') < 0) { + # Mediawiki requires the Category + # prefix, but let's not force the user + # to specify it. + $category = "Category:" . $category; + } + my $mw_pages = $mediawiki->list( { + action => 'query', + list => 'categorymembers', + cmtitle => $category, + cmlimit => 'max' } ) + || die $mediawiki->{error}->{code} . ': ' + . $mediawiki->{error}->{details}; + foreach my $page (@{$mw_pages}) { + $pages->{$page->{title}} = $page; } } } +sub get_mw_all_pages { + my $pages = shift; + # No user-provided list, get the list of pages from the API. + my $mw_pages = $mediawiki->list({ + action => 'query', + list => 'allpages', + aplimit => 'max' + }); + if (!defined($mw_pages)) { + print STDERR "fatal: could not get the list of wiki pages.\n"; + print STDERR "fatal: '$url' does not appear to be a mediawiki\n"; + print STDERR "fatal: make sure '$url/api.php' is a valid page.\n"; + exit 1; + } + foreach my $page (@{$mw_pages}) { + $pages->{$page->{title}} = $page; + } +} + +# queries the wiki for a set of pages. Meant to be used within a loop +# querying the wiki for slices of page list. sub get_mw_first_pages { my $some_pages = shift; my @some_pages = @{$some_pages}; @@ -205,70 +330,45 @@ sub get_mw_first_pages { } } +# Get the list of pages to be fetched according to configuration. sub get_mw_pages { mw_connect_maybe(); + print STDERR "Listing pages on remote wiki...\n"; + my %pages; # hash on page titles to avoid duplicates my $user_defined; if (@tracked_pages) { $user_defined = 1; # The user provided a list of pages titles, but we # still need to query the API to get the page IDs. - - my @some_pages = @tracked_pages; - while (@some_pages) { - my $last = 50; - if ($#some_pages < $last) { - $last = $#some_pages; - } - my @slice = @some_pages[0..$last]; - get_mw_first_pages(\@slice, \%pages); - @some_pages = @some_pages[51..$#some_pages]; - } + get_mw_tracked_pages(\%pages); } if (@tracked_categories) { $user_defined = 1; - foreach my $category (@tracked_categories) { - if (index($category, ':') < 0) { - # Mediawiki requires the Category - # prefix, but let's not force the user - # to specify it. - $category = "Category:" . $category; - } - my $mw_pages = $mediawiki->list( { - action => 'query', - list => 'categorymembers', - cmtitle => $category, - cmlimit => 'max' } ) - || die $mediawiki->{error}->{code} . ': ' . $mediawiki->{error}->{details}; - foreach my $page (@{$mw_pages}) { - $pages{$page->{title}} = $page; - } - } + get_mw_tracked_categories(\%pages); } if (!$user_defined) { - # No user-provided list, get the list of pages from - # the API. - my $mw_pages = $mediawiki->list({ - action => 'query', - list => 'allpages', - aplimit => 500, - }); - if (!defined($mw_pages)) { - print STDERR "fatal: could not get the list of wiki pages.\n"; - print STDERR "fatal: '$url' does not appear to be a mediawiki\n"; - print STDERR "fatal: make sure '$url/api.php' is a valid page.\n"; - exit 1; - } - foreach my $page (@{$mw_pages}) { - $pages{$page->{title}} = $page; + get_mw_all_pages(\%pages); + } + if ($import_media) { + print STDERR "Getting media files for selected pages...\n"; + if ($user_defined) { + get_linked_mediafiles(\%pages); + } else { + get_all_mediafiles(\%pages); } } - return values(%pages); + print STDERR (scalar keys %pages) . " pages found.\n"; + return %pages; } +# usage: $out = run_git("command args"); +# $out = run_git("command args", "raw"); # don't interpret output as UTF-8. sub run_git { - open(my $git, "-|:encoding(UTF-8)", "git " . $_[0]); + my $args = shift; + my $encoding = (shift || "encoding(UTF-8)"); + open(my $git, "-|:$encoding", "git " . $args); my $res = do { local $/; <$git> }; close($git); @@ -276,6 +376,123 @@ sub run_git { } +sub get_all_mediafiles { + my $pages = shift; + # Attach list of all pages for media files from the API, + # they are in a different namespace, only one namespace + # can be queried at the same moment + my $mw_pages = $mediawiki->list({ + action => 'query', + list => 'allpages', + apnamespace => get_mw_namespace_id("File"), + aplimit => 'max' + }); + if (!defined($mw_pages)) { + print STDERR "fatal: could not get the list of pages for media files.\n"; + print STDERR "fatal: '$url' does not appear to be a mediawiki\n"; + print STDERR "fatal: make sure '$url/api.php' is a valid page.\n"; + exit 1; + } + foreach my $page (@{$mw_pages}) { + $pages->{$page->{title}} = $page; + } +} + +sub get_linked_mediafiles { + my $pages = shift; + my @titles = map $_->{title}, values(%{$pages}); + + # The query is split in small batches because of the MW API limit of + # the number of links to be returned (500 links max). + my $batch = 10; + while (@titles) { + if ($#titles < $batch) { + $batch = $#titles; + } + my @slice = @titles[0..$batch]; + + # pattern 'page1|page2|...' required by the API + my $mw_titles = join('|', @slice); + + # Media files could be included or linked from + # a page, get all related + my $query = { + action => 'query', + prop => 'links|images', + titles => $mw_titles, + plnamespace => get_mw_namespace_id("File"), + pllimit => 'max' + }; + my $result = $mediawiki->api($query); + + while (my ($id, $page) = each(%{$result->{query}->{pages}})) { + my @media_titles; + if (defined($page->{links})) { + my @link_titles = map $_->{title}, @{$page->{links}}; + push(@media_titles, @link_titles); + } + if (defined($page->{images})) { + my @image_titles = map $_->{title}, @{$page->{images}}; + push(@media_titles, @image_titles); + } + if (@media_titles) { + get_mw_page_list(\@media_titles, $pages); + } + } + + @titles = @titles[($batch+1)..$#titles]; + } +} + +sub get_mw_mediafile_for_page_revision { + # Name of the file on Wiki, with the prefix. + my $filename = shift; + my $timestamp = shift; + my %mediafile; + + # Search if on a media file with given timestamp exists on + # MediaWiki. In that case download the file. + my $query = { + action => 'query', + prop => 'imageinfo', + titles => "File:" . $filename, + iistart => $timestamp, + iiend => $timestamp, + iiprop => 'timestamp|archivename|url', + iilimit => 1 + }; + my $result = $mediawiki->api($query); + + my ($fileid, $file) = each( %{$result->{query}->{pages}} ); + # If not defined it means there is no revision of the file for + # given timestamp. + if (defined($file->{imageinfo})) { + $mediafile{title} = $filename; + + my $fileinfo = pop(@{$file->{imageinfo}}); + $mediafile{timestamp} = $fileinfo->{timestamp}; + # Mediawiki::API's download function doesn't support https URLs + # and can't download old versions of files. + print STDERR "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n"; + $mediafile{content} = download_mw_mediafile($fileinfo->{url}); + } + return %mediafile; +} + +sub download_mw_mediafile { + my $url = shift; + + my $response = $mediawiki->{ua}->get($url); + if ($response->code == 200) { + return $response->decoded_content; + } else { + print STDERR "Error downloading mediafile from :\n"; + print STDERR "URL: $url\n"; + print STDERR "Server response: " . $response->code . " " . $response->message . "\n"; + exit 1; + } +} + sub get_last_local_revision { # Get note regarding last mediawiki revision my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null"); @@ -297,13 +514,36 @@ sub get_last_local_revision { # Remember the timestamp corresponding to a revision id. my %basetimestamps; +# Get the last remote revision without taking in account which pages are +# tracked or not. This function makes a single request to the wiki thus +# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev +# option. +sub get_last_global_remote_rev { + mw_connect_maybe(); + + my $query = { + action => 'query', + list => 'recentchanges', + prop => 'revisions', + rclimit => '1', + rcdir => 'older', + }; + my $result = $mediawiki->api($query); + return $result->{query}->{recentchanges}[0]->{revid}; +} + +# Get the last remote revision concerning the tracked pages and the tracked +# categories. sub get_last_remote_revision { mw_connect_maybe(); - my @pages = get_mw_pages(); + my %pages_hash = get_mw_pages(); + my @pages = values(%pages_hash); my $max_rev_num = 0; + print STDERR "Getting last revision id on tracked pages...\n"; + foreach my $page (@pages) { my $id = $page->{pageid}; @@ -379,6 +619,16 @@ sub literal_data { print STDOUT "data ", bytes::length($content), "\n", $content; } +sub literal_data_raw { + # Output possibly binary content. + my ($content) = @_; + # Avoid confusion between size in bytes and in characters + utf8::downgrade($content); + binmode STDOUT, ":raw"; + print STDOUT "data ", bytes::length($content), "\n", $content; + binmode STDOUT, ":utf8"; +} + sub mw_capabilities { # Revisions are imported to the private namespace # refs/mediawiki/$remotename/ by the helper and fetched into @@ -461,11 +711,24 @@ sub fetch_mw_revisions { return ($n, @revisions); } +sub fe_escape_path { + my $path = shift; + $path =~ s/\\/\\\\/g; + $path =~ s/"/\\"/g; + $path =~ s/\n/\\n/g; + return '"' . $path . '"'; +} + sub import_file_revision { my $commit = shift; my %commit = %{$commit}; my $full_import = shift; my $n = shift; + my $mediafile = shift; + my %mediafile; + if ($mediafile) { + %mediafile = %{$mediafile}; + } my $title = $commit{title}; my $comment = $commit{comment}; @@ -483,11 +746,17 @@ sub import_file_revision { print STDOUT "from refs/mediawiki/$remotename/master^0\n"; } if ($content ne DELETED_CONTENT) { - print STDOUT "M 644 inline $title.mw\n"; + print STDOUT "M 644 inline " . + fe_escape_path($title . ".mw") . "\n"; literal_data($content); + if (%mediafile) { + print STDOUT "M 644 inline " + . fe_escape_path($mediafile{title}) . "\n"; + literal_data_raw($mediafile{content}); + } print STDOUT "\n\n"; } else { - print STDOUT "D $title.mw\n"; + print STDOUT "D " . fe_escape_path($title . ".mw") . "\n"; } # mediawiki revision number in the git note @@ -547,8 +816,6 @@ sub mw_import_ref { mw_connect_maybe(); - my @pages = get_mw_pages(); - print STDERR "Searching revisions...\n"; my $last_local = get_last_local_revision(); my $fetch_from = $last_local + 1; @@ -557,36 +824,111 @@ sub mw_import_ref { } else { print STDERR ", fetching from here.\n"; } + + my $n = 0; + if ($fetch_strategy eq "by_rev") { + print STDERR "Fetching & writing export data by revs...\n"; + $n = mw_import_ref_by_revs($fetch_from); + } elsif ($fetch_strategy eq "by_page") { + print STDERR "Fetching & writing export data by pages...\n"; + $n = mw_import_ref_by_pages($fetch_from); + } else { + print STDERR "fatal: invalid fetch strategy \"$fetch_strategy\".\n"; + print STDERR "Check your configuration variables remote.$remotename.fetchStrategy and mediawiki.fetchStrategy\n"; + exit 1; + } + + if ($fetch_from == 1 && $n == 0) { + print STDERR "You appear to have cloned an empty MediaWiki.\n"; + # Something has to be done remote-helper side. If nothing is done, an error is + # thrown saying that HEAD is refering to unknown object 0000000000000000000 + # and the clone fails. + } +} + +sub mw_import_ref_by_pages { + + my $fetch_from = shift; + my %pages_hash = get_mw_pages(); + my @pages = values(%pages_hash); + my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from); - # Creation of the fast-import stream - print STDERR "Fetching & writing export data...\n"; + @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions; + my @revision_ids = map $_->{revid}, @revisions; - $n = 0; + return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash); +} + +sub mw_import_ref_by_revs { + + my $fetch_from = shift; + my %pages_hash = get_mw_pages(); + + my $last_remote = get_last_global_remote_rev(); + my @revision_ids = $fetch_from..$last_remote; + return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash); +} + +# Import revisions given in second argument (array of integers). +# Only pages appearing in the third argument (hash indexed by page titles) +# will be imported. +sub mw_import_revids { + my $fetch_from = shift; + my $revision_ids = shift; + my $pages = shift; + + my $n = 0; + my $n_actual = 0; my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined - foreach my $pagerevid (sort {$a->{revid} <=> $b->{revid}} @revisions) { + foreach my $pagerevid (@$revision_ids) { + # Count page even if we skip it, since we display + # $n/$total and $total includes skipped pages. + $n++; + # fetch the content of the pages my $query = { action => 'query', prop => 'revisions', rvprop => 'content|timestamp|comment|user|ids', - revids => $pagerevid->{revid}, + revids => $pagerevid, }; my $result = $mediawiki->api($query); - my $rev = pop(@{$result->{query}->{pages}->{$pagerevid->{pageid}}->{revisions}}); + if (!$result) { + die "Failed to retrieve modified page for revision $pagerevid"; + } - $n++; + if (defined($result->{query}->{badrevids}->{$pagerevid})) { + # The revision id does not exist on the remote wiki. + next; + } + + if (!defined($result->{query}->{pages})) { + die "Invalid revision $pagerevid."; + } + + my @result_pages = values(%{$result->{query}->{pages}}); + my $result_page = $result_pages[0]; + my $rev = $result_pages[0]->{revisions}->[0]; + + my $page_title = $result_page->{title}; + + if (!exists($pages->{$page_title})) { + print STDERR "$n/", scalar(@$revision_ids), + ": Skipping revision #$rev->{revid} of $page_title\n"; + next; + } + + $n_actual++; my %commit; $commit{author} = $rev->{user} || 'Anonymous'; - $commit{comment} = $rev->{comment} || '*Empty MediaWiki Message*'; - $commit{title} = mediawiki_smudge_filename( - $result->{query}->{pages}->{$pagerevid->{pageid}}->{title} - ); - $commit{mw_revision} = $pagerevid->{revid}; + $commit{comment} = $rev->{comment} || EMPTY_MESSAGE; + $commit{title} = mediawiki_smudge_filename($page_title); + $commit{mw_revision} = $rev->{revid}; $commit{content} = mediawiki_smudge($rev->{'*'}); if (!defined($rev->{timestamp})) { @@ -596,17 +938,23 @@ sub mw_import_ref { } $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp); - print STDERR "$n/", scalar(@revisions), ": Revision #$pagerevid->{revid} of $commit{title}\n"; - - import_file_revision(\%commit, ($fetch_from == 1), $n); + # Differentiates classic pages and media files. + my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/; + my %mediafile; + if ($namespace) { + my $id = get_mw_namespace_id($namespace); + if ($id && $id == get_mw_namespace_id("File")) { + %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp}); + } + } + # If this is a revision of the media page for new version + # of a file do one common commit for both file and media page. + # Else do commit only for that page. + print STDERR "$n/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n"; + import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile); } - if ($fetch_from == 1 && $n == 0) { - print STDERR "You appear to have cloned an empty MediaWiki.\n"; - # Something has to be done remote-helper side. If nothing is done, an error is - # thrown saying that HEAD is refering to unknown object 0000000000000000000 - # and the clone fails. - } + return $n_actual; } sub error_non_fast_forward { @@ -624,6 +972,63 @@ sub error_non_fast_forward { return 0; } +sub mw_upload_file { + my $complete_file_name = shift; + my $new_sha1 = shift; + my $extension = shift; + my $file_deleted = shift; + my $summary = shift; + my $newrevid; + my $path = "File:" . $complete_file_name; + my %hashFiles = get_allowed_file_extensions(); + if (!exists($hashFiles{$extension})) { + print STDERR "$complete_file_name is not a permitted file on this wiki.\n"; + print STDERR "Check the configuration of file uploads in your mediawiki.\n"; + return $newrevid; + } + # Deleting and uploading a file requires a priviledged user + if ($file_deleted) { + mw_connect_maybe(); + my $query = { + action => 'delete', + title => $path, + reason => $summary + }; + if (!$mediawiki->edit($query)) { + print STDERR "Failed to delete file on remote wiki\n"; + print STDERR "Check your permissions on the remote site. Error code:\n"; + print STDERR $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details}; + exit 1; + } + } else { + # Don't let perl try to interpret file content as UTF-8 => use "raw" + my $content = run_git("cat-file blob $new_sha1", "raw"); + if ($content ne "") { + mw_connect_maybe(); + $mediawiki->{config}->{upload_url} = + "$url/index.php/Special:Upload"; + $mediawiki->edit({ + action => 'upload', + filename => $complete_file_name, + comment => $summary, + file => [undef, + $complete_file_name, + Content => $content], + ignorewarnings => 1, + }, { + skip_encoding => 1 + } ) || die $mediawiki->{error}->{code} . ':' + . $mediawiki->{error}->{details}; + my $last_file_page = $mediawiki->get_page({title => $path}); + $newrevid = $last_file_page->{revid}; + print STDERR "Pushed file: $new_sha1 - $complete_file_name.\n"; + } else { + print STDERR "Empty file $complete_file_name not pushed.\n"; + } + } + return $newrevid; +} + sub mw_push_file { my $diff_info = shift; # $diff_info contains a string in this format: @@ -636,7 +1041,12 @@ sub mw_push_file { my $summary = shift; # MediaWiki revision number. Keep the previous one by default, # in case there's no edit to perform. - my $newrevid = shift; + my $oldrevid = shift; + my $newrevid; + + if ($summary eq EMPTY_MESSAGE) { + $summary = ''; + } my $new_sha1 = $diff_info_split[3]; my $old_sha1 = $diff_info_split[2]; @@ -644,9 +1054,16 @@ sub mw_push_file { my $page_deleted = ($new_sha1 eq NULL_SHA1); $complete_file_name = mediawiki_clean_filename($complete_file_name); - if (substr($complete_file_name,-3) eq ".mw") { - my $title = substr($complete_file_name,0,-3); - + my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/; + if (!defined($extension)) { + $extension = ""; + } + if ($extension eq "mw") { + my $ns = get_mw_namespace_id_for_page($complete_file_name); + if ($ns && $ns == get_mw_namespace_id("File") && (!$export_media)) { + print STDERR "Ignoring media file related page: $complete_file_name\n"; + return ($oldrevid, "ok"); + } my $file_content; if ($page_deleted) { # Deleting a page usually requires @@ -664,7 +1081,7 @@ sub mw_push_file { action => 'edit', summary => $summary, title => $title, - basetimestamp => $basetimestamps{$newrevid}, + basetimestamp => $basetimestamps{$oldrevid}, text => mediawiki_clean($file_content, $page_created), }, { skip_encoding => 1 # Helps with names with accentuated characters @@ -676,7 +1093,7 @@ sub mw_push_file { $mediawiki->{error}->{code} . ' from mediwiki: ' . $mediawiki->{error}->{details} . ".\n"; - return ($newrevid, "non-fast-forward"); + return ($oldrevid, "non-fast-forward"); } else { # Other errors. Shouldn't happen => just die() die 'Fatal: Error ' . @@ -686,9 +1103,14 @@ sub mw_push_file { } $newrevid = $result->{edit}->{newrevid}; print STDERR "Pushed file: $new_sha1 - $title\n"; + } elsif ($export_media) { + $newrevid = mw_upload_file($complete_file_name, $new_sha1, + $extension, $page_deleted, + $summary); } else { - print STDERR "$complete_file_name not a mediawiki file (Not pushable on this version of git-remote-mediawiki).\n" + print STDERR "Ignoring media file $title\n"; } + $newrevid = ($newrevid or $oldrevid); return ($newrevid, "ok"); } @@ -760,16 +1182,26 @@ sub mw_push_revision { if ($last_local_revid > 0) { my $parsed_sha1 = $remoteorigin_sha1; # Find a path from last MediaWiki commit to pushed commit + print STDERR "Computing path from local to remote ...\n"; + my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents $local ^$parsed_sha1")); + my %local_ancestry; + foreach my $line (@local_ancestry) { + if (my ($child, $parents) = $line =~ m/^-?([a-f0-9]+) ([a-f0-9 ]+)/) { + foreach my $parent (split(' ', $parents)) { + $local_ancestry{$parent} = $child; + } + } elsif (!$line =~ m/^([a-f0-9]+)/) { + die "Unexpected output from git rev-list: $line"; + } + } while ($parsed_sha1 ne $HEAD_sha1) { - my @commit_info = grep(/^$parsed_sha1/, split(/\n/, run_git("rev-list --children $local"))); - if (!@commit_info) { + my $child = $local_ancestry{$parsed_sha1}; + if (!$child) { + printf STDERR "Cannot find a path in history from remote commit to last commit\n"; return error_non_fast_forward($remote); } - my @commit_info_split = split(/ |\n/, $commit_info[0]); - # $commit_info_split[1] is the sha1 of the commit to export - # $commit_info_split[0] is the sha1 of its direct child - push(@commit_pairs, \@commit_info_split); - $parsed_sha1 = $commit_info_split[1]; + push(@commit_pairs, [$parsed_sha1, $child]); + $parsed_sha1 = $child; } } else { # No remote mediawiki revision. Export the whole @@ -791,8 +1223,8 @@ sub mw_push_revision { # TODO: we could detect rename, and encode them with a #redirect on the wiki. # TODO: for now, it's just a delete+add my @diff_info_list = split(/\0/, $diff_infos); - # Keep the first line of the commit message as mediawiki comment for the revision - my $commit_msg = (split(/\n/, run_git("show --pretty=format:\"%s\" $sha1_commit")))[0]; + # Keep the subject line of the commit message as mediawiki comment for the revision + my $commit_msg = run_git("log --no-walk --format=\"%s\" $sha1_commit"); chomp($commit_msg); # Push every blob while (@diff_info_list) { @@ -817,7 +1249,7 @@ sub mw_push_revision { } } unless ($dumb_push) { - run_git("notes --ref=$remotename/mediawiki add -m \"mediawiki_revision: $mw_revision\" $sha1_commit"); + run_git("notes --ref=$remotename/mediawiki add -f -m \"mediawiki_revision: $mw_revision\" $sha1_commit"); run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child"); } } @@ -825,3 +1257,104 @@ sub mw_push_revision { print STDOUT "ok $remote\n"; return 1; } + +sub get_allowed_file_extensions { + mw_connect_maybe(); + + my $query = { + action => 'query', + meta => 'siteinfo', + siprop => 'fileextensions' + }; + my $result = $mediawiki->api($query); + my @file_extensions= map $_->{ext},@{$result->{query}->{fileextensions}}; + my %hashFile = map {$_ => 1}@file_extensions; + + return %hashFile; +} + +# In memory cache for MediaWiki namespace ids. +my %namespace_id; + +# Namespaces whose id is cached in the configuration file +# (to avoid duplicates) +my %cached_mw_namespace_id; + +# Return MediaWiki id for a canonical namespace name. +# Ex.: "File", "Project". +sub get_mw_namespace_id { + mw_connect_maybe(); + my $name = shift; + + if (!exists $namespace_id{$name}) { + # Look at configuration file, if the record for that namespace is + # already cached. Namespaces are stored in form: + # "Name_of_namespace:Id_namespace", ex.: "File:6". + my @temp = split(/[\n]/, run_git("config --get-all remote." + . $remotename .".namespaceCache")); + chomp(@temp); + foreach my $ns (@temp) { + my ($n, $id) = split(/:/, $ns); + if ($id eq 'notANameSpace') { + $namespace_id{$n} = {is_namespace => 0}; + } else { + $namespace_id{$n} = {is_namespace => 1, id => $id}; + } + $cached_mw_namespace_id{$n} = 1; + } + } + + if (!exists $namespace_id{$name}) { + print STDERR "Namespace $name not found in cache, querying the wiki ...\n"; + # NS not found => get namespace id from MW and store it in + # configuration file. + my $query = { + action => 'query', + meta => 'siteinfo', + siprop => 'namespaces' + }; + my $result = $mediawiki->api($query); + + while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) { + if (defined($ns->{id}) && defined($ns->{canonical})) { + $namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}}; + if ($ns->{'*'}) { + # alias (e.g. french Fichier: as alias for canonical File:) + $namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}}; + } + } + } + } + + my $ns = $namespace_id{$name}; + my $id; + + unless (defined $ns) { + print STDERR "No such namespace $name on MediaWiki.\n"; + $ns = {is_namespace => 0}; + $namespace_id{$name} = $ns; + } + + if ($ns->{is_namespace}) { + $id = $ns->{id}; + } + + # Store "notANameSpace" as special value for inexisting namespaces + my $store_id = ($id || 'notANameSpace'); + + # Store explicitely requested namespaces on disk + if (!exists $cached_mw_namespace_id{$name}) { + run_git("config --add remote.". $remotename + .".namespaceCache \"". $name .":". $store_id ."\""); + $cached_mw_namespace_id{$name} = 1; + } + return $id; +} + +sub get_mw_namespace_id_for_page { + if (my ($namespace) = $_[0] =~ /^([^:]*):/) { + return get_mw_namespace_id($namespace); + } else { + return; + } +} diff --git a/contrib/mw-to-git/t/.gitignore b/contrib/mw-to-git/t/.gitignore new file mode 100644 index 0000000000..a7a40b4964 --- /dev/null +++ b/contrib/mw-to-git/t/.gitignore @@ -0,0 +1,4 @@ +WEB/ +wiki/ +trash directory.t*/ +test-results/ diff --git a/contrib/mw-to-git/t/Makefile b/contrib/mw-to-git/t/Makefile new file mode 100644 index 0000000000..f422203fa0 --- /dev/null +++ b/contrib/mw-to-git/t/Makefile @@ -0,0 +1,31 @@ +# +# Copyright (C) 2012 +# Charles Roussel <charles.roussel@ensimag.imag.fr> +# Simon Cathebras <simon.cathebras@ensimag.imag.fr> +# Julien Khayat <julien.khayat@ensimag.imag.fr> +# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr> +# Simon Perrat <simon.perrat@ensimag.imag.fr> +# +## Test git-remote-mediawiki + +all: test + +-include ../../../config.mak.autogen +-include ../../../config.mak + +T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh) + +.PHONY: help test clean all + +help: + @echo 'Run "$(MAKE) test" to launch test scripts' + @echo 'Run "$(MAKE) clean" to remove trash folders' + +test: + @for t in $(T); do \ + echo "$$t"; \ + "./$$t" || exit 1; \ + done + +clean: + $(RM) -r 'trash directory'.* diff --git a/contrib/mw-to-git/t/README b/contrib/mw-to-git/t/README new file mode 100644 index 0000000000..96e97390cf --- /dev/null +++ b/contrib/mw-to-git/t/README @@ -0,0 +1,124 @@ +Tests for Mediawiki-to-Git +========================== + +Introduction +------------ +This manual describes how to install the git-remote-mediawiki test +environment on a machine with git installed on it. + +Prerequisite +------------ + +In order to run this test environment correctly, you will need to +install the following packages (Debian/Ubuntu names, may need to be +adapted for another distribution): + +* lighttpd +* php5 +* php5-cgi +* php5-cli +* php5-curl +* php5-sqlite + +Principles and Technical Choices +-------------------------------- + +The test environment makes it easy to install and manipulate one or +several MediaWiki instances. To allow developers to run the testsuite +easily, the environment does not require root priviledge (except to +install the required packages if needed). It starts a webserver +instance on the user's account (using lighttpd greatly helps for +that), and does not need a separate database daemon (thanks to the use +of sqlite). + +Run the test environment +------------------------ + +Install a new wiki +~~~~~~~~~~~~~~~~~~ + +Once you have all the prerequisite, you need to install a MediaWiki +instance on your machine. If you already have one, it is still +strongly recommended to install one with the script provided. Here's +how to work it: + +a. change directory to contrib/mw-to-git/t/ +b. if needed, edit test.config to choose your installation parameters +c. run `./install-wiki.sh install` +d. check on your favourite web browser if your wiki is correctly + installed. + +Remove an existing wiki +~~~~~~~~~~~~~~~~~~~~~~~ + +Edit the file test.config to fit the wiki you want to delete, and then +execute the command `./install-wiki.sh delete` from the +contrib/mw-to-git/t directory. + +Run the existing tests +~~~~~~~~~~~~~~~~~~~~~~ + +The provided tests are currently in the `contrib/mw-to-git/t` directory. +The files are all the t936[0-9]-*.sh shell scripts. + +a. Run all tests: +To do so, run "make test" from the contrib/mw-to-git/ directory. + +b. Run a specific test: +To run a given test <test_name>, run ./<test_name> from the +contrib/mw-to-git/t directory. + +How to create new tests +----------------------- + +Available functions +~~~~~~~~~~~~~~~~~~~ + +The test environment of git-remote-mediawiki provides some functions +useful to test its behaviour. for more details about the functions' +parameters, please refer to the `test-gitmw-lib.sh` and +`test-gitmw.pl` files. + +** `test_check_wiki_precond`: +Check if the tests must be skipped or not. Please use this function +at the beggining of each new test file. + +** `wiki_getpage`: +Fetch a given page from the wiki and puts its content in the +directory in parameter. + +** `wiki_delete_page`: +Delete a given page from the wiki. + +** `wiki_edit_page`: +Create or modify a given page in the wiki. You can specify several +parameters like a summary for the page edition, or add the page to a +given category. +See test-gitmw.pl for more details. + +** `wiki_getallpage`: +Fetch all pages from the wiki into a given directory. The directory +is created if it does not exists. + +** `test_diff_directories`: +Compare the content of two directories. The content must be the same. +Use this function to compare the content of a git directory and a wiki +one created by wiki_getallpage. + +** `test_contains_N_files`: +Check if the given directory contains a given number of file. + +** `wiki_page_exists`: +Tests if a given page exists on the wiki. + +** `wiki_reset`: +Reset the wiki, i.e. flush the database. Use this function at the +begining of each new test, except if the test re-uses the same wiki +(and history) as the previous test. + +How to write a new test +~~~~~~~~~~~~~~~~~~~~~~~ + +Please, follow the standards given by git. See git/t/README. +New file should be named as t936[0-9]-*.sh. +Be sure to reset your wiki regulary with the function `wiki_reset`. diff --git a/contrib/mw-to-git/t/install-wiki.sh b/contrib/mw-to-git/t/install-wiki.sh new file mode 100755 index 0000000000..c6d6fa3aef --- /dev/null +++ b/contrib/mw-to-git/t/install-wiki.sh @@ -0,0 +1,45 @@ +#!/bin/sh + +# This script installs or deletes a MediaWiki on your computer. +# It requires a web server with PHP and SQLite running. In addition, if you +# do not have MediaWiki sources on your computer, the option 'install' +# downloads them for you. +# Please set the CONFIGURATION VARIABLES in ./test-gitmw-lib.sh + +WIKI_TEST_DIR=$(cd "$(dirname "$0")" && pwd) + +if test -z "$WIKI_TEST_DIR" +then + WIKI_TEST_DIR=. +fi + +. "$WIKI_TEST_DIR"/test-gitmw-lib.sh +usage () { + echo "Usage: " + echo " ./install-wiki.sh <install | delete | --help>" + echo " install | -i : Install a wiki on your computer." + echo " delete | -d : Delete the wiki and all its pages and " + echo " content." +} + + +# Argument: install, delete, --help | -h +case "$1" in + "install" | "-i") + wiki_install + exit 0 + ;; + "delete" | "-d") + wiki_delete + exit 0 + ;; + "--help" | "-h") + usage + exit 0 + ;; + *) + echo "Invalid argument: $1" + usage + exit 1 + ;; +esac diff --git a/contrib/mw-to-git/t/install-wiki/.gitignore b/contrib/mw-to-git/t/install-wiki/.gitignore new file mode 100644 index 0000000000..b5a2a4408c --- /dev/null +++ b/contrib/mw-to-git/t/install-wiki/.gitignore @@ -0,0 +1 @@ +wikidb.sqlite diff --git a/contrib/mw-to-git/t/install-wiki/LocalSettings.php b/contrib/mw-to-git/t/install-wiki/LocalSettings.php new file mode 100644 index 0000000000..29f125116b --- /dev/null +++ b/contrib/mw-to-git/t/install-wiki/LocalSettings.php @@ -0,0 +1,129 @@ +<?php +# This file was automatically generated by the MediaWiki 1.19.0 +# installer. If you make manual changes, please keep track in case you +# need to recreate them later. +# +# See includes/DefaultSettings.php for all configurable settings +# and their default values, but don't forget to make changes in _this_ +# file, not there. +# +# Further documentation for configuration settings may be found at: +# http://www.mediawiki.org/wiki/Manual:Configuration_settings + +# Protect against web entry +if ( !defined( 'MEDIAWIKI' ) ) { + exit; +} + +## Uncomment this to disable output compression +# $wgDisableOutputCompression = true; + +$wgSitename = "Git-MediaWiki-Test"; +$wgMetaNamespace = "Git-MediaWiki-Test"; + +## The URL base path to the directory containing the wiki; +## defaults for all runtime URL paths are based off of this. +## For more information on customizing the URLs please see: +## http://www.mediawiki.org/wiki/Manual:Short_URL +$wgScriptPath = "@WG_SCRIPT_PATH@"; +$wgScriptExtension = ".php"; + +## The protocol and server name to use in fully-qualified URLs +$wgServer = "@WG_SERVER@"; + +## The relative URL path to the skins directory +$wgStylePath = "$wgScriptPath/skins"; + +## The relative URL path to the logo. Make sure you change this from the default, +## or else you'll overwrite your logo when you upgrade! +$wgLogo = "$wgStylePath/common/images/wiki.png"; + +## UPO means: this is also a user preference option + +$wgEnableEmail = true; +$wgEnableUserEmail = true; # UPO + +$wgEmergencyContact = "apache@localhost"; +$wgPasswordSender = "apache@localhost"; + +$wgEnotifUserTalk = false; # UPO +$wgEnotifWatchlist = false; # UPO +$wgEmailAuthentication = true; + +## Database settings +$wgDBtype = "sqlite"; +$wgDBserver = ""; +$wgDBname = "@WG_SQLITE_DATAFILE@"; +$wgDBuser = ""; +$wgDBpassword = ""; + +# SQLite-specific settings +$wgSQLiteDataDir = "@WG_SQLITE_DATADIR@"; + + +## Shared memory settings +$wgMainCacheType = CACHE_NONE; +$wgMemCachedServers = array(); + +## To enable image uploads, make sure the 'images' directory +## is writable, then set this to true: +$wgEnableUploads = true; +$wgUseImageMagick = true; +$wgImageMagickConvertCommand ="@CONVERT@"; +$wgFileExtensions[] = 'txt'; + +# InstantCommons allows wiki to use images from http://commons.wikimedia.org +$wgUseInstantCommons = false; + +## If you use ImageMagick (or any other shell command) on a +## Linux server, this will need to be set to the name of an +## available UTF-8 locale +$wgShellLocale = "en_US.utf8"; + +## If you want to use image uploads under safe mode, +## create the directories images/archive, images/thumb and +## images/temp, and make them all writable. Then uncomment +## this, if it's not already uncommented: +#$wgHashedUploadDirectory = false; + +## Set $wgCacheDirectory to a writable directory on the web server +## to make your wiki go slightly faster. The directory should not +## be publically accessible from the web. +#$wgCacheDirectory = "$IP/cache"; + +# Site language code, should be one of the list in ./languages/Names.php +$wgLanguageCode = "en"; + +$wgSecretKey = "1c912bfe3519fb70f5dc523ecc698111cd43d81a11c585b3eefb28f29c2699b7"; +#$wgSecretKey = "@SECRETKEY@"; + + +# Site upgrade key. Must be set to a string (default provided) to turn on the +# web installer while LocalSettings.php is in place +$wgUpgradeKey = "ddae7dc87cd0a645"; + +## Default skin: you can change the default skin. Use the internal symbolic +## names, ie 'standard', 'nostalgia', 'cologneblue', 'monobook', 'vector': +$wgDefaultSkin = "vector"; + +## For attaching licensing metadata to pages, and displaying an +## appropriate copyright notice / icon. GNU Free Documentation +## License and Creative Commons licenses are supported so far. +$wgRightsPage = ""; # Set to the title of a wiki page that describes your license/copyright +$wgRightsUrl = ""; +$wgRightsText = ""; +$wgRightsIcon = ""; + +# Path to the GNU diff3 utility. Used for conflict resolution. +$wgDiff3 = "/usr/bin/diff3"; + +# Query string length limit for ResourceLoader. You should only set this if +# your web server has a query string length limit (then set it to that limit), +# or if you have suhosin.get.max_value_length set in php.ini (then set it to +# that value) +$wgResourceLoaderMaxQueryLength = -1; + + + +# End of automatically generated settings. +# Add more configuration options below. diff --git a/contrib/mw-to-git/t/install-wiki/db_install.php b/contrib/mw-to-git/t/install-wiki/db_install.php new file mode 100644 index 0000000000..0f3f4e018a --- /dev/null +++ b/contrib/mw-to-git/t/install-wiki/db_install.php @@ -0,0 +1,120 @@ +<?php +/** + * This script generates a SQLite database for a MediaWiki version 1.19.0 + * You must specify the login of the admin (argument 1) and its + * password (argument 2) and the folder where the database file + * is located (absolute path in argument 3). + * It is used by the script install-wiki.sh in order to make easy the + * installation of a MediaWiki. + * + * In order to generate a SQLite database file, MediaWiki ask the user + * to submit some forms in its web browser. This script simulates this + * behavior though the functions <get> and <submit> + * + */ +$argc = $_SERVER['argc']; +$argv = $_SERVER['argv']; + +$login = $argv[2]; +$pass = $argv[3]; +$tmp = $argv[4]; +$port = $argv[5]; + +$url = 'http://localhost:'.$port.'/wiki/mw-config/index.php'; +$db_dir = urlencode($tmp); +$tmp_cookie = tempnam($tmp, "COOKIE_"); +/* + * Fetchs a page with cURL. + */ +function get($page_name = "") { + $curl = curl_init(); + $page_name_add = ""; + if ($page_name != "") { + $page_name_add = '?page='.$page_name; + } + $url = $GLOBALS['url'].$page_name_add; + $tmp_cookie = $GLOBALS['tmp_cookie']; + curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie); + curl_setopt($curl, CURLOPT_RETURNTRANSFER, true); + curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true); + curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie); + curl_setopt($curl, CURLOPT_HEADER, true); + curl_setopt($curl, CURLOPT_URL, $url); + + $page = curl_exec($curl); + if (!$page) { + die("Could not get page: $url\n"); + } + curl_close($curl); + return $page; +} + +/* + * Submits a form with cURL. + */ +function submit($page_name, $option = "") { + $curl = curl_init(); + $datapost = 'submit-continue=Continue+%E2%86%92'; + if ($option != "") { + $datapost = $option.'&'.$datapost; + } + $url = $GLOBALS['url'].'?page='.$page_name; + $tmp_cookie = $GLOBALS['tmp_cookie']; + curl_setopt($curl, CURLOPT_URL, $url); + curl_setopt($curl, CURLOPT_POST, true); + curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true); + curl_setopt($curl, CURLOPT_POSTFIELDS, $datapost); + curl_setopt($curl, CURLOPT_RETURNTRANSFER, true); + curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie); + curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie); + + $page = curl_exec($curl); + if (!$page) { + die("Could not get page: $url\n"); + } + curl_close($curl); + return "$page"; +} + +/* + * Here starts this script: simulates the behavior of the user + * submitting forms to generates the database file. + * Note this simulation was made for the MediaWiki version 1.19.0, + * we can't assume it works with other versions. + * + */ + +$page = get(); +if (!preg_match('/input type="hidden" value="([0-9]+)" name="LanguageRequestTime"/', + $page, $matches)) { + echo "Unexpected content for page downloaded:\n"; + echo "$page"; + die; +}; +$timestamp = $matches[1]; +$language = "LanguageRequestTime=$timestamp&uselang=en&ContLang=en"; +$page = submit('Language', $language); + +submit('Welcome'); + +$db_config = 'DBType=sqlite'; +$db_config = $db_config.'&sqlite_wgSQLiteDataDir='.$db_dir; +$db_config = $db_config.'&sqlite_wgDBname='.$argv[1]; +submit('DBConnect', $db_config); + +$wiki_config = 'config_wgSitename=TEST'; +$wiki_config = $wiki_config.'&config__NamespaceType=site-name'; +$wiki_config = $wiki_config.'&config_wgMetaNamespace=MyWiki'; +$wiki_config = $wiki_config.'&config__AdminName='.$login; + +$wiki_config = $wiki_config.'&config__AdminPassword='.$pass; +$wiki_config = $wiki_config.'&config__AdminPassword2='.$pass; + +$wiki_config = $wiki_config.'&wiki__configEmail=email%40email.org'; +$wiki_config = $wiki_config.'&config__SkipOptional=skip'; +submit('Name', $wiki_config); +submit('Install'); +submit('Install'); + +unlink($tmp_cookie); +?> diff --git a/contrib/mw-to-git/t/push-pull-tests.sh b/contrib/mw-to-git/t/push-pull-tests.sh new file mode 100644 index 0000000000..9da2dc5ff0 --- /dev/null +++ b/contrib/mw-to-git/t/push-pull-tests.sh @@ -0,0 +1,144 @@ +test_push_pull () { + + test_expect_success 'Git pull works after adding a new wiki page' ' + wiki_reset && + + git clone mediawiki::'"$WIKI_URL"' mw_dir_1 && + wiki_editpage Foo "page created after the git clone" false && + + ( + cd mw_dir_1 && + git pull + ) && + + wiki_getallpage ref_page_1 && + test_diff_directories mw_dir_1 ref_page_1 + ' + + test_expect_success 'Git pull works after editing a wiki page' ' + wiki_reset && + + wiki_editpage Foo "page created before the git clone" false && + git clone mediawiki::'"$WIKI_URL"' mw_dir_2 && + wiki_editpage Foo "new line added on the wiki" true && + + ( + cd mw_dir_2 && + git pull + ) && + + wiki_getallpage ref_page_2 && + test_diff_directories mw_dir_2 ref_page_2 + ' + + test_expect_success 'git pull works on conflict handled by auto-merge' ' + wiki_reset && + + wiki_editpage Foo "1 init +3 +5 + " false && + git clone mediawiki::'"$WIKI_URL"' mw_dir_3 && + + wiki_editpage Foo "1 init +2 content added on wiki after clone +3 +5 + " false && + + ( + cd mw_dir_3 && + echo "1 init +3 +4 content added on git after clone +5 +" >Foo.mw && + git commit -am "conflicting change on foo" && + git pull && + git push + ) + ' + + test_expect_success 'Git push works after adding a file .mw' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir_4 && + wiki_getallpage ref_page_4 && + ( + cd mw_dir_4 && + test_path_is_missing Foo.mw && + touch Foo.mw && + echo "hello world" >>Foo.mw && + git add Foo.mw && + git commit -m "Foo" && + git push + ) && + wiki_getallpage ref_page_4 && + test_diff_directories mw_dir_4 ref_page_4 + ' + + test_expect_success 'Git push works after editing a file .mw' ' + wiki_reset && + wiki_editpage "Foo" "page created before the git clone" false && + git clone mediawiki::'"$WIKI_URL"' mw_dir_5 && + + ( + cd mw_dir_5 && + echo "new line added in the file Foo.mw" >>Foo.mw && + git commit -am "edit file Foo.mw" && + git push + ) && + + wiki_getallpage ref_page_5 && + test_diff_directories mw_dir_5 ref_page_5 + ' + + test_expect_failure 'Git push works after deleting a file' ' + wiki_reset && + wiki_editpage Foo "wiki page added before git clone" false && + git clone mediawiki::'"$WIKI_URL"' mw_dir_6 && + + ( + cd mw_dir_6 && + git rm Foo.mw && + git commit -am "page Foo.mw deleted" && + git push + ) && + + test_must_fail wiki_page_exist Foo + ' + + test_expect_success 'Merge conflict expected and solving it' ' + wiki_reset && + + git clone mediawiki::'"$WIKI_URL"' mw_dir_7 && + wiki_editpage Foo "1 conflict +3 wiki +4" false && + + ( + cd mw_dir_7 && + echo "1 conflict +2 git +4" >Foo.mw && + git add Foo.mw && + git commit -m "conflict created" && + test_must_fail git pull && + "$PERL_PATH" -pi -e "s/[<=>].*//g" Foo.mw && + git commit -am "merge conflict solved" && + git push + ) + ' + + test_expect_failure 'git pull works after deleting a wiki page' ' + wiki_reset && + wiki_editpage Foo "wiki page added before the git clone" false && + git clone mediawiki::'"$WIKI_URL"' mw_dir_8 && + + wiki_delete_page Foo && + ( + cd mw_dir_8 && + git pull && + test_path_is_missing Foo.mw + ) + ' +} diff --git a/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh b/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh new file mode 100755 index 0000000000..811a90c9ae --- /dev/null +++ b/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh @@ -0,0 +1,257 @@ +#!/bin/sh +# +# Copyright (C) 2012 +# Charles Roussel <charles.roussel@ensimag.imag.fr> +# Simon Cathebras <simon.cathebras@ensimag.imag.fr> +# Julien Khayat <julien.khayat@ensimag.imag.fr> +# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr> +# Simon Perrat <simon.perrat@ensimag.imag.fr> +# +# License: GPL v2 or later + + +test_description='Test the Git Mediawiki remote helper: git clone' + +. ./test-gitmw-lib.sh +. $TEST_DIRECTORY/test-lib.sh + + +test_check_precond + + +test_expect_success 'Git clone creates the expected git log with one file' ' + wiki_reset && + wiki_editpage foo "this is not important" false -c cat -s "this must be the same" && + git clone mediawiki::'"$WIKI_URL"' mw_dir_1 && + ( + cd mw_dir_1 && + git log --format=%s HEAD^..HEAD >log.tmp + ) && + echo "this must be the same" >msg.tmp && + diff -b mw_dir_1/log.tmp msg.tmp +' + + +test_expect_success 'Git clone creates the expected git log with multiple files' ' + wiki_reset && + wiki_editpage daddy "this is not important" false -s="this must be the same" && + wiki_editpage daddy "neither is this" true -s="this must also be the same" && + wiki_editpage daddy "neither is this" true -s="same same same" && + wiki_editpage dj "dont care" false -s="identical" && + wiki_editpage dj "dont care either" true -s="identical too" && + git clone mediawiki::'"$WIKI_URL"' mw_dir_2 && + ( + cd mw_dir_2 && + git log --format=%s Daddy.mw >logDaddy.tmp && + git log --format=%s Dj.mw >logDj.tmp + ) && + echo "same same same" >msgDaddy.tmp && + echo "this must also be the same" >>msgDaddy.tmp && + echo "this must be the same" >>msgDaddy.tmp && + echo "identical too" >msgDj.tmp && + echo "identical" >>msgDj.tmp && + diff -b mw_dir_2/logDaddy.tmp msgDaddy.tmp && + diff -b mw_dir_2/logDj.tmp msgDj.tmp +' + + +test_expect_success 'Git clone creates only Main_Page.mw with an empty wiki' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir_3 && + test_contains_N_files mw_dir_3 1 && + test_path_is_file mw_dir_3/Main_Page.mw +' + +test_expect_success 'Git clone does not fetch a deleted page' ' + wiki_reset && + wiki_editpage foo "this page must be deleted before the clone" false && + wiki_delete_page foo && + git clone mediawiki::'"$WIKI_URL"' mw_dir_4 && + test_contains_N_files mw_dir_4 1 && + test_path_is_file mw_dir_4/Main_Page.mw && + test_path_is_missing mw_dir_4/Foo.mw +' + +test_expect_success 'Git clone works with page added' ' + wiki_reset && + wiki_editpage foo " I will be cloned" false && + wiki_editpage bar "I will be cloned" false && + git clone mediawiki::'"$WIKI_URL"' mw_dir_5 && + wiki_getallpage ref_page_5 && + test_diff_directories mw_dir_5 ref_page_5 && + wiki_delete_page foo && + wiki_delete_page bar +' + +test_expect_success 'Git clone works with an edited page ' ' + wiki_reset && + wiki_editpage foo "this page will be edited" \ + false -s "first edition of page foo"&& + wiki_editpage foo "this page has been edited and must be on the clone " true && + git clone mediawiki::'"$WIKI_URL"' mw_dir_6 && + test_path_is_file mw_dir_6/Foo.mw && + test_path_is_file mw_dir_6/Main_Page.mw && + wiki_getallpage mw_dir_6/page_ref_6 && + test_diff_directories mw_dir_6 mw_dir_6/page_ref_6 && + ( + cd mw_dir_6 && + git log --format=%s HEAD^ Foo.mw > ../Foo.log + ) && + echo "first edition of page foo" > FooExpect.log && + diff FooExpect.log Foo.log +' + + +test_expect_success 'Git clone works with several pages and some deleted ' ' + wiki_reset && + wiki_editpage foo "this page will not be deleted" false && + wiki_editpage bar "I must not be erased" false && + wiki_editpage namnam "I will not be there at the end" false && + wiki_editpage nyancat "nyan nyan nyan delete me" false && + wiki_delete_page namnam && + wiki_delete_page nyancat && + git clone mediawiki::'"$WIKI_URL"' mw_dir_7 && + test_path_is_file mw_dir_7/Foo.mw && + test_path_is_file mw_dir_7/Bar.mw && + test_path_is_missing mw_dir_7/Namnam.mw && + test_path_is_missing mw_dir_7/Nyancat.mw && + wiki_getallpage mw_dir_7/page_ref_7 && + test_diff_directories mw_dir_7 mw_dir_7/page_ref_7 +' + + +test_expect_success 'Git clone works with one specific page cloned ' ' + wiki_reset && + wiki_editpage foo "I will not be cloned" false && + wiki_editpage bar "Do not clone me" false && + wiki_editpage namnam "I will be cloned :)" false -s="this log must stay" && + wiki_editpage nyancat "nyan nyan nyan you cant clone me" false && + git clone -c remote.origin.pages=namnam \ + mediawiki::'"$WIKI_URL"' mw_dir_8 && + test_contains_N_files mw_dir_8 1 && + test_path_is_file mw_dir_8/Namnam.mw && + test_path_is_missing mw_dir_8/Main_Page.mw && + ( + cd mw_dir_8 && + echo "this log must stay" >msg.tmp && + git log --format=%s >log.tmp && + diff -b msg.tmp log.tmp + ) && + wiki_check_content mw_dir_8/Namnam.mw Namnam +' + +test_expect_success 'Git clone works with multiple specific page cloned ' ' + wiki_reset && + wiki_editpage foo "I will be there" false && + wiki_editpage bar "I will not disapear" false && + wiki_editpage namnam "I be erased" false && + wiki_editpage nyancat "nyan nyan nyan you will not erase me" false && + wiki_delete_page namnam && + git clone -c remote.origin.pages="foo bar nyancat namnam" \ + mediawiki::'"$WIKI_URL"' mw_dir_9 && + test_contains_N_files mw_dir_9 3 && + test_path_is_missing mw_dir_9/Namnam.mw && + test_path_is_file mw_dir_9/Foo.mw && + test_path_is_file mw_dir_9/Nyancat.mw && + test_path_is_file mw_dir_9/Bar.mw && + wiki_check_content mw_dir_9/Foo.mw Foo && + wiki_check_content mw_dir_9/Bar.mw Bar && + wiki_check_content mw_dir_9/Nyancat.mw Nyancat +' + +test_expect_success 'Mediawiki-clone of several specific pages on wiki' ' + wiki_reset && + wiki_editpage foo "foo 1" false && + wiki_editpage bar "bar 1" false && + wiki_editpage dummy "dummy 1" false && + wiki_editpage cloned_1 "cloned_1 1" false && + wiki_editpage cloned_2 "cloned_2 2" false && + wiki_editpage cloned_3 "cloned_3 3" false && + mkdir -p ref_page_10 && + wiki_getpage cloned_1 ref_page_10 && + wiki_getpage cloned_2 ref_page_10 && + wiki_getpage cloned_3 ref_page_10 && + git clone -c remote.origin.pages="cloned_1 cloned_2 cloned_3" \ + mediawiki::'"$WIKI_URL"' mw_dir_10 && + test_diff_directories mw_dir_10 ref_page_10 +' + +test_expect_success 'Git clone works with the shallow option' ' + wiki_reset && + wiki_editpage foo "1st revision, should be cloned" false && + wiki_editpage bar "1st revision, should be cloned" false && + wiki_editpage nyan "1st revision, should not be cloned" false && + wiki_editpage nyan "2nd revision, should be cloned" false && + git -c remote.origin.shallow=true clone \ + mediawiki::'"$WIKI_URL"' mw_dir_11 && + test_contains_N_files mw_dir_11 4 && + test_path_is_file mw_dir_11/Nyan.mw && + test_path_is_file mw_dir_11/Foo.mw && + test_path_is_file mw_dir_11/Bar.mw && + test_path_is_file mw_dir_11/Main_Page.mw && + ( + cd mw_dir_11 && + test `git log --oneline Nyan.mw | wc -l` -eq 1 && + test `git log --oneline Foo.mw | wc -l` -eq 1 && + test `git log --oneline Bar.mw | wc -l` -eq 1 && + test `git log --oneline Main_Page.mw | wc -l ` -eq 1 + ) && + wiki_check_content mw_dir_11/Nyan.mw Nyan && + wiki_check_content mw_dir_11/Foo.mw Foo && + wiki_check_content mw_dir_11/Bar.mw Bar && + wiki_check_content mw_dir_11/Main_Page.mw Main_Page +' + +test_expect_success 'Git clone works with the shallow option with a delete page' ' + wiki_reset && + wiki_editpage foo "1st revision, will be deleted" false && + wiki_editpage bar "1st revision, should be cloned" false && + wiki_editpage nyan "1st revision, should not be cloned" false && + wiki_editpage nyan "2nd revision, should be cloned" false && + wiki_delete_page foo && + git -c remote.origin.shallow=true clone \ + mediawiki::'"$WIKI_URL"' mw_dir_12 && + test_contains_N_files mw_dir_12 3 && + test_path_is_file mw_dir_12/Nyan.mw && + test_path_is_missing mw_dir_12/Foo.mw && + test_path_is_file mw_dir_12/Bar.mw && + test_path_is_file mw_dir_12/Main_Page.mw && + ( + cd mw_dir_12 && + test `git log --oneline Nyan.mw | wc -l` -eq 1 && + test `git log --oneline Bar.mw | wc -l` -eq 1 && + test `git log --oneline Main_Page.mw | wc -l ` -eq 1 + ) && + wiki_check_content mw_dir_12/Nyan.mw Nyan && + wiki_check_content mw_dir_12/Bar.mw Bar && + wiki_check_content mw_dir_12/Main_Page.mw Main_Page +' + +test_expect_success 'Test of fetching a category' ' + wiki_reset && + wiki_editpage Foo "I will be cloned" false -c=Category && + wiki_editpage Bar "Meet me on the repository" false -c=Category && + wiki_editpage Dummy "I will not come" false && + wiki_editpage BarWrong "I will stay online only" false -c=NotCategory && + git clone -c remote.origin.categories="Category" \ + mediawiki::'"$WIKI_URL"' mw_dir_13 && + wiki_getallpage ref_page_13 Category && + test_diff_directories mw_dir_13 ref_page_13 +' + +test_expect_success 'Test of resistance to modification of category on wiki for clone' ' + wiki_reset && + wiki_editpage Tobedeleted "this page will be deleted" false -c=Catone && + wiki_editpage Tobeedited "this page will be modified" false -c=Catone && + wiki_editpage Normalone "this page wont be modified and will be on git" false -c=Catone && + wiki_editpage Notconsidered "this page will not appear on local" false && + wiki_editpage Othercategory "this page will not appear on local" false -c=Cattwo && + wiki_editpage Tobeedited "this page have been modified" true -c=Catone && + wiki_delete_page Tobedeleted + git clone -c remote.origin.categories="Catone" \ + mediawiki::'"$WIKI_URL"' mw_dir_14 && + wiki_getallpage ref_page_14 Catone && + test_diff_directories mw_dir_14 ref_page_14 +' + +test_done diff --git a/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh b/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh new file mode 100755 index 0000000000..9ea201459b --- /dev/null +++ b/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh @@ -0,0 +1,24 @@ +#!/bin/sh +# +# Copyright (C) 2012 +# Charles Roussel <charles.roussel@ensimag.imag.fr> +# Simon Cathebras <simon.cathebras@ensimag.imag.fr> +# Julien Khayat <julien.khayat@ensimag.imag.fr> +# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr> +# Simon Perrat <simon.perrat@ensimag.imag.fr> +# +# License: GPL v2 or later + +# tests for git-remote-mediawiki + +test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases' + +. ./test-gitmw-lib.sh +. ./push-pull-tests.sh +. $TEST_DIRECTORY/test-lib.sh + +test_check_precond + +test_push_pull + +test_done diff --git a/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh b/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh new file mode 100755 index 0000000000..b6405ce262 --- /dev/null +++ b/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh @@ -0,0 +1,347 @@ +#!/bin/sh +# +# Copyright (C) 2012 +# Charles Roussel <charles.roussel@ensimag.imag.fr> +# Simon Cathebras <simon.cathebras@ensimag.imag.fr> +# Julien Khayat <julien.khayat@ensimag.imag.fr> +# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr> +# Simon Perrat <simon.perrat@ensimag.imag.fr> +# +# License: GPL v2 or later + +# tests for git-remote-mediawiki + +test_description='Test git-mediawiki with special characters in filenames' + +. ./test-gitmw-lib.sh +. $TEST_DIRECTORY/test-lib.sh + + +test_check_precond + + +test_expect_success 'Git clone works for a wiki with accents in the page names' ' + wiki_reset && + wiki_editpage féé "This page must be délétéd before clone" false && + wiki_editpage kèè "This page must be deleted before clone" false && + wiki_editpage hàà "This page must be deleted before clone" false && + wiki_editpage kîî "This page must be deleted before clone" false && + wiki_editpage foo "This page must be deleted before clone" false && + git clone mediawiki::'"$WIKI_URL"' mw_dir_1 && + wiki_getallpage ref_page_1 && + test_diff_directories mw_dir_1 ref_page_1 +' + + +test_expect_success 'Git pull works with a wiki with accents in the pages names' ' + wiki_reset && + wiki_editpage kîî "this page must be cloned" false && + wiki_editpage foo "this page must be cloned" false && + git clone mediawiki::'"$WIKI_URL"' mw_dir_2 && + wiki_editpage éàîôû "This page must be pulled" false && + ( + cd mw_dir_2 && + git pull + ) && + wiki_getallpage ref_page_2 && + test_diff_directories mw_dir_2 ref_page_2 +' + + +test_expect_success 'Cloning a chosen page works with accents' ' + wiki_reset && + wiki_editpage kîî "this page must be cloned" false && + git clone -c remote.origin.pages=kîî \ + mediawiki::'"$WIKI_URL"' mw_dir_3 && + wiki_check_content mw_dir_3/Kîî.mw Kîî && + test_path_is_file mw_dir_3/Kîî.mw && + rm -rf mw_dir_3 +' + + +test_expect_success 'The shallow option works with accents' ' + wiki_reset && + wiki_editpage néoà "1st revision, should not be cloned" false && + wiki_editpage néoà "2nd revision, should be cloned" false && + git -c remote.origin.shallow=true clone \ + mediawiki::'"$WIKI_URL"' mw_dir_4 && + test_contains_N_files mw_dir_4 2 && + test_path_is_file mw_dir_4/Néoà.mw && + test_path_is_file mw_dir_4/Main_Page.mw && + ( + cd mw_dir_4 && + test `git log --oneline Néoà.mw | wc -l` -eq 1 && + test `git log --oneline Main_Page.mw | wc -l ` -eq 1 + ) && + wiki_check_content mw_dir_4/Néoà.mw Néoà && + wiki_check_content mw_dir_4/Main_Page.mw Main_Page +' + + +test_expect_success 'Cloning works when page name first letter has an accent' ' + wiki_reset && + wiki_editpage îî "this page must be cloned" false && + git clone -c remote.origin.pages=îî \ + mediawiki::'"$WIKI_URL"' mw_dir_5 && + test_path_is_file mw_dir_5/Îî.mw && + wiki_check_content mw_dir_5/Îî.mw Îî +' + + +test_expect_success 'Git push works with a wiki with accents' ' + wiki_reset && + wiki_editpage féé "lots of accents : éèàÖ" false && + wiki_editpage foo "this page must be cloned" false && + git clone mediawiki::'"$WIKI_URL"' mw_dir_6 && + ( + cd mw_dir_6 && + echo "A wild Pîkächû appears on the wiki" >Pîkächû.mw && + git add Pîkächû.mw && + git commit -m "A new page appears" && + git push + ) && + wiki_getallpage ref_page_6 && + test_diff_directories mw_dir_6 ref_page_6 +' + +test_expect_success 'Git clone works with accentsand spaces' ' + wiki_reset && + wiki_editpage "é à î" "this page must be délété before the clone" false && + git clone mediawiki::'"$WIKI_URL"' mw_dir_7 && + wiki_getallpage ref_page_7 && + test_diff_directories mw_dir_7 ref_page_7 +' + +test_expect_success 'character $ in page name (mw -> git)' ' + wiki_reset && + wiki_editpage file_\$_foo "expect to be called file_$_foo" false && + git clone mediawiki::'"$WIKI_URL"' mw_dir_8 && + test_path_is_file mw_dir_8/File_\$_foo.mw && + wiki_getallpage ref_page_8 && + test_diff_directories mw_dir_8 ref_page_8 +' + + + +test_expect_success 'character $ in file name (git -> mw) ' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir_9 && + ( + cd mw_dir_9 && + echo "this file is called File_\$_foo.mw" >File_\$_foo.mw && + git add . && + git commit -am "file File_\$_foo.mw" && + git pull && + git push + ) && + wiki_getallpage ref_page_9 && + test_diff_directories mw_dir_9 ref_page_9 +' + + +test_expect_failure 'capital at the begining of file names' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir_10 && + ( + cd mw_dir_10 && + echo "my new file foo" >foo.mw && + echo "my new file Foo... Finger crossed" >Foo.mw && + git add . && + git commit -am "file foo.mw" && + git pull && + git push + ) && + wiki_getallpage ref_page_10 && + test_diff_directories mw_dir_10 ref_page_10 +' + + +test_expect_failure 'special character at the begining of file name from mw to git' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir_11 && + wiki_editpage {char_1 "expect to be renamed {char_1" false && + wiki_editpage [char_2 "expect to be renamed [char_2" false && + ( + cd mw_dir_11 && + git pull + ) && + test_path_is_file mw_dir_11/{char_1 && + test_path_is_file mw_dir_11/[char_2 +' + +test_expect_success 'Pull page with title containing ":" other than namespace separator' ' + wiki_editpage Foo:Bar content false && + ( + cd mw_dir_11 && + git pull + ) && + test_path_is_file mw_dir_11/Foo:Bar.mw +' + +test_expect_success 'Push page with title containing ":" other than namespace separator' ' + ( + cd mw_dir_11 && + echo content >NotANameSpace:Page.mw && + git add NotANameSpace:Page.mw && + git commit -m "add page with colon" && + git push + ) && + wiki_page_exist NotANameSpace:Page +' + +test_expect_success 'test of correct formating for file name from mw to git' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir_12 && + wiki_editpage char_%_7b_1 "expect to be renamed char{_1" false && + wiki_editpage char_%_5b_2 "expect to be renamed char{_2" false && + ( + cd mw_dir_12 && + git pull + ) && + test_path_is_file mw_dir_12/Char\{_1.mw && + test_path_is_file mw_dir_12/Char\[_2.mw && + wiki_getallpage ref_page_12 && + mv ref_page_12/Char_%_7b_1.mw ref_page_12/Char\{_1.mw && + mv ref_page_12/Char_%_5b_2.mw ref_page_12/Char\[_2.mw && + test_diff_directories mw_dir_12 ref_page_12 +' + + +test_expect_failure 'test of correct formating for file name begining with special character' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir_13 && + ( + cd mw_dir_13 && + echo "my new file {char_1" >\{char_1.mw && + echo "my new file [char_2" >\[char_2.mw && + git add . && + git commit -am "commiting some exotic file name..." && + git push && + git pull + ) && + wiki_getallpage ref_page_13 && + test_path_is_file ref_page_13/{char_1.mw && + test_path_is_file ref_page_13/[char_2.mw && + test_diff_directories mw_dir_13 ref_page_13 +' + + +test_expect_success 'test of correct formating for file name from git to mw' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir_14 && + ( + cd mw_dir_14 && + echo "my new file char{_1" >Char\{_1.mw && + echo "my new file char[_2" >Char\[_2.mw && + git add . && + git commit -m "commiting some exotic file name..." && + git push + ) && + wiki_getallpage ref_page_14 && + mv mw_dir_14/Char\{_1.mw mw_dir_14/Char_%_7b_1.mw && + mv mw_dir_14/Char\[_2.mw mw_dir_14/Char_%_5b_2.mw && + test_diff_directories mw_dir_14 ref_page_14 +' + + +test_expect_success 'git clone with /' ' + wiki_reset && + wiki_editpage \/fo\/o "this is not important" false -c=Deleted && + git clone mediawiki::'"$WIKI_URL"' mw_dir_15 && + test_path_is_file mw_dir_15/%2Ffo%2Fo.mw && + wiki_check_content mw_dir_15/%2Ffo%2Fo.mw \/fo\/o +' + + +test_expect_success 'git push with /' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir_16 && + echo "I will be on the wiki" >mw_dir_16/%2Ffo%2Fo.mw && + ( + cd mw_dir_16 && + git add %2Ffo%2Fo.mw && + git commit -m " %2Ffo%2Fo added" && + git push + ) && + wiki_page_exist \/fo\/o && + wiki_check_content mw_dir_16/%2Ffo%2Fo.mw \/fo\/o + +' + + +test_expect_success 'git clone with \' ' + wiki_reset && + wiki_editpage \\ko\\o "this is not important" false -c=Deleted && + git clone mediawiki::'"$WIKI_URL"' mw_dir_17 && + test_path_is_file mw_dir_17/\\ko\\o.mw && + wiki_check_content mw_dir_17/\\ko\\o.mw \\ko\\o +' + + +test_expect_success 'git push with \' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir_18 && + echo "I will be on the wiki" >mw_dir_18/\\ko\\o.mw && + ( + cd mw_dir_18 && + git add \\ko\\o.mw && + git commit -m " \\ko\\o added" && + git push + )&& + wiki_page_exist \\ko\\o && + wiki_check_content mw_dir_18/\\ko\\o.mw \\ko\\o + +' + +test_expect_success 'git clone with \ in format control' ' + wiki_reset && + wiki_editpage \\no\\o "this is not important" false && + git clone mediawiki::'"$WIKI_URL"' mw_dir_19 && + test_path_is_file mw_dir_19/\\no\\o.mw && + wiki_check_content mw_dir_19/\\no\\o.mw \\no\\o +' + + +test_expect_success 'git push with \ in format control' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir_20 && + echo "I will be on the wiki" >mw_dir_20/\\fo\\o.mw && + ( + cd mw_dir_20 && + git add \\fo\\o.mw && + git commit -m " \\fo\\o added" && + git push + )&& + wiki_page_exist \\fo\\o && + wiki_check_content mw_dir_20/\\fo\\o.mw \\fo\\o + +' + + +test_expect_success 'fast-import meta-characters in page name (mw -> git)' ' + wiki_reset && + wiki_editpage \"file\"_\\_foo "expect to be called \"file\"_\\_foo" false && + git clone mediawiki::'"$WIKI_URL"' mw_dir_21 && + test_path_is_file mw_dir_21/\"file\"_\\_foo.mw && + wiki_getallpage ref_page_21 && + test_diff_directories mw_dir_21 ref_page_21 +' + + +test_expect_success 'fast-import meta-characters in page name (git -> mw) ' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir_22 && + ( + cd mw_dir_22 && + echo "this file is called \"file\"_\\_foo.mw" >\"file\"_\\_foo && + git add . && + git commit -am "file \"file\"_\\_foo" && + git pull && + git push + ) && + wiki_getallpage ref_page_22 && + test_diff_directories mw_dir_22 ref_page_22 +' + + +test_done diff --git a/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh b/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh new file mode 100755 index 0000000000..5a0373935f --- /dev/null +++ b/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh @@ -0,0 +1,198 @@ +#!/bin/sh +# +# Copyright (C) 2012 +# Charles Roussel <charles.roussel@ensimag.imag.fr> +# Simon Cathebras <simon.cathebras@ensimag.imag.fr> +# Julien Khayat <julien.khayat@ensimag.imag.fr> +# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr> +# Simon Perrat <simon.perrat@ensimag.imag.fr> +# +# License: GPL v2 or later + +# tests for git-remote-mediawiki + +test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases' + +. ./test-gitmw-lib.sh +. $TEST_DIRECTORY/test-lib.sh + + +test_check_precond + + +test_git_reimport () { + git -c remote.origin.dumbPush=true push && + git -c remote.origin.mediaImport=true pull --rebase +} + +# Don't bother with permissions, be administrator by default +test_expect_success 'setup config' ' + git config --global remote.origin.mwLogin WikiAdmin && + git config --global remote.origin.mwPassword AdminPass && + test_might_fail git config --global --unset remote.origin.mediaImport +' + +test_expect_success 'git push can upload media (File:) files' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir && + ( + cd mw_dir && + echo "hello world" >Foo.txt && + git add Foo.txt && + git commit -m "add a text file" && + git push && + "$PERL_PATH" -e "print STDOUT \"binary content: \".chr(255);" >Foo.txt && + git add Foo.txt && + git commit -m "add a text file with binary content" && + git push + ) +' + +test_expect_success 'git clone works on previously created wiki with media files' ' + test_when_finished "rm -rf mw_dir mw_dir_clone" && + git clone -c remote.origin.mediaimport=true \ + mediawiki::'"$WIKI_URL"' mw_dir_clone && + test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt && + (cd mw_dir_clone && git checkout HEAD^) && + (cd mw_dir && git checkout HEAD^) && + test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt +' + +test_expect_success 'git push & pull work with locally renamed media files' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir && + test_when_finished "rm -fr mw_dir" && + ( + cd mw_dir && + echo "A File" >Foo.txt && + git add Foo.txt && + git commit -m "add a file" && + git mv Foo.txt Bar.txt && + git commit -m "Rename a file" && + test_git_reimport && + echo "A File" >expect && + test_cmp expect Bar.txt && + test_path_is_missing Foo.txt + ) +' + +test_expect_success 'git push can propagate local page deletion' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir && + test_when_finished "rm -fr mw_dir" && + ( + cd mw_dir && + test_path_is_missing Foo.mw && + echo "hello world" >Foo.mw && + git add Foo.mw && + git commit -m "Add the page Foo" && + git push && + rm -f Foo.mw && + git commit -am "Delete the page Foo" && + test_git_reimport && + test_path_is_missing Foo.mw + ) +' + +test_expect_success 'git push can propagate local media file deletion' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir && + test_when_finished "rm -fr mw_dir" && + ( + cd mw_dir && + echo "hello world" >Foo.txt && + git add Foo.txt && + git commit -m "Add the text file Foo" && + git rm Foo.txt && + git commit -m "Delete the file Foo" && + test_git_reimport && + test_path_is_missing Foo.txt + ) +' + +# test failure: the file is correctly uploaded, and then deleted but +# as no page link to it, the import (which looks at page revisions) +# doesn't notice the file deletion on the wiki. We fetch the list of +# files from the wiki, but as the file is deleted, it doesn't appear. +test_expect_failure 'git pull correctly imports media file deletion when no page link to it' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir && + test_when_finished "rm -fr mw_dir" && + ( + cd mw_dir && + echo "hello world" >Foo.txt && + git add Foo.txt && + git commit -m "Add the text file Foo" && + git push && + git rm Foo.txt && + git commit -m "Delete the file Foo" && + test_git_reimport && + test_path_is_missing Foo.txt + ) +' + +test_expect_success 'git push properly warns about insufficient permissions' ' + wiki_reset && + git clone mediawiki::'"$WIKI_URL"' mw_dir && + test_when_finished "rm -fr mw_dir" && + ( + cd mw_dir && + echo "A File" >foo.forbidden && + git add foo.forbidden && + git commit -m "add a file" && + git push 2>actual && + test_i18ngrep "foo.forbidden is not a permitted file" actual + ) +' + +test_expect_success 'setup a repository with media files' ' + wiki_reset && + wiki_editpage testpage "I am linking a file [[File:File.txt]]" false && + echo "File content" >File.txt && + wiki_upload_file File.txt && + echo "Another file content" >AnotherFile.txt && + wiki_upload_file AnotherFile.txt +' + +test_expect_success 'git clone works with one specific page cloned and mediaimport=true' ' + git clone -c remote.origin.pages=testpage \ + -c remote.origin.mediaimport=true \ + mediawiki::'"$WIKI_URL"' mw_dir_15 && + test_when_finished "rm -rf mw_dir_15" && + test_contains_N_files mw_dir_15 3 && + test_path_is_file mw_dir_15/Testpage.mw && + test_path_is_file mw_dir_15/File:File.txt.mw && + test_path_is_file mw_dir_15/File.txt && + test_path_is_missing mw_dir_15/Main_Page.mw && + test_path_is_missing mw_dir_15/File:AnotherFile.txt.mw && + test_path_is_missing mw_dir_15/AnothetFile.txt && + wiki_check_content mw_dir_15/Testpage.mw Testpage && + test_cmp mw_dir_15/File.txt File.txt +' + +test_expect_success 'git clone works with one specific page cloned and mediaimport=false' ' + test_when_finished "rm -rf mw_dir_16" && + git clone -c remote.origin.pages=testpage \ + mediawiki::'"$WIKI_URL"' mw_dir_16 && + test_contains_N_files mw_dir_16 1 && + test_path_is_file mw_dir_16/Testpage.mw && + test_path_is_missing mw_dir_16/File:File.txt.mw && + test_path_is_missing mw_dir_16/File.txt && + test_path_is_missing mw_dir_16/Main_Page.mw && + wiki_check_content mw_dir_16/Testpage.mw Testpage +' + +# should behave like mediaimport=false +test_expect_success 'git clone works with one specific page cloned and mediaimport unset' ' + test_when_finished "rm -fr mw_dir_17" && + git clone -c remote.origin.pages=testpage \ + mediawiki::'"$WIKI_URL"' mw_dir_17 && + test_contains_N_files mw_dir_17 1 && + test_path_is_file mw_dir_17/Testpage.mw && + test_path_is_missing mw_dir_17/File:File.txt.mw && + test_path_is_missing mw_dir_17/File.txt && + test_path_is_missing mw_dir_17/Main_Page.mw && + wiki_check_content mw_dir_17/Testpage.mw Testpage +' + +test_done diff --git a/contrib/mw-to-git/t/t9364-pull-by-rev.sh b/contrib/mw-to-git/t/t9364-pull-by-rev.sh new file mode 100755 index 0000000000..5c22457a0b --- /dev/null +++ b/contrib/mw-to-git/t/t9364-pull-by-rev.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +test_description='Test the Git Mediawiki remote helper: git pull by revision' + +. ./test-gitmw-lib.sh +. ./push-pull-tests.sh +. $TEST_DIRECTORY/test-lib.sh + +test_check_precond + +test_expect_success 'configuration' ' + git config --global mediawiki.fetchStrategy by_rev +' + +test_push_pull + +test_done diff --git a/contrib/mw-to-git/t/test-gitmw-lib.sh b/contrib/mw-to-git/t/test-gitmw-lib.sh new file mode 100755 index 0000000000..3b2cfacf51 --- /dev/null +++ b/contrib/mw-to-git/t/test-gitmw-lib.sh @@ -0,0 +1,435 @@ +# Copyright (C) 2012 +# Charles Roussel <charles.roussel@ensimag.imag.fr> +# Simon Cathebras <simon.cathebras@ensimag.imag.fr> +# Julien Khayat <julien.khayat@ensimag.imag.fr> +# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr> +# Simon Perrat <simon.perrat@ensimag.imag.fr> +# License: GPL v2 or later + +# +# CONFIGURATION VARIABLES +# You might want to change these ones +# + +. ./test.config + +WIKI_URL=http://"$SERVER_ADDR:$PORT/$WIKI_DIR_NAME" +CURR_DIR=$(pwd) +TEST_OUTPUT_DIRECTORY=$(pwd) +TEST_DIRECTORY="$CURR_DIR"/../../../t + +export TEST_OUTPUT_DIRECTORY TEST_DIRECTORY CURR_DIR + +if test "$LIGHTTPD" = "false" ; then + PORT=80 +else + WIKI_DIR_INST="$CURR_DIR/$WEB_WWW" +fi + +wiki_upload_file () { + "$CURR_DIR"/test-gitmw.pl upload_file "$@" +} + +wiki_getpage () { + "$CURR_DIR"/test-gitmw.pl get_page "$@" +} + +wiki_delete_page () { + "$CURR_DIR"/test-gitmw.pl delete_page "$@" +} + +wiki_editpage () { + "$CURR_DIR"/test-gitmw.pl edit_page "$@" +} + +die () { + die_with_status 1 "$@" +} + +die_with_status () { + status=$1 + shift + echo >&2 "$*" + exit "$status" +} + + +# Check the preconditions to run git-remote-mediawiki's tests +test_check_precond () { + if ! test_have_prereq PERL + then + skip_all='skipping gateway git-mw tests, perl not available' + test_done + fi + + if [ ! -f "$GIT_BUILD_DIR"/git-remote-mediawiki ]; + then + echo "No remote mediawiki for git found. Copying it in git" + echo "cp $GIT_BUILD_DIR/contrib/mw-to-git/git-remote-mediawiki $GIT_BUILD_DIR/" + ln -s "$GIT_BUILD_DIR"/contrib/mw-to-git/git-remote-mediawiki "$GIT_BUILD_DIR" + fi + + if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ]; + then + skip_all='skipping gateway git-mw tests, no mediawiki found' + test_done + fi +} + +# test_diff_directories <dir_git> <dir_wiki> +# +# Compare the contents of directories <dir_git> and <dir_wiki> with diff +# and errors if they do not match. The program will +# not look into .git in the process. +# Warning: the first argument MUST be the directory containing the git data +test_diff_directories () { + rm -rf "$1_tmp" + mkdir -p "$1_tmp" + cp "$1"/*.mw "$1_tmp" + diff -r -b "$1_tmp" "$2" +} + +# $1=<dir> +# $2=<N> +# +# Check that <dir> contains exactly <N> files +test_contains_N_files () { + if test `ls -- "$1" | wc -l` -ne "$2"; then + echo "directory $1 sould contain $2 files" + echo "it contains these files:" + ls "$1" + false + fi +} + + +# wiki_check_content <file_name> <page_name> +# +# Compares the contents of the file <file_name> and the wiki page +# <page_name> and exits with error 1 if they do not match. +wiki_check_content () { + mkdir -p wiki_tmp + wiki_getpage "$2" wiki_tmp + # replacement of forbidden character in file name + page_name=$(printf "%s\n" "$2" | sed -e "s/\//%2F/g") + + diff -b "$1" wiki_tmp/"$page_name".mw + if test $? -ne 0 + then + rm -rf wiki_tmp + error "ERROR: file $2 not found on wiki" + fi + rm -rf wiki_tmp +} + +# wiki_page_exist <page_name> +# +# Check the existence of the page <page_name> on the wiki and exits +# with error if it is absent from it. +wiki_page_exist () { + mkdir -p wiki_tmp + wiki_getpage "$1" wiki_tmp + page_name=$(printf "%s\n" "$1" | sed "s/\//%2F/g") + if test -f wiki_tmp/"$page_name".mw ; then + rm -rf wiki_tmp + else + rm -rf wiki_tmp + error "test failed: file $1 not found on wiki" + fi +} + +# wiki_getallpagename +# +# Fetch the name of each page on the wiki. +wiki_getallpagename () { + "$CURR_DIR"/test-gitmw.pl getallpagename +} + +# wiki_getallpagecategory <category> +# +# Fetch the name of each page belonging to <category> on the wiki. +wiki_getallpagecategory () { + "$CURR_DIR"/test-gitmw.pl getallpagename "$@" +} + +# wiki_getallpage <dest_dir> [<category>] +# +# Fetch all the pages from the wiki and place them in the directory +# <dest_dir>. +# If <category> is define, then wiki_getallpage fetch the pages included +# in <category>. +wiki_getallpage () { + if test -z "$2"; + then + wiki_getallpagename + else + wiki_getallpagecategory "$2" + fi + mkdir -p "$1" + while read -r line; do + wiki_getpage "$line" $1; + done < all.txt +} + +# ================= Install part ================= + +error () { + echo "$@" >&2 + exit 1 +} + +# config_lighttpd +# +# Create the configuration files and the folders necessary to start lighttpd. +# Overwrite any existing file. +config_lighttpd () { + mkdir -p $WEB + mkdir -p $WEB_TMP + mkdir -p $WEB_WWW + cat > $WEB/lighttpd.conf <<EOF + server.document-root = "$CURR_DIR/$WEB_WWW" + server.port = $PORT + server.pid-file = "$CURR_DIR/$WEB_TMP/pid" + + server.modules = ( + "mod_rewrite", + "mod_redirect", + "mod_access", + "mod_accesslog", + "mod_fastcgi" + ) + + index-file.names = ("index.php" , "index.html") + + mimetype.assign = ( + ".pdf" => "application/pdf", + ".sig" => "application/pgp-signature", + ".spl" => "application/futuresplash", + ".class" => "application/octet-stream", + ".ps" => "application/postscript", + ".torrent" => "application/x-bittorrent", + ".dvi" => "application/x-dvi", + ".gz" => "application/x-gzip", + ".pac" => "application/x-ns-proxy-autoconfig", + ".swf" => "application/x-shockwave-flash", + ".tar.gz" => "application/x-tgz", + ".tgz" => "application/x-tgz", + ".tar" => "application/x-tar", + ".zip" => "application/zip", + ".mp3" => "audio/mpeg", + ".m3u" => "audio/x-mpegurl", + ".wma" => "audio/x-ms-wma", + ".wax" => "audio/x-ms-wax", + ".ogg" => "application/ogg", + ".wav" => "audio/x-wav", + ".gif" => "image/gif", + ".jpg" => "image/jpeg", + ".jpeg" => "image/jpeg", + ".png" => "image/png", + ".xbm" => "image/x-xbitmap", + ".xpm" => "image/x-xpixmap", + ".xwd" => "image/x-xwindowdump", + ".css" => "text/css", + ".html" => "text/html", + ".htm" => "text/html", + ".js" => "text/javascript", + ".asc" => "text/plain", + ".c" => "text/plain", + ".cpp" => "text/plain", + ".log" => "text/plain", + ".conf" => "text/plain", + ".text" => "text/plain", + ".txt" => "text/plain", + ".dtd" => "text/xml", + ".xml" => "text/xml", + ".mpeg" => "video/mpeg", + ".mpg" => "video/mpeg", + ".mov" => "video/quicktime", + ".qt" => "video/quicktime", + ".avi" => "video/x-msvideo", + ".asf" => "video/x-ms-asf", + ".asx" => "video/x-ms-asf", + ".wmv" => "video/x-ms-wmv", + ".bz2" => "application/x-bzip", + ".tbz" => "application/x-bzip-compressed-tar", + ".tar.bz2" => "application/x-bzip-compressed-tar", + "" => "text/plain" + ) + + fastcgi.server = ( ".php" => + ("localhost" => + ( "socket" => "$CURR_DIR/$WEB_TMP/php.socket", + "bin-path" => "$PHP_DIR/php-cgi -c $CURR_DIR/$WEB/php.ini" + + ) + ) + ) +EOF + + cat > $WEB/php.ini <<EOF + session.save_path ='$CURR_DIR/$WEB_TMP' +EOF +} + +# start_lighttpd +# +# Start or restart daemon lighttpd. If restart, rewrite configuration files. +start_lighttpd () { + if test -f "$WEB_TMP/pid"; then + echo "Instance already running. Restarting..." + stop_lighttpd + fi + config_lighttpd + "$LIGHTTPD_DIR"/lighttpd -f "$WEB"/lighttpd.conf + + if test $? -ne 0 ; then + echo "Could not execute http deamon lighttpd" + exit 1 + fi +} + +# stop_lighttpd +# +# Kill daemon lighttpd and removes files and folders associated. +stop_lighttpd () { + test -f "$WEB_TMP/pid" && kill $(cat "$WEB_TMP/pid") + rm -rf "$WEB" +} + +# Create the SQLite database of the MediaWiki. If the database file already +# exists, it will be deleted. +# This script should be runned from the directory where $FILES_FOLDER is +# located. +create_db () { + rm -f "$TMP/$DB_FILE" + + echo "Generating the SQLite database file. It can take some time ..." + # Run the php script to generate the SQLite database file + # with cURL calls. + php "$FILES_FOLDER/$DB_INSTALL_SCRIPT" $(basename "$DB_FILE" .sqlite) \ + "$WIKI_ADMIN" "$WIKI_PASSW" "$TMP" "$PORT" + + if [ ! -f "$TMP/$DB_FILE" ] ; then + error "Can't create database file $TMP/$DB_FILE. Try to run ./install-wiki.sh delete first." + fi + + # Copy the generated database file into the directory the + # user indicated. + cp "$TMP/$DB_FILE" "$FILES_FOLDER" || + error "Unable to copy $TMP/$DB_FILE to $FILES_FOLDER" +} + +# Install a wiki in your web server directory. +wiki_install () { + if test $LIGHTTPD = "true" ; then + start_lighttpd + fi + + SERVER_ADDR=$SERVER_ADDR:$PORT + # In this part, we change directory to $TMP in order to download, + # unpack and copy the files of MediaWiki + ( + mkdir -p "$WIKI_DIR_INST/$WIKI_DIR_NAME" + if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ] ; then + error "Folder $WIKI_DIR_INST/$WIKI_DIR_NAME doesn't exist. + Please create it and launch the script again." + fi + + # Fetch MediaWiki's archive if not already present in the TMP directory + cd "$TMP" + if [ ! -f "$MW_VERSION.tar.gz" ] ; then + echo "Downloading $MW_VERSION sources ..." + wget "http://download.wikimedia.org/mediawiki/1.19/mediawiki-1.19.0.tar.gz" || + error "Unable to download "\ + "http://download.wikimedia.org/mediawiki/1.19/"\ + "mediawiki-1.19.0.tar.gz. "\ + "Please fix your connection and launch the script again." + echo "$MW_VERSION.tar.gz downloaded in `pwd`. "\ + "You can delete it later if you want." + else + echo "Reusing existing $MW_VERSION.tar.gz downloaded in `pwd`." + fi + archive_abs_path=$(pwd)/"$MW_VERSION.tar.gz" + cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" || + error "can't cd to $WIKI_DIR_INST/$WIKI_DIR_NAME/" + tar xzf "$archive_abs_path" --strip-components=1 || + error "Unable to extract WikiMedia's files from $archive_abs_path to "\ + "$WIKI_DIR_INST/$WIKI_DIR_NAME" + ) || exit 1 + + create_db + + # Copy the generic LocalSettings.php in the web server's directory + # And modify parameters according to the ones set at the top + # of this script. + # Note that LocalSettings.php is never modified. + if [ ! -f "$FILES_FOLDER/LocalSettings.php" ] ; then + error "Can't find $FILES_FOLDER/LocalSettings.php " \ + "in the current folder. "\ + "Please run the script inside its folder." + fi + cp "$FILES_FOLDER/LocalSettings.php" \ + "$FILES_FOLDER/LocalSettings-tmp.php" || + error "Unable to copy $FILES_FOLDER/LocalSettings.php " \ + "to $FILES_FOLDER/LocalSettings-tmp.php" + + # Parse and set the LocalSettings file of the user according to the + # CONFIGURATION VARIABLES section at the beginning of this script + file_swap="$FILES_FOLDER/LocalSettings-swap.php" + sed "s,@WG_SCRIPT_PATH@,/$WIKI_DIR_NAME," \ + "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap" + mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php" + sed "s,@WG_SERVER@,http://$SERVER_ADDR," \ + "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap" + mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php" + sed "s,@WG_SQLITE_DATADIR@,$TMP," \ + "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap" + mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php" + sed "s,@WG_SQLITE_DATAFILE@,$( basename $DB_FILE .sqlite)," \ + "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap" + mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php" + + mv "$FILES_FOLDER/LocalSettings-tmp.php" \ + "$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php" || + error "Unable to move $FILES_FOLDER/LocalSettings-tmp.php" \ + "in $WIKI_DIR_INST/$WIKI_DIR_NAME" + echo "File $FILES_FOLDER/LocalSettings.php is set in" \ + " $WIKI_DIR_INST/$WIKI_DIR_NAME" + + echo "Your wiki has been installed. You can check it at + http://$SERVER_ADDR/$WIKI_DIR_NAME" +} + +# Reset the database of the wiki and the password of the admin +# +# Warning: This function must be called only in a subdirectory of t/ directory +wiki_reset () { + # Copy initial database of the wiki + if [ ! -f "../$FILES_FOLDER/$DB_FILE" ] ; then + error "Can't find ../$FILES_FOLDER/$DB_FILE in the current folder." + fi + cp "../$FILES_FOLDER/$DB_FILE" "$TMP" || + error "Can't copy ../$FILES_FOLDER/$DB_FILE in $TMP" + echo "File $FILES_FOLDER/$DB_FILE is set in $TMP" +} + +# Delete the wiki created in the web server's directory and all its content +# saved in the database. +wiki_delete () { + if test $LIGHTTPD = "true"; then + stop_lighttpd + else + # Delete the wiki's directory. + rm -rf "$WIKI_DIR_INST/$WIKI_DIR_NAME" || + error "Wiki's directory $WIKI_DIR_INST/" \ + "$WIKI_DIR_NAME could not be deleted" + # Delete the wiki's SQLite database. + rm -f "$TMP/$DB_FILE" || + error "Database $TMP/$DB_FILE could not be deleted." + fi + + # Delete the wiki's SQLite database + rm -f "$TMP/$DB_FILE" || error "Database $TMP/$DB_FILE could not be deleted." + rm -f "$FILES_FOLDER/$DB_FILE" + rm -rf "$TMP/$MW_VERSION" +} diff --git a/contrib/mw-to-git/t/test-gitmw.pl b/contrib/mw-to-git/t/test-gitmw.pl new file mode 100755 index 0000000000..0ff76259fa --- /dev/null +++ b/contrib/mw-to-git/t/test-gitmw.pl @@ -0,0 +1,225 @@ +#!/usr/bin/perl -w -s +# Copyright (C) 2012 +# Charles Roussel <charles.roussel@ensimag.imag.fr> +# Simon Cathebras <simon.cathebras@ensimag.imag.fr> +# Julien Khayat <julien.khayat@ensimag.imag.fr> +# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr> +# Simon Perrat <simon.perrat@ensimag.imag.fr> +# License: GPL v2 or later + +# Usage: +# ./test-gitmw.pl <command> [argument]* +# Execute in terminal using the name of the function to call as first +# parameter, and the function's arguments as following parameters +# +# Example: +# ./test-gitmw.pl "get_page" foo . +# will call <wiki_getpage> with arguments <foo> and <.> +# +# Available functions are: +# "get_page" +# "delete_page" +# "edit_page" +# "getallpagename" + +use MediaWiki::API; +use Getopt::Long; +use encoding 'utf8'; +use DateTime::Format::ISO8601; +use open ':encoding(utf8)'; +use constant SLASH_REPLACEMENT => "%2F"; + +#Parsing of the config file + +my $configfile = "$ENV{'CURR_DIR'}/test.config"; +my %config; +open my $CONFIG, "<", $configfile or die "can't open $configfile: $!"; +while (<$CONFIG>) +{ + chomp; + s/#.*//; + s/^\s+//; + s/\s+$//; + next unless length; + my ($key, $value) = split (/\s*=\s*/,$_, 2); + $config{$key} = $value; + last if ($key eq 'LIGHTTPD' and $value eq 'false'); + last if ($key eq 'PORT'); +} +close $CONFIG or die "can't close $configfile: $!"; + +my $wiki_address = "http://$config{'SERVER_ADDR'}".":"."$config{'PORT'}"; +my $wiki_url = "$wiki_address/$config{'WIKI_DIR_NAME'}/api.php"; +my $wiki_admin = "$config{'WIKI_ADMIN'}"; +my $wiki_admin_pass = "$config{'WIKI_PASSW'}"; +my $mw = MediaWiki::API->new; +$mw->{config}->{api_url} = $wiki_url; + + +# wiki_login <name> <password> +# +# Logs the user with <name> and <password> in the global variable +# of the mediawiki $mw +sub wiki_login { + $mw->login( { lgname => "$_[0]",lgpassword => "$_[1]" } ) + || die "getpage: login failed"; +} + +# wiki_getpage <wiki_page> <dest_path> +# +# fetch a page <wiki_page> from the wiki referenced in the global variable +# $mw and copies its content in directory dest_path +sub wiki_getpage { + my $pagename = $_[0]; + my $destdir = $_[1]; + + my $page = $mw->get_page( { title => $pagename } ); + if (!defined($page)) { + die "getpage: wiki does not exist"; + } + + my $content = $page->{'*'}; + if (!defined($content)) { + die "getpage: page does not exist"; + } + + $pagename=$page->{'title'}; + # Replace spaces by underscore in the page name + $pagename =~ s/ /_/g; + $pagename =~ s/\//%2F/g; + open(my $file, ">$destdir/$pagename.mw"); + print $file "$content"; + close ($file); + +} + +# wiki_delete_page <page_name> +# +# delete the page with name <page_name> from the wiki referenced +# in the global variable $mw +sub wiki_delete_page { + my $pagename = $_[0]; + + my $exist=$mw->get_page({title => $pagename}); + + if (defined($exist->{'*'})){ + $mw->edit({ action => 'delete', + title => $pagename}) + || die $mw->{error}->{code} . ": " . $mw->{error}->{details}; + } else { + die "no page with such name found: $pagename\n"; + } +} + +# wiki_editpage <wiki_page> <wiki_content> <wiki_append> [-c=<category>] [-s=<summary>] +# +# Edit a page named <wiki_page> with content <wiki_content> on the wiki +# referenced with the global variable $mw +# If <wiki_append> == true : append <wiki_content> at the end of the actual +# content of the page <wiki_page> +# If <wik_page> doesn't exist, that page is created with the <wiki_content> +sub wiki_editpage { + my $wiki_page = $_[0]; + my $wiki_content = $_[1]; + my $wiki_append = $_[2]; + my $summary = ""; + my ($summ, $cat) = (); + GetOptions('s=s' => \$summ, 'c=s' => \$cat); + + my $append = 0; + if (defined($wiki_append) && $wiki_append eq 'true') { + $append=1; + } + + my $previous_text =""; + + if ($append) { + my $ref = $mw->get_page( { title => $wiki_page } ); + $previous_text = $ref->{'*'}; + } + + my $text = $wiki_content; + if (defined($previous_text)) { + $text="$previous_text$text"; + } + + # Eventually, add this page to a category. + if (defined($cat)) { + my $category_name="[[Category:$cat]]"; + $text="$text\n $category_name"; + } + if(defined($summ)){ + $summary=$summ; + } + + $mw->edit( { action => 'edit', title => $wiki_page, summary => $summary, text => "$text"} ); +} + +# wiki_getallpagename [<category>] +# +# Fetch all pages of the wiki referenced by the global variable $mw +# and print the names of each one in the file all.txt with a new line +# ("\n") between these. +# If the argument <category> is defined, then this function get only the pages +# belonging to <category>. +sub wiki_getallpagename { + # fetch the pages of the wiki + if (defined($_[0])) { + my $mw_pages = $mw->list ( { action => 'query', + list => 'categorymembers', + cmtitle => "Category:$_[0]", + cmnamespace => 0, + cmlimit => 500 }, + ) + || die $mw->{error}->{code}.": ".$mw->{error}->{details}; + open(my $file, ">all.txt"); + foreach my $page (@{$mw_pages}) { + print $file "$page->{title}\n"; + } + close ($file); + + } else { + my $mw_pages = $mw->list({ + action => 'query', + list => 'allpages', + aplimit => 500, + }) + || die $mw->{error}->{code}.": ".$mw->{error}->{details}; + open(my $file, ">all.txt"); + foreach my $page (@{$mw_pages}) { + print $file "$page->{title}\n"; + } + close ($file); + } +} + +sub wiki_upload_file { + my $file_name = $_[0]; + my $resultat = $mw->edit ( { + action => 'upload', + filename => $file_name, + comment => 'upload a file', + file => [ $file_name ], + ignorewarnings=>1, + }, { + skip_encoding => 1 + } ) || die $mw->{error}->{code} . ' : ' . $mw->{error}->{details}; +} + + + +# Main part of this script: parse the command line arguments +# and select which function to execute +my $fct_to_call = shift; + +wiki_login($wiki_admin, $wiki_admin_pass); + +my %functions_to_call = qw( + upload_file wiki_upload_file + get_page wiki_getpage + delete_page wiki_delete_page + edit_page wiki_editpage + getallpagename wiki_getallpagename +); +die "$0 ERROR: wrong argument" unless exists $functions_to_call{$fct_to_call}; +&{$functions_to_call{$fct_to_call}}(@ARGV); diff --git a/contrib/mw-to-git/t/test.config b/contrib/mw-to-git/t/test.config new file mode 100644 index 0000000000..958b37b4a7 --- /dev/null +++ b/contrib/mw-to-git/t/test.config @@ -0,0 +1,35 @@ +# Name of the web server's directory dedicated to the wiki is WIKI_DIR_NAME +WIKI_DIR_NAME=wiki + +# Login and password of the wiki's admin +WIKI_ADMIN=WikiAdmin +WIKI_PASSW=AdminPass + +# Address of the web server +SERVER_ADDR=localhost + +# SQLite database of the wiki, named DB_FILE, is located in TMP +TMP=/tmp +DB_FILE=wikidb.sqlite + +# If LIGHTTPD is not set to true, the script will use the defaut +# web server running in WIKI_DIR_INST. +WIKI_DIR_INST=/var/www + +# If LIGHTTPD is set to true, the script will use Lighttpd to run +# the wiki. +LIGHTTPD=true + +# The variables below are useful only if LIGHTTPD is set to true. +PORT=1234 +PHP_DIR=/usr/bin +LIGHTTPD_DIR=/usr/sbin +WEB=WEB +WEB_TMP=$WEB/tmp +WEB_WWW=$WEB/www + +# The variables below are used by the script to install a wiki. +# You should not modify these unless you are modifying the script itself. +MW_VERSION=mediawiki-1.19.0 +FILES_FOLDER=install-wiki +DB_INSTALL_SCRIPT=db_install.php diff --git a/contrib/p4import/git-p4import.py b/contrib/p4import/git-p4import.py index b6e534b65b..593d6a0682 100644 --- a/contrib/p4import/git-p4import.py +++ b/contrib/p4import/git-p4import.py @@ -14,6 +14,11 @@ import sys import time import getopt +if sys.hexversion < 0x02020000: + # The behavior of the marshal module changed significantly in 2.2 + sys.stderr.write("git-p4import.py: requires Python 2.2 or later.\n") + sys.exit(1) + from signal import signal, \ SIGPIPE, SIGINT, SIG_DFL, \ default_int_handler diff --git a/contrib/remote-helpers/Makefile b/contrib/remote-helpers/Makefile new file mode 100644 index 0000000000..9a76575f78 --- /dev/null +++ b/contrib/remote-helpers/Makefile @@ -0,0 +1,13 @@ +TESTS := $(wildcard test*.sh) + +export T := $(addprefix $(CURDIR)/,$(TESTS)) +export MAKE := $(MAKE) -e +export PATH := $(CURDIR):$(PATH) + +test: + $(MAKE) -C ../../t $@ + +$(TESTS): + $(MAKE) -C ../../t $(CURDIR)/$@ + +.PHONY: $(TESTS) diff --git a/contrib/remote-helpers/git-remote-bzr b/contrib/remote-helpers/git-remote-bzr new file mode 100755 index 0000000000..c5822e4ac9 --- /dev/null +++ b/contrib/remote-helpers/git-remote-bzr @@ -0,0 +1,725 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 Felipe Contreras +# + +# +# Just copy to your ~/bin, or anywhere in your $PATH. +# Then you can clone with: +# % git clone bzr::/path/to/bzr/repo/or/url +# +# For example: +# % git clone bzr::$HOME/myrepo +# or +# % git clone bzr::lp:myrepo +# + +import sys + +import bzrlib +if hasattr(bzrlib, "initialize"): + bzrlib.initialize() + +import bzrlib.plugin +bzrlib.plugin.load_plugins() + +import bzrlib.generate_ids +import bzrlib.transport + +import sys +import os +import json +import re +import StringIO + +NAME_RE = re.compile('^([^<>]+)') +AUTHOR_RE = re.compile('^([^<>]+?)? ?<([^<>]*)>$') +RAW_AUTHOR_RE = re.compile('^(\w+) (.+)? <(.*)> (\d+) ([+-]\d+)') + +def die(msg, *args): + sys.stderr.write('ERROR: %s\n' % (msg % args)) + sys.exit(1) + +def warn(msg, *args): + sys.stderr.write('WARNING: %s\n' % (msg % args)) + +def gittz(tz): + return '%+03d%02d' % (tz / 3600, tz % 3600 / 60) + +class Marks: + + def __init__(self, path): + self.path = path + self.tips = {} + self.marks = {} + self.rev_marks = {} + self.last_mark = 0 + self.load() + + def load(self): + if not os.path.exists(self.path): + return + + tmp = json.load(open(self.path)) + self.tips = tmp['tips'] + self.marks = tmp['marks'] + self.last_mark = tmp['last-mark'] + + for rev, mark in self.marks.iteritems(): + self.rev_marks[mark] = rev + + def dict(self): + return { 'tips': self.tips, 'marks': self.marks, 'last-mark' : self.last_mark } + + def store(self): + json.dump(self.dict(), open(self.path, 'w')) + + def __str__(self): + return str(self.dict()) + + def from_rev(self, rev): + return self.marks[rev] + + def to_rev(self, mark): + return self.rev_marks[mark] + + def next_mark(self): + self.last_mark += 1 + return self.last_mark + + def get_mark(self, rev): + self.last_mark += 1 + self.marks[rev] = self.last_mark + return self.last_mark + + def is_marked(self, rev): + return self.marks.has_key(rev) + + def new_mark(self, rev, mark): + self.marks[rev] = mark + self.rev_marks[mark] = rev + self.last_mark = mark + + def get_tip(self, branch): + return self.tips.get(branch, None) + + def set_tip(self, branch, tip): + self.tips[branch] = tip + +class Parser: + + def __init__(self, repo): + self.repo = repo + self.line = self.get_line() + + def get_line(self): + return sys.stdin.readline().strip() + + def __getitem__(self, i): + return self.line.split()[i] + + def check(self, word): + return self.line.startswith(word) + + def each_block(self, separator): + while self.line != separator: + yield self.line + self.line = self.get_line() + + def __iter__(self): + return self.each_block('') + + def next(self): + self.line = self.get_line() + if self.line == 'done': + self.line = None + + def get_mark(self): + i = self.line.index(':') + 1 + return int(self.line[i:]) + + def get_data(self): + if not self.check('data'): + return None + i = self.line.index(' ') + 1 + size = int(self.line[i:]) + return sys.stdin.read(size) + + def get_author(self): + m = RAW_AUTHOR_RE.match(self.line) + if not m: + return None + _, name, email, date, tz = m.groups() + committer = '%s <%s>' % (name, email) + tz = int(tz) + tz = ((tz / 100) * 3600) + ((tz % 100) * 60) + return (committer, int(date), tz) + +def rev_to_mark(rev): + global marks + return marks.from_rev(rev) + +def mark_to_rev(mark): + global marks + return marks.to_rev(mark) + +def fixup_user(user): + name = mail = None + user = user.replace('"', '') + m = AUTHOR_RE.match(user) + if m: + name = m.group(1) + mail = m.group(2).strip() + else: + m = NAME_RE.match(user) + if m: + name = m.group(1).strip() + + return '%s <%s>' % (name, mail) + +def get_filechanges(cur, prev): + modified = {} + removed = {} + + changes = cur.changes_from(prev) + + for path, fid, kind in changes.added: + modified[path] = fid + for path, fid, kind in changes.removed: + removed[path] = None + for path, fid, kind, mod, _ in changes.modified: + modified[path] = fid + for oldpath, newpath, fid, kind, mod, _ in changes.renamed: + removed[oldpath] = None + modified[newpath] = fid + + return modified, removed + +def export_files(tree, files): + global marks, filenodes + + final = [] + for path, fid in files.iteritems(): + kind = tree.kind(fid) + + h = tree.get_file_sha1(fid) + + if kind == 'symlink': + d = tree.get_symlink_target(fid) + mode = '120000' + elif kind == 'file': + + if tree.is_executable(fid): + mode = '100755' + else: + mode = '100644' + + # is the blog already exported? + if h in filenodes: + mark = filenodes[h] + final.append((mode, mark, path)) + continue + + d = tree.get_file_text(fid) + elif kind == 'directory': + continue + else: + die("Unhandled kind '%s' for path '%s'" % (kind, path)) + + mark = marks.next_mark() + filenodes[h] = mark + + print "blob" + print "mark :%u" % mark + print "data %d" % len(d) + print d + + final.append((mode, mark, path)) + + return final + +def export_branch(branch, name): + global prefix, dirname + + ref = '%s/heads/%s' % (prefix, name) + tip = marks.get_tip(name) + + repo = branch.repository + repo.lock_read() + revs = branch.iter_merge_sorted_revisions(None, tip, 'exclude', 'forward') + count = 0 + + revs = [revid for revid, _, _, _ in revs if not marks.is_marked(revid)] + + for revid in revs: + + rev = repo.get_revision(revid) + + parents = rev.parent_ids + time = rev.timestamp + tz = rev.timezone + committer = rev.committer.encode('utf-8') + committer = "%s %u %s" % (fixup_user(committer), time, gittz(tz)) + author = committer + msg = rev.message.encode('utf-8') + + msg += '\n' + + if len(parents) == 0: + parent = bzrlib.revision.NULL_REVISION + else: + parent = parents[0] + + cur_tree = repo.revision_tree(revid) + prev = repo.revision_tree(parent) + modified, removed = get_filechanges(cur_tree, prev) + + modified_final = export_files(cur_tree, modified) + + if len(parents) == 0: + print 'reset %s' % ref + + print "commit %s" % ref + print "mark :%d" % (marks.get_mark(revid)) + print "author %s" % (author) + print "committer %s" % (committer) + print "data %d" % (len(msg)) + print msg + + for i, p in enumerate(parents): + try: + m = rev_to_mark(p) + except KeyError: + # ghost? + continue + if i == 0: + print "from :%s" % m + else: + print "merge :%s" % m + + for f in modified_final: + print "M %s :%u %s" % f + for f in removed: + print "D %s" % (f) + print + + count += 1 + if (count % 100 == 0): + print "progress revision %s (%d/%d)" % (revid, count, len(revs)) + print "#############################################################" + + repo.unlock() + + revid = branch.last_revision() + + # make sure the ref is updated + print "reset %s" % ref + print "from :%u" % rev_to_mark(revid) + print + + marks.set_tip(name, revid) + +def export_tag(repo, name): + global tags + try: + print "reset refs/tags/%s" % name + print "from :%u" % rev_to_mark(tags[name]) + print + except KeyError: + warn("TODO: fetch tag '%s'" % name) + +def do_import(parser): + global dirname + + branch = parser.repo + path = os.path.join(dirname, 'marks-git') + + print "feature done" + if os.path.exists(path): + print "feature import-marks=%s" % path + print "feature export-marks=%s" % path + sys.stdout.flush() + + while parser.check('import'): + ref = parser[1] + if ref.startswith('refs/heads/'): + name = ref[len('refs/heads/'):] + export_branch(branch, name) + if ref.startswith('refs/tags/'): + name = ref[len('refs/tags/'):] + export_tag(branch, name) + parser.next() + + print 'done' + + sys.stdout.flush() + +def parse_blob(parser): + global blob_marks + + parser.next() + mark = parser.get_mark() + parser.next() + data = parser.get_data() + blob_marks[mark] = data + parser.next() + +class CustomTree(): + + def __init__(self, repo, revid, parents, files): + global files_cache + + self.repo = repo + self.revid = revid + self.parents = parents + self.updates = {} + + def copy_tree(revid): + files = files_cache[revid] = {} + tree = repo.repository.revision_tree(revid) + repo.lock_read() + try: + for path, entry in tree.iter_entries_by_dir(): + files[path] = entry.file_id + finally: + repo.unlock() + return files + + if len(parents) == 0: + self.base_id = bzrlib.revision.NULL_REVISION + self.base_files = {} + else: + self.base_id = parents[0] + self.base_files = files_cache.get(self.base_id, None) + if not self.base_files: + self.base_files = copy_tree(self.base_id) + + self.files = files_cache[revid] = self.base_files.copy() + + for path, f in files.iteritems(): + fid = self.files.get(path, None) + if not fid: + fid = bzrlib.generate_ids.gen_file_id(path) + f['path'] = path + self.updates[fid] = f + + def last_revision(self): + return self.base_id + + def iter_changes(self): + changes = [] + + def get_parent(dirname, basename): + parent_fid = self.base_files.get(dirname, None) + if parent_fid: + return parent_fid + parent_fid = self.files.get(dirname, None) + if parent_fid: + return parent_fid + if basename == '': + return None + fid = bzrlib.generate_ids.gen_file_id(path) + d = add_entry(fid, dirname, 'directory') + return fid + + def add_entry(fid, path, kind, mode = None): + dirname, basename = os.path.split(path) + parent_fid = get_parent(dirname, basename) + + executable = False + if mode == '100755': + executable = True + elif mode == '120000': + kind = 'symlink' + + change = (fid, + (None, path), + True, + (False, True), + (None, parent_fid), + (None, basename), + (None, kind), + (None, executable)) + self.files[path] = change[0] + changes.append(change) + return change + + def update_entry(fid, path, kind, mode = None): + dirname, basename = os.path.split(path) + parent_fid = get_parent(dirname, basename) + + executable = False + if mode == '100755': + executable = True + elif mode == '120000': + kind = 'symlink' + + change = (fid, + (path, path), + True, + (True, True), + (None, parent_fid), + (None, basename), + (None, kind), + (None, executable)) + self.files[path] = change[0] + changes.append(change) + return change + + def remove_entry(fid, path, kind): + dirname, basename = os.path.split(path) + parent_fid = get_parent(dirname, basename) + change = (fid, + (path, None), + True, + (True, False), + (parent_fid, None), + (None, None), + (None, None), + (None, None)) + del self.files[path] + changes.append(change) + return change + + for fid, f in self.updates.iteritems(): + path = f['path'] + + if 'deleted' in f: + remove_entry(fid, path, 'file') + continue + + if path in self.base_files: + update_entry(fid, path, 'file', f['mode']) + else: + add_entry(fid, path, 'file', f['mode']) + + return changes + + def get_file_with_stat(self, file_id, path=None): + return (StringIO.StringIO(self.updates[file_id]['data']), None) + + def get_symlink_target(self, file_id): + return self.updates[file_id]['data'] + +def parse_commit(parser): + global marks, blob_marks, bmarks, parsed_refs + global mode + + parents = [] + + ref = parser[1] + parser.next() + + if ref != 'refs/heads/master': + die("bzr doesn't support multiple branches; use 'master'") + + commit_mark = parser.get_mark() + parser.next() + author = parser.get_author() + parser.next() + committer = parser.get_author() + parser.next() + data = parser.get_data() + parser.next() + if parser.check('from'): + parents.append(parser.get_mark()) + parser.next() + while parser.check('merge'): + parents.append(parser.get_mark()) + parser.next() + + files = {} + + for line in parser: + if parser.check('M'): + t, m, mark_ref, path = line.split(' ', 3) + mark = int(mark_ref[1:]) + f = { 'mode' : m, 'data' : blob_marks[mark] } + elif parser.check('D'): + t, path = line.split(' ') + f = { 'deleted' : True } + else: + die('Unknown file command: %s' % line) + files[path] = f + + repo = parser.repo + + committer, date, tz = committer + parents = [str(mark_to_rev(p)) for p in parents] + revid = bzrlib.generate_ids.gen_revision_id(committer, date) + props = {} + props['branch-nick'] = repo.nick + + mtree = CustomTree(repo, revid, parents, files) + changes = mtree.iter_changes() + + repo.lock_write() + try: + builder = repo.get_commit_builder(parents, None, date, tz, committer, props, revid) + try: + list(builder.record_iter_changes(mtree, mtree.last_revision(), changes)) + builder.finish_inventory() + builder.commit(data.decode('utf-8', 'replace')) + except Exception, e: + builder.abort() + raise + finally: + repo.unlock() + + parsed_refs[ref] = revid + marks.new_mark(revid, commit_mark) + +def parse_reset(parser): + global parsed_refs + + ref = parser[1] + parser.next() + + if ref != 'refs/heads/master': + die("bzr doesn't support multiple branches; use 'master'") + + # ugh + if parser.check('commit'): + parse_commit(parser) + return + if not parser.check('from'): + return + from_mark = parser.get_mark() + parser.next() + + parsed_refs[ref] = mark_to_rev(from_mark) + +def do_export(parser): + global parsed_refs, dirname, peer + + parser.next() + + for line in parser.each_block('done'): + if parser.check('blob'): + parse_blob(parser) + elif parser.check('commit'): + parse_commit(parser) + elif parser.check('reset'): + parse_reset(parser) + elif parser.check('tag'): + pass + elif parser.check('feature'): + pass + else: + die('unhandled export command: %s' % line) + + repo = parser.repo + + for ref, revid in parsed_refs.iteritems(): + if ref == 'refs/heads/master': + repo.generate_revision_history(revid, marks.get_tip('master')) + revno, revid = repo.last_revision_info() + if peer: + if hasattr(peer, "import_last_revision_info_and_tags"): + peer.import_last_revision_info_and_tags(repo, revno, revid) + else: + peer.import_last_revision_info(repo.repository, revno, revid) + wt = peer.bzrdir.open_workingtree() + else: + wt = repo.bzrdir.open_workingtree() + wt.update() + print "ok %s" % ref + print + +def do_capabilities(parser): + global dirname + + print "import" + print "export" + print "refspec refs/heads/*:%s/heads/*" % prefix + + path = os.path.join(dirname, 'marks-git') + + if os.path.exists(path): + print "*import-marks %s" % path + print "*export-marks %s" % path + + print + +def do_list(parser): + global tags + print "? refs/heads/%s" % 'master' + for tag, revid in parser.repo.tags.get_tag_dict().items(): + print "? refs/tags/%s" % tag + tags[tag] = revid + print "@refs/heads/%s HEAD" % 'master' + print + +def get_repo(url, alias): + global dirname, peer + + origin = bzrlib.bzrdir.BzrDir.open(url) + branch = origin.open_branch() + + if not isinstance(origin.transport, bzrlib.transport.local.LocalTransport): + clone_path = os.path.join(dirname, 'clone') + remote_branch = branch + if os.path.exists(clone_path): + # pull + d = bzrlib.bzrdir.BzrDir.open(clone_path) + branch = d.open_branch() + result = branch.pull(remote_branch, [], None, False) + else: + # clone + d = origin.sprout(clone_path, None, + hardlink=True, create_tree_if_local=False, + source_branch=remote_branch) + branch = d.open_branch() + branch.bind(remote_branch) + + peer = remote_branch + else: + peer = None + + return branch + +def main(args): + global marks, prefix, dirname + global tags, filenodes + global blob_marks + global parsed_refs + global files_cache + + alias = args[1] + url = args[2] + + prefix = 'refs/bzr/%s' % alias + tags = {} + filenodes = {} + blob_marks = {} + parsed_refs = {} + files_cache = {} + + gitdir = os.environ['GIT_DIR'] + dirname = os.path.join(gitdir, 'bzr', alias) + + if not os.path.exists(dirname): + os.makedirs(dirname) + + repo = get_repo(url, alias) + + marks_path = os.path.join(dirname, 'marks-int') + marks = Marks(marks_path) + + parser = Parser(repo) + for line in parser: + if parser.check('capabilities'): + do_capabilities(parser) + elif parser.check('list'): + do_list(parser) + elif parser.check('import'): + do_import(parser) + elif parser.check('export'): + do_export(parser) + else: + die('unhandled command: %s' % line) + sys.stdout.flush() + + marks.store() + +sys.exit(main(sys.argv)) diff --git a/contrib/remote-helpers/git-remote-hg b/contrib/remote-helpers/git-remote-hg new file mode 100755 index 0000000000..328c2dc76d --- /dev/null +++ b/contrib/remote-helpers/git-remote-hg @@ -0,0 +1,805 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 Felipe Contreras +# + +# Inspired by Rocco Rutte's hg-fast-export + +# Just copy to your ~/bin, or anywhere in your $PATH. +# Then you can clone with: +# git clone hg::/path/to/mercurial/repo/ + +from mercurial import hg, ui, bookmarks, context, util, encoding + +import re +import sys +import os +import json +import shutil +import subprocess +import urllib + +# +# If you want to switch to hg-git compatibility mode: +# git config --global remote-hg.hg-git-compat true +# +# git: +# Sensible defaults for git. +# hg bookmarks are exported as git branches, hg branches are prefixed +# with 'branches/', HEAD is a special case. +# +# hg: +# Emulate hg-git. +# Only hg bookmarks are exported as git branches. +# Commits are modified to preserve hg information and allow bidirectionality. +# + +NAME_RE = re.compile('^([^<>]+)') +AUTHOR_RE = re.compile('^([^<>]+?)? ?<([^<>]*)>$') +AUTHOR_HG_RE = re.compile('^(.*?) ?<(.*?)(?:>(.+)?)?$') +RAW_AUTHOR_RE = re.compile('^(\w+) (?:(.+)? )?<(.*)> (\d+) ([+-]\d+)') + +def die(msg, *args): + sys.stderr.write('ERROR: %s\n' % (msg % args)) + sys.exit(1) + +def warn(msg, *args): + sys.stderr.write('WARNING: %s\n' % (msg % args)) + +def gitmode(flags): + return 'l' in flags and '120000' or 'x' in flags and '100755' or '100644' + +def gittz(tz): + return '%+03d%02d' % (-tz / 3600, -tz % 3600 / 60) + +def hgmode(mode): + m = { '100755': 'x', '120000': 'l' } + return m.get(mode, '') + +def get_config(config): + cmd = ['git', 'config', '--get', config] + process = subprocess.Popen(cmd, stdout=subprocess.PIPE) + output, _ = process.communicate() + return output + +class Marks: + + def __init__(self, path): + self.path = path + self.tips = {} + self.marks = {} + self.rev_marks = {} + self.last_mark = 0 + + self.load() + + def load(self): + if not os.path.exists(self.path): + return + + tmp = json.load(open(self.path)) + + self.tips = tmp['tips'] + self.marks = tmp['marks'] + self.last_mark = tmp['last-mark'] + + for rev, mark in self.marks.iteritems(): + self.rev_marks[mark] = int(rev) + + def dict(self): + return { 'tips': self.tips, 'marks': self.marks, 'last-mark' : self.last_mark } + + def store(self): + json.dump(self.dict(), open(self.path, 'w')) + + def __str__(self): + return str(self.dict()) + + def from_rev(self, rev): + return self.marks[str(rev)] + + def to_rev(self, mark): + return self.rev_marks[mark] + + def get_mark(self, rev): + self.last_mark += 1 + self.marks[str(rev)] = self.last_mark + return self.last_mark + + def new_mark(self, rev, mark): + self.marks[str(rev)] = mark + self.rev_marks[mark] = rev + self.last_mark = mark + + def is_marked(self, rev): + return self.marks.has_key(str(rev)) + + def get_tip(self, branch): + return self.tips.get(branch, 0) + + def set_tip(self, branch, tip): + self.tips[branch] = tip + +class Parser: + + def __init__(self, repo): + self.repo = repo + self.line = self.get_line() + + def get_line(self): + return sys.stdin.readline().strip() + + def __getitem__(self, i): + return self.line.split()[i] + + def check(self, word): + return self.line.startswith(word) + + def each_block(self, separator): + while self.line != separator: + yield self.line + self.line = self.get_line() + + def __iter__(self): + return self.each_block('') + + def next(self): + self.line = self.get_line() + if self.line == 'done': + self.line = None + + def get_mark(self): + i = self.line.index(':') + 1 + return int(self.line[i:]) + + def get_data(self): + if not self.check('data'): + return None + i = self.line.index(' ') + 1 + size = int(self.line[i:]) + return sys.stdin.read(size) + + def get_author(self): + global bad_mail + + ex = None + m = RAW_AUTHOR_RE.match(self.line) + if not m: + return None + _, name, email, date, tz = m.groups() + if name and 'ext:' in name: + m = re.match('^(.+?) ext:\((.+)\)$', name) + if m: + name = m.group(1) + ex = urllib.unquote(m.group(2)) + + if email != bad_mail: + if name: + user = '%s <%s>' % (name, email) + else: + user = '<%s>' % (email) + else: + user = name + + if ex: + user += ex + + tz = int(tz) + tz = ((tz / 100) * 3600) + ((tz % 100) * 60) + return (user, int(date), -tz) + +def export_file(fc): + d = fc.data() + print "M %s inline %s" % (gitmode(fc.flags()), fc.path()) + print "data %d" % len(d) + print d + +def get_filechanges(repo, ctx, parent): + modified = set() + added = set() + removed = set() + + cur = ctx.manifest() + prev = repo[parent].manifest().copy() + + for fn in cur: + if fn in prev: + if (cur.flags(fn) != prev.flags(fn) or cur[fn] != prev[fn]): + modified.add(fn) + del prev[fn] + else: + added.add(fn) + removed |= set(prev.keys()) + + return added | modified, removed + +def fixup_user_git(user): + name = mail = None + user = user.replace('"', '') + m = AUTHOR_RE.match(user) + if m: + name = m.group(1) + mail = m.group(2).strip() + else: + m = NAME_RE.match(user) + if m: + name = m.group(1).strip() + return (name, mail) + +def fixup_user_hg(user): + def sanitize(name): + # stole this from hg-git + return re.sub('[<>\n]', '?', name.lstrip('< ').rstrip('> ')) + + m = AUTHOR_HG_RE.match(user) + if m: + name = sanitize(m.group(1)) + mail = sanitize(m.group(2)) + ex = m.group(3) + if ex: + name += ' ext:(' + urllib.quote(ex) + ')' + else: + name = sanitize(user) + if '@' in user: + mail = name + else: + mail = None + + return (name, mail) + +def fixup_user(user): + global mode, bad_mail + + if mode == 'git': + name, mail = fixup_user_git(user) + else: + name, mail = fixup_user_hg(user) + + if not name: + name = bad_name + if not mail: + mail = bad_mail + + return '%s <%s>' % (name, mail) + +def get_repo(url, alias): + global dirname, peer + + myui = ui.ui() + myui.setconfig('ui', 'interactive', 'off') + + if hg.islocal(url): + repo = hg.repository(myui, url) + else: + local_path = os.path.join(dirname, 'clone') + if not os.path.exists(local_path): + peer, dstpeer = hg.clone(myui, {}, url, local_path, update=False, pull=True) + repo = dstpeer.local() + else: + repo = hg.repository(myui, local_path) + peer = hg.peer(myui, {}, url) + repo.pull(peer, heads=None, force=True) + + return repo + +def rev_to_mark(rev): + global marks + return marks.from_rev(rev) + +def mark_to_rev(mark): + global marks + return marks.to_rev(mark) + +def export_ref(repo, name, kind, head): + global prefix, marks, mode + + ename = '%s/%s' % (kind, name) + tip = marks.get_tip(ename) + + # mercurial takes too much time checking this + if tip and tip == head.rev(): + # nothing to do + return + revs = xrange(tip, head.rev() + 1) + count = 0 + + revs = [rev for rev in revs if not marks.is_marked(rev)] + + for rev in revs: + + c = repo[rev] + (manifest, user, (time, tz), files, desc, extra) = repo.changelog.read(c.node()) + rev_branch = extra['branch'] + + author = "%s %d %s" % (fixup_user(user), time, gittz(tz)) + if 'committer' in extra: + user, time, tz = extra['committer'].rsplit(' ', 2) + committer = "%s %s %s" % (user, time, gittz(int(tz))) + else: + committer = author + + parents = [p for p in repo.changelog.parentrevs(rev) if p >= 0] + + if len(parents) == 0: + modified = c.manifest().keys() + removed = [] + else: + modified, removed = get_filechanges(repo, c, parents[0]) + + if mode == 'hg': + extra_msg = '' + + if rev_branch != 'default': + extra_msg += 'branch : %s\n' % rev_branch + + renames = [] + for f in c.files(): + if f not in c.manifest(): + continue + rename = c.filectx(f).renamed() + if rename: + renames.append((rename[0], f)) + + for e in renames: + extra_msg += "rename : %s => %s\n" % e + + for key, value in extra.iteritems(): + if key in ('author', 'committer', 'encoding', 'message', 'branch', 'hg-git'): + continue + else: + extra_msg += "extra : %s : %s\n" % (key, urllib.quote(value)) + + desc += '\n' + if extra_msg: + desc += '\n--HG--\n' + extra_msg + + if len(parents) == 0 and rev: + print 'reset %s/%s' % (prefix, ename) + + print "commit %s/%s" % (prefix, ename) + print "mark :%d" % (marks.get_mark(rev)) + print "author %s" % (author) + print "committer %s" % (committer) + print "data %d" % (len(desc)) + print desc + + if len(parents) > 0: + print "from :%s" % (rev_to_mark(parents[0])) + if len(parents) > 1: + print "merge :%s" % (rev_to_mark(parents[1])) + + for f in modified: + export_file(c.filectx(f)) + for f in removed: + print "D %s" % (f) + print + + count += 1 + if (count % 100 == 0): + print "progress revision %d '%s' (%d/%d)" % (rev, name, count, len(revs)) + print "#############################################################" + + # make sure the ref is updated + print "reset %s/%s" % (prefix, ename) + print "from :%u" % rev_to_mark(rev) + print + + marks.set_tip(ename, rev) + +def export_tag(repo, tag): + export_ref(repo, tag, 'tags', repo[tag]) + +def export_bookmark(repo, bmark): + head = bmarks[bmark] + export_ref(repo, bmark, 'bookmarks', head) + +def export_branch(repo, branch): + tip = get_branch_tip(repo, branch) + head = repo[tip] + export_ref(repo, branch, 'branches', head) + +def export_head(repo): + global g_head + export_ref(repo, g_head[0], 'bookmarks', g_head[1]) + +def do_capabilities(parser): + global prefix, dirname + + print "import" + print "export" + print "refspec refs/heads/branches/*:%s/branches/*" % prefix + print "refspec refs/heads/*:%s/bookmarks/*" % prefix + print "refspec refs/tags/*:%s/tags/*" % prefix + + path = os.path.join(dirname, 'marks-git') + + if os.path.exists(path): + print "*import-marks %s" % path + print "*export-marks %s" % path + + print + +def get_branch_tip(repo, branch): + global branches + + heads = branches.get(branch, None) + if not heads: + return None + + # verify there's only one head + if (len(heads) > 1): + warn("Branch '%s' has more than one head, consider merging" % branch) + # older versions of mercurial don't have this + if hasattr(repo, "branchtip"): + return repo.branchtip(branch) + + return heads[0] + +def list_head(repo, cur): + global g_head, bmarks + + head = bookmarks.readcurrent(repo) + if head: + node = repo[head] + else: + # fake bookmark from current branch + head = cur + node = repo['.'] + if not node: + node = repo['tip'] + if not node: + return + if head == 'default': + head = 'master' + bmarks[head] = node + + print "@refs/heads/%s HEAD" % head + g_head = (head, node) + +def do_list(parser): + global branches, bmarks, mode, track_branches + + repo = parser.repo + for bmark, node in bookmarks.listbookmarks(repo).iteritems(): + bmarks[bmark] = repo[node] + + cur = repo.dirstate.branch() + + list_head(repo, cur) + + if track_branches: + for branch in repo.branchmap(): + heads = repo.branchheads(branch) + if len(heads): + branches[branch] = heads + + for branch in branches: + print "? refs/heads/branches/%s" % branch + + for bmark in bmarks: + print "? refs/heads/%s" % bmark + + for tag, node in repo.tagslist(): + if tag == 'tip': + continue + print "? refs/tags/%s" % tag + + print + +def do_import(parser): + repo = parser.repo + + path = os.path.join(dirname, 'marks-git') + + print "feature done" + if os.path.exists(path): + print "feature import-marks=%s" % path + print "feature export-marks=%s" % path + sys.stdout.flush() + + tmp = encoding.encoding + encoding.encoding = 'utf-8' + + # lets get all the import lines + while parser.check('import'): + ref = parser[1] + + if (ref == 'HEAD'): + export_head(repo) + elif ref.startswith('refs/heads/branches/'): + branch = ref[len('refs/heads/branches/'):] + export_branch(repo, branch) + elif ref.startswith('refs/heads/'): + bmark = ref[len('refs/heads/'):] + export_bookmark(repo, bmark) + elif ref.startswith('refs/tags/'): + tag = ref[len('refs/tags/'):] + export_tag(repo, tag) + + parser.next() + + encoding.encoding = tmp + + print 'done' + +def parse_blob(parser): + global blob_marks + + parser.next() + mark = parser.get_mark() + parser.next() + data = parser.get_data() + blob_marks[mark] = data + parser.next() + return + +def get_merge_files(repo, p1, p2, files): + for e in repo[p1].files(): + if e not in files: + if e not in repo[p1].manifest(): + continue + f = { 'ctx' : repo[p1][e] } + files[e] = f + +def parse_commit(parser): + global marks, blob_marks, bmarks, parsed_refs + global mode + + from_mark = merge_mark = None + + ref = parser[1] + parser.next() + + commit_mark = parser.get_mark() + parser.next() + author = parser.get_author() + parser.next() + committer = parser.get_author() + parser.next() + data = parser.get_data() + parser.next() + if parser.check('from'): + from_mark = parser.get_mark() + parser.next() + if parser.check('merge'): + merge_mark = parser.get_mark() + parser.next() + if parser.check('merge'): + die('octopus merges are not supported yet') + + files = {} + + for line in parser: + if parser.check('M'): + t, m, mark_ref, path = line.split(' ', 3) + mark = int(mark_ref[1:]) + f = { 'mode' : hgmode(m), 'data' : blob_marks[mark] } + elif parser.check('D'): + t, path = line.split(' ') + f = { 'deleted' : True } + else: + die('Unknown file command: %s' % line) + files[path] = f + + def getfilectx(repo, memctx, f): + of = files[f] + if 'deleted' in of: + raise IOError + if 'ctx' in of: + return of['ctx'] + is_exec = of['mode'] == 'x' + is_link = of['mode'] == 'l' + rename = of.get('rename', None) + return context.memfilectx(f, of['data'], + is_link, is_exec, rename) + + repo = parser.repo + + user, date, tz = author + extra = {} + + if committer != author: + extra['committer'] = "%s %u %u" % committer + + if from_mark: + p1 = repo.changelog.node(mark_to_rev(from_mark)) + else: + p1 = '\0' * 20 + + if merge_mark: + p2 = repo.changelog.node(mark_to_rev(merge_mark)) + else: + p2 = '\0' * 20 + + # + # If files changed from any of the parents, hg wants to know, but in git if + # nothing changed from the first parent, nothing changed. + # + if merge_mark: + get_merge_files(repo, p1, p2, files) + + if mode == 'hg': + i = data.find('\n--HG--\n') + if i >= 0: + tmp = data[i + len('\n--HG--\n'):].strip() + for k, v in [e.split(' : ') for e in tmp.split('\n')]: + if k == 'rename': + old, new = v.split(' => ', 1) + files[new]['rename'] = old + elif k == 'branch': + extra[k] = v + elif k == 'extra': + ek, ev = v.split(' : ', 1) + extra[ek] = urllib.unquote(ev) + data = data[:i] + + ctx = context.memctx(repo, (p1, p2), data, + files.keys(), getfilectx, + user, (date, tz), extra) + + tmp = encoding.encoding + encoding.encoding = 'utf-8' + + node = repo.commitctx(ctx) + + encoding.encoding = tmp + + rev = repo[node].rev() + + parsed_refs[ref] = node + + marks.new_mark(rev, commit_mark) + +def parse_reset(parser): + ref = parser[1] + parser.next() + # ugh + if parser.check('commit'): + parse_commit(parser) + return + if not parser.check('from'): + return + from_mark = parser.get_mark() + parser.next() + + node = parser.repo.changelog.node(mark_to_rev(from_mark)) + parsed_refs[ref] = node + +def parse_tag(parser): + name = parser[1] + parser.next() + from_mark = parser.get_mark() + parser.next() + tagger = parser.get_author() + parser.next() + data = parser.get_data() + parser.next() + + # nothing to do + +def do_export(parser): + global parsed_refs, bmarks, peer + + parser.next() + + for line in parser.each_block('done'): + if parser.check('blob'): + parse_blob(parser) + elif parser.check('commit'): + parse_commit(parser) + elif parser.check('reset'): + parse_reset(parser) + elif parser.check('tag'): + parse_tag(parser) + elif parser.check('feature'): + pass + else: + die('unhandled export command: %s' % line) + + for ref, node in parsed_refs.iteritems(): + if ref.startswith('refs/heads/branches'): + pass + elif ref.startswith('refs/heads/'): + bmark = ref[len('refs/heads/'):] + if bmark in bmarks: + old = bmarks[bmark].hex() + else: + old = '' + if not bookmarks.pushbookmark(parser.repo, bmark, old, node): + continue + elif ref.startswith('refs/tags/'): + tag = ref[len('refs/tags/'):] + parser.repo.tag([tag], node, None, True, None, {}) + else: + # transport-helper/fast-export bugs + continue + print "ok %s" % ref + + print + + if peer: + parser.repo.push(peer, force=False) + +def fix_path(alias, repo, orig_url): + repo_url = util.url(repo.url()) + url = util.url(orig_url) + if str(url) == str(repo_url): + return + cmd = ['git', 'config', 'remote.%s.url' % alias, "hg::%s" % repo_url] + subprocess.call(cmd) + +def main(args): + global prefix, dirname, branches, bmarks + global marks, blob_marks, parsed_refs + global peer, mode, bad_mail, bad_name + global track_branches + + alias = args[1] + url = args[2] + peer = None + + hg_git_compat = False + track_branches = True + try: + if get_config('remote-hg.hg-git-compat') == 'true\n': + hg_git_compat = True + track_branches = False + if get_config('remote-hg.track-branches') == 'false\n': + track_branches = False + except subprocess.CalledProcessError: + pass + + if hg_git_compat: + mode = 'hg' + bad_mail = 'none@none' + bad_name = '' + else: + mode = 'git' + bad_mail = 'unknown' + bad_name = 'Unknown' + + if alias[4:] == url: + is_tmp = True + alias = util.sha1(alias).hexdigest() + else: + is_tmp = False + + gitdir = os.environ['GIT_DIR'] + dirname = os.path.join(gitdir, 'hg', alias) + branches = {} + bmarks = {} + blob_marks = {} + parsed_refs = {} + + repo = get_repo(url, alias) + prefix = 'refs/hg/%s' % alias + + if not is_tmp: + fix_path(alias, peer or repo, url) + + if not os.path.exists(dirname): + os.makedirs(dirname) + + marks_path = os.path.join(dirname, 'marks-hg') + marks = Marks(marks_path) + + parser = Parser(repo) + for line in parser: + if parser.check('capabilities'): + do_capabilities(parser) + elif parser.check('list'): + do_list(parser) + elif parser.check('import'): + do_import(parser) + elif parser.check('export'): + do_export(parser) + else: + die('unhandled command: %s' % line) + sys.stdout.flush() + + if not is_tmp: + marks.store() + else: + shutil.rmtree(dirname) + +sys.exit(main(sys.argv)) diff --git a/contrib/remote-helpers/test-bzr.sh b/contrib/remote-helpers/test-bzr.sh new file mode 100755 index 0000000000..70aa8a010a --- /dev/null +++ b/contrib/remote-helpers/test-bzr.sh @@ -0,0 +1,143 @@ +#!/bin/sh +# +# Copyright (c) 2012 Felipe Contreras +# + +test_description='Test remote-bzr' + +. ./test-lib.sh + +if ! test_have_prereq PYTHON; then + skip_all='skipping remote-bzr tests; python not available' + test_done +fi + +if ! "$PYTHON_PATH" -c 'import bzrlib'; then + skip_all='skipping remote-bzr tests; bzr not available' + test_done +fi + +cmd=' +import bzrlib +bzrlib.initialize() +import bzrlib.plugin +bzrlib.plugin.load_plugins() +import bzrlib.plugins.fastimport +' + +if ! "$PYTHON_PATH" -c "$cmd"; then + echo "consider setting BZR_PLUGIN_PATH=$HOME/.bazaar/plugins" 1>&2 + skip_all='skipping remote-bzr tests; bzr-fastimport not available' + test_done +fi + +check () { + (cd $1 && + git log --format='%s' -1 && + git symbolic-ref HEAD) > actual && + (echo $2 && + echo "refs/heads/$3") > expected && + test_cmp expected actual +} + +bzr whoami "A U Thor <author@example.com>" + +test_expect_success 'cloning' ' + (bzr init bzrrepo && + cd bzrrepo && + echo one > content && + bzr add content && + bzr commit -m one + ) && + + git clone "bzr::$PWD/bzrrepo" gitrepo && + check gitrepo one master +' + +test_expect_success 'pulling' ' + (cd bzrrepo && + echo two > content && + bzr commit -m two + ) && + + (cd gitrepo && git pull) && + + check gitrepo two master +' + +test_expect_success 'pushing' ' + (cd gitrepo && + echo three > content && + git commit -a -m three && + git push + ) && + + echo three > expected && + cat bzrrepo/content > actual && + test_cmp expected actual +' + +test_expect_success 'roundtrip' ' + (cd gitrepo && + git pull && + git log --format="%s" -1 origin/master > actual) && + echo three > expected && + test_cmp expected actual && + + (cd gitrepo && git push && git pull) && + + (cd bzrrepo && + echo four > content && + bzr commit -m four + ) && + + (cd gitrepo && git pull && git push) && + + check gitrepo four master && + + (cd gitrepo && + echo five > content && + git commit -a -m five && + git push && git pull + ) && + + (cd bzrrepo && bzr revert) && + + echo five > expected && + cat bzrrepo/content > actual && + test_cmp expected actual +' + +cat > expected <<EOF +100644 blob 54f9d6da5c91d556e6b54340b1327573073030af content +100755 blob 68769579c3eaadbe555379b9c3538e6628bae1eb executable +120000 blob 6b584e8ece562ebffc15d38808cd6b98fc3d97ea link +EOF + +test_expect_success 'special modes' ' + (cd bzrrepo && + echo exec > executable + chmod +x executable && + bzr add executable + bzr commit -m exec && + ln -s content link + bzr add link + bzr commit -m link && + mkdir dir && + bzr add dir && + bzr commit -m dir) && + + (cd gitrepo && + git pull + git ls-tree HEAD > ../actual) && + + test_cmp expected actual && + + (cd gitrepo && + git cat-file -p HEAD:link > ../actual) && + + echo -n content > expected && + test_cmp expected actual +' + +test_done diff --git a/contrib/remote-helpers/test-hg-bidi.sh b/contrib/remote-helpers/test-hg-bidi.sh new file mode 100755 index 0000000000..1d61982436 --- /dev/null +++ b/contrib/remote-helpers/test-hg-bidi.sh @@ -0,0 +1,243 @@ +#!/bin/sh +# +# Copyright (c) 2012 Felipe Contreras +# +# Base commands from hg-git tests: +# https://bitbucket.org/durin42/hg-git/src +# + +test_description='Test bidirectionality of remote-hg' + +. ./test-lib.sh + +if ! test_have_prereq PYTHON; then + skip_all='skipping remote-hg tests; python not available' + test_done +fi + +if ! "$PYTHON_PATH" -c 'import mercurial'; then + skip_all='skipping remote-hg tests; mercurial not available' + test_done +fi + +# clone to a git repo +git_clone () { + hg -R $1 bookmark -f -r tip master && + git clone -q "hg::$PWD/$1" $2 +} + +# clone to an hg repo +hg_clone () { + ( + hg init $2 && + cd $1 && + git push -q "hg::$PWD/../$2" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' + ) && + + (cd $2 && hg -q update) +} + +# push an hg repo +hg_push () { + ( + cd $2 + old=$(git symbolic-ref --short HEAD) + git checkout -q -b tmp && + git fetch -q "hg::$PWD/../$1" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' && + git checkout -q $old && + git branch -q -D tmp 2> /dev/null || true + ) +} + +hg_log () { + hg -R $1 log --graph --debug | grep -v 'tag: *default/' +} + +setup () { + ( + echo "[ui]" + echo "username = A U Thor <author@example.com>" + echo "[defaults]" + echo "backout = -d \"0 0\"" + echo "commit = -d \"0 0\"" + echo "debugrawcommit = -d \"0 0\"" + echo "tag = -d \"0 0\"" + ) >> "$HOME"/.hgrc && + git config --global remote-hg.hg-git-compat true + + export HGEDITOR=/usr/bin/true + + export GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0230" + export GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE" +} + +setup + +test_expect_success 'encoding' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + ( + git init -q gitrepo && + cd gitrepo && + + echo alpha > alpha && + git add alpha && + git commit -m "add älphà" && + + export GIT_AUTHOR_NAME="tést èncödîng" && + echo beta > beta && + git add beta && + git commit -m "add beta" && + + echo gamma > gamma && + git add gamma && + git commit -m "add gämmâ" && + + : TODO git config i18n.commitencoding latin-1 && + echo delta > delta && + git add delta && + git commit -m "add déltà" + ) && + + hg_clone gitrepo hgrepo && + git_clone hgrepo gitrepo2 && + hg_clone gitrepo2 hgrepo2 && + + HGENCODING=utf-8 hg_log hgrepo > expected && + HGENCODING=utf-8 hg_log hgrepo2 > actual && + + test_cmp expected actual +' + +test_expect_success 'file removal' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + ( + git init -q gitrepo && + cd gitrepo && + echo alpha > alpha && + git add alpha && + git commit -m "add alpha" && + echo beta > beta && + git add beta && + git commit -m "add beta" + mkdir foo && + echo blah > foo/bar && + git add foo && + git commit -m "add foo" && + git rm alpha && + git commit -m "remove alpha" && + git rm foo/bar && + git commit -m "remove foo/bar" + ) && + + hg_clone gitrepo hgrepo && + git_clone hgrepo gitrepo2 && + hg_clone gitrepo2 hgrepo2 && + + hg_log hgrepo > expected && + hg_log hgrepo2 > actual && + + test_cmp expected actual +' + +test_expect_success 'git tags' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + ( + git init -q gitrepo && + cd gitrepo && + git config receive.denyCurrentBranch ignore && + echo alpha > alpha && + git add alpha && + git commit -m "add alpha" && + git tag alpha && + + echo beta > beta && + git add beta && + git commit -m "add beta" && + git tag -a -m "added tag beta" beta + ) && + + hg_clone gitrepo hgrepo && + git_clone hgrepo gitrepo2 && + hg_clone gitrepo2 hgrepo2 && + + hg_log hgrepo > expected && + hg_log hgrepo2 > actual && + + test_cmp expected actual +' + +test_expect_success 'hg branch' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + ( + git init -q gitrepo && + cd gitrepo && + + echo alpha > alpha && + git add alpha && + git commit -q -m "add alpha" && + git checkout -q -b not-master + ) && + + ( + hg_clone gitrepo hgrepo && + + cd hgrepo && + hg -q co master && + hg mv alpha beta && + hg -q commit -m "rename alpha to beta" && + hg branch gamma | grep -v "permanent and global" && + hg -q commit -m "started branch gamma" + ) && + + hg_push hgrepo gitrepo && + hg_clone gitrepo hgrepo2 && + + : TODO, avoid "master" bookmark && + (cd hgrepo2 && hg checkout gamma) && + + hg_log hgrepo > expected && + hg_log hgrepo2 > actual && + + test_cmp expected actual +' + +test_expect_success 'hg tags' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + ( + git init -q gitrepo && + cd gitrepo && + + echo alpha > alpha && + git add alpha && + git commit -m "add alpha" && + git checkout -q -b not-master + ) && + + ( + hg_clone gitrepo hgrepo && + + cd hgrepo && + hg co master && + hg tag alpha + ) && + + hg_push hgrepo gitrepo && + hg_clone gitrepo hgrepo2 && + + hg_log hgrepo > expected && + hg_log hgrepo2 > actual && + + test_cmp expected actual +' + +test_done diff --git a/contrib/remote-helpers/test-hg-hg-git.sh b/contrib/remote-helpers/test-hg-hg-git.sh new file mode 100755 index 0000000000..7e3967f5b6 --- /dev/null +++ b/contrib/remote-helpers/test-hg-hg-git.sh @@ -0,0 +1,534 @@ +#!/bin/sh +# +# Copyright (c) 2012 Felipe Contreras +# +# Base commands from hg-git tests: +# https://bitbucket.org/durin42/hg-git/src +# + +test_description='Test remote-hg output compared to hg-git' + +. ./test-lib.sh + +if ! test_have_prereq PYTHON; then + skip_all='skipping remote-hg tests; python not available' + test_done +fi + +if ! "$PYTHON_PATH" -c 'import mercurial'; then + skip_all='skipping remote-hg tests; mercurial not available' + test_done +fi + +if ! "$PYTHON_PATH" -c 'import hggit'; then + skip_all='skipping remote-hg tests; hg-git not available' + test_done +fi + +# clone to a git repo with git +git_clone_git () { + hg -R $1 bookmark -f -r tip master && + git clone -q "hg::$PWD/$1" $2 +} + +# clone to an hg repo with git +hg_clone_git () { + ( + hg init $2 && + cd $1 && + git push -q "hg::$PWD/../$2" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' + ) && + + (cd $2 && hg -q update) +} + +# clone to a git repo with hg +git_clone_hg () { + ( + git init -q $2 && + cd $1 && + hg bookmark -f -r tip master && + hg -q push -r master ../$2 || true + ) +} + +# clone to an hg repo with hg +hg_clone_hg () { + hg -q clone $1 $2 +} + +# push an hg repo with git +hg_push_git () { + ( + cd $2 + old=$(git symbolic-ref --short HEAD) + git checkout -q -b tmp && + git fetch -q "hg::$PWD/../$1" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' && + git checkout -q $old && + git branch -q -D tmp 2> /dev/null || true + ) +} + +# push an hg git repo with hg +hg_push_hg () { + ( + cd $1 && + hg -q push ../$2 || true + ) +} + +hg_log () { + hg -R $1 log --graph --debug | grep -v 'tag: *default/' +} + +git_log () { + git --git-dir=$1/.git fast-export --branches +} + +setup () { + ( + echo "[ui]" + echo "username = A U Thor <author@example.com>" + echo "[defaults]" + echo "backout = -d \"0 0\"" + echo "commit = -d \"0 0\"" + echo "debugrawcommit = -d \"0 0\"" + echo "tag = -d \"0 0\"" + echo "[extensions]" + echo "hgext.bookmarks =" + echo "hggit =" + ) >> "$HOME"/.hgrc && + git config --global receive.denycurrentbranch warn + git config --global remote-hg.hg-git-compat true + + export HGEDITOR=/usr/bin/true + + export GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0230" + export GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE" +} + +setup + +test_expect_success 'executable bit' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + ( + git init -q gitrepo && + cd gitrepo && + echo alpha > alpha && + chmod 0644 alpha && + git add alpha && + git commit -m "add alpha" && + chmod 0755 alpha && + git add alpha && + git commit -m "set executable bit" && + chmod 0644 alpha && + git add alpha && + git commit -m "clear executable bit" + ) && + + for x in hg git; do + ( + hg_clone_$x gitrepo hgrepo-$x && + cd hgrepo-$x && + hg_log . && + hg manifest -r 1 -v && + hg manifest -v + ) > output-$x && + + git_clone_$x hgrepo-$x gitrepo2-$x && + git_log gitrepo2-$x > log-$x + done && + cp -r log-* output-* /tmp/foo/ && + + test_cmp output-hg output-git && + test_cmp log-hg log-git +' + +test_expect_success 'symlink' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + ( + git init -q gitrepo && + cd gitrepo && + echo alpha > alpha && + git add alpha && + git commit -m "add alpha" && + ln -s alpha beta && + git add beta && + git commit -m "add beta" + ) && + + for x in hg git; do + ( + hg_clone_$x gitrepo hgrepo-$x && + cd hgrepo-$x && + hg_log . && + hg manifest -v + ) > output-$x && + + git_clone_$x hgrepo-$x gitrepo2-$x && + git_log gitrepo2-$x > log-$x + done && + + test_cmp output-hg output-git && + test_cmp log-hg log-git +' + +test_expect_success 'merge conflict 1' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + ( + hg init hgrepo1 && + cd hgrepo1 && + echo A > afile && + hg add afile && + hg ci -m "origin" && + + echo B > afile && + hg ci -m "A->B" && + + hg up -r0 && + echo C > afile && + hg ci -m "A->C" && + + hg merge -r1 || true && + echo C > afile && + hg resolve -m afile && + hg ci -m "merge to C" + ) && + + for x in hg git; do + git_clone_$x hgrepo1 gitrepo-$x && + hg_clone_$x gitrepo-$x hgrepo2-$x && + hg_log hgrepo2-$x > hg-log-$x && + git_log gitrepo-$x > git-log-$x + done && + + test_cmp hg-log-hg hg-log-git && + test_cmp git-log-hg git-log-git +' + +test_expect_success 'merge conflict 2' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + ( + hg init hgrepo1 && + cd hgrepo1 && + echo A > afile && + hg add afile && + hg ci -m "origin" && + + echo B > afile && + hg ci -m "A->B" && + + hg up -r0 && + echo C > afile && + hg ci -m "A->C" && + + hg merge -r1 || true && + echo B > afile && + hg resolve -m afile && + hg ci -m "merge to B" + ) && + + for x in hg git; do + git_clone_$x hgrepo1 gitrepo-$x && + hg_clone_$x gitrepo-$x hgrepo2-$x && + hg_log hgrepo2-$x > hg-log-$x && + git_log gitrepo-$x > git-log-$x + done && + + test_cmp hg-log-hg hg-log-git && + test_cmp git-log-hg git-log-git +' + +test_expect_success 'converged merge' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + ( + hg init hgrepo1 && + cd hgrepo1 && + echo A > afile && + hg add afile && + hg ci -m "origin" && + + echo B > afile && + hg ci -m "A->B" && + + echo C > afile && + hg ci -m "B->C" && + + hg up -r0 && + echo C > afile && + hg ci -m "A->C" && + + hg merge -r2 || true && + hg ci -m "merge" + ) && + + for x in hg git; do + git_clone_$x hgrepo1 gitrepo-$x && + hg_clone_$x gitrepo-$x hgrepo2-$x && + hg_log hgrepo2-$x > hg-log-$x && + git_log gitrepo-$x > git-log-$x + done && + + test_cmp hg-log-hg hg-log-git && + test_cmp git-log-hg git-log-git +' + +test_expect_success 'encoding' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + ( + git init -q gitrepo && + cd gitrepo && + + echo alpha > alpha && + git add alpha && + git commit -m "add älphà" && + + export GIT_AUTHOR_NAME="tést èncödîng" && + echo beta > beta && + git add beta && + git commit -m "add beta" && + + echo gamma > gamma && + git add gamma && + git commit -m "add gämmâ" && + + : TODO git config i18n.commitencoding latin-1 && + echo delta > delta && + git add delta && + git commit -m "add déltà" + ) && + + for x in hg git; do + hg_clone_$x gitrepo hgrepo-$x && + git_clone_$x hgrepo-$x gitrepo2-$x && + + HGENCODING=utf-8 hg_log hgrepo-$x > hg-log-$x && + git_log gitrepo2-$x > git-log-$x + done && + + test_cmp hg-log-hg hg-log-git && + test_cmp git-log-hg git-log-git +' + +test_expect_success 'file removal' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + ( + git init -q gitrepo && + cd gitrepo && + echo alpha > alpha && + git add alpha && + git commit -m "add alpha" && + echo beta > beta && + git add beta && + git commit -m "add beta" + mkdir foo && + echo blah > foo/bar && + git add foo && + git commit -m "add foo" && + git rm alpha && + git commit -m "remove alpha" && + git rm foo/bar && + git commit -m "remove foo/bar" + ) && + + for x in hg git; do + ( + hg_clone_$x gitrepo hgrepo-$x && + cd hgrepo-$x && + hg_log . && + hg manifest -r 3 && + hg manifest + ) > output-$x && + + git_clone_$x hgrepo-$x gitrepo2-$x && + git_log gitrepo2-$x > log-$x + done && + + test_cmp output-hg output-git && + test_cmp log-hg log-git +' + +test_expect_success 'git tags' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + ( + git init -q gitrepo && + cd gitrepo && + git config receive.denyCurrentBranch ignore && + echo alpha > alpha && + git add alpha && + git commit -m "add alpha" && + git tag alpha && + + echo beta > beta && + git add beta && + git commit -m "add beta" && + git tag -a -m "added tag beta" beta + ) && + + for x in hg git; do + hg_clone_$x gitrepo hgrepo-$x && + hg_log hgrepo-$x > log-$x + done && + + test_cmp log-hg log-git +' + +test_expect_success 'hg author' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + for x in hg git; do + ( + git init -q gitrepo-$x && + cd gitrepo-$x && + + echo alpha > alpha && + git add alpha && + git commit -m "add alpha" && + git checkout -q -b not-master + ) && + + ( + hg_clone_$x gitrepo-$x hgrepo-$x && + cd hgrepo-$x && + + hg co master && + echo beta > beta && + hg add beta && + hg commit -u "test" -m "add beta" && + + echo gamma >> beta && + hg commit -u "test <test@example.com> (comment)" -m "modify beta" && + + echo gamma > gamma && + hg add gamma && + hg commit -u "<test@example.com>" -m "add gamma" && + + echo delta > delta && + hg add delta && + hg commit -u "name<test@example.com>" -m "add delta" && + + echo epsilon > epsilon && + hg add epsilon && + hg commit -u "name <test@example.com" -m "add epsilon" && + + echo zeta > zeta && + hg add zeta && + hg commit -u " test " -m "add zeta" && + + echo eta > eta && + hg add eta && + hg commit -u "test < test@example.com >" -m "add eta" && + + echo theta > theta && + hg add theta && + hg commit -u "test >test@example.com>" -m "add theta" && + + echo iota > iota && + hg add iota && + hg commit -u "test <test <at> example <dot> com>" -m "add iota" + ) && + + hg_push_$x hgrepo-$x gitrepo-$x && + hg_clone_$x gitrepo-$x hgrepo2-$x && + + hg_log hgrepo2-$x > hg-log-$x && + git_log gitrepo-$x > git-log-$x + done && + + test_cmp git-log-hg git-log-git && + + test_cmp hg-log-hg hg-log-git && + test_cmp git-log-hg git-log-git +' + +test_expect_success 'hg branch' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + for x in hg git; do + ( + git init -q gitrepo-$x && + cd gitrepo-$x && + + echo alpha > alpha && + git add alpha && + git commit -q -m "add alpha" && + git checkout -q -b not-master + ) && + + ( + hg_clone_$x gitrepo-$x hgrepo-$x && + + cd hgrepo-$x && + hg -q co master && + hg mv alpha beta && + hg -q commit -m "rename alpha to beta" && + hg branch gamma | grep -v "permanent and global" && + hg -q commit -m "started branch gamma" + ) && + + hg_push_$x hgrepo-$x gitrepo-$x && + hg_clone_$x gitrepo-$x hgrepo2-$x && + + hg_log hgrepo2-$x > hg-log-$x && + git_log gitrepo-$x > git-log-$x + done && + + test_cmp hg-log-hg hg-log-git && + test_cmp git-log-hg git-log-git +' + +test_expect_success 'hg tags' ' + mkdir -p tmp && cd tmp && + test_when_finished "cd .. && rm -rf tmp" && + + for x in hg git; do + ( + git init -q gitrepo-$x && + cd gitrepo-$x && + + echo alpha > alpha && + git add alpha && + git commit -m "add alpha" && + git checkout -q -b not-master + ) && + + ( + hg_clone_$x gitrepo-$x hgrepo-$x && + + cd hgrepo-$x && + hg co master && + hg tag alpha + ) && + + hg_push_$x hgrepo-$x gitrepo-$x && + hg_clone_$x gitrepo-$x hgrepo2-$x && + + ( + git --git-dir=gitrepo-$x/.git tag -l && + hg_log hgrepo2-$x && + cat hgrepo2-$x/.hgtags + ) > output-$x + done && + + test_cmp output-hg output-git +' + +test_done diff --git a/contrib/remote-helpers/test-hg.sh b/contrib/remote-helpers/test-hg.sh new file mode 100755 index 0000000000..5f81dfae6c --- /dev/null +++ b/contrib/remote-helpers/test-hg.sh @@ -0,0 +1,121 @@ +#!/bin/sh +# +# Copyright (c) 2012 Felipe Contreras +# +# Base commands from hg-git tests: +# https://bitbucket.org/durin42/hg-git/src +# + +test_description='Test remote-hg' + +. ./test-lib.sh + +if ! test_have_prereq PYTHON; then + skip_all='skipping remote-hg tests; python not available' + test_done +fi + +if ! "$PYTHON_PATH" -c 'import mercurial'; then + skip_all='skipping remote-hg tests; mercurial not available' + test_done +fi + +check () { + (cd $1 && + git log --format='%s' -1 && + git symbolic-ref HEAD) > actual && + (echo $2 && + echo "refs/heads/$3") > expected && + test_cmp expected actual +} + +setup () { + ( + echo "[ui]" + echo "username = H G Wells <wells@example.com>" + ) >> "$HOME"/.hgrc +} + +setup + +test_expect_success 'cloning' ' + test_when_finished "rm -rf gitrepo*" && + + ( + hg init hgrepo && + cd hgrepo && + echo zero > content && + hg add content && + hg commit -m zero + ) && + + git clone "hg::$PWD/hgrepo" gitrepo && + check gitrepo zero master +' + +test_expect_success 'cloning with branches' ' + test_when_finished "rm -rf gitrepo*" && + + ( + cd hgrepo && + hg branch next && + echo next > content && + hg commit -m next + ) && + + git clone "hg::$PWD/hgrepo" gitrepo && + check gitrepo next next && + + (cd hgrepo && hg checkout default) && + + git clone "hg::$PWD/hgrepo" gitrepo2 && + check gitrepo2 zero master +' + +test_expect_success 'cloning with bookmarks' ' + test_when_finished "rm -rf gitrepo*" && + + ( + cd hgrepo && + hg bookmark feature-a && + echo feature-a > content && + hg commit -m feature-a + ) && + + git clone "hg::$PWD/hgrepo" gitrepo && + check gitrepo feature-a feature-a +' + +test_expect_success 'cloning with detached head' ' + test_when_finished "rm -rf gitrepo*" && + + ( + cd hgrepo && + hg update -r 0 + ) && + + git clone "hg::$PWD/hgrepo" gitrepo && + check gitrepo zero master +' + +test_expect_success 'update bookmark' ' + test_when_finished "rm -rf gitrepo*" && + + ( + cd hgrepo && + hg bookmark devel + ) && + + ( + git clone "hg::$PWD/hgrepo" gitrepo && + cd gitrepo && + git checkout devel && + echo devel > content && + git commit -a -m devel && + git push + ) && + + hg -R hgrepo bookmarks | grep "devel\s\+3:" +' + +test_done diff --git a/contrib/stats/mailmap.pl b/contrib/stats/mailmap.pl index 4b852e2455..9513f5e35b 100755 --- a/contrib/stats/mailmap.pl +++ b/contrib/stats/mailmap.pl @@ -1,38 +1,70 @@ -#!/usr/bin/perl -w -my %mailmap = (); -open I, "<", ".mailmap"; -while (<I>) { - chomp; - next if /^#/; - if (my ($author, $mail) = /^(.*?)\s+<(.+)>$/) { - $mailmap{$mail} = $author; - } +#!/usr/bin/perl + +use warnings 'all'; +use strict; +use Getopt::Long; + +my $match_emails; +my $match_names; +my $order_by = 'count'; +Getopt::Long::Configure(qw(bundling)); +GetOptions( + 'emails|e!' => \$match_emails, + 'names|n!' => \$match_names, + 'count|c' => sub { $order_by = 'count' }, + 'time|t' => sub { $order_by = 'stamp' }, +) or exit 1; +$match_emails = 1 unless $match_names; + +my $email = {}; +my $name = {}; + +open(my $fh, '-|', "git log --format='%at <%aE> %aN'"); +while(<$fh>) { + my ($t, $e, $n) = /(\S+) <(\S+)> (.*)/; + mark($email, $e, $n, $t); + mark($name, $n, $e, $t); } -close I; - -my %mail2author = (); -open I, "git log --pretty='format:%ae %an' |"; -while (<I>) { - chomp; - my ($mail, $author) = split(/\t/, $_); - next if exists $mailmap{$mail}; - $mail2author{$mail} ||= {}; - $mail2author{$mail}{$author} ||= 0; - $mail2author{$mail}{$author}++; +close($fh); + +if ($match_emails) { + foreach my $e (dups($email)) { + foreach my $n (vals($email->{$e})) { + show($n, $e, $email->{$e}->{$n}); + } + print "\n"; + } } -close I; - -while (my ($mail, $authorcount) = each %mail2author) { - # %$authorcount is ($author => $count); - # sort and show the names from the most frequent ones. - my @names = (map { $_->[0] } - sort { $b->[1] <=> $a->[1] } - map { [$_, $authorcount->{$_}] } - keys %$authorcount); - if (1 < @names) { - for (@names) { - print "$_ <$mail>\n"; +if ($match_names) { + foreach my $n (dups($name)) { + foreach my $e (vals($name->{$n})) { + show($n, $e, $name->{$n}->{$e}); } + print "\n"; } } +exit 0; +sub mark { + my ($h, $k, $v, $t) = @_; + my $e = $h->{$k}->{$v} ||= { count => 0, stamp => 0 }; + $e->{count}++; + $e->{stamp} = $t unless $t < $e->{stamp}; +} + +sub dups { + my $h = shift; + return grep { keys($h->{$_}) > 1 } keys($h); +} + +sub vals { + my $h = shift; + return sort { + $h->{$b}->{$order_by} <=> $h->{$a}->{$order_by} + } keys($h); +} + +sub show { + my ($n, $e, $h) = @_; + print "$n <$e> ($h->{$order_by})\n"; +} diff --git a/contrib/subtree/.gitignore b/contrib/subtree/.gitignore index 7e77c9d022..91360a3d7f 100644 --- a/contrib/subtree/.gitignore +++ b/contrib/subtree/.gitignore @@ -1,4 +1,5 @@ *~ +git-subtree git-subtree.xml git-subtree.1 mainline diff --git a/contrib/subtree/git-subtree.txt b/contrib/subtree/git-subtree.txt index 0c44fda011..c5bce41ac7 100644 --- a/contrib/subtree/git-subtree.txt +++ b/contrib/subtree/git-subtree.txt @@ -93,7 +93,7 @@ pull:: repository. push:: - Does a 'split' (see above) using the <prefix> supplied + Does a 'split' (see below) using the <prefix> supplied and then does a 'git push' to push the result to the repository and refspec. This can be used to push your subtree to different branches of the remote repository. diff --git a/contrib/svn-fe/svn-fe.c b/contrib/svn-fe/svn-fe.c index 35db24f5ea..f363505abb 100644 --- a/contrib/svn-fe/svn-fe.c +++ b/contrib/svn-fe/svn-fe.c @@ -10,7 +10,8 @@ int main(int argc, char **argv) { if (svndump_init(NULL)) return 1; - svndump_read((argc > 1) ? argv[1] : NULL); + svndump_read((argc > 1) ? argv[1] : NULL, "refs/heads/master", + "refs/notes/svn/revs"); svndump_deinit(); svndump_reset(); return 0; diff --git a/contrib/svn-fe/svnrdump_sim.py b/contrib/svn-fe/svnrdump_sim.py new file mode 100755 index 0000000000..17cf6f961f --- /dev/null +++ b/contrib/svn-fe/svnrdump_sim.py @@ -0,0 +1,57 @@ +#!/usr/bin/python +""" +Simulates svnrdump by replaying an existing dump from a file, taking care +of the specified revision range. +To simulate incremental imports the environment variable SVNRMAX can be set +to the highest revision that should be available. +""" +import sys, os + +if sys.hexversion < 0x02040000: + # The limiter is the ValueError() calls. This may be too conservative + sys.stderr.write("svnrdump-sim.py: requires Python 2.4 or later.\n") + sys.exit(1) + +def getrevlimit(): + var = 'SVNRMAX' + if os.environ.has_key(var): + return os.environ[var] + return None + +def writedump(url, lower, upper): + if url.startswith('sim://'): + filename = url[6:] + if filename[-1] == '/': filename = filename[:-1] #remove terminating slash + else: + raise ValueError('sim:// url required') + f = open(filename, 'r'); + state = 'header' + wroterev = False + while(True): + l = f.readline() + if l == '': break + if state == 'header' and l.startswith('Revision-number: '): + state = 'prefix' + if state == 'prefix' and l == 'Revision-number: %s\n' % lower: + state = 'selection' + if not upper == 'HEAD' and state == 'selection' and l == 'Revision-number: %s\n' % upper: + break; + + if state == 'header' or state == 'selection': + if state == 'selection': wroterev = True + sys.stdout.write(l) + return wroterev + +if __name__ == "__main__": + if not (len(sys.argv) in (3, 4, 5)): + print "usage: %s dump URL -rLOWER:UPPER" + sys.exit(1) + if not sys.argv[1] == 'dump': raise NotImplementedError('only "dump" is suppported.') + url = sys.argv[2] + r = ('0', 'HEAD') + if len(sys.argv) == 4 and sys.argv[3][0:2] == '-r': + r = sys.argv[3][2:].lstrip().split(':') + if not getrevlimit() is None: r[1] = getrevlimit() + if writedump(url, r[0], r[1]): ret = 0 + else: ret = 1 + sys.exit(ret) diff --git a/contrib/vim/README b/contrib/vim/README index fca1e17251..8f16d06972 100644 --- a/contrib/vim/README +++ b/contrib/vim/README @@ -17,16 +17,6 @@ To install: 1. Copy these files to vim's syntax directory $HOME/.vim/syntax 2. To auto-detect the editing of various git-related filetypes: - $ cat >>$HOME/.vim/filetype.vim <<'EOF' - autocmd BufNewFile,BufRead *.git/COMMIT_EDITMSG setf gitcommit - autocmd BufNewFile,BufRead *.git/config,.gitconfig setf gitconfig - autocmd BufNewFile,BufRead git-rebase-todo setf gitrebase - autocmd BufNewFile,BufRead .msg.[0-9]* - \ if getline(1) =~ '^From.*# This line is ignored.$' | - \ setf gitsendemail | - \ endif - autocmd BufNewFile,BufRead *.git/** - \ if getline(1) =~ '^\x\{40\}\>\|^ref: ' | - \ setf git | - \ endif - EOF + + $ curl http://ftp.vim.org/pub/vim/runtime/filetype.vim | + sed -ne '/^" Git$/, /^$/ p' >>$HOME/.vim/filetype.vim |