diff options
Diffstat (limited to 'contrib')
29 files changed, 1469 insertions, 728 deletions
diff --git a/contrib/buildsystems/CMakeLists.txt b/contrib/buildsystems/CMakeLists.txt new file mode 100644 index 0000000000..df539a44fa --- /dev/null +++ b/contrib/buildsystems/CMakeLists.txt @@ -0,0 +1,988 @@ +# +# Copyright (c) 2020 Sibi Siddharthan +# + +#[[ + +Instructions how to use this in Visual Studio: + +Open the worktree as a folder. Visual Studio 2019 and later will detect +the CMake configuration automatically and set everything up for you, +ready to build. You can then run the tests in `t/` via a regular Git Bash. + +Note: Visual Studio also has the option of opening `CMakeLists.txt` +directly; Using this option, Visual Studio will not find the source code, +though, therefore the `File>Open>Folder...` option is preferred. + +Instructions to run CMake manually: + + mkdir -p contrib/buildsystems/out + cd contrib/buildsystems/out + cmake ../ -DCMAKE_BUILD_TYPE=Release + +This will build the git binaries in contrib/buildsystems/out +directory (our top-level .gitignore file knows to ignore contents of +this directory). + +Possible build configurations(-DCMAKE_BUILD_TYPE) with corresponding +compiler flags +Debug : -g +Release: -O3 +RelWithDebInfo : -O2 -g +MinSizeRel : -Os +empty(default) : + +NOTE: -DCMAKE_BUILD_TYPE is optional. For multi-config generators like Visual Studio +this option is ignored + +This process generates a Makefile(Linux/*BSD/MacOS) , Visual Studio solution(Windows) by default. +Run `make` to build Git on Linux/*BSD/MacOS. +Open git.sln on Windows and build Git. + +NOTE: By default CMake uses Makefile as the build tool on Linux and Visual Studio in Windows, +to use another tool say `ninja` add this to the command line when configuring. +`-G Ninja` + +]] +cmake_minimum_required(VERSION 3.14) + +#set the source directory to root of git +set(CMAKE_SOURCE_DIR ${CMAKE_CURRENT_LIST_DIR}/../..) +if(WIN32) + set(VCPKG_DIR "${CMAKE_SOURCE_DIR}/compat/vcbuild/vcpkg") + if(MSVC AND NOT EXISTS ${VCPKG_DIR}) + message("Initializing vcpkg and building the Git's dependencies (this will take a while...)") + execute_process(COMMAND ${CMAKE_SOURCE_DIR}/compat/vcbuild/vcpkg_install.bat) + endif() + list(APPEND CMAKE_PREFIX_PATH "${VCPKG_DIR}/installed/x64-windows") + + # In the vcpkg edition, we need this to be able to link to libcurl + set(CURL_NO_CURL_CMAKE ON) +endif() + +find_program(SH_EXE sh PATHS "C:/Program Files/Git/bin") +if(NOT SH_EXE) + message(FATAL_ERROR "sh: shell interpreter was not found in your path, please install one." + "On Windows, you can get it as part of 'Git for Windows' install at https://gitforwindows.org/") +endif() + +#Create GIT-VERSION-FILE using GIT-VERSION-GEN +if(NOT EXISTS ${CMAKE_SOURCE_DIR}/GIT-VERSION-FILE) + message("Generating GIT-VERSION-FILE") + execute_process(COMMAND ${SH_EXE} ${CMAKE_SOURCE_DIR}/GIT-VERSION-GEN + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}) +endif() + +#Parse GIT-VERSION-FILE to get the version +file(STRINGS ${CMAKE_SOURCE_DIR}/GIT-VERSION-FILE git_version REGEX "GIT_VERSION = (.*)") +string(REPLACE "GIT_VERSION = " "" git_version ${git_version}) +string(FIND ${git_version} "GIT" location) +if(location EQUAL -1) + string(REGEX MATCH "[0-9]*\\.[0-9]*\\.[0-9]*" git_version ${git_version}) +else() + string(REGEX MATCH "[0-9]*\\.[0-9]*" git_version ${git_version}) + string(APPEND git_version ".0") #for building from a snapshot +endif() + +project(git + VERSION ${git_version} + LANGUAGES C) + + +#TODO gitk git-gui gitweb +#TODO Enable NLS on windows natively +#TODO Add pcre support + +#macros for parsing the Makefile for sources and scripts +macro(parse_makefile_for_sources list_var regex) + file(STRINGS ${CMAKE_SOURCE_DIR}/Makefile ${list_var} REGEX "^${regex} \\+=(.*)") + string(REPLACE "${regex} +=" "" ${list_var} ${${list_var}}) + string(REPLACE "$(COMPAT_OBJS)" "" ${list_var} ${${list_var}}) #remove "$(COMPAT_OBJS)" This is only for libgit. + string(STRIP ${${list_var}} ${list_var}) #remove trailing/leading whitespaces + string(REPLACE ".o" ".c;" ${list_var} ${${list_var}}) #change .o to .c, ; is for converting the string into a list + list(TRANSFORM ${list_var} STRIP) #remove trailing/leading whitespaces for each element in list + list(REMOVE_ITEM ${list_var} "") #remove empty list elements +endmacro() + +macro(parse_makefile_for_scripts list_var regex lang) + file(STRINGS ${CMAKE_SOURCE_DIR}/Makefile ${list_var} REGEX "^${regex} \\+=(.*)") + string(REPLACE "${regex} +=" "" ${list_var} ${${list_var}}) + string(STRIP ${${list_var}} ${list_var}) #remove trailing/leading whitespaces + string(REPLACE " " ";" ${list_var} ${${list_var}}) #convert string to a list + if(NOT ${lang}) #exclude for SCRIPT_LIB + list(TRANSFORM ${list_var} REPLACE "${lang}" "") #do the replacement + endif() +endmacro() + +include(CheckTypeSize) +include(CheckCSourceRuns) +include(CheckCSourceCompiles) +include(CheckIncludeFile) +include(CheckFunctionExists) +include(CheckSymbolExists) +include(CheckStructHasMember) +include(CTest) + +find_package(ZLIB REQUIRED) +find_package(CURL) +find_package(EXPAT) +find_package(Iconv) + +#Don't use libintl on Windows Visual Studio and Clang builds +if(NOT (WIN32 AND (CMAKE_C_COMPILER_ID STREQUAL "MSVC" OR CMAKE_C_COMPILER_ID STREQUAL "Clang"))) + find_package(Intl) +endif() + +if(NOT Intl_FOUND) + add_compile_definitions(NO_GETTEXT) + if(NOT Iconv_FOUND) + add_compile_definitions(NO_ICONV) + endif() +endif() + +include_directories(SYSTEM ${ZLIB_INCLUDE_DIRS}) +if(CURL_FOUND) + include_directories(SYSTEM ${CURL_INCLUDE_DIRS}) +endif() +if(EXPAT_FOUND) + include_directories(SYSTEM ${EXPAT_INCLUDE_DIRS}) +endif() +if(Iconv_FOUND) + include_directories(SYSTEM ${Iconv_INCLUDE_DIRS}) +endif() +if(Intl_FOUND) + include_directories(SYSTEM ${Intl_INCLUDE_DIRS}) +endif() + + +if(WIN32 AND NOT MSVC)#not required for visual studio builds + find_program(WINDRES_EXE windres) + if(NOT WINDRES_EXE) + message(FATAL_ERROR "Install windres on Windows for resource files") + endif() +endif() + +find_program(MSGFMT_EXE msgfmt) +if(NOT MSGFMT_EXE) + set(MSGFMT_EXE ${CMAKE_SOURCE_DIR}/compat/vcbuild/vcpkg/downloads/tools/msys2/msys64/usr/bin/msgfmt.exe) + if(NOT EXISTS ${MSGFMT_EXE}) + message(WARNING "Text Translations won't be built") + unset(MSGFMT_EXE) + endif() +endif() + +#Force all visual studio outputs to CMAKE_BINARY_DIR +if(CMAKE_C_COMPILER_ID STREQUAL "MSVC") + set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_DEBUG ${CMAKE_BINARY_DIR}) + set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_RELEASE ${CMAKE_BINARY_DIR}) + add_compile_options(/MP) +endif() + +#default behaviour +include_directories(${CMAKE_SOURCE_DIR}) +add_compile_definitions(GIT_HOST_CPU="${CMAKE_SYSTEM_PROCESSOR}") +add_compile_definitions(SHA256_BLK INTERNAL_QSORT RUNTIME_PREFIX) +add_compile_definitions(NO_OPENSSL SHA1_DC SHA1DC_NO_STANDARD_INCLUDES + SHA1DC_INIT_SAFE_HASH_DEFAULT=0 + SHA1DC_CUSTOM_INCLUDE_SHA1_C="cache.h" + SHA1DC_CUSTOM_INCLUDE_UBC_CHECK_C="git-compat-util.h" ) +list(APPEND compat_SOURCES sha1dc_git.c sha1dc/sha1.c sha1dc/ubc_check.c block-sha1/sha1.c sha256/block/sha256.c compat/qsort_s.c) + + +add_compile_definitions(PAGER_ENV="LESS=FRX LV=-c" + ETC_GITATTRIBUTES="etc/gitattributes" + ETC_GITCONFIG="etc/gitconfig" + GIT_EXEC_PATH="libexec/git-core" + GIT_LOCALE_PATH="share/locale" + GIT_MAN_PATH="share/man" + GIT_INFO_PATH="share/info" + GIT_HTML_PATH="share/doc/git-doc" + DEFAULT_HELP_FORMAT="html" + DEFAULT_GIT_TEMPLATE_DIR="share/git-core/templates" + GIT_VERSION="${PROJECT_VERSION}.GIT" + GIT_USER_AGENT="git/${PROJECT_VERSION}.GIT" + BINDIR="bin" + GIT_BUILT_FROM_COMMIT="") + +if(WIN32) + set(FALLBACK_RUNTIME_PREFIX /mingw64) + add_compile_definitions(FALLBACK_RUNTIME_PREFIX="${FALLBACK_RUNTIME_PREFIX}") +else() + set(FALLBACK_RUNTIME_PREFIX /home/$ENV{USER}) + add_compile_definitions(FALLBACK_RUNTIME_PREFIX="${FALLBACK_RUNTIME_PREFIX}") +endif() + + +#Platform Specific +if(CMAKE_SYSTEM_NAME STREQUAL "Windows") + if(CMAKE_C_COMPILER_ID STREQUAL "MSVC" OR CMAKE_C_COMPILER_ID STREQUAL "Clang") + include_directories(${CMAKE_SOURCE_DIR}/compat/vcbuild/include) + add_compile_definitions(_CRT_SECURE_NO_WARNINGS _CRT_NONSTDC_NO_DEPRECATE) + endif() + include_directories(${CMAKE_SOURCE_DIR}/compat/win32) + add_compile_definitions(HAVE_ALLOCA_H NO_POSIX_GOODIES NATIVE_CRLF NO_UNIX_SOCKETS WIN32 + _CONSOLE DETECT_MSYS_TTY STRIP_EXTENSION=".exe" NO_SYMLINK_HEAD UNRELIABLE_FSTAT + NOGDI OBJECT_CREATION_MODE=1 __USE_MINGW_ANSI_STDIO=0 + USE_NED_ALLOCATOR OVERRIDE_STRDUP MMAP_PREVENTS_DELETE USE_WIN32_MMAP + UNICODE _UNICODE HAVE_WPGMPTR ENSURE_MSYSTEM_IS_SET) + list(APPEND compat_SOURCES compat/mingw.c compat/winansi.c compat/win32/path-utils.c + compat/win32/pthread.c compat/win32mmap.c compat/win32/syslog.c + compat/win32/trace2_win32_process_info.c compat/win32/dirent.c + compat/nedmalloc/nedmalloc.c compat/strdup.c) + set(NO_UNIX_SOCKETS 1) + +elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux") + add_compile_definitions(PROCFS_EXECUTABLE_PATH="/proc/self/exe" HAVE_DEV_TTY ) + list(APPEND compat_SOURCES unix-socket.c) +endif() + +set(EXE_EXTENSION ${CMAKE_EXECUTABLE_SUFFIX}) + +#header checks +check_include_file(libgen.h HAVE_LIBGEN_H) +if(NOT HAVE_LIBGEN_H) + add_compile_definitions(NO_LIBGEN_H) + list(APPEND compat_SOURCES compat/basename.c) +endif() + +check_include_file(sys/sysinfo.h HAVE_SYSINFO) +if(HAVE_SYSINFO) + add_compile_definitions(HAVE_SYSINFO) +endif() + +check_c_source_compiles(" +#include <alloca.h> + +int main(void) +{ + char *p = (char *) alloca(2 * sizeof(int)); + + if (p) + return 0; + return 0; +}" +HAVE_ALLOCA_H) +if(HAVE_ALLOCA_H) + add_compile_definitions(HAVE_ALLOCA_H) +endif() + +check_include_file(strings.h HAVE_STRINGS_H) +if(HAVE_STRINGS_H) + add_compile_definitions(HAVE_STRINGS_H) +endif() + +check_include_file(sys/select.h HAVE_SYS_SELECT_H) +if(NOT HAVE_SYS_SELECT_H) + add_compile_definitions(NO_SYS_SELECT_H) +endif() + +check_include_file(sys/poll.h HAVE_SYS_POLL_H) +if(NOT HAVE_SYS_POLL_H) + add_compile_definitions(NO_SYS_POLL_H) +endif() + +check_include_file(poll.h HAVE_POLL_H) +if(NOT HAVE_POLL_H) + add_compile_definitions(NO_POLL_H) +endif() + +check_include_file(inttypes.h HAVE_INTTYPES_H) +if(NOT HAVE_INTTYPES_H) + add_compile_definitions(NO_INTTYPES_H) +endif() + +check_include_file(paths.h HAVE_PATHS_H) +if(HAVE_PATHS_H) + add_compile_definitions(HAVE_PATHS_H) +endif() + +#function checks +set(function_checks + strcasestr memmem strlcpy strtoimax strtoumax strtoull + setenv mkdtemp poll pread memmem) + +#unsetenv,hstrerror are incompatible with windows build +if(NOT WIN32) + list(APPEND function_checks unsetenv hstrerror) +endif() + +foreach(f ${function_checks}) + string(TOUPPER ${f} uf) + check_function_exists(${f} HAVE_${uf}) + if(NOT HAVE_${uf}) + add_compile_definitions(NO_${uf}) + endif() +endforeach() + +if(NOT HAVE_POLL_H OR NOT HAVE_SYS_POLL_H OR NOT HAVE_POLL) + include_directories(${CMAKE_SOURCE_DIR}/compat/poll) + add_compile_definitions(NO_POLL) + list(APPEND compat_SOURCES compat/poll/poll.c) +endif() + +if(NOT HAVE_STRCASESTR) + list(APPEND compat_SOURCES compat/strcasestr.c) +endif() + +if(NOT HAVE_STRLCPY) + list(APPEND compat_SOURCES compat/strlcpy.c) +endif() + +if(NOT HAVE_STRTOUMAX) + list(APPEND compat_SOURCES compat/strtoumax.c compat/strtoimax.c) +endif() + +if(NOT HAVE_SETENV) + list(APPEND compat_SOURCES compat/setenv.c) +endif() + +if(NOT HAVE_MKDTEMP) + list(APPEND compat_SOURCES compat/mkdtemp.c) +endif() + +if(NOT HAVE_PREAD) + list(APPEND compat_SOURCES compat/pread.c) +endif() + +if(NOT HAVE_MEMMEM) + list(APPEND compat_SOURCES compat/memmem.c) +endif() + +if(NOT WIN32) + if(NOT HAVE_UNSETENV) + list(APPEND compat_SOURCES compat/unsetenv.c) + endif() + + if(NOT HAVE_HSTRERROR) + list(APPEND compat_SOURCES compat/hstrerror.c) + endif() +endif() + +check_function_exists(getdelim HAVE_GETDELIM) +if(HAVE_GETDELIM) + add_compile_definitions(HAVE_GETDELIM) +endif() + +check_function_exists(clock_gettime HAVE_CLOCK_GETTIME) +check_symbol_exists(CLOCK_MONOTONIC "time.h" HAVE_CLOCK_MONOTONIC) +if(HAVE_CLOCK_GETTIME) + add_compile_definitions(HAVE_CLOCK_GETTIME) +endif() +if(HAVE_CLOCK_MONOTONIC) + add_compile_definitions(HAVE_CLOCK_MONOTONIC) +endif() + +#check for st_blocks in struct stat +check_struct_has_member("struct stat" st_blocks "sys/stat.h" STRUCT_STAT_HAS_ST_BLOCKS) +if(NOT STRUCT_STAT_HAS_ST_BLOCKS) + add_compile_definitions(NO_ST_BLOCKS_IN_STRUCT_STAT) +endif() + +#compile checks +check_c_source_runs(" +#include<stdio.h> +#include<stdarg.h> +#include<string.h> +#include<stdlib.h> + +int test_vsnprintf(char *str, size_t maxsize, const char *format, ...) +{ + int ret; + va_list ap; + + va_start(ap, format); + ret = vsnprintf(str, maxsize, format, ap); + va_end(ap); + return ret; +} + +int main(void) +{ + char buf[6]; + + if (test_vsnprintf(buf, 3, \"%s\", \"12345\") != 5 + || strcmp(buf, \"12\")) + return 1; + if (snprintf(buf, 3, \"%s\", \"12345\") != 5 + || strcmp(buf, \"12\")) + return 1; + return 0; +}" +SNPRINTF_OK) +if(NOT SNPRINTF_OK) + add_compile_definitions(SNPRINTF_RETURNS_BOGUS) + list(APPEND compat_SOURCES compat/snprintf.c) +endif() + +check_c_source_runs(" +#include<stdio.h> + +int main(void) +{ + FILE *f = fopen(\".\", \"r\"); + + return f != NULL; +}" +FREAD_READS_DIRECTORIES_NO) +if(NOT FREAD_READS_DIRECTORIES_NO) + add_compile_definitions(FREAD_READS_DIRECTORIES) + list(APPEND compat_SOURCES compat/fopen.c) +endif() + +check_c_source_compiles(" +#include <regex.h> +#ifndef REG_STARTEND +#error oops we dont have it +#endif + +int main(void) +{ + return 0; +}" +HAVE_REGEX) +if(NOT HAVE_REGEX) + include_directories(${CMAKE_SOURCE_DIR}/compat/regex) + list(APPEND compat_SOURCES compat/regex/regex.c ) + add_compile_definitions(NO_REGEX NO_MBSUPPORT GAWK) +endif() + + +check_c_source_compiles(" +#include <stddef.h> +#include <sys/types.h> +#include <sys/sysctl.h> + +int main(void) +{ + int val, mib[2]; + size_t len; + + mib[0] = CTL_HW; + mib[1] = 1; + len = sizeof(val); + return sysctl(mib, 2, &val, &len, NULL, 0) ? 1 : 0; +}" +HAVE_BSD_SYSCTL) +if(HAVE_BSD_SYSCTL) + add_compile_definitions(HAVE_BSD_SYSCTL) +endif() + +set(CMAKE_REQUIRED_LIBRARIES ${Iconv_LIBRARIES}) +set(CMAKE_REQUIRED_INCLUDES ${Iconv_INCLUDE_DIRS}) + +check_c_source_compiles(" +#include <iconv.h> + +extern size_t iconv(iconv_t cd, + char **inbuf, size_t *inbytesleft, + char **outbuf, size_t *outbytesleft); + +int main(void) +{ + return 0; +}" +HAVE_NEW_ICONV) +if(HAVE_NEW_ICONV) + set(HAVE_OLD_ICONV 0) +else() + set(HAVE_OLD_ICONV 1) +endif() + +check_c_source_runs(" +#include <iconv.h> +#if ${HAVE_OLD_ICONV} +typedef const char *iconv_ibp; +#else +typedef char *iconv_ibp; +#endif + +int main(void) +{ + int v; + iconv_t conv; + char in[] = \"a\"; + iconv_ibp pin = in; + char out[20] = \"\"; + char *pout = out; + size_t isz = sizeof(in); + size_t osz = sizeof(out); + + conv = iconv_open(\"UTF-16\", \"UTF-8\"); + iconv(conv, &pin, &isz, &pout, &osz); + iconv_close(conv); + v = (unsigned char)(out[0]) + (unsigned char)(out[1]); + return v != 0xfe + 0xff; +}" +ICONV_DOESNOT_OMIT_BOM) +if(NOT ICONV_DOESNOT_OMIT_BOM) + add_compile_definitions(ICONV_OMITS_BOM) +endif() + +unset(CMAKE_REQUIRED_LIBRARIES) +unset(CMAKE_REQUIRED_INCLUDES) + + +#programs +set(PROGRAMS_BUILT + git git-daemon git-http-backend git-sh-i18n--envsubst + git-shell) + +if(NOT CURL_FOUND) + list(APPEND excluded_progs git-http-fetch git-http-push) + add_compile_definitions(NO_CURL) + message(WARNING "git-http-push and git-http-fetch will not be built") +else() + list(APPEND PROGRAMS_BUILT git-http-fetch git-http-push git-imap-send git-remote-http) + if(CURL_VERSION_STRING VERSION_GREATER_EQUAL 7.34.0) + add_compile_definitions(USE_CURL_FOR_IMAP_SEND) + endif() +endif() + +if(NOT EXPAT_FOUND) + list(APPEND excluded_progs git-http-push) + add_compile_definitions(NO_EXPAT) +else() + list(APPEND PROGRAMS_BUILT git-http-push) + if(EXPAT_VERSION_STRING VERSION_LESS_EQUAL 1.2) + add_compile_definitions(EXPAT_NEEDS_XMLPARSE_H) + endif() +endif() + +list(REMOVE_DUPLICATES excluded_progs) +list(REMOVE_DUPLICATES PROGRAMS_BUILT) + + +foreach(p ${excluded_progs}) + list(APPEND EXCLUSION_PROGS --exclude-program ${p} ) +endforeach() + +#for comparing null values +list(APPEND EXCLUSION_PROGS empty) +set(EXCLUSION_PROGS_CACHE ${EXCLUSION_PROGS} CACHE STRING "Programs not built" FORCE) + +if(NOT EXISTS ${CMAKE_BINARY_DIR}/command-list.h OR NOT EXCLUSION_PROGS_CACHE STREQUAL EXCLUSION_PROGS) + list(REMOVE_ITEM EXCLUSION_PROGS empty) + message("Generating command-list.h") + execute_process(COMMAND ${SH_EXE} ${CMAKE_SOURCE_DIR}/generate-cmdlist.sh ${EXCLUSION_PROGS} command-list.txt + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + OUTPUT_FILE ${CMAKE_BINARY_DIR}/command-list.h) +endif() + +if(NOT EXISTS ${CMAKE_BINARY_DIR}/config-list.h) + message("Generating config-list.h") + execute_process(COMMAND ${SH_EXE} ${CMAKE_SOURCE_DIR}/generate-configlist.sh + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + OUTPUT_FILE ${CMAKE_BINARY_DIR}/config-list.h) +endif() + +include_directories(${CMAKE_BINARY_DIR}) + +#build +#libgit +parse_makefile_for_sources(libgit_SOURCES "LIB_OBJS") + +list(TRANSFORM libgit_SOURCES PREPEND "${CMAKE_SOURCE_DIR}/") +list(TRANSFORM compat_SOURCES PREPEND "${CMAKE_SOURCE_DIR}/") +add_library(libgit ${libgit_SOURCES} ${compat_SOURCES}) + +#libxdiff +parse_makefile_for_sources(libxdiff_SOURCES "XDIFF_OBJS") + +list(TRANSFORM libxdiff_SOURCES PREPEND "${CMAKE_SOURCE_DIR}/") +add_library(xdiff STATIC ${libxdiff_SOURCES}) + +if(WIN32) + if(NOT MSVC)#use windres when compiling with gcc and clang + add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/git.res + COMMAND ${WINDRES_EXE} -O coff -DMAJOR=${PROJECT_VERSION_MAJOR} -DMINOR=${PROJECT_VERSION_MINOR} + -DMICRO=${PROJECT_VERSION_PATCH} -DPATCHLEVEL=0 -DGIT_VERSION="\\\"${PROJECT_VERSION}.GIT\\\"" + -i ${CMAKE_SOURCE_DIR}/git.rc -o ${CMAKE_BINARY_DIR}/git.res + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + VERBATIM) + else()#MSVC use rc + add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/git.res + COMMAND ${CMAKE_RC_COMPILER} /d MAJOR=${PROJECT_VERSION_MAJOR} /d MINOR=${PROJECT_VERSION_MINOR} + /d MICRO=${PROJECT_VERSION_PATCH} /d PATCHLEVEL=0 /d GIT_VERSION="${PROJECT_VERSION}.GIT" + /fo ${CMAKE_BINARY_DIR}/git.res ${CMAKE_SOURCE_DIR}/git.rc + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + VERBATIM) + endif() + add_custom_target(git-rc DEPENDS ${CMAKE_BINARY_DIR}/git.res) +endif() + +#link all required libraries to common-main +add_library(common-main OBJECT ${CMAKE_SOURCE_DIR}/common-main.c) + +target_link_libraries(common-main libgit xdiff ${ZLIB_LIBRARIES}) +if(Intl_FOUND) + target_link_libraries(common-main ${Intl_LIBRARIES}) +endif() +if(Iconv_FOUND) + target_link_libraries(common-main ${Iconv_LIBRARIES}) +endif() +if(WIN32) + target_link_libraries(common-main ws2_32 ntdll ${CMAKE_BINARY_DIR}/git.res) + add_dependencies(common-main git-rc) + if(CMAKE_C_COMPILER_ID STREQUAL "GNU") + target_link_options(common-main PUBLIC -municode -Wl,--nxcompat -Wl,--dynamicbase -Wl,--pic-executable,-e,mainCRTStartup) + elseif(CMAKE_C_COMPILER_ID STREQUAL "Clang") + target_link_options(common-main PUBLIC -municode -Wl,-nxcompat -Wl,-dynamicbase -Wl,-entry:wmainCRTStartup -Wl,invalidcontinue.obj) + elseif(CMAKE_C_COMPILER_ID STREQUAL "MSVC") + target_link_options(common-main PUBLIC /IGNORE:4217 /IGNORE:4049 /NOLOGO /ENTRY:wmainCRTStartup /SUBSYSTEM:CONSOLE invalidcontinue.obj) + else() + message(FATAL_ERROR "Unhandled compiler: ${CMAKE_C_COMPILER_ID}") + endif() +elseif(UNIX) + target_link_libraries(common-main pthread rt) +endif() + +#git +parse_makefile_for_sources(git_SOURCES "BUILTIN_OBJS") + +list(TRANSFORM git_SOURCES PREPEND "${CMAKE_SOURCE_DIR}/") +add_executable(git ${CMAKE_SOURCE_DIR}/git.c ${git_SOURCES}) +target_link_libraries(git common-main) + +add_executable(git-daemon ${CMAKE_SOURCE_DIR}/daemon.c) +target_link_libraries(git-daemon common-main) + +add_executable(git-http-backend ${CMAKE_SOURCE_DIR}/http-backend.c) +target_link_libraries(git-http-backend common-main) + +add_executable(git-sh-i18n--envsubst ${CMAKE_SOURCE_DIR}/sh-i18n--envsubst.c) +target_link_libraries(git-sh-i18n--envsubst common-main) + +add_executable(git-shell ${CMAKE_SOURCE_DIR}/shell.c) +target_link_libraries(git-shell common-main) + +if(CURL_FOUND) + add_library(http_obj OBJECT ${CMAKE_SOURCE_DIR}/http.c) + + add_executable(git-imap-send ${CMAKE_SOURCE_DIR}/imap-send.c) + target_link_libraries(git-imap-send http_obj common-main ${CURL_LIBRARIES}) + + add_executable(git-http-fetch ${CMAKE_SOURCE_DIR}/http-walker.c ${CMAKE_SOURCE_DIR}/http-fetch.c) + target_link_libraries(git-http-fetch http_obj common-main ${CURL_LIBRARIES}) + + add_executable(git-remote-http ${CMAKE_SOURCE_DIR}/http-walker.c ${CMAKE_SOURCE_DIR}/remote-curl.c) + target_link_libraries(git-remote-http http_obj common-main ${CURL_LIBRARIES} ) + + if(EXPAT_FOUND) + add_executable(git-http-push ${CMAKE_SOURCE_DIR}/http-push.c) + target_link_libraries(git-http-push http_obj common-main ${CURL_LIBRARIES} ${EXPAT_LIBRARIES}) + endif() +endif() + +set(git_builtin_extra + cherry cherry-pick format-patch fsck-objects + init merge-subtree restore show + stage status switch whatchanged) + +#Creating hardlinks +foreach(s ${git_SOURCES} ${git_builtin_extra}) + string(REPLACE "${CMAKE_SOURCE_DIR}/builtin/" "" s ${s}) + string(REPLACE ".c" "" s ${s}) + file(APPEND ${CMAKE_BINARY_DIR}/CreateLinks.cmake "file(CREATE_LINK git${EXE_EXTENSION} git-${s}${EXE_EXTENSION})\n") + list(APPEND git_links ${CMAKE_BINARY_DIR}/git-${s}${EXE_EXTENSION}) +endforeach() + +if(CURL_FOUND) + set(remote_exes + git-remote-https git-remote-ftp git-remote-ftps) + foreach(s ${remote_exes}) + file(APPEND ${CMAKE_BINARY_DIR}/CreateLinks.cmake "file(CREATE_LINK git-remote-http${EXE_EXTENSION} ${s}${EXE_EXTENSION})\n") + list(APPEND git_http_links ${CMAKE_BINARY_DIR}/${s}${EXE_EXTENSION}) + endforeach() +endif() + +add_custom_command(OUTPUT ${git_links} ${git_http_links} + COMMAND ${CMAKE_COMMAND} -P ${CMAKE_BINARY_DIR}/CreateLinks.cmake + DEPENDS git git-remote-http) +add_custom_target(git-links ALL DEPENDS ${git_links} ${git_http_links}) + + +#creating required scripts +set(SHELL_PATH /bin/sh) +set(PERL_PATH /usr/bin/perl) +set(LOCALEDIR ${FALLBACK_RUNTIME_PREFIX}/share/locale) +set(GITWEBDIR ${FALLBACK_RUNTIME_PREFIX}/share/locale) +set(INSTLIBDIR ${FALLBACK_RUNTIME_PREFIX}/share/perl5) + +#shell scripts +parse_makefile_for_scripts(git_sh_scripts "SCRIPT_SH" ".sh") +parse_makefile_for_scripts(git_shlib_scripts "SCRIPT_LIB" "") +set(git_shell_scripts + ${git_sh_scripts} ${git_shlib_scripts} git-instaweb) + +foreach(script ${git_shell_scripts}) + file(STRINGS ${CMAKE_SOURCE_DIR}/${script}.sh content NEWLINE_CONSUME) + string(REPLACE "@SHELL_PATH@" "${SHELL_PATH}" content "${content}") + string(REPLACE "@@DIFF@@" "diff" content "${content}") + string(REPLACE "@LOCALEDIR@" "${LOCALEDIR}" content "${content}") + string(REPLACE "@GITWEBDIR@" "${GITWEBDIR}" content "${content}") + string(REPLACE "@@NO_CURL@@" "" content "${content}") + string(REPLACE "@@USE_GETTEXT_SCHEME@@" "" content "${content}") + string(REPLACE "# @@BROKEN_PATH_FIX@@" "" content "${content}") + string(REPLACE "@@PERL@@" "${PERL_PATH}" content "${content}") + string(REPLACE "@@SANE_TEXT_GREP@@" "-a" content "${content}") + string(REPLACE "@@PAGER_ENV@@" "LESS=FRX LV=-c" content "${content}") + file(WRITE ${CMAKE_BINARY_DIR}/${script} ${content}) +endforeach() + +#perl scripts +parse_makefile_for_scripts(git_perl_scripts "SCRIPT_PERL" ".perl") + +#create perl header +file(STRINGS ${CMAKE_SOURCE_DIR}/perl/header_templates/fixed_prefix.template.pl perl_header ) +string(REPLACE "@@PATHSEP@@" ":" perl_header "${perl_header}") +string(REPLACE "@@INSTLIBDIR@@" "${INSTLIBDIR}" perl_header "${perl_header}") + +foreach(script ${git_perl_scripts}) + file(STRINGS ${CMAKE_SOURCE_DIR}/${script}.perl content NEWLINE_CONSUME) + string(REPLACE "#!/usr/bin/perl" "#!/usr/bin/perl\n${perl_header}\n" content "${content}") + string(REPLACE "@@GIT_VERSION@@" "${PROJECT_VERSION}" content "${content}") + file(WRITE ${CMAKE_BINARY_DIR}/${script} ${content}) +endforeach() + +#python script +file(STRINGS ${CMAKE_SOURCE_DIR}/git-p4.py content NEWLINE_CONSUME) +string(REPLACE "#!/usr/bin/env python" "#!/usr/bin/python" content "${content}") +file(WRITE ${CMAKE_BINARY_DIR}/git-p4 ${content}) + +#perl modules +file(GLOB_RECURSE perl_modules "${CMAKE_SOURCE_DIR}/perl/*.pm") + +foreach(pm ${perl_modules}) + string(REPLACE "${CMAKE_SOURCE_DIR}/perl/" "" file_path ${pm}) + file(STRINGS ${pm} content NEWLINE_CONSUME) + string(REPLACE "@@LOCALEDIR@@" "${LOCALEDIR}" content "${content}") + string(REPLACE "@@NO_PERL_CPAN_FALLBACKS@@" "" content "${content}") + file(WRITE ${CMAKE_BINARY_DIR}/perl/build/lib/${file_path} ${content}) +#test-lib.sh requires perl/build/lib to be the build directory of perl modules +endforeach() + + +#templates +file(GLOB templates "${CMAKE_SOURCE_DIR}/templates/*") +list(TRANSFORM templates REPLACE "${CMAKE_SOURCE_DIR}/templates/" "") +list(REMOVE_ITEM templates ".gitignore") +list(REMOVE_ITEM templates "Makefile") +list(REMOVE_ITEM templates "blt")# Prevents an error when reconfiguring for in source builds + +list(REMOVE_ITEM templates "branches--") +file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/templates/blt/branches) #create branches + +#templates have @.*@ replacement so use configure_file instead +foreach(tm ${templates}) + string(REPLACE "--" "/" blt_tm ${tm}) + string(REPLACE "this" "" blt_tm ${blt_tm})# for this-- + configure_file(${CMAKE_SOURCE_DIR}/templates/${tm} ${CMAKE_BINARY_DIR}/templates/blt/${blt_tm} @ONLY) +endforeach() + + +#translations +if(MSGFMT_EXE) + file(GLOB po_files "${CMAKE_SOURCE_DIR}/po/*.po") + list(TRANSFORM po_files REPLACE "${CMAKE_SOURCE_DIR}/po/" "") + list(TRANSFORM po_files REPLACE ".po" "") + foreach(po ${po_files}) + file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/po/build/locale/${po}/LC_MESSAGES) + add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/po/build/locale/${po}/LC_MESSAGES/git.mo + COMMAND ${MSGFMT_EXE} --check --statistics -o ${CMAKE_BINARY_DIR}/po/build/locale/${po}/LC_MESSAGES/git.mo ${CMAKE_SOURCE_DIR}/po/${po}.po) + list(APPEND po_gen ${CMAKE_BINARY_DIR}/po/build/locale/${po}/LC_MESSAGES/git.mo) + endforeach() + add_custom_target(po-gen ALL DEPENDS ${po_gen}) +endif() + + +#to help with the install +list(TRANSFORM git_shell_scripts PREPEND "${CMAKE_BINARY_DIR}/") +list(TRANSFORM git_perl_scripts PREPEND "${CMAKE_BINARY_DIR}/") + +#install +install(TARGETS git git-shell + RUNTIME DESTINATION bin) +install(PROGRAMS ${CMAKE_BINARY_DIR}/git-cvsserver + DESTINATION bin) + +list(REMOVE_ITEM PROGRAMS_BUILT git git-shell) +install(TARGETS ${PROGRAMS_BUILT} + RUNTIME DESTINATION libexec/git-core) + +set(bin_links + git-receive-pack git-upload-archive git-upload-pack) + +foreach(b ${bin_links}) +install(CODE "file(CREATE_LINK ${CMAKE_INSTALL_PREFIX}/bin/git${EXE_EXTENSION} ${CMAKE_INSTALL_PREFIX}/bin/${b}${EXE_EXTENSION})") +endforeach() + +install(CODE "file(CREATE_LINK ${CMAKE_INSTALL_PREFIX}/bin/git${EXE_EXTENSION} ${CMAKE_INSTALL_PREFIX}/libexec/git-core/git${EXE_EXTENSION})") +install(CODE "file(CREATE_LINK ${CMAKE_INSTALL_PREFIX}/bin/git-shell${EXE_EXTENSION} ${CMAKE_INSTALL_PREFIX}/libexec/git-core/git-shell${EXE_EXTENSION})") + +foreach(b ${git_links}) + string(REPLACE "${CMAKE_BINARY_DIR}" "" b ${b}) + install(CODE "file(CREATE_LINK ${CMAKE_INSTALL_PREFIX}/bin/git${EXE_EXTENSION} ${CMAKE_INSTALL_PREFIX}/libexec/git-core/${b}${EXE_EXTENSION})") +endforeach() + +foreach(b ${git_http_links}) + string(REPLACE "${CMAKE_BINARY_DIR}" "" b ${b}) + install(CODE "file(CREATE_LINK ${CMAKE_INSTALL_PREFIX}/libexec/git-core/git-remote-http${EXE_EXTENSION} ${CMAKE_INSTALL_PREFIX}/libexec/git-core/${b}${EXE_EXTENSION})") +endforeach() + +install(PROGRAMS ${git_shell_scripts} ${git_perl_scripts} ${CMAKE_BINARY_DIR}/git-p4 + DESTINATION libexec/git-core) + +install(DIRECTORY ${CMAKE_SOURCE_DIR}/mergetools DESTINATION libexec/git-core) +install(DIRECTORY ${CMAKE_BINARY_DIR}/perl/build/lib/ DESTINATION share/perl5 + FILES_MATCHING PATTERN "*.pm") +install(DIRECTORY ${CMAKE_BINARY_DIR}/templates/blt/ DESTINATION share/git-core/templates) + +if(MSGFMT_EXE) + install(DIRECTORY ${CMAKE_BINARY_DIR}/po/build/locale DESTINATION share) +endif() + + +if(BUILD_TESTING) + +#tests-helpers +add_executable(test-fake-ssh ${CMAKE_SOURCE_DIR}/t/helper/test-fake-ssh.c) +target_link_libraries(test-fake-ssh common-main) + +#test-tool +parse_makefile_for_sources(test-tool_SOURCES "TEST_BUILTINS_OBJS") + +list(TRANSFORM test-tool_SOURCES PREPEND "${CMAKE_SOURCE_DIR}/t/helper/") +add_executable(test-tool ${CMAKE_SOURCE_DIR}/t/helper/test-tool.c ${test-tool_SOURCES}) +target_link_libraries(test-tool common-main) + +set_target_properties(test-fake-ssh test-tool + PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/t/helper) + +if(MSVC) + set_target_properties(test-fake-ssh test-tool + PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG ${CMAKE_BINARY_DIR}/t/helper) + set_target_properties(test-fake-ssh test-tool + PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE ${CMAKE_BINARY_DIR}/t/helper) +endif() + +#wrapper scripts +set(wrapper_scripts + git git-upload-pack git-receive-pack git-upload-archive git-shell git-remote-ext) + +set(wrapper_test_scripts + test-fake-ssh test-tool) + + +foreach(script ${wrapper_scripts}) + file(STRINGS ${CMAKE_SOURCE_DIR}/wrap-for-bin.sh content NEWLINE_CONSUME) + string(REPLACE "@@BUILD_DIR@@" "${CMAKE_BINARY_DIR}" content "${content}") + string(REPLACE "@@PROG@@" "${script}${EXE_EXTENSION}" content "${content}") + file(WRITE ${CMAKE_BINARY_DIR}/bin-wrappers/${script} ${content}) +endforeach() + +foreach(script ${wrapper_test_scripts}) + file(STRINGS ${CMAKE_SOURCE_DIR}/wrap-for-bin.sh content NEWLINE_CONSUME) + string(REPLACE "@@BUILD_DIR@@" "${CMAKE_BINARY_DIR}" content "${content}") + string(REPLACE "@@PROG@@" "t/helper/${script}${EXE_EXTENSION}" content "${content}") + file(WRITE ${CMAKE_BINARY_DIR}/bin-wrappers/${script} ${content}) +endforeach() + +file(STRINGS ${CMAKE_SOURCE_DIR}/wrap-for-bin.sh content NEWLINE_CONSUME) +string(REPLACE "@@BUILD_DIR@@" "${CMAKE_BINARY_DIR}" content "${content}") +string(REPLACE "@@PROG@@" "git-cvsserver" content "${content}") +file(WRITE ${CMAKE_BINARY_DIR}/bin-wrappers/git-cvsserver ${content}) + +#options for configuring test options +option(PERL_TESTS "Perform tests that use perl" ON) +option(PYTHON_TESTS "Perform tests that use python" ON) + +#GIT-BUILD-OPTIONS +set(TEST_SHELL_PATH ${SHELL_PATH}) +set(DIFF diff) +set(PYTHON_PATH /usr/bin/python) +set(TAR tar) +set(NO_CURL ) +set(NO_EXPAT ) +set(USE_LIBPCRE1 ) +set(USE_LIBPCRE2 ) +set(NO_LIBPCRE1_JIT ) +set(NO_PERL ) +set(NO_PTHREADS ) +set(NO_PYTHON ) +set(PAGER_ENV "LESS=FRX LV=-c") +set(DC_SHA1 YesPlease) +set(RUNTIME_PREFIX true) +set(NO_GETTEXT ) + +if(NOT CURL_FOUND) + set(NO_CURL 1) +endif() + +if(NOT EXPAT_FOUND) + set(NO_EXPAT 1) +endif() + +if(NOT Intl_FOUND) + set(NO_GETTEXT 1) +endif() + +if(NOT PERL_TESTS) + set(NO_PERL 1) +endif() + +if(NOT PYTHON_TESTS) + set(NO_PYTHON 1) +endif() + +file(WRITE ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "SHELL_PATH='${SHELL_PATH}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "TEST_SHELL_PATH='${TEST_SHELL_PATH}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "PERL_PATH='${PERL_PATH}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "DIFF='${DIFF}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "PYTHON_PATH='${PYTHON_PATH}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "TAR='${TAR}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_CURL='${NO_CURL}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_EXPAT='${NO_EXPAT}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "USE_LIBPCRE1='${USE_LIBPCRE1}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_LIBPCRE1_JIT='${NO_LIBPCRE1_JIT}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_PERL='${NO_PERL}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_PTHREADS='${NO_PTHREADS}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_UNIX_SOCKETS='${NO_UNIX_SOCKETS}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "PAGER_ENV='${PAGER_ENV}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "DC_SHA1='${DC_SHA1}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "X='${EXE_EXTENSION}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_GETTEXT='${NO_GETTEXT}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "RUNTIME_PREFIX='${RUNTIME_PREFIX}'\n") +file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_PYTHON='${NO_PYTHON}'\n") +if(WIN32) + file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "PATH=\"$PATH:$TEST_DIRECTORY/../compat/vcbuild/vcpkg/installed/x64-windows/bin\"\n") +endif() + +#Make the tests work when building out of the source tree +get_filename_component(CACHE_PATH ${CMAKE_CURRENT_LIST_DIR}/../../CMakeCache.txt ABSOLUTE) +if(NOT ${CMAKE_BINARY_DIR}/CMakeCache.txt STREQUAL ${CACHE_PATH}) + file(RELATIVE_PATH BUILD_DIR_RELATIVE ${CMAKE_SOURCE_DIR} ${CMAKE_BINARY_DIR}/CMakeCache.txt) + string(REPLACE "/CMakeCache.txt" "" BUILD_DIR_RELATIVE ${BUILD_DIR_RELATIVE}) + #Setting the build directory in test-lib.sh before running tests + file(WRITE ${CMAKE_BINARY_DIR}/CTestCustom.cmake + "file(STRINGS ${CMAKE_SOURCE_DIR}/t/test-lib.sh GIT_BUILD_DIR_REPL REGEX \"GIT_BUILD_DIR=(.*)\")\n" + "file(STRINGS ${CMAKE_SOURCE_DIR}/t/test-lib.sh content NEWLINE_CONSUME)\n" + "string(REPLACE \"\${GIT_BUILD_DIR_REPL}\" \"GIT_BUILD_DIR=\\\"$TEST_DIRECTORY/../${BUILD_DIR_RELATIVE}\\\"\" content \"\${content}\")\n" + "file(WRITE ${CMAKE_SOURCE_DIR}/t/test-lib.sh \${content})") + #misc copies + file(COPY ${CMAKE_SOURCE_DIR}/t/chainlint.sed DESTINATION ${CMAKE_BINARY_DIR}/t/) + file(COPY ${CMAKE_SOURCE_DIR}/po/is.po DESTINATION ${CMAKE_BINARY_DIR}/po/) + file(COPY ${CMAKE_SOURCE_DIR}/mergetools/tkdiff DESTINATION ${CMAKE_BINARY_DIR}/mergetools/) + file(COPY ${CMAKE_SOURCE_DIR}/contrib/completion/git-prompt.sh DESTINATION ${CMAKE_BINARY_DIR}/contrib/completion/) + file(COPY ${CMAKE_SOURCE_DIR}/contrib/completion/git-completion.bash DESTINATION ${CMAKE_BINARY_DIR}/contrib/completion/) +endif() + +file(GLOB test_scipts "${CMAKE_SOURCE_DIR}/t/t[0-9]*.sh") + +#test +foreach(tsh ${test_scipts}) + add_test(NAME ${tsh} + COMMAND ${SH_EXE} ${tsh} + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/t) +endforeach() + +endif()#BUILD_TESTING diff --git a/contrib/buildsystems/Generators/Vcxproj.pm b/contrib/buildsystems/Generators/Vcxproj.pm index 5c666f9ac0..d2584450ba 100644 --- a/contrib/buildsystems/Generators/Vcxproj.pm +++ b/contrib/buildsystems/Generators/Vcxproj.pm @@ -80,6 +80,7 @@ sub createProject { $libs_release = join(";", sort(grep /^(?!libgit\.lib|xdiff\/lib\.lib|vcs-svn\/lib\.lib)/, @{$$build_structure{"$prefix${name}_LIBS"}})); $libs_debug = $libs_release; $libs_debug =~ s/zlib\.lib/zlibd\.lib/g; + $libs_debug =~ s/libexpat\.lib/libexpatd\.lib/g; $libs_debug =~ s/libcurl\.lib/libcurl-d\.lib/g; } diff --git a/contrib/buildsystems/engine.pl b/contrib/buildsystems/engine.pl index 070978506a..2ff9620459 100755 --- a/contrib/buildsystems/engine.pl +++ b/contrib/buildsystems/engine.pl @@ -349,7 +349,7 @@ sub handleLinkLine } elsif ("$part" eq "-lcurl") { push(@libs, "libcurl.lib"); } elsif ("$part" eq "-lexpat") { - push(@libs, "expat.lib"); + push(@libs, "libexpat.lib"); } elsif ("$part" eq "-liconv") { push(@libs, "libiconv.lib"); } elsif ($part =~ /^[-\/]/) { diff --git a/contrib/coccinelle/commit.cocci b/contrib/coccinelle/commit.cocci index 778e4704f6..af6dd4c20c 100644 --- a/contrib/coccinelle/commit.cocci +++ b/contrib/coccinelle/commit.cocci @@ -32,3 +32,21 @@ expression c; - c->maybe_tree + repo_get_commit_tree(specify_the_right_repo_here, c) ...>} + +@@ +struct commit *c; +expression E; +@@ +( +- c->generation = E; ++ commit_graph_data_at(c)->generation = E; +| +- c->graph_pos = E; ++ commit_graph_data_at(c)->graph_pos = E; +| +- c->generation ++ commit_graph_generation(c) +| +- c->graph_pos ++ commit_graph_position(c) +) diff --git a/contrib/completion/git-completion.bash b/contrib/completion/git-completion.bash index c21786f2fd..0a96ad87e7 100644 --- a/contrib/completion/git-completion.bash +++ b/contrib/completion/git-completion.bash @@ -39,6 +39,11 @@ # When set to "1", do not include "DWIM" suggestions in git-checkout # and git-switch completion (e.g., completing "foo" when "origin/foo" # exists). +# +# GIT_COMPLETION_SHOW_ALL +# +# When set to "1" suggest all options, including options which are +# typically hidden (e.g. '--allow-empty' for 'git commit'). case "$COMP_WORDBREAKS" in *:*) : great ;; @@ -50,7 +55,7 @@ esac # variable. __git_find_repo_path () { - if [ -n "$__git_repo_path" ]; then + if [ -n "${__git_repo_path-}" ]; then # we already know where it is return fi @@ -301,6 +306,19 @@ __gitcomp_direct () COMPREPLY=($1) } +# Similar to __gitcomp_direct, but appends to COMPREPLY instead. +# Callers must take care of providing only words that match the current word +# to be completed and adding any prefix and/or suffix (trailing space!), if +# necessary. +# 1: List of newline-separated matching completion words, complete with +# prefix and suffix. +__gitcomp_direct_append () +{ + local IFS=$'\n' + + COMPREPLY+=($1) +} + __gitcompappend () { local x i=${#COMPREPLY[@]} @@ -373,7 +391,7 @@ __gitcomp () # Clear the variables caching builtins' options when (re-)sourcing # the completion script. if [[ -n ${ZSH_VERSION-} ]]; then - unset $(set |sed -ne 's/^\(__gitcomp_builtin_[a-zA-Z0-9_][a-zA-Z0-9_]*\)=.*/\1/p') 2>/dev/null + unset ${(M)${(k)parameters[@]}:#__gitcomp_builtin_*} 2>/dev/null else unset $(compgen -v __gitcomp_builtin_) fi @@ -391,17 +409,24 @@ __gitcomp_builtin () # spaces must be replaced with underscore for multi-word # commands, e.g. "git remote add" becomes remote_add. local cmd="$1" - local incl="$2" - local excl="$3" + local incl="${2-}" + local excl="${3-}" local var=__gitcomp_builtin_"${cmd/-/_}" local options - eval "options=\$$var" + eval "options=\${$var-}" + + local completion_helper + if [ "$GIT_COMPLETION_SHOW_ALL" = "1" ]; then + completion_helper="--git-completion-helper-all" + else + completion_helper="--git-completion-helper" + fi if [ -z "$options" ]; then # leading and trailing spaces are significant to make # option removal work correctly. - options=" $incl $(__git ${cmd/_/ } --git-completion-helper) " || return + options=" $incl $(__git ${cmd/_/ } $completion_helper) " || return for i in $excl; do options="${options/ $i / }" @@ -504,7 +529,7 @@ __git_index_files () { local root="$2" match="$3" - __git_ls_files_helper "$root" "$1" "$match" | + __git_ls_files_helper "$root" "$1" "${match:-?}" | awk -F / -v pfx="${2//\\/\\\\}" '{ paths[$1] = 1 } @@ -611,6 +636,19 @@ __git_heads () "refs/heads/$cur_*" "refs/heads/$cur_*/**" } +# Lists branches from remote repositories. +# 1: A prefix to be added to each listed branch (optional). +# 2: List only branches matching this word (optional; list all branches if +# unset or empty). +# 3: A suffix to be appended to each listed branch (optional). +__git_remote_heads () +{ + local pfx="${1-}" cur_="${2-}" sfx="${3-}" + + __git for-each-ref --format="${pfx//\%/%%}%(refname:strip=2)$sfx" \ + "refs/remotes/$cur_*" "refs/remotes/$cur_*/**" +} + # Lists tags from the local repository. # Accepts the same positional parameters as __git_heads() above. __git_tags () @@ -621,6 +659,26 @@ __git_tags () "refs/tags/$cur_*" "refs/tags/$cur_*/**" } +# List unique branches from refs/remotes used for 'git checkout' and 'git +# switch' tracking DWIMery. +# 1: A prefix to be added to each listed branch (optional) +# 2: List only branches matching this word (optional; list all branches if +# unset or empty). +# 3: A suffix to be appended to each listed branch (optional). +__git_dwim_remote_heads () +{ + local pfx="${1-}" cur_="${2-}" sfx="${3-}" + local fer_pfx="${pfx//\%/%%}" # "escape" for-each-ref format specifiers + + # employ the heuristic used by git checkout and git switch + # Try to find a remote branch that cur_es the completion word + # but only output if the branch name is unique + __git for-each-ref --format="$fer_pfx%(refname:strip=3)$sfx" \ + --sort="refname:strip=3" \ + "refs/remotes/*/$cur_*" "refs/remotes/*/$cur_*/**" | \ + uniq -u +} + # Lists refs from the local (by default) or from a remote repository. # It accepts 0, 1 or 2 arguments: # 1: The remote to list refs from (optional; ignored, if set but empty). @@ -696,13 +754,7 @@ __git_refs () __git_dir="$dir" __git for-each-ref --format="$fer_pfx%($format)$sfx" \ "${refs[@]}" if [ -n "$track" ]; then - # employ the heuristic used by git checkout - # Try to find a remote branch that matches the completion word - # but only output if the branch name is unique - __git for-each-ref --format="$fer_pfx%(refname:strip=3)$sfx" \ - --sort="refname:strip=3" \ - "refs/remotes/*/$match*" "refs/remotes/*/$match*/**" | \ - uniq -u + __git_dwim_remote_heads "$pfx" "$match" "$sfx" fi return fi @@ -749,29 +801,51 @@ __git_refs () # Usage: __git_complete_refs [<option>]... # --remote=<remote>: The remote to list refs from, can be the name of a # configured remote, a path, or a URL. -# --track: List unique remote branches for 'git checkout's tracking DWIMery. +# --dwim: List unique remote branches for 'git switch's tracking DWIMery. # --pfx=<prefix>: A prefix to be added to each ref. # --cur=<word>: The current ref to be completed. Defaults to the current # word to be completed. # --sfx=<suffix>: A suffix to be appended to each ref instead of the default # space. +# --mode=<mode>: What set of refs to complete, one of 'refs' (the default) to +# complete all refs, 'heads' to complete only branches, or +# 'remote-heads' to complete only remote branches. Note that +# --remote is only compatible with --mode=refs. __git_complete_refs () { - local remote track pfx cur_="$cur" sfx=" " + local remote= dwim= pfx= cur_="$cur" sfx=" " mode="refs" while test $# != 0; do case "$1" in --remote=*) remote="${1##--remote=}" ;; - --track) track="yes" ;; + --dwim) dwim="yes" ;; + # --track is an old spelling of --dwim + --track) dwim="yes" ;; --pfx=*) pfx="${1##--pfx=}" ;; --cur=*) cur_="${1##--cur=}" ;; --sfx=*) sfx="${1##--sfx=}" ;; + --mode=*) mode="${1##--mode=}" ;; *) return 1 ;; esac shift done - __gitcomp_direct "$(__git_refs "$remote" "$track" "$pfx" "$cur_" "$sfx")" + # complete references based on the specified mode + case "$mode" in + refs) + __gitcomp_direct "$(__git_refs "$remote" "" "$pfx" "$cur_" "$sfx")" ;; + heads) + __gitcomp_direct "$(__git_heads "$pfx" "$cur_" "$sfx")" ;; + remote-heads) + __gitcomp_direct "$(__git_remote_heads "$pfx" "$cur_" "$sfx")" ;; + *) + return 1 ;; + esac + + # Append DWIM remote branch names if requested + if [ "$dwim" = "yes" ]; then + __gitcomp_direct_append "$(__git_dwim_remote_heads "$pfx" "$cur_" "$sfx")" + fi } # __git_refs2 requires 1 argument (to pass to __git_refs) @@ -1090,7 +1164,7 @@ __git_find_on_cmdline () while [ $c -lt $cword ]; do for word in $wordlist; do if [ "$word" = "${words[c]}" ]; then - if [ -n "$show_idx" ]; then + if [ -n "${show_idx-}" ]; then echo "$c $word" else echo "$word" @@ -1102,6 +1176,40 @@ __git_find_on_cmdline () done } +# Similar to __git_find_on_cmdline, except that it loops backwards and thus +# prints the *last* word found. Useful for finding which of two options that +# supersede each other came last, such as "--guess" and "--no-guess". +# +# Usage: __git_find_last_on_cmdline [<option>]... "<wordlist>" +# --show-idx: Optionally show the index of the found word in the $words array. +__git_find_last_on_cmdline () +{ + local word c=$cword show_idx + + while test $# -gt 1; do + case "$1" in + --show-idx) show_idx=y ;; + *) return 1 ;; + esac + shift + done + local wordlist="$1" + + while [ $c -gt 1 ]; do + ((c--)) + for word in $wordlist; do + if [ "$word" = "${words[c]}" ]; then + if [ -n "$show_idx" ]; then + echo "$c $word" + else + echo "$word" + fi + return + fi + done + done +} + # Echo the value of an option set on the command line or config # # $1: short option name @@ -1356,10 +1464,66 @@ _git_bundle () esac } +# Helper function to decide whether or not we should enable DWIM logic for +# git-switch and git-checkout. +# +# To decide between the following rules in priority order +# 1) the last provided of "--guess" or "--no-guess" explicitly enable or +# disable completion of DWIM logic respectively. +# 2) If the --no-track option is provided, take this as a hint to disable the +# DWIM completion logic +# 3) If GIT_COMPLETION_CHECKOUT_NO_GUESS is set, disable the DWIM completion +# logic, as requested by the user. +# 4) Enable DWIM logic otherwise. +# +__git_checkout_default_dwim_mode () +{ + local last_option dwim_opt="--dwim" + + if [ "${GIT_COMPLETION_CHECKOUT_NO_GUESS-}" = "1" ]; then + dwim_opt="" + fi + + # --no-track disables DWIM, but with lower priority than + # --guess/--no-guess + if [ -n "$(__git_find_on_cmdline "--no-track")" ]; then + dwim_opt="" + fi + + # Find the last provided --guess or --no-guess + last_option="$(__git_find_last_on_cmdline "--guess --no-guess")" + case "$last_option" in + --guess) + dwim_opt="--dwim" + ;; + --no-guess) + dwim_opt="" + ;; + esac + + echo "$dwim_opt" +} + _git_checkout () { __git_has_doubledash && return + local dwim_opt="$(__git_checkout_default_dwim_mode)" + + case "$prev" in + -b|-B|--orphan) + # Complete local branches (and DWIM branch + # remote branch names) for an option argument + # specifying a new branch name. This is for + # convenience, assuming new branches are + # possibly based on pre-existing branch names. + __git_complete_refs $dwim_opt --mode="heads" + return + ;; + *) + ;; + esac + case "$cur" in --conflict=*) __gitcomp "diff3 merge" "" "${cur##--conflict=}" @@ -1368,14 +1532,21 @@ _git_checkout () __gitcomp_builtin checkout ;; *) - # check if --track, --no-track, or --no-guess was specified - # if so, disable DWIM mode - local flags="--track --no-track --no-guess" track_opt="--track" - if [ "$GIT_COMPLETION_CHECKOUT_NO_GUESS" = "1" ] || - [ -n "$(__git_find_on_cmdline "$flags")" ]; then - track_opt='' + # At this point, we've already handled special completion for + # the arguments to -b/-B, and --orphan. There are 3 main + # things left we can possibly complete: + # 1) a start-point for -b/-B, -d/--detach, or --orphan + # 2) a remote head, for --track + # 3) an arbitrary reference, possibly including DWIM names + # + + if [ -n "$(__git_find_on_cmdline "-b -B -d --detach --orphan")" ]; then + __git_complete_refs --mode="refs" + elif [ -n "$(__git_find_on_cmdline "--track")" ]; then + __git_complete_refs --mode="remote-heads" + else + __git_complete_refs $dwim_opt --mode="refs" fi - __git_complete_refs $track_opt ;; esac } @@ -1552,8 +1723,8 @@ _git_diff () } __git_mergetools_common="diffuse diffmerge ecmerge emerge kdiff3 meld opendiff - tkdiff vimdiff gvimdiff xxdiff araxis p4merge bc - codecompare smerge + tkdiff vimdiff nvimdiff gvimdiff xxdiff araxis p4merge + bc codecompare smerge " _git_difftool () @@ -1612,6 +1783,10 @@ _git_format_patch () " "" "${cur##--thread=}" return ;; + --base=*|--interdiff=*|--range-diff=*) + __git_complete_refs --cur="${cur#--*=}" + return + ;; --*) __gitcomp_builtin format-patch "$__git_format_patch_extra_options" return @@ -1860,6 +2035,7 @@ _git_log () $merge $__git_diff_common_options --pickaxe-all --pickaxe-regex + --patch --no-patch " return ;; @@ -2215,6 +2391,22 @@ _git_status () _git_switch () { + local dwim_opt="$(__git_checkout_default_dwim_mode)" + + case "$prev" in + -c|-C|--orphan) + # Complete local branches (and DWIM branch + # remote branch names) for an option argument + # specifying a new branch name. This is for + # convenience, assuming new branches are + # possibly based on pre-existing branch names. + __git_complete_refs $dwim_opt --mode="heads" + return + ;; + *) + ;; + esac + case "$cur" in --conflict=*) __gitcomp "diff3 merge" "" "${cur##--conflict=}" @@ -2223,29 +2415,26 @@ _git_switch () __gitcomp_builtin switch ;; *) - # check if --track, --no-track, or --no-guess was specified - # if so, disable DWIM mode - local track_opt="--track" only_local_ref=n - if [ "$GIT_COMPLETION_CHECKOUT_NO_GUESS" = "1" ] || - [ -n "$(__git_find_on_cmdline "--track --no-track --no-guess")" ]; then - track_opt='' - fi - # explicit --guess enables DWIM mode regardless of - # $GIT_COMPLETION_CHECKOUT_NO_GUESS - if [ -n "$(__git_find_on_cmdline "--guess")" ]; then - track_opt='--track' - fi - if [ -z "$(__git_find_on_cmdline "-d --detach")" ]; then - only_local_ref=y - else - # --guess --detach is invalid combination, no - # dwim will be done when --detach is specified - track_opt= + # Unlike in git checkout, git switch --orphan does not take + # a start point. Thus we really have nothing to complete after + # the branch name. + if [ -n "$(__git_find_on_cmdline "--orphan")" ]; then + return fi - if [ $only_local_ref = y -a -z "$track_opt" ]; then - __gitcomp_direct "$(__git_heads "" "$cur" " ")" + + # At this point, we've already handled special completion for + # -c/-C, and --orphan. There are 3 main things left to + # complete: + # 1) a start-point for -c/-C or -d/--detach + # 2) a remote head, for --track + # 3) a branch name, possibly including DWIM remote branches + + if [ -n "$(__git_find_on_cmdline "-c -C -d --detach")" ]; then + __git_complete_refs --mode="refs" + elif [ -n "$(__git_find_on_cmdline "--track")" ]; then + __git_complete_refs --mode="remote-heads" else - __git_complete_refs $track_opt + __git_complete_refs $dwim_opt --mode="heads" fi ;; esac @@ -2652,6 +2841,13 @@ _git_reset () _git_restore () { + case "$prev" in + -s) + __git_complete_refs + return + ;; + esac + case "$cur" in --conflict=*) __gitcomp "diff3 merge" "" "${cur##--conflict=}" @@ -2732,6 +2928,14 @@ _git_show () __gitcomp "$__git_diff_submodule_formats" "" "${cur##--submodule=}" return ;; + --color-moved=*) + __gitcomp "$__git_color_moved_opts" "" "${cur##--color-moved=}" + return + ;; + --color-moved-ws=*) + __gitcomp "$__git_color_moved_ws_opts" "" "${cur##--color-moved-ws=}" + return + ;; --*) __gitcomp "--pretty= --format= --abbrev-commit --no-abbrev-commit --oneline --show-signature --patch @@ -2781,7 +2985,7 @@ _git_stash () local save_opts='--all --keep-index --no-keep-index --quiet --patch --include-untracked' local subcommands='push list show apply clear drop pop create branch' local subcommand="$(__git_find_on_cmdline "$subcommands save")" - if [ -n "$(__git_find_on_cmdline "-p")" ]; then + if [ -z "$subcommand" -a -n "$(__git_find_on_cmdline "-p")" ]; then subcommand="push" fi if [ -z "$subcommand" ]; then @@ -3175,7 +3379,7 @@ __git_main () ((c++)) done - if [ -z "$command" ]; then + if [ -z "${command-}" ]; then case "$prev" in --git-dir|-C|--work-tree) # these need a path argument, let's fall back to @@ -3210,7 +3414,7 @@ __git_main () " ;; *) - if test -n "$GIT_TESTING_PORCELAIN_COMMAND_LIST" + if test -n "${GIT_TESTING_PORCELAIN_COMMAND_LIST-}" then __gitcomp "$GIT_TESTING_PORCELAIN_COMMAND_LIST" else diff --git a/contrib/completion/git-completion.zsh b/contrib/completion/git-completion.zsh index eef4eff53d..ce47e86b60 100644 --- a/contrib/completion/git-completion.zsh +++ b/contrib/completion/git-completion.zsh @@ -150,9 +150,11 @@ __git_zsh_cmd_common () push:'update remote refs along with associated objects' rebase:'forward-port local commits to the updated upstream head' reset:'reset current HEAD to the specified state' + restore:'restore working tree files' rm:'remove files from the working tree and from the index' show:'show various types of objects' status:'show the working tree status' + switch:'switch branches' tag:'create, list, delete or verify a tag object signed with GPG') _describe -t common-commands 'common commands' list && _ret=0 } diff --git a/contrib/completion/git-prompt.sh b/contrib/completion/git-prompt.sh index 014cd7c3cf..16260bab73 100644 --- a/contrib/completion/git-prompt.sh +++ b/contrib/completion/git-prompt.sh @@ -70,6 +70,15 @@ # state symbols by setting GIT_PS1_STATESEPARATOR. The default separator # is SP. # +# When there is an in-progress operation such as a merge, rebase, +# revert, cherry-pick, or bisect, the prompt will include information +# related to the operation, often in the form "|<OPERATION-NAME>". +# +# When the repository has a sparse-checkout, a notification of the form +# "|SPARSE" will be included in the prompt. This can be shortened to a +# single '?' character by setting GIT_PS1_COMPRESSSPARSESTATE, or omitted +# by setting GIT_PS1_OMITSPARSESTATE. +# # By default, __git_ps1 will compare HEAD to your SVN upstream if it can # find one, or @{upstream} otherwise. Once you have set # GIT_PS1_SHOWUPSTREAM, you can override it on a per-repository basis by @@ -421,6 +430,13 @@ __git_ps1 () return $exit fi + local sparse="" + if [ -z "${GIT_PS1_COMPRESSSPARSESTATE}" ] && + [ -z "${GIT_PS1_OMITSPARSESTATE}" ] && + [ "$(git config --bool core.sparseCheckout)" = "true" ]; then + sparse="|SPARSE" + fi + local r="" local b="" local step="" @@ -492,6 +508,7 @@ __git_ps1 () local i="" local s="" local u="" + local h="" local c="" local p="" @@ -524,6 +541,11 @@ __git_ps1 () u="%${ZSH_VERSION+%}" fi + if [ -n "${GIT_PS1_COMPRESSSPARSESTATE}" ] && + [ "$(git config --bool core.sparseCheckout)" = "true" ]; then + h="?" + fi + if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then __git_ps1_show_upstream fi @@ -542,8 +564,8 @@ __git_ps1 () b="\${__git_ps1_branch_name}" fi - local f="$w$i$s$u" - local gitstring="$c$b${f:+$z$f}$r$p" + local f="$h$w$i$s$u" + local gitstring="$c$b${f:+$z$f}${sparse}$r$p" if [ $pcmode = yes ]; then if [ "${__git_printf_supports_v-}" != yes ]; then diff --git a/contrib/diff-highlight/DiffHighlight.pm b/contrib/diff-highlight/DiffHighlight.pm index e2589922a6..376f577737 100644 --- a/contrib/diff-highlight/DiffHighlight.pm +++ b/contrib/diff-highlight/DiffHighlight.pm @@ -112,7 +112,7 @@ sub handle_line { # Since we can receive arbitrary input, there's no optimal # place to flush. Flushing on a blank line is a heuristic that # happens to match git-log output. - if (!length) { + if (/^$/) { $flush_cb->(); } } diff --git a/contrib/fast-import/import-tars.perl b/contrib/fast-import/import-tars.perl index e800d9f5c9..d50ce26d5d 100755 --- a/contrib/fast-import/import-tars.perl +++ b/contrib/fast-import/import-tars.perl @@ -139,6 +139,8 @@ foreach my $tar_file (@ARGV) print FI "\n"; } + next if ($typeflag eq 'g'); # ignore global header + my $path; if ($prefix) { $path = "$prefix/$name"; diff --git a/contrib/mw-to-git/git-mw.perl b/contrib/mw-to-git/git-mw.perl index 28df3ee321..eb52a53d32 100755 --- a/contrib/mw-to-git/git-mw.perl +++ b/contrib/mw-to-git/git-mw.perl @@ -6,7 +6,7 @@ # License: GPL v2 or later # Set of tools for git repo with a mediawiki remote. -# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/ +# Documentation & bugtracker: https://github.com/Git-Mediawiki/Git-Mediawiki use strict; use warnings; diff --git a/contrib/mw-to-git/git-remote-mediawiki.perl b/contrib/mw-to-git/git-remote-mediawiki.perl index d8ff2e69c4..a5624413dc 100755 --- a/contrib/mw-to-git/git-remote-mediawiki.perl +++ b/contrib/mw-to-git/git-remote-mediawiki.perl @@ -9,7 +9,7 @@ # License: GPL v2 or later # Gateway between Git and MediaWiki. -# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/ +# Documentation & bugtracker: https://github.com/Git-Mediawiki/Git-Mediawiki use strict; use MediaWiki::API; @@ -56,38 +56,38 @@ my $url = $ARGV[1]; # Accept both space-separated and multiple keys in config file. # Spaces should be written as _ anyway because we'll use chomp. -my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages")); +my @tracked_pages = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.pages"])); chomp(@tracked_pages); # Just like @tracked_pages, but for MediaWiki categories. -my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories")); +my @tracked_categories = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.categories"])); chomp(@tracked_categories); # Just like @tracked_categories, but for MediaWiki namespaces. -my @tracked_namespaces = split(/[ \n]/, run_git("config --get-all remote.${remotename}.namespaces")); +my @tracked_namespaces = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.namespaces"])); for (@tracked_namespaces) { s/_/ /g; } chomp(@tracked_namespaces); # Import media files on pull -my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport"); +my $import_media = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.mediaimport"]); chomp($import_media); $import_media = ($import_media eq 'true'); # Export media files on push -my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport"); +my $export_media = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.mediaexport"]); chomp($export_media); $export_media = !($export_media eq 'false'); -my $wiki_login = run_git("config --get remote.${remotename}.mwLogin"); +my $wiki_login = run_git_quoted(["config", "--get", "remote.${remotename}.mwLogin"]); # Note: mwPassword is discouraged. Use the credential system instead. -my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword"); -my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain"); +my $wiki_passwd = run_git_quoted(["config", "--get", "remote.${remotename}.mwPassword"]); +my $wiki_domain = run_git_quoted(["config", "--get", "remote.${remotename}.mwDomain"]); chomp($wiki_login); chomp($wiki_passwd); chomp($wiki_domain); # Import only last revisions (both for clone and fetch) -my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow"); +my $shallow_import = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.shallow"]); chomp($shallow_import); $shallow_import = ($shallow_import eq 'true'); @@ -97,9 +97,9 @@ $shallow_import = ($shallow_import eq 'true'); # Possible values: # - by_rev: perform one query per new revision on the remote wiki # - by_page: query each tracked page for new revision -my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy"); +my $fetch_strategy = run_git_quoted(["config", "--get", "remote.${remotename}.fetchStrategy"]); if (!$fetch_strategy) { - $fetch_strategy = run_git('config --get mediawiki.fetchStrategy'); + $fetch_strategy = run_git_quoted(["config", "--get", "mediawiki.fetchStrategy"]); } chomp($fetch_strategy); if (!$fetch_strategy) { @@ -123,9 +123,9 @@ my %basetimestamps; # will get the history with information lost). If the import is # deterministic, this means everybody gets the same sha1 for each # MediaWiki revision. -my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush"); +my $dumb_push = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.dumbPush"]); if (!$dumb_push) { - $dumb_push = run_git('config --get --bool mediawiki.dumbPush'); + $dumb_push = run_git_quoted(["config", "--get", "--bool", "mediawiki.dumbPush"]); } chomp($dumb_push); $dumb_push = ($dumb_push eq 'true'); @@ -369,12 +369,14 @@ sub get_mw_pages { return %pages; } -# usage: $out = run_git("command args"); -# $out = run_git("command args", "raw"); # don't interpret output as UTF-8. -sub run_git { +# usage: $out = run_git_quoted(["command", "args", ...]); +# $out = run_git_quoted(["command", "args", ...], "raw"); # don't interpret output as UTF-8. +# $out = run_git_quoted_nostderr(["command", "args", ...]); # discard stderr +# $out = run_git_quoted_nostderr(["command", "args", ...], "raw"); # ditto but raw instead of UTF-8 as above +sub _run_git { my $args = shift; my $encoding = (shift || 'encoding(UTF-8)'); - open(my $git, "-|:${encoding}", "git ${args}") + open(my $git, "-|:${encoding}", @$args) or die "Unable to fork: $!\n"; my $res = do { local $/ = undef; @@ -385,6 +387,13 @@ sub run_git { return $res; } +sub run_git_quoted { + _run_git(["git", @{$_[0]}], $_[1]); +} + +sub run_git_quoted_nostderr { + _run_git(['sh', '-c', 'git "$@" 2>/dev/null', '--', @{$_[0]}], $_[1]); +} sub get_all_mediafiles { my $pages = shift; @@ -511,8 +520,9 @@ sub download_mw_mediafile { } sub get_last_local_revision { - # Get note regarding last mediawiki revision - my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null"); + # Get note regarding last mediawiki revision. + my $note = run_git_quoted_nostderr(["notes", "--ref=${remotename}/mediawiki", + "show", "refs/mediawiki/${remotename}/master"]); my @note_info = split(/ /, $note); my $lastrevision_number; @@ -807,7 +817,10 @@ sub get_more_refs { sub mw_import { # multiple import commands can follow each other. my @refs = (shift, get_more_refs('import')); + my $processedRefs; foreach my $ref (@refs) { + next if $processedRefs->{$ref}; # skip duplicates: "import refs/heads/master" being issued twice; TODO: why? + $processedRefs->{$ref} = 1; mw_import_ref($ref); } print {*STDOUT} "done\n"; @@ -970,7 +983,7 @@ sub mw_import_revids { } sub error_non_fast_forward { - my $advice = run_git('config --bool advice.pushNonFastForward'); + my $advice = run_git_quoted(["config", "--bool", "advice.pushNonFastForward"]); chomp($advice); if ($advice ne 'false') { # Native git-push would show this after the summary. @@ -1014,7 +1027,7 @@ sub mw_upload_file { } } else { # Don't let perl try to interpret file content as UTF-8 => use "raw" - my $content = run_git("cat-file blob ${new_sha1}", 'raw'); + my $content = run_git_quoted(["cat-file", "blob", $new_sha1], 'raw'); if ($content ne EMPTY) { $mediawiki = connect_maybe($mediawiki, $remotename, $url); $mediawiki->{config}->{upload_url} = @@ -1084,7 +1097,7 @@ sub mw_push_file { # with this content instead: $file_content = DELETED_CONTENT; } else { - $file_content = run_git("cat-file blob ${new_sha1}"); + $file_content = run_git_quoted(["cat-file", "blob", $new_sha1]); } $mediawiki = connect_maybe($mediawiki, $remotename, $url); @@ -1174,10 +1187,10 @@ sub mw_push_revision { my $mw_revision = $last_remote_revid; # Get sha1 of commit pointed by local HEAD - my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null"); + my $HEAD_sha1 = run_git_quoted_nostderr(["rev-parse", $local]); chomp($HEAD_sha1); # Get sha1 of commit pointed by remotes/$remotename/master - my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null"); + my $remoteorigin_sha1 = run_git_quoted_nostderr(["rev-parse", "refs/remotes/${remotename}/master"]); chomp($remoteorigin_sha1); if ($last_local_revid > 0 && @@ -1197,7 +1210,7 @@ sub mw_push_revision { my $parsed_sha1 = $remoteorigin_sha1; # Find a path from last MediaWiki commit to pushed commit print {*STDERR} "Computing path from local to remote ...\n"; - my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}")); + my @local_ancestry = split(/\n/, run_git_quoted(["rev-list", "--boundary", "--parents", $local, "^${parsed_sha1}"])); my %local_ancestry; foreach my $line (@local_ancestry) { if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) { @@ -1221,7 +1234,7 @@ sub mw_push_revision { # No remote mediawiki revision. Export the whole # history (linearized with --first-parent) print {*STDERR} "Warning: no common ancestor, pushing complete history\n"; - my $history = run_git("rev-list --first-parent --children ${local}"); + my $history = run_git_quoted(["rev-list", "--first-parent", "--children", $local]); my @history = split(/\n/, $history); @history = @history[1..$#history]; foreach my $line (reverse @history) { @@ -1233,12 +1246,12 @@ sub mw_push_revision { foreach my $commit_info_split (@commit_pairs) { my $sha1_child = @{$commit_info_split}[0]; my $sha1_commit = @{$commit_info_split}[1]; - my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}"); + my $diff_infos = run_git_quoted(["diff-tree", "-r", "--raw", "-z", $sha1_child, $sha1_commit]); # TODO: we could detect rename, and encode them with a #redirect on the wiki. # TODO: for now, it's just a delete+add my @diff_info_list = split(/\0/, $diff_infos); # Keep the subject line of the commit message as mediawiki comment for the revision - my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit})); + my $commit_msg = run_git_quoted(["log", "--no-walk", '--format="%s"', $sha1_commit]); chomp($commit_msg); # Push every blob while (@diff_info_list) { @@ -1263,7 +1276,10 @@ sub mw_push_revision { } } if (!$dumb_push) { - run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit})); + run_git_quoted(["notes", "--ref=${remotename}/mediawiki", + "add", "-f", "-m", + "mediawiki_revision: ${mw_revision}", + $sha1_commit]); } } @@ -1304,7 +1320,7 @@ sub get_mw_namespace_id { # already cached. Namespaces are stored in form: # "Name_of_namespace:Id_namespace", ex.: "File:6". my @temp = split(/\n/, - run_git("config --get-all remote.${remotename}.namespaceCache")); + run_git_quoted(["config", "--get-all", "remote.${remotename}.namespaceCache"])); chomp(@temp); foreach my $ns (@temp) { my ($n, $id) = split(/:/, $ns); @@ -1358,7 +1374,7 @@ sub get_mw_namespace_id { # Store explicitly requested namespaces on disk if (!exists $cached_mw_namespace_id{$name}) { - run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}")); + run_git_quoted(["config", "--add", "remote.${remotename}.namespaceCache", "${name}:${store_id}"]); $cached_mw_namespace_id{$name} = 1; } return $id; diff --git a/contrib/mw-to-git/git-remote-mediawiki.txt b/contrib/mw-to-git/git-remote-mediawiki.txt index 23b7ef9f62..5da825f61e 100644 --- a/contrib/mw-to-git/git-remote-mediawiki.txt +++ b/contrib/mw-to-git/git-remote-mediawiki.txt @@ -4,4 +4,4 @@ objects from mediawiki just as one would do with a classic git repository thanks to remote-helpers. For more information, visit the wiki at -https://github.com/moy/Git-Mediawiki/wiki +https://github.com/Git-Mediawiki/Git-Mediawiki diff --git a/contrib/mw-to-git/t/.gitignore b/contrib/mw-to-git/t/.gitignore index a7a40b4964..2b8dc30c6d 100644 --- a/contrib/mw-to-git/t/.gitignore +++ b/contrib/mw-to-git/t/.gitignore @@ -1,4 +1,4 @@ WEB/ -wiki/ +mediawiki/ trash directory.t*/ test-results/ diff --git a/contrib/mw-to-git/t/README b/contrib/mw-to-git/t/README index 2ee34be7e4..72c4889db7 100644 --- a/contrib/mw-to-git/t/README +++ b/contrib/mw-to-git/t/README @@ -14,11 +14,11 @@ install the following packages (Debian/Ubuntu names, may need to be adapted for another distribution): * lighttpd -* php5 -* php5-cgi -* php5-cli -* php5-curl -* php5-sqlite +* php +* php-cgi +* php-cli +* php-curl +* php-sqlite Principles and Technical Choices -------------------------------- diff --git a/contrib/mw-to-git/t/install-wiki/.gitignore b/contrib/mw-to-git/t/install-wiki/.gitignore deleted file mode 100644 index b5a2a4408c..0000000000 --- a/contrib/mw-to-git/t/install-wiki/.gitignore +++ /dev/null @@ -1 +0,0 @@ -wikidb.sqlite diff --git a/contrib/mw-to-git/t/install-wiki/LocalSettings.php b/contrib/mw-to-git/t/install-wiki/LocalSettings.php deleted file mode 100644 index 745e47e881..0000000000 --- a/contrib/mw-to-git/t/install-wiki/LocalSettings.php +++ /dev/null @@ -1,129 +0,0 @@ -<?php -# This file was automatically generated by the MediaWiki 1.19.0 -# installer. If you make manual changes, please keep track in case you -# need to recreate them later. -# -# See includes/DefaultSettings.php for all configurable settings -# and their default values, but don't forget to make changes in _this_ -# file, not there. -# -# Further documentation for configuration settings may be found at: -# http://www.mediawiki.org/wiki/Manual:Configuration_settings - -# Protect against web entry -if ( !defined( 'MEDIAWIKI' ) ) { - exit; -} - -## Uncomment this to disable output compression -# $wgDisableOutputCompression = true; - -$wgSitename = "Git-MediaWiki-Test"; -$wgMetaNamespace = "Git-MediaWiki-Test"; - -## The URL base path to the directory containing the wiki; -## defaults for all runtime URL paths are based off of this. -## For more information on customizing the URLs please see: -## http://www.mediawiki.org/wiki/Manual:Short_URL -$wgScriptPath = "@WG_SCRIPT_PATH@"; -$wgScriptExtension = ".php"; - -## The protocol and server name to use in fully-qualified URLs -$wgServer = "@WG_SERVER@"; - -## The relative URL path to the skins directory -$wgStylePath = "$wgScriptPath/skins"; - -## The relative URL path to the logo. Make sure you change this from the default, -## or else you'll overwrite your logo when you upgrade! -$wgLogo = "$wgStylePath/common/images/wiki.png"; - -## UPO means: this is also a user preference option - -$wgEnableEmail = true; -$wgEnableUserEmail = true; # UPO - -$wgEmergencyContact = "apache@localhost"; -$wgPasswordSender = "apache@localhost"; - -$wgEnotifUserTalk = false; # UPO -$wgEnotifWatchlist = false; # UPO -$wgEmailAuthentication = true; - -## Database settings -$wgDBtype = "sqlite"; -$wgDBserver = ""; -$wgDBname = "@WG_SQLITE_DATAFILE@"; -$wgDBuser = ""; -$wgDBpassword = ""; - -# SQLite-specific settings -$wgSQLiteDataDir = "@WG_SQLITE_DATADIR@"; - - -## Shared memory settings -$wgMainCacheType = CACHE_NONE; -$wgMemCachedServers = array(); - -## To enable image uploads, make sure the 'images' directory -## is writable, then set this to true: -$wgEnableUploads = true; -$wgUseImageMagick = true; -$wgImageMagickConvertCommand ="@CONVERT@"; -$wgFileExtensions[] = 'txt'; - -# InstantCommons allows wiki to use images from http://commons.wikimedia.org -$wgUseInstantCommons = false; - -## If you use ImageMagick (or any other shell command) on a -## Linux server, this will need to be set to the name of an -## available UTF-8 locale -$wgShellLocale = "en_US.utf8"; - -## If you want to use image uploads under safe mode, -## create the directories images/archive, images/thumb and -## images/temp, and make them all writable. Then uncomment -## this, if it's not already uncommented: -#$wgHashedUploadDirectory = false; - -## Set $wgCacheDirectory to a writable directory on the web server -## to make your wiki go slightly faster. The directory should not -## be publicly accessible from the web. -#$wgCacheDirectory = "$IP/cache"; - -# Site language code, should be one of the list in ./languages/Names.php -$wgLanguageCode = "en"; - -$wgSecretKey = "1c912bfe3519fb70f5dc523ecc698111cd43d81a11c585b3eefb28f29c2699b7"; -#$wgSecretKey = "@SECRETKEY@"; - - -# Site upgrade key. Must be set to a string (default provided) to turn on the -# web installer while LocalSettings.php is in place -$wgUpgradeKey = "ddae7dc87cd0a645"; - -## Default skin: you can change the default skin. Use the internal symbolic -## names, ie 'standard', 'nostalgia', 'cologneblue', 'monobook', 'vector': -$wgDefaultSkin = "vector"; - -## For attaching licensing metadata to pages, and displaying an -## appropriate copyright notice / icon. GNU Free Documentation -## License and Creative Commons licenses are supported so far. -$wgRightsPage = ""; # Set to the title of a wiki page that describes your license/copyright -$wgRightsUrl = ""; -$wgRightsText = ""; -$wgRightsIcon = ""; - -# Path to the GNU diff3 utility. Used for conflict resolution. -$wgDiff3 = "/usr/bin/diff3"; - -# Query string length limit for ResourceLoader. You should only set this if -# your web server has a query string length limit (then set it to that limit), -# or if you have suhosin.get.max_value_length set in php.ini (then set it to -# that value) -$wgResourceLoaderMaxQueryLength = -1; - - - -# End of automatically generated settings. -# Add more configuration options below. diff --git a/contrib/mw-to-git/t/install-wiki/db_install.php b/contrib/mw-to-git/t/install-wiki/db_install.php deleted file mode 100644 index b033849800..0000000000 --- a/contrib/mw-to-git/t/install-wiki/db_install.php +++ /dev/null @@ -1,120 +0,0 @@ -<?php -/** - * This script generates a SQLite database for a MediaWiki version 1.19.0 - * You must specify the login of the admin (argument 1) and its - * password (argument 2) and the folder where the database file - * is located (absolute path in argument 3). - * It is used by the script install-wiki.sh in order to make easy the - * installation of a MediaWiki. - * - * In order to generate a SQLite database file, MediaWiki ask the user - * to submit some forms in its web browser. This script simulates this - * behavior though the functions <get> and <submit> - * - */ -$argc = $_SERVER['argc']; -$argv = $_SERVER['argv']; - -$login = $argv[2]; -$pass = $argv[3]; -$tmp = $argv[4]; -$port = $argv[5]; - -$url = 'http://localhost:'.$port.'/wiki/mw-config/index.php'; -$db_dir = urlencode($tmp); -$tmp_cookie = tempnam($tmp, "COOKIE_"); -/* - * Fetches a page with cURL. - */ -function get($page_name = "") { - $curl = curl_init(); - $page_name_add = ""; - if ($page_name != "") { - $page_name_add = '?page='.$page_name; - } - $url = $GLOBALS['url'].$page_name_add; - $tmp_cookie = $GLOBALS['tmp_cookie']; - curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie); - curl_setopt($curl, CURLOPT_RETURNTRANSFER, true); - curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true); - curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie); - curl_setopt($curl, CURLOPT_HEADER, true); - curl_setopt($curl, CURLOPT_URL, $url); - - $page = curl_exec($curl); - if (!$page) { - die("Could not get page: $url\n"); - } - curl_close($curl); - return $page; -} - -/* - * Submits a form with cURL. - */ -function submit($page_name, $option = "") { - $curl = curl_init(); - $datapost = 'submit-continue=Continue+%E2%86%92'; - if ($option != "") { - $datapost = $option.'&'.$datapost; - } - $url = $GLOBALS['url'].'?page='.$page_name; - $tmp_cookie = $GLOBALS['tmp_cookie']; - curl_setopt($curl, CURLOPT_URL, $url); - curl_setopt($curl, CURLOPT_POST, true); - curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true); - curl_setopt($curl, CURLOPT_POSTFIELDS, $datapost); - curl_setopt($curl, CURLOPT_RETURNTRANSFER, true); - curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie); - curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie); - - $page = curl_exec($curl); - if (!$page) { - die("Could not get page: $url\n"); - } - curl_close($curl); - return "$page"; -} - -/* - * Here starts this script: simulates the behavior of the user - * submitting forms to generates the database file. - * Note this simulation was made for the MediaWiki version 1.19.0, - * we can't assume it works with other versions. - * - */ - -$page = get(); -if (!preg_match('/input type="hidden" value="([0-9]+)" name="LanguageRequestTime"/', - $page, $matches)) { - echo "Unexpected content for page downloaded:\n"; - echo "$page"; - die; -}; -$timestamp = $matches[1]; -$language = "LanguageRequestTime=$timestamp&uselang=en&ContLang=en"; -$page = submit('Language', $language); - -submit('Welcome'); - -$db_config = 'DBType=sqlite'; -$db_config = $db_config.'&sqlite_wgSQLiteDataDir='.$db_dir; -$db_config = $db_config.'&sqlite_wgDBname='.$argv[1]; -submit('DBConnect', $db_config); - -$wiki_config = 'config_wgSitename=TEST'; -$wiki_config = $wiki_config.'&config__NamespaceType=site-name'; -$wiki_config = $wiki_config.'&config_wgMetaNamespace=MyWiki'; -$wiki_config = $wiki_config.'&config__AdminName='.$login; - -$wiki_config = $wiki_config.'&config__AdminPassword='.$pass; -$wiki_config = $wiki_config.'&config__AdminPassword2='.$pass; - -$wiki_config = $wiki_config.'&wiki__configEmail=email%40email.org'; -$wiki_config = $wiki_config.'&config__SkipOptional=skip'; -submit('Name', $wiki_config); -submit('Install'); -submit('Install'); - -unlink($tmp_cookie); -?> diff --git a/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh b/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh index 9106833578..4c39bda7bf 100755 --- a/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh +++ b/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh @@ -28,7 +28,7 @@ test_expect_success 'Git clone creates the expected git log with one file' ' git log --format=%s HEAD^..HEAD >log.tmp ) && echo "this must be the same" >msg.tmp && - diff -b mw_dir_1/log.tmp msg.tmp + test_cmp msg.tmp mw_dir_1/log.tmp ' @@ -50,8 +50,8 @@ test_expect_success 'Git clone creates the expected git log with multiple files' echo "this must be the same" >>msgDaddy.tmp && echo "identical too" >msgDj.tmp && echo "identical" >>msgDj.tmp && - diff -b mw_dir_2/logDaddy.tmp msgDaddy.tmp && - diff -b mw_dir_2/logDj.tmp msgDj.tmp + test_cmp msgDaddy.tmp mw_dir_2/logDaddy.tmp && + test_cmp msgDj.tmp mw_dir_2/logDj.tmp ' @@ -135,7 +135,7 @@ test_expect_success 'Git clone works with one specific page cloned ' ' cd mw_dir_8 && echo "this log must stay" >msg.tmp && git log --format=%s >log.tmp && - diff -b msg.tmp log.tmp + test_cmp msg.tmp log.tmp ) && wiki_check_content mw_dir_8/Namnam.mw Namnam ' diff --git a/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh b/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh index 3ff3a09567..6187ec67fa 100755 --- a/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh +++ b/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh @@ -27,12 +27,12 @@ test_git_reimport () { # Don't bother with permissions, be administrator by default test_expect_success 'setup config' ' - git config --global remote.origin.mwLogin WikiAdmin && - git config --global remote.origin.mwPassword AdminPass && + git config --global remote.origin.mwLogin "$WIKI_ADMIN" && + git config --global remote.origin.mwPassword "$WIKI_PASSW" && test_might_fail git config --global --unset remote.origin.mediaImport ' -test_expect_success 'git push can upload media (File:) files' ' +test_expect_failure 'git push can upload media (File:) files' ' wiki_reset && git clone mediawiki::'"$WIKI_URL"' mw_dir && ( @@ -48,13 +48,14 @@ test_expect_success 'git push can upload media (File:) files' ' ) ' -test_expect_success 'git clone works on previously created wiki with media files' ' +test_expect_failure 'git clone works on previously created wiki with media files' ' test_when_finished "rm -rf mw_dir mw_dir_clone" && git clone -c remote.origin.mediaimport=true \ mediawiki::'"$WIKI_URL"' mw_dir_clone && test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt && (cd mw_dir_clone && git checkout HEAD^) && (cd mw_dir && git checkout HEAD^) && + test_path_is_file mw_dir_clone/Foo.txt && test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt ' diff --git a/contrib/mw-to-git/t/test-gitmw-lib.sh b/contrib/mw-to-git/t/test-gitmw-lib.sh index 3948a00282..64e46c1671 100755 --- a/contrib/mw-to-git/t/test-gitmw-lib.sh +++ b/contrib/mw-to-git/t/test-gitmw-lib.sh @@ -13,7 +13,8 @@ . ./test.config -WIKI_URL=http://"$SERVER_ADDR:$PORT/$WIKI_DIR_NAME" +WIKI_BASE_URL=http://$SERVER_ADDR:$PORT +WIKI_URL=$WIKI_BASE_URL/$WIKI_DIR_NAME CURR_DIR=$(pwd) TEST_OUTPUT_DIRECTORY=$(pwd) TEST_DIRECTORY="$CURR_DIR"/../../../t @@ -65,7 +66,7 @@ test_check_precond () { GIT_EXEC_PATH=$(cd "$(dirname "$0")" && cd "../.." && pwd) PATH="$GIT_EXEC_PATH"'/bin-wrapper:'"$PATH" - if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ]; + if ! test -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" then skip_all='skipping gateway git-mw tests, no mediawiki found' test_done @@ -291,27 +292,59 @@ stop_lighttpd () { test -f "$WEB_TMP/pid" && kill $(cat "$WEB_TMP/pid") } -# Create the SQLite database of the MediaWiki. If the database file already -# exists, it will be deleted. -# This script should be runned from the directory where $FILES_FOLDER is -# located. -create_db () { - rm -f "$TMP/$DB_FILE" - - echo "Generating the SQLite database file. It can take some time ..." - # Run the php script to generate the SQLite database file - # with cURL calls. - php "$FILES_FOLDER/$DB_INSTALL_SCRIPT" $(basename "$DB_FILE" .sqlite) \ - "$WIKI_ADMIN" "$WIKI_PASSW" "$TMP" "$PORT" - - if [ ! -f "$TMP/$DB_FILE" ] ; then - error "Can't create database file $TMP/$DB_FILE. Try to run ./install-wiki.sh delete first." +wiki_delete_db () { + rm -rf \ + "$FILES_FOLDER_DB"/* || error "Couldn't delete $FILES_FOLDER_DB/" +} + +wiki_delete_db_backup () { + rm -rf \ + "$FILES_FOLDER_POST_INSTALL_DB"/* || error "Couldn't delete $FILES_FOLDER_POST_INSTALL_DB/" +} + +# Install MediaWiki using its install.php script. If the database file +# already exists, it will be deleted. +install_mediawiki () { + + localsettings="$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php" + if test -f "$localsettings" + then + error "We already installed the wiki, since $localsettings exists" \ + "perhaps you wanted to run 'delete' first?" fi - # Copy the generated database file into the directory the - # user indicated. - cp "$TMP/$DB_FILE" "$FILES_FOLDER" || - error "Unable to copy $TMP/$DB_FILE to $FILES_FOLDER" + wiki_delete_db + wiki_delete_db_backup + mkdir \ + "$FILES_FOLDER_DB/" \ + "$FILES_FOLDER_POST_INSTALL_DB/" + + install_script="$WIKI_DIR_INST/$WIKI_DIR_NAME/maintenance/install.php" + echo "Installing MediaWiki using $install_script. This may take some time ..." + + php "$WIKI_DIR_INST/$WIKI_DIR_NAME/maintenance/install.php" \ + --server $WIKI_BASE_URL \ + --scriptpath /wiki \ + --lang en \ + --dbtype sqlite \ + --dbpath $PWD/$FILES_FOLDER_DB/ \ + --pass "$WIKI_PASSW" \ + Git-MediaWiki-Test \ + "$WIKI_ADMIN" || + error "Couldn't run $install_script, see errors above. Try to run ./install-wiki.sh delete first." + cat <<-'EOF' >>$localsettings +# Custom settings added by test-gitmw-lib.sh +# +# Uploading text files is needed for +# t9363-mw-to-git-export-import.sh +$wgEnableUploads = true; +$wgFileExtensions[] = 'txt'; +EOF + + # Copy the initially generated database file into our backup + # folder + cp -R "$FILES_FOLDER_DB/"* "$FILES_FOLDER_POST_INSTALL_DB/" || + error "Unable to copy $FILES_FOLDER_DB/* to $FILES_FOLDER_POST_INSTALL_DB/*" } # Install a wiki in your web server directory. @@ -320,30 +353,33 @@ wiki_install () { start_lighttpd fi - SERVER_ADDR=$SERVER_ADDR:$PORT # In this part, we change directory to $TMP in order to download, # unpack and copy the files of MediaWiki ( mkdir -p "$WIKI_DIR_INST/$WIKI_DIR_NAME" - if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ] ; then + if ! test -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" + then error "Folder $WIKI_DIR_INST/$WIKI_DIR_NAME doesn't exist. Please create it and launch the script again." fi - # Fetch MediaWiki's archive if not already present in the TMP directory + # Fetch MediaWiki's archive if not already present in the + # download directory + mkdir -p "$FILES_FOLDER_DOWNLOAD" MW_FILENAME="mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz" - cd "$TMP" - if [ ! -f $MW_FILENAME ] ; then + cd "$FILES_FOLDER_DOWNLOAD" + if ! test -f $MW_FILENAME + then echo "Downloading $MW_VERSION_MAJOR.$MW_VERSION_MINOR sources ..." wget "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/$MW_FILENAME" || error "Unable to download "\ "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/"\ "$MW_FILENAME. "\ "Please fix your connection and launch the script again." - echo "$MW_FILENAME downloaded in $(pwd). "\ - "You can delete it later if you want." + echo "$MW_FILENAME downloaded in $(pwd)/;" \ + "you can delete it later if you want." else - echo "Reusing existing $MW_FILENAME downloaded in $(pwd)." + echo "Reusing existing $MW_FILENAME downloaded in $(pwd)/" fi archive_abs_path=$(pwd)/$MW_FILENAME cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" || @@ -352,48 +388,12 @@ wiki_install () { error "Unable to extract WikiMedia's files from $archive_abs_path to "\ "$WIKI_DIR_INST/$WIKI_DIR_NAME" ) || exit 1 + echo Extracted in "$WIKI_DIR_INST/$WIKI_DIR_NAME" - create_db - - # Copy the generic LocalSettings.php in the web server's directory - # And modify parameters according to the ones set at the top - # of this script. - # Note that LocalSettings.php is never modified. - if [ ! -f "$FILES_FOLDER/LocalSettings.php" ] ; then - error "Can't find $FILES_FOLDER/LocalSettings.php " \ - "in the current folder. "\ - "Please run the script inside its folder." - fi - cp "$FILES_FOLDER/LocalSettings.php" \ - "$FILES_FOLDER/LocalSettings-tmp.php" || - error "Unable to copy $FILES_FOLDER/LocalSettings.php " \ - "to $FILES_FOLDER/LocalSettings-tmp.php" - - # Parse and set the LocalSettings file of the user according to the - # CONFIGURATION VARIABLES section at the beginning of this script - file_swap="$FILES_FOLDER/LocalSettings-swap.php" - sed "s,@WG_SCRIPT_PATH@,/$WIKI_DIR_NAME," \ - "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap" - mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php" - sed "s,@WG_SERVER@,http://$SERVER_ADDR," \ - "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap" - mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php" - sed "s,@WG_SQLITE_DATADIR@,$TMP," \ - "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap" - mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php" - sed "s,@WG_SQLITE_DATAFILE@,$( basename $DB_FILE .sqlite)," \ - "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap" - mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php" - - mv "$FILES_FOLDER/LocalSettings-tmp.php" \ - "$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php" || - error "Unable to move $FILES_FOLDER/LocalSettings-tmp.php" \ - "in $WIKI_DIR_INST/$WIKI_DIR_NAME" - echo "File $FILES_FOLDER/LocalSettings.php is set in" \ - " $WIKI_DIR_INST/$WIKI_DIR_NAME" + install_mediawiki echo "Your wiki has been installed. You can check it at - http://$SERVER_ADDR/$WIKI_DIR_NAME" + $WIKI_URL" } # Reset the database of the wiki and the password of the admin @@ -401,12 +401,18 @@ wiki_install () { # Warning: This function must be called only in a subdirectory of t/ directory wiki_reset () { # Copy initial database of the wiki - if [ ! -f "../$FILES_FOLDER/$DB_FILE" ] ; then - error "Can't find ../$FILES_FOLDER/$DB_FILE in the current folder." + if ! test -d "../$FILES_FOLDER_DB" + then + error "No wiki database at ../$FILES_FOLDER_DB, not installed yet?" + fi + if ! test -d "../$FILES_FOLDER_POST_INSTALL_DB" + then + error "No wiki backup database at ../$FILES_FOLDER_POST_INSTALL_DB, failed installation?" fi - cp "../$FILES_FOLDER/$DB_FILE" "$TMP" || - error "Can't copy ../$FILES_FOLDER/$DB_FILE in $TMP" - echo "File $FILES_FOLDER/$DB_FILE is set in $TMP" + wiki_delete_db + cp -R "../$FILES_FOLDER_POST_INSTALL_DB/"* "../$FILES_FOLDER_DB/" || + error "Can't copy ../$FILES_FOLDER_POST_INSTALL_DB/* to ../$FILES_FOLDER_DB/*" + echo "File $FILES_FOLDER_DB/* has been reset" } # Delete the wiki created in the web server's directory and all its content @@ -420,13 +426,7 @@ wiki_delete () { rm -rf "$WIKI_DIR_INST/$WIKI_DIR_NAME" || error "Wiki's directory $WIKI_DIR_INST/" \ "$WIKI_DIR_NAME could not be deleted" - # Delete the wiki's SQLite database. - rm -f "$TMP/$DB_FILE" || - error "Database $TMP/$DB_FILE could not be deleted." fi - - # Delete the wiki's SQLite database - rm -f "$TMP/$DB_FILE" || error "Database $TMP/$DB_FILE could not be deleted." - rm -f "$FILES_FOLDER/$DB_FILE" - rm -rf "$TMP/mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz" + wiki_delete_db + wiki_delete_db_backup } diff --git a/contrib/mw-to-git/t/test-gitmw.pl b/contrib/mw-to-git/t/test-gitmw.pl index 0ff76259fa..c5d687f078 100755 --- a/contrib/mw-to-git/t/test-gitmw.pl +++ b/contrib/mw-to-git/t/test-gitmw.pl @@ -24,9 +24,7 @@ use MediaWiki::API; use Getopt::Long; -use encoding 'utf8'; use DateTime::Format::ISO8601; -use open ':encoding(utf8)'; use constant SLASH_REPLACEMENT => "%2F"; #Parsing of the config file @@ -87,7 +85,7 @@ sub wiki_getpage { # Replace spaces by underscore in the page name $pagename =~ s/ /_/g; $pagename =~ s/\//%2F/g; - open(my $file, ">$destdir/$pagename.mw"); + open(my $file, ">:encoding(UTF-8)", "$destdir/$pagename.mw"); print $file "$content"; close ($file); @@ -172,7 +170,7 @@ sub wiki_getallpagename { cmlimit => 500 }, ) || die $mw->{error}->{code}.": ".$mw->{error}->{details}; - open(my $file, ">all.txt"); + open(my $file, ">:encoding(UTF-8)", "all.txt"); foreach my $page (@{$mw_pages}) { print $file "$page->{title}\n"; } @@ -185,7 +183,7 @@ sub wiki_getallpagename { aplimit => 500, }) || die $mw->{error}->{code}.": ".$mw->{error}->{details}; - open(my $file, ">all.txt"); + open(my $file, ">:encoding(UTF-8)", "all.txt"); foreach my $page (@{$mw_pages}) { print $file "$page->{title}\n"; } @@ -214,12 +212,12 @@ my $fct_to_call = shift; wiki_login($wiki_admin, $wiki_admin_pass); -my %functions_to_call = qw( - upload_file wiki_upload_file - get_page wiki_getpage - delete_page wiki_delete_page - edit_page wiki_editpage - getallpagename wiki_getallpagename +my %functions_to_call = ( + upload_file => \&wiki_upload_file, + get_page => \&wiki_getpage, + delete_page => \&wiki_delete_page, + edit_page => \&wiki_editpage, + getallpagename => \&wiki_getallpagename, ); die "$0 ERROR: wrong argument" unless exists $functions_to_call{$fct_to_call}; -&{$functions_to_call{$fct_to_call}}(@ARGV); +$functions_to_call{$fct_to_call}->(map { utf8::decode($_); $_ } @ARGV); diff --git a/contrib/mw-to-git/t/test.config b/contrib/mw-to-git/t/test.config index 5ba0684162..ed10b3e4a4 100644 --- a/contrib/mw-to-git/t/test.config +++ b/contrib/mw-to-git/t/test.config @@ -3,15 +3,11 @@ WIKI_DIR_NAME=wiki # Login and password of the wiki's admin WIKI_ADMIN=WikiAdmin -WIKI_PASSW=AdminPass +WIKI_PASSW=AdminPass1 # Address of the web server SERVER_ADDR=localhost -# SQLite database of the wiki, named DB_FILE, is located in TMP -TMP=/tmp -DB_FILE=wikidb.sqlite - # If LIGHTTPD is not set to true, the script will use the default # web server running in WIKI_DIR_INST. WIKI_DIR_INST=/var/www @@ -28,10 +24,17 @@ WEB=WEB WEB_TMP=$WEB/tmp WEB_WWW=$WEB/www +# Where our configuration for the wiki is located +FILES_FOLDER=mediawiki +FILES_FOLDER_DOWNLOAD=$FILES_FOLDER/download +FILES_FOLDER_DB=$FILES_FOLDER/db +FILES_FOLDER_POST_INSTALL_DB=$FILES_FOLDER/post-install-db + # The variables below are used by the script to install a wiki. # You should not modify these unless you are modifying the script itself. -# tested versions: 1.19.X -> 1.21.1 -MW_VERSION_MAJOR=1.21 -MW_VERSION_MINOR=1 -FILES_FOLDER=install-wiki -DB_INSTALL_SCRIPT=db_install.php +# tested versions: 1.19.X -> 1.21.1 -> 1.34.2 +# +# See https://www.mediawiki.org/wiki/Download for what the latest +# version is. +MW_VERSION_MAJOR=1.34 +MW_VERSION_MINOR=2 diff --git a/contrib/subtree/Makefile b/contrib/subtree/Makefile index 6906aae441..6fa7496bfd 100644 --- a/contrib/subtree/Makefile +++ b/contrib/subtree/Makefile @@ -25,14 +25,16 @@ ASCIIDOC_HTML = xhtml11 ASCIIDOC_DOCBOOK = docbook ASCIIDOC_EXTRA = XMLTO = xmlto +XMLTO_EXTRA = ifdef USE_ASCIIDOCTOR ASCIIDOC = asciidoctor ASCIIDOC_CONF = ASCIIDOC_HTML = xhtml5 -ASCIIDOC_DOCBOOK = docbook45 +ASCIIDOC_DOCBOOK = docbook ASCIIDOC_EXTRA += -I../../Documentation -rasciidoctor-extensions ASCIIDOC_EXTRA += -alitdd='&\#x2d;&\#x2d;' +XMLTO_EXTRA += --skip-validation endif ifndef SHELL_PATH @@ -78,7 +80,7 @@ install-html: $(GIT_SUBTREE_HTML) $(INSTALL) -m 644 $^ $(DESTDIR)$(htmldir) $(GIT_SUBTREE_DOC): $(GIT_SUBTREE_XML) - $(XMLTO) -m $(MANPAGE_XSL) man $^ + $(XMLTO) -m $(MANPAGE_XSL) $(XMLTO_EXTRA) man $^ $(GIT_SUBTREE_XML): $(GIT_SUBTREE_TXT) $(ASCIIDOC) -b $(ASCIIDOC_DOCBOOK) -d manpage $(ASCIIDOC_CONF) \ diff --git a/contrib/subtree/git-subtree.txt b/contrib/subtree/git-subtree.txt index 352deda69d..0db02fe3c0 100644 --- a/contrib/subtree/git-subtree.txt +++ b/contrib/subtree/git-subtree.txt @@ -139,12 +139,12 @@ OPTIONS -m <message>:: --message=<message>:: - This option is only valid for add, merge and pull (unsure). + This option is only valid for add, merge, pull, and split --rejoin. Specify <message> as the commit message for the merge commit. -OPTIONS FOR add, merge, push, pull ----------------------------------- +OPTIONS FOR add, merge, and pull +-------------------------------- --squash:: This option is only valid for add, merge, and pull commands. diff --git a/contrib/svn-fe/.gitignore b/contrib/svn-fe/.gitignore deleted file mode 100644 index 02a7791585..0000000000 --- a/contrib/svn-fe/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -/*.xml -/*.1 -/*.html -/svn-fe diff --git a/contrib/svn-fe/Makefile b/contrib/svn-fe/Makefile deleted file mode 100644 index e8651aaf4b..0000000000 --- a/contrib/svn-fe/Makefile +++ /dev/null @@ -1,105 +0,0 @@ -all:: svn-fe$X - -CC = cc -RM = rm -f -MV = mv - -CFLAGS = -g -O2 -Wall -LDFLAGS = -EXTLIBS = -lz - -include ../../config.mak.uname --include ../../config.mak.autogen --include ../../config.mak - -ifeq ($(uname_S),Darwin) - ifndef NO_FINK - ifeq ($(shell test -d /sw/lib && echo y),y) - CFLAGS += -I/sw/include - LDFLAGS += -L/sw/lib - endif - endif - ifndef NO_DARWIN_PORTS - ifeq ($(shell test -d /opt/local/lib && echo y),y) - CFLAGS += -I/opt/local/include - LDFLAGS += -L/opt/local/lib - endif - endif -endif - -ifndef NO_OPENSSL - EXTLIBS += -lssl - ifdef NEEDS_CRYPTO_WITH_SSL - EXTLIBS += -lcrypto - endif -endif - -ifndef NO_PTHREADS - CFLAGS += $(PTHREADS_CFLAGS) - EXTLIBS += $(PTHREAD_LIBS) -endif - -ifdef HAVE_CLOCK_GETTIME - CFLAGS += -DHAVE_CLOCK_GETTIME - EXTLIBS += -lrt -endif - -ifdef NEEDS_LIBICONV - EXTLIBS += -liconv -endif - -GIT_LIB = ../../libgit.a -VCSSVN_LIB = ../../vcs-svn/lib.a -XDIFF_LIB = ../../xdiff/lib.a - -LIBS = $(VCSSVN_LIB) $(GIT_LIB) $(XDIFF_LIB) - -QUIET_SUBDIR0 = +$(MAKE) -C # space to separate -C and subdir -QUIET_SUBDIR1 = - -ifneq ($(findstring $(MAKEFLAGS),w),w) -PRINT_DIR = --no-print-directory -else # "make -w" -NO_SUBDIR = : -endif - -ifneq ($(findstring $(MAKEFLAGS),s),s) -ifndef V - QUIET_CC = @echo ' ' CC $@; - QUIET_LINK = @echo ' ' LINK $@; - QUIET_SUBDIR0 = +@subdir= - QUIET_SUBDIR1 = ;$(NO_SUBDIR) echo ' ' SUBDIR $$subdir; \ - $(MAKE) $(PRINT_DIR) -C $$subdir -endif -endif - -svn-fe$X: svn-fe.o $(VCSSVN_LIB) $(XDIFF_LIB) $(GIT_LIB) - $(QUIET_LINK)$(CC) $(CFLAGS) $(LDFLAGS) $(EXTLIBS) -o $@ svn-fe.o $(LIBS) - -svn-fe.o: svn-fe.c ../../vcs-svn/svndump.h - $(QUIET_CC)$(CC) $(CFLAGS) -I../../vcs-svn -o $*.o -c $< - -svn-fe.html: svn-fe.txt - $(QUIET_SUBDIR0)../../Documentation $(QUIET_SUBDIR1) \ - MAN_TXT=../contrib/svn-fe/svn-fe.txt \ - ../contrib/svn-fe/$@ - -svn-fe.1: svn-fe.txt - $(QUIET_SUBDIR0)../../Documentation $(QUIET_SUBDIR1) \ - MAN_TXT=../contrib/svn-fe/svn-fe.txt \ - ../contrib/svn-fe/$@ - $(MV) ../../Documentation/svn-fe.1 . - -../../vcs-svn/lib.a: FORCE - $(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) vcs-svn/lib.a - -../../xdiff/lib.a: FORCE - $(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) xdiff/lib.a - -../../libgit.a: FORCE - $(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) libgit.a - -clean: - $(RM) svn-fe$X svn-fe.o svn-fe.html svn-fe.xml svn-fe.1 - -.PHONY: all clean FORCE diff --git a/contrib/svn-fe/svn-fe.c b/contrib/svn-fe/svn-fe.c deleted file mode 100644 index f363505abb..0000000000 --- a/contrib/svn-fe/svn-fe.c +++ /dev/null @@ -1,18 +0,0 @@ -/* - * This file is in the public domain. - * You may freely use, modify, distribute, and relicense it. - */ - -#include <stdlib.h> -#include "svndump.h" - -int main(int argc, char **argv) -{ - if (svndump_init(NULL)) - return 1; - svndump_read((argc > 1) ? argv[1] : NULL, "refs/heads/master", - "refs/notes/svn/revs"); - svndump_deinit(); - svndump_reset(); - return 0; -} diff --git a/contrib/svn-fe/svn-fe.txt b/contrib/svn-fe/svn-fe.txt deleted file mode 100644 index 19333fc8df..0000000000 --- a/contrib/svn-fe/svn-fe.txt +++ /dev/null @@ -1,71 +0,0 @@ -svn-fe(1) -========= - -NAME ----- -svn-fe - convert an SVN "dumpfile" to a fast-import stream - -SYNOPSIS --------- -[verse] -mkfifo backchannel && -svnadmin dump --deltas REPO | - svn-fe [url] 3<backchannel | - git fast-import --cat-blob-fd=3 3>backchannel - -DESCRIPTION ------------ - -Converts a Subversion dumpfile into input suitable for -git-fast-import(1) and similar importers. REPO is a path to a -Subversion repository mirrored on the local disk. Remote Subversion -repositories can be mirrored on local disk using the `svnsync` -command. - -Note: this tool is very young. The details of its commandline -interface may change in backward incompatible ways. - -INPUT FORMAT ------------- -Subversion's repository dump format is documented in full in -`notes/dump-load-format.txt` from the Subversion source tree. -Files in this format can be generated using the 'svnadmin dump' or -'svk admin dump' command. - -OUTPUT FORMAT -------------- -The fast-import format is documented by the git-fast-import(1) -manual page. - -NOTES ------ -Subversion dumps do not record a separate author and committer for -each revision, nor do they record a separate display name and email -address for each author. Like git-svn(1), 'svn-fe' will use the name - ---------- -user <user@UUID> ---------- - -as committer, where 'user' is the value of the `svn:author` property -and 'UUID' the repository's identifier. - -To support incremental imports, 'svn-fe' puts a `git-svn-id` line at -the end of each commit log message if passed a URL on the command -line. This line has the form `git-svn-id: URL@REVNO UUID`. - -The resulting repository will generally require further processing -to put each project in its own repository and to separate the history -of each branch. The 'git filter-repo --subdirectory-filter' command -may be useful for this purpose. - -BUGS ----- -Empty directories and unknown properties are silently discarded. - -The exit status does not reflect whether an error was detected. - -SEE ALSO --------- -git-svn(1), svn2git(1), svk(1), git-filter-repo(1), git-fast-import(1), -https://svn.apache.org/repos/asf/subversion/trunk/notes/dump-load-format.txt diff --git a/contrib/svn-fe/svnrdump_sim.py b/contrib/svn-fe/svnrdump_sim.py deleted file mode 100755 index 8a3cee6175..0000000000 --- a/contrib/svn-fe/svnrdump_sim.py +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env python -""" -Simulates svnrdump by replaying an existing dump from a file, taking care -of the specified revision range. -To simulate incremental imports the environment variable SVNRMAX can be set -to the highest revision that should be available. -""" -import sys -import os - -if sys.hexversion < 0x02040000: - # The limiter is the ValueError() calls. This may be too conservative - sys.stderr.write("svnrdump-sim.py: requires Python 2.4 or later.\n") - sys.exit(1) - - -def getrevlimit(): - var = 'SVNRMAX' - if var in os.environ: - return os.environ[var] - return None - - -def writedump(url, lower, upper): - if url.startswith('sim://'): - filename = url[6:] - if filename[-1] == '/': - filename = filename[:-1] # remove terminating slash - else: - raise ValueError('sim:// url required') - f = open(filename, 'r') - state = 'header' - wroterev = False - while(True): - l = f.readline() - if l == '': - break - if state == 'header' and l.startswith('Revision-number: '): - state = 'prefix' - if state == 'prefix' and l == 'Revision-number: %s\n' % lower: - state = 'selection' - if not upper == 'HEAD' and state == 'selection' and \ - l == 'Revision-number: %s\n' % upper: - break - - if state == 'header' or state == 'selection': - if state == 'selection': - wroterev = True - sys.stdout.write(l) - return wroterev - -if __name__ == "__main__": - if not (len(sys.argv) in (3, 4, 5)): - print("usage: %s dump URL -rLOWER:UPPER") - sys.exit(1) - if not sys.argv[1] == 'dump': - raise NotImplementedError('only "dump" is supported.') - url = sys.argv[2] - r = ('0', 'HEAD') - if len(sys.argv) == 4 and sys.argv[3][0:2] == '-r': - r = sys.argv[3][2:].lstrip().split(':') - if not getrevlimit() is None: - r[1] = getrevlimit() - if writedump(url, r[0], r[1]): - ret = 0 - else: - ret = 1 - sys.exit(ret) |