git/submodule.c

1100 lines
30 KiB
C
Raw Normal View History

#include "cache.h"
#include "submodule-config.h"
#include "submodule.h"
#include "dir.h"
#include "diff.h"
#include "commit.h"
#include "revision.h"
#include "run-command.h"
#include "diffcore.h"
#include "refs.h"
Submodules: Add the new "ignore" config option for diff and status The new "ignore" config option controls the default behavior for "git status" and the diff family. It specifies under what circumstances they consider submodules as modified and can be set separately for each submodule. The command line option "--ignore-submodules=" has been extended to accept the new parameter "none" for both status and diff. Users that chose submodules to get rid of long work tree scanning times might want to set the "dirty" option for those submodules. This brings back the pre 1.7.0 behavior, where submodule work trees were never scanned for modifications. By using "--ignore-submodules=none" on the command line the status and diff commands can be told to do a full scan. This option can be set to the following values (which have the same name and meaning as for the "--ignore-submodules" option of status and diff): "all": All changes to the submodule will be ignored. "dirty": Only differences of the commit recorded in the superproject and the submodules HEAD will be considered modifications, all changes to the work tree of the submodule will be ignored. When using this value, the submodule will not be scanned for work tree changes at all, leading to a performance benefit on large submodules. "untracked": Only untracked files in the submodules work tree are ignored, a changed HEAD and/or modified files in the submodule will mark it as modified. "none" (which is the default): Either untracked or modified files in a submodules work tree or a difference between the subdmodules HEAD and the commit recorded in the superproject will make it show up as changed. This value is added as a new parameter for the "--ignore-submodules" option of the diff family and "git status" so the user can override the settings in the configuration. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2010-08-06 06:39:25 +08:00
#include "string-list.h"
fetch: avoid quadratic loop checking for updated submodules Recent versions of git can be slow to fetch repositories with a large number of refs (or when they already have a large number of refs). For example, GitHub makes pull-requests available as refs, which can lead to a large number of available refs. This slowness goes away when submodule recursion is turned off: $ git ls-remote git://github.com/rails/rails.git | wc -l 3034 [this takes ~10 seconds of CPU time to complete] git fetch --recurse-submodules=no \ git://github.com/rails/rails.git "refs/*:refs/*" [this still isn't done after 10 _minutes_ of pegging the CPU] git fetch \ git://github.com/rails/rails.git "refs/*:refs/*" You can produce a quicker and simpler test case like this: doit() { head=`git rev-parse HEAD` for i in `seq 1 $1`; do echo $head refs/heads/ref$i done >.git/packed-refs echo "==> $1" rm -rf dest git init -q --bare dest && (cd dest && time git.compile fetch -q .. refs/*:refs/*) } rm -rf repo git init -q repo && cd repo && >file && git add file && git commit -q -m one doit 100 doit 200 doit 400 doit 800 doit 1600 doit 3200 Which yields timings like: # refs seconds of CPU 100 0.06 200 0.24 400 0.95 800 3.39 1600 13.66 3200 54.09 Notice that although the number of refs doubles in each trial, the CPU time spent quadruples. The problem is that the submodule recursion code works something like: - for each ref we fetch - for each commit in git rev-list $new_sha1 --not --all - add modified submodules to list - fetch any newly referenced submodules But that means if we fetch N refs, we start N revision walks. Worse, because we use "--all", the number of refs we must process that constitute "--all" keeps growing, too. And you end up doing O(N^2) ref resolutions. Instead, this patch structures the code like this: - for each sha1 we already have - add $old_sha1 to list $old - for each ref we fetch - add $new_sha1 to list $new - for each commit in git rev-list $new --not $old - add modified submodules to list - fetch any newly referenced submodules This yields timings like: # refs seconds of CPU 100 0.00 200 0.04 400 0.04 800 0.10 1600 0.21 3200 0.39 Note that the amount of effort doubles as the number of refs doubles. Similarly, the fetch of rails.git takes about as much time as it does with --recurse-submodules=no. Signed-off-by: Jeff King <peff@peff.net> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-09-13 03:56:52 +08:00
#include "sha1-array.h"
#include "argv-array.h"
#include "blob.h"
#include "thread-utils.h"
Submodules: Add the new "ignore" config option for diff and status The new "ignore" config option controls the default behavior for "git status" and the diff family. It specifies under what circumstances they consider submodules as modified and can be set separately for each submodule. The command line option "--ignore-submodules=" has been extended to accept the new parameter "none" for both status and diff. Users that chose submodules to get rid of long work tree scanning times might want to set the "dirty" option for those submodules. This brings back the pre 1.7.0 behavior, where submodule work trees were never scanned for modifications. By using "--ignore-submodules=none" on the command line the status and diff commands can be told to do a full scan. This option can be set to the following values (which have the same name and meaning as for the "--ignore-submodules" option of status and diff): "all": All changes to the submodule will be ignored. "dirty": Only differences of the commit recorded in the superproject and the submodules HEAD will be considered modifications, all changes to the work tree of the submodule will be ignored. When using this value, the submodule will not be scanned for work tree changes at all, leading to a performance benefit on large submodules. "untracked": Only untracked files in the submodules work tree are ignored, a changed HEAD and/or modified files in the submodule will mark it as modified. "none" (which is the default): Either untracked or modified files in a submodules work tree or a difference between the subdmodules HEAD and the commit recorded in the superproject will make it show up as changed. This value is added as a new parameter for the "--ignore-submodules" option of the diff family and "git status" so the user can override the settings in the configuration. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2010-08-06 06:39:25 +08:00
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
static int config_fetch_recurse_submodules = RECURSE_SUBMODULES_ON_DEMAND;
static struct string_list changed_submodule_paths;
fetch: avoid quadratic loop checking for updated submodules Recent versions of git can be slow to fetch repositories with a large number of refs (or when they already have a large number of refs). For example, GitHub makes pull-requests available as refs, which can lead to a large number of available refs. This slowness goes away when submodule recursion is turned off: $ git ls-remote git://github.com/rails/rails.git | wc -l 3034 [this takes ~10 seconds of CPU time to complete] git fetch --recurse-submodules=no \ git://github.com/rails/rails.git "refs/*:refs/*" [this still isn't done after 10 _minutes_ of pegging the CPU] git fetch \ git://github.com/rails/rails.git "refs/*:refs/*" You can produce a quicker and simpler test case like this: doit() { head=`git rev-parse HEAD` for i in `seq 1 $1`; do echo $head refs/heads/ref$i done >.git/packed-refs echo "==> $1" rm -rf dest git init -q --bare dest && (cd dest && time git.compile fetch -q .. refs/*:refs/*) } rm -rf repo git init -q repo && cd repo && >file && git add file && git commit -q -m one doit 100 doit 200 doit 400 doit 800 doit 1600 doit 3200 Which yields timings like: # refs seconds of CPU 100 0.06 200 0.24 400 0.95 800 3.39 1600 13.66 3200 54.09 Notice that although the number of refs doubles in each trial, the CPU time spent quadruples. The problem is that the submodule recursion code works something like: - for each ref we fetch - for each commit in git rev-list $new_sha1 --not --all - add modified submodules to list - fetch any newly referenced submodules But that means if we fetch N refs, we start N revision walks. Worse, because we use "--all", the number of refs we must process that constitute "--all" keeps growing, too. And you end up doing O(N^2) ref resolutions. Instead, this patch structures the code like this: - for each sha1 we already have - add $old_sha1 to list $old - for each ref we fetch - add $new_sha1 to list $new - for each commit in git rev-list $new --not $old - add modified submodules to list - fetch any newly referenced submodules This yields timings like: # refs seconds of CPU 100 0.00 200 0.04 400 0.04 800 0.10 1600 0.21 3200 0.39 Note that the amount of effort doubles as the number of refs doubles. Similarly, the fetch of rails.git takes about as much time as it does with --recurse-submodules=no. Signed-off-by: Jeff King <peff@peff.net> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-09-13 03:56:52 +08:00
static int initialized_fetch_ref_tips;
static struct sha1_array ref_tips_before_fetch;
static struct sha1_array ref_tips_after_fetch;
/*
* The following flag is set if the .gitmodules file is unmerged. We then
* disable recursion for all submodules where .git/config doesn't have a
* matching config entry because we can't guess what might be configured in
* .gitmodules unless the user resolves the conflict. When a command line
* option is given (which always overrides configuration) this flag will be
* ignored.
*/
static int gitmodules_is_unmerged;
/*
* This flag is set if the .gitmodules file had unstaged modifications on
* startup. This must be checked before allowing modifications to the
* .gitmodules file with the intention to stage them later, because when
* continuing we would stage the modifications the user didn't stage herself
* too. That might change in a future version when we learn to stage the
* changes we do ourselves without staging any previous modifications.
*/
static int gitmodules_is_modified;
int is_staging_gitmodules_ok(void)
{
return !gitmodules_is_modified;
}
/*
* Try to update the "path" entry in the "submodule.<name>" section of the
* .gitmodules file. Return 0 only if a .gitmodules file was found, a section
* with the correct path=<oldpath> setting was found and we could update it.
*/
int update_path_in_gitmodules(const char *oldpath, const char *newpath)
{
struct strbuf entry = STRBUF_INIT;
const struct submodule *submodule;
if (!file_exists(".gitmodules")) /* Do nothing without .gitmodules */
return -1;
if (gitmodules_is_unmerged)
die(_("Cannot change unmerged .gitmodules, resolve merge conflicts first"));
submodule = submodule_from_path(null_sha1, oldpath);
if (!submodule || !submodule->name) {
warning(_("Could not find section in .gitmodules where path=%s"), oldpath);
return -1;
}
strbuf_addstr(&entry, "submodule.");
strbuf_addstr(&entry, submodule->name);
strbuf_addstr(&entry, ".path");
if (git_config_set_in_file(".gitmodules", entry.buf, newpath) < 0) {
/* Maybe the user already did that, don't error out here */
warning(_("Could not update .gitmodules entry %s"), entry.buf);
strbuf_release(&entry);
return -1;
}
strbuf_release(&entry);
return 0;
}
rm: delete .gitmodules entry of submodules removed from the work tree Currently using "git rm" on a submodule removes the submodule's work tree from that of the superproject and the gitlink from the index. But the submodule's section in .gitmodules is left untouched, which is a leftover of the now removed submodule and might irritate users (as opposed to the setting in .git/config, this must stay as a reminder that the user showed interest in this submodule so it will be repopulated later when an older commit is checked out). Let "git rm" help the user by not only removing the submodule from the work tree but by also removing the "submodule.<submodule name>" section from the .gitmodules file and stage both. This doesn't happen when the "--cached" option is used, as it would modify the work tree. This also silently does nothing when no .gitmodules file is found and only issues a warning when it doesn't have a section for this submodule. This is because the user might just use plain gitlinks without the .gitmodules file or has already removed the section by hand before issuing the "git rm" command (in which case the warning reminds him that rm would have done that for him). Only when .gitmodules is found and contains merge conflicts the rm command will fail and tell the user to resolve the conflict before trying again. Also extend the man page to inform the user about this new feature. While at it promote the submodule sub-section to a chapter as it made not much sense under "REMOVING FILES THAT HAVE DISAPPEARED FROM THE FILESYSTEM". In t7610 three uses of "git rm submod" had to be replaced with "git rm --cached submod" because that test expects .gitmodules and the work tree to stay untouched. Also in t7400 the tests for the remaining settings in the .gitmodules file had to be changed to assert that these settings are missing. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2013-08-07 03:15:25 +08:00
/*
* Try to remove the "submodule.<name>" section from .gitmodules where the given
* path is configured. Return 0 only if a .gitmodules file was found, a section
* with the correct path=<path> setting was found and we could remove it.
*/
int remove_path_from_gitmodules(const char *path)
{
struct strbuf sect = STRBUF_INIT;
const struct submodule *submodule;
rm: delete .gitmodules entry of submodules removed from the work tree Currently using "git rm" on a submodule removes the submodule's work tree from that of the superproject and the gitlink from the index. But the submodule's section in .gitmodules is left untouched, which is a leftover of the now removed submodule and might irritate users (as opposed to the setting in .git/config, this must stay as a reminder that the user showed interest in this submodule so it will be repopulated later when an older commit is checked out). Let "git rm" help the user by not only removing the submodule from the work tree but by also removing the "submodule.<submodule name>" section from the .gitmodules file and stage both. This doesn't happen when the "--cached" option is used, as it would modify the work tree. This also silently does nothing when no .gitmodules file is found and only issues a warning when it doesn't have a section for this submodule. This is because the user might just use plain gitlinks without the .gitmodules file or has already removed the section by hand before issuing the "git rm" command (in which case the warning reminds him that rm would have done that for him). Only when .gitmodules is found and contains merge conflicts the rm command will fail and tell the user to resolve the conflict before trying again. Also extend the man page to inform the user about this new feature. While at it promote the submodule sub-section to a chapter as it made not much sense under "REMOVING FILES THAT HAVE DISAPPEARED FROM THE FILESYSTEM". In t7610 three uses of "git rm submod" had to be replaced with "git rm --cached submod" because that test expects .gitmodules and the work tree to stay untouched. Also in t7400 the tests for the remaining settings in the .gitmodules file had to be changed to assert that these settings are missing. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2013-08-07 03:15:25 +08:00
if (!file_exists(".gitmodules")) /* Do nothing without .gitmodules */
return -1;
if (gitmodules_is_unmerged)
die(_("Cannot change unmerged .gitmodules, resolve merge conflicts first"));
submodule = submodule_from_path(null_sha1, path);
if (!submodule || !submodule->name) {
rm: delete .gitmodules entry of submodules removed from the work tree Currently using "git rm" on a submodule removes the submodule's work tree from that of the superproject and the gitlink from the index. But the submodule's section in .gitmodules is left untouched, which is a leftover of the now removed submodule and might irritate users (as opposed to the setting in .git/config, this must stay as a reminder that the user showed interest in this submodule so it will be repopulated later when an older commit is checked out). Let "git rm" help the user by not only removing the submodule from the work tree but by also removing the "submodule.<submodule name>" section from the .gitmodules file and stage both. This doesn't happen when the "--cached" option is used, as it would modify the work tree. This also silently does nothing when no .gitmodules file is found and only issues a warning when it doesn't have a section for this submodule. This is because the user might just use plain gitlinks without the .gitmodules file or has already removed the section by hand before issuing the "git rm" command (in which case the warning reminds him that rm would have done that for him). Only when .gitmodules is found and contains merge conflicts the rm command will fail and tell the user to resolve the conflict before trying again. Also extend the man page to inform the user about this new feature. While at it promote the submodule sub-section to a chapter as it made not much sense under "REMOVING FILES THAT HAVE DISAPPEARED FROM THE FILESYSTEM". In t7610 three uses of "git rm submod" had to be replaced with "git rm --cached submod" because that test expects .gitmodules and the work tree to stay untouched. Also in t7400 the tests for the remaining settings in the .gitmodules file had to be changed to assert that these settings are missing. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2013-08-07 03:15:25 +08:00
warning(_("Could not find section in .gitmodules where path=%s"), path);
return -1;
}
strbuf_addstr(&sect, "submodule.");
strbuf_addstr(&sect, submodule->name);
rm: delete .gitmodules entry of submodules removed from the work tree Currently using "git rm" on a submodule removes the submodule's work tree from that of the superproject and the gitlink from the index. But the submodule's section in .gitmodules is left untouched, which is a leftover of the now removed submodule and might irritate users (as opposed to the setting in .git/config, this must stay as a reminder that the user showed interest in this submodule so it will be repopulated later when an older commit is checked out). Let "git rm" help the user by not only removing the submodule from the work tree but by also removing the "submodule.<submodule name>" section from the .gitmodules file and stage both. This doesn't happen when the "--cached" option is used, as it would modify the work tree. This also silently does nothing when no .gitmodules file is found and only issues a warning when it doesn't have a section for this submodule. This is because the user might just use plain gitlinks without the .gitmodules file or has already removed the section by hand before issuing the "git rm" command (in which case the warning reminds him that rm would have done that for him). Only when .gitmodules is found and contains merge conflicts the rm command will fail and tell the user to resolve the conflict before trying again. Also extend the man page to inform the user about this new feature. While at it promote the submodule sub-section to a chapter as it made not much sense under "REMOVING FILES THAT HAVE DISAPPEARED FROM THE FILESYSTEM". In t7610 three uses of "git rm submod" had to be replaced with "git rm --cached submod" because that test expects .gitmodules and the work tree to stay untouched. Also in t7400 the tests for the remaining settings in the .gitmodules file had to be changed to assert that these settings are missing. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2013-08-07 03:15:25 +08:00
if (git_config_rename_section_in_file(".gitmodules", sect.buf, NULL) < 0) {
/* Maybe the user already did that, don't error out here */
warning(_("Could not remove .gitmodules entry for %s"), path);
strbuf_release(&sect);
return -1;
}
strbuf_release(&sect);
return 0;
}
void stage_updated_gitmodules(void)
{
if (add_file_to_cache(".gitmodules", 0))
die(_("staging updated .gitmodules failed"));
}
static int add_submodule_odb(const char *path)
{
struct strbuf objects_directory = STRBUF_INIT;
struct alternate_object_database *alt_odb;
int ret = 0;
int alloc;
strbuf_git_path_submodule(&objects_directory, path, "objects/");
if (!is_directory(objects_directory.buf)) {
ret = -1;
goto done;
}
/* avoid adding it twice */
add_submodule_odb: initialize alt_odb list earlier The add_submodule_odb function tries to add a submodule's object store as an "alternate". It needs the existing list to be initialized (from the objects/info/alternates file) for two reasons: 1. We look for duplicates with the existing alternate stores, but obviously this doesn't work if we haven't loaded any yet. 2. We link our new entry into the list by prepending it to alt_odb_list. But we do _not_ modify alt_odb_tail. This variable starts as NULL, and is a signal to the alt_odb code that the list has not yet been initialized. We then call read_info_alternates on the submodule (to recursively load its alternates), which will try to append to that tail, assuming it has been initialized. This causes us to segfault if it is NULL. This rarely comes up in practice, because we will have initialized the alt_odb any time we do an object lookup. So you can trigger this only when: - you try to access a submodule (e.g., a diff with diff.submodule=log) - the access happens before any other object has been accessed (e.g., because the diff is between the working tree and the index) - the submodule contains an alternates file (so we try to add an entry to the NULL alt_odb_tail) To fix this, we just need to call prepare_alt_odb at the start of the function (and if we have already initialized, it is a noop). Note that we can remove the prepare_alt_odb call from the end. It is guaranteed to be a noop, since we will have called it earlier. Signed-off-by: Jeff King <peff@peff.net> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2015-10-28 22:07:25 +08:00
prepare_alt_odb();
for (alt_odb = alt_odb_list; alt_odb; alt_odb = alt_odb->next)
if (alt_odb->name - alt_odb->base == objects_directory.len &&
!strncmp(alt_odb->base, objects_directory.buf,
objects_directory.len))
goto done;
alloc = objects_directory.len + 42; /* for "12/345..." sha1 */
alt_odb = xmalloc(sizeof(*alt_odb) + alloc);
alt_odb->next = alt_odb_list;
xsnprintf(alt_odb->base, alloc, "%s", objects_directory.buf);
alt_odb->name = alt_odb->base + objects_directory.len;
alt_odb->name[2] = '/';
alt_odb->name[40] = '\0';
alt_odb->name[41] = '\0';
alt_odb_list = alt_odb;
/* add possible alternates from the submodule */
read_info_alternates(objects_directory.buf, 0);
done:
strbuf_release(&objects_directory);
return ret;
}
Submodules: Add the new "ignore" config option for diff and status The new "ignore" config option controls the default behavior for "git status" and the diff family. It specifies under what circumstances they consider submodules as modified and can be set separately for each submodule. The command line option "--ignore-submodules=" has been extended to accept the new parameter "none" for both status and diff. Users that chose submodules to get rid of long work tree scanning times might want to set the "dirty" option for those submodules. This brings back the pre 1.7.0 behavior, where submodule work trees were never scanned for modifications. By using "--ignore-submodules=none" on the command line the status and diff commands can be told to do a full scan. This option can be set to the following values (which have the same name and meaning as for the "--ignore-submodules" option of status and diff): "all": All changes to the submodule will be ignored. "dirty": Only differences of the commit recorded in the superproject and the submodules HEAD will be considered modifications, all changes to the work tree of the submodule will be ignored. When using this value, the submodule will not be scanned for work tree changes at all, leading to a performance benefit on large submodules. "untracked": Only untracked files in the submodules work tree are ignored, a changed HEAD and/or modified files in the submodule will mark it as modified. "none" (which is the default): Either untracked or modified files in a submodules work tree or a difference between the subdmodules HEAD and the commit recorded in the superproject will make it show up as changed. This value is added as a new parameter for the "--ignore-submodules" option of the diff family and "git status" so the user can override the settings in the configuration. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2010-08-06 06:39:25 +08:00
void set_diffopt_flags_from_submodule_config(struct diff_options *diffopt,
const char *path)
{
const struct submodule *submodule = submodule_from_path(null_sha1, path);
if (submodule) {
if (submodule->ignore)
handle_ignore_submodules_arg(diffopt, submodule->ignore);
else if (gitmodules_is_unmerged)
DIFF_OPT_SET(diffopt, IGNORE_SUBMODULES);
Submodules: Add the new "ignore" config option for diff and status The new "ignore" config option controls the default behavior for "git status" and the diff family. It specifies under what circumstances they consider submodules as modified and can be set separately for each submodule. The command line option "--ignore-submodules=" has been extended to accept the new parameter "none" for both status and diff. Users that chose submodules to get rid of long work tree scanning times might want to set the "dirty" option for those submodules. This brings back the pre 1.7.0 behavior, where submodule work trees were never scanned for modifications. By using "--ignore-submodules=none" on the command line the status and diff commands can be told to do a full scan. This option can be set to the following values (which have the same name and meaning as for the "--ignore-submodules" option of status and diff): "all": All changes to the submodule will be ignored. "dirty": Only differences of the commit recorded in the superproject and the submodules HEAD will be considered modifications, all changes to the work tree of the submodule will be ignored. When using this value, the submodule will not be scanned for work tree changes at all, leading to a performance benefit on large submodules. "untracked": Only untracked files in the submodules work tree are ignored, a changed HEAD and/or modified files in the submodule will mark it as modified. "none" (which is the default): Either untracked or modified files in a submodules work tree or a difference between the subdmodules HEAD and the commit recorded in the superproject will make it show up as changed. This value is added as a new parameter for the "--ignore-submodules" option of the diff family and "git status" so the user can override the settings in the configuration. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2010-08-06 06:39:25 +08:00
}
}
int submodule_config(const char *var, const char *value, void *cb)
{
if (starts_with(var, "submodule."))
return parse_submodule_config_option(var, value);
else if (!strcmp(var, "fetch.recursesubmodules")) {
config_fetch_recurse_submodules = parse_fetch_recurse_submodules_arg(var, value);
return 0;
}
return 0;
}
void gitmodules_config(void)
{
const char *work_tree = get_git_work_tree();
if (work_tree) {
struct strbuf gitmodules_path = STRBUF_INIT;
int pos;
strbuf_addstr(&gitmodules_path, work_tree);
strbuf_addstr(&gitmodules_path, "/.gitmodules");
if (read_cache() < 0)
die("index file corrupt");
pos = cache_name_pos(".gitmodules", 11);
if (pos < 0) { /* .gitmodules not found or isn't merged */
pos = -1 - pos;
if (active_nr > pos) { /* there is a .gitmodules */
const struct cache_entry *ce = active_cache[pos];
if (ce_namelen(ce) == 11 &&
!memcmp(ce->name, ".gitmodules", 11))
gitmodules_is_unmerged = 1;
}
} else if (pos < active_nr) {
struct stat st;
if (lstat(".gitmodules", &st) == 0 &&
ce_match_stat(active_cache[pos], &st, 0) & DATA_CHANGED)
gitmodules_is_modified = 1;
}
if (!gitmodules_is_unmerged)
git_config_from_file(submodule_config, gitmodules_path.buf, NULL);
strbuf_release(&gitmodules_path);
}
}
Add the option "--ignore-submodules" to "git status" In some use cases it is not desirable that "git status" considers submodules that only contain untracked content as dirty. This may happen e.g. when the submodule is not under the developers control and not all build generated files have been added to .gitignore by the upstream developers. Using the "untracked" parameter for the "--ignore-submodules" option disables checking for untracked content and lets git diff report them as changed only when they have new commits or modified content. Sometimes it is not wanted to have submodules show up as changed when they just contain changes to their work tree (this was the behavior before 1.7.0). An example for that are scripts which just want to check for submodule commits while ignoring any changes to the work tree. Also users having large submodules known not to change might want to use this option, as the - sometimes substantial - time it takes to scan the submodule work tree(s) is saved when using the "dirty" parameter. And if you want to ignore any changes to submodules, you can now do that by using this option without parameters or with "all" (when the config option status.submodulesummary is set, using "all" will also suppress the output of the submodule summary). A new function handle_ignore_submodules_arg() is introduced to parse this option new to "git status" in a single location, as "git diff" already knew it. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2010-06-25 22:56:47 +08:00
void handle_ignore_submodules_arg(struct diff_options *diffopt,
const char *arg)
{
DIFF_OPT_CLR(diffopt, IGNORE_SUBMODULES);
DIFF_OPT_CLR(diffopt, IGNORE_UNTRACKED_IN_SUBMODULES);
DIFF_OPT_CLR(diffopt, IGNORE_DIRTY_SUBMODULES);
Add the option "--ignore-submodules" to "git status" In some use cases it is not desirable that "git status" considers submodules that only contain untracked content as dirty. This may happen e.g. when the submodule is not under the developers control and not all build generated files have been added to .gitignore by the upstream developers. Using the "untracked" parameter for the "--ignore-submodules" option disables checking for untracked content and lets git diff report them as changed only when they have new commits or modified content. Sometimes it is not wanted to have submodules show up as changed when they just contain changes to their work tree (this was the behavior before 1.7.0). An example for that are scripts which just want to check for submodule commits while ignoring any changes to the work tree. Also users having large submodules known not to change might want to use this option, as the - sometimes substantial - time it takes to scan the submodule work tree(s) is saved when using the "dirty" parameter. And if you want to ignore any changes to submodules, you can now do that by using this option without parameters or with "all" (when the config option status.submodulesummary is set, using "all" will also suppress the output of the submodule summary). A new function handle_ignore_submodules_arg() is introduced to parse this option new to "git status" in a single location, as "git diff" already knew it. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2010-06-25 22:56:47 +08:00
if (!strcmp(arg, "all"))
DIFF_OPT_SET(diffopt, IGNORE_SUBMODULES);
else if (!strcmp(arg, "untracked"))
DIFF_OPT_SET(diffopt, IGNORE_UNTRACKED_IN_SUBMODULES);
else if (!strcmp(arg, "dirty"))
DIFF_OPT_SET(diffopt, IGNORE_DIRTY_SUBMODULES);
Submodules: Add the new "ignore" config option for diff and status The new "ignore" config option controls the default behavior for "git status" and the diff family. It specifies under what circumstances they consider submodules as modified and can be set separately for each submodule. The command line option "--ignore-submodules=" has been extended to accept the new parameter "none" for both status and diff. Users that chose submodules to get rid of long work tree scanning times might want to set the "dirty" option for those submodules. This brings back the pre 1.7.0 behavior, where submodule work trees were never scanned for modifications. By using "--ignore-submodules=none" on the command line the status and diff commands can be told to do a full scan. This option can be set to the following values (which have the same name and meaning as for the "--ignore-submodules" option of status and diff): "all": All changes to the submodule will be ignored. "dirty": Only differences of the commit recorded in the superproject and the submodules HEAD will be considered modifications, all changes to the work tree of the submodule will be ignored. When using this value, the submodule will not be scanned for work tree changes at all, leading to a performance benefit on large submodules. "untracked": Only untracked files in the submodules work tree are ignored, a changed HEAD and/or modified files in the submodule will mark it as modified. "none" (which is the default): Either untracked or modified files in a submodules work tree or a difference between the subdmodules HEAD and the commit recorded in the superproject will make it show up as changed. This value is added as a new parameter for the "--ignore-submodules" option of the diff family and "git status" so the user can override the settings in the configuration. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2010-08-06 06:39:25 +08:00
else if (strcmp(arg, "none"))
Add the option "--ignore-submodules" to "git status" In some use cases it is not desirable that "git status" considers submodules that only contain untracked content as dirty. This may happen e.g. when the submodule is not under the developers control and not all build generated files have been added to .gitignore by the upstream developers. Using the "untracked" parameter for the "--ignore-submodules" option disables checking for untracked content and lets git diff report them as changed only when they have new commits or modified content. Sometimes it is not wanted to have submodules show up as changed when they just contain changes to their work tree (this was the behavior before 1.7.0). An example for that are scripts which just want to check for submodule commits while ignoring any changes to the work tree. Also users having large submodules known not to change might want to use this option, as the - sometimes substantial - time it takes to scan the submodule work tree(s) is saved when using the "dirty" parameter. And if you want to ignore any changes to submodules, you can now do that by using this option without parameters or with "all" (when the config option status.submodulesummary is set, using "all" will also suppress the output of the submodule summary). A new function handle_ignore_submodules_arg() is introduced to parse this option new to "git status" in a single location, as "git diff" already knew it. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2010-06-25 22:56:47 +08:00
die("bad --ignore-submodules argument: %s", arg);
}
static int prepare_submodule_summary(struct rev_info *rev, const char *path,
struct commit *left, struct commit *right,
int *fast_forward, int *fast_backward)
{
struct commit_list *merge_bases, *list;
init_revisions(rev, NULL);
setup_revisions(0, NULL, rev, NULL);
rev->left_right = 1;
rev->first_parent_only = 1;
left->object.flags |= SYMMETRIC_LEFT;
add_pending_object(rev, &left->object, path);
add_pending_object(rev, &right->object, path);
merge_bases = get_merge_bases(left, right);
if (merge_bases) {
if (merge_bases->item == left)
*fast_forward = 1;
else if (merge_bases->item == right)
*fast_backward = 1;
}
for (list = merge_bases; list; list = list->next) {
list->item->object.flags |= UNINTERESTING;
add_pending_object(rev, &list->item->object,
oid_to_hex(&list->item->object.oid));
}
return prepare_revision_walk(rev);
}
static void print_submodule_summary(struct rev_info *rev, FILE *f,
const char *line_prefix,
const char *del, const char *add, const char *reset)
{
static const char format[] = " %m %s";
struct strbuf sb = STRBUF_INIT;
struct commit *commit;
while ((commit = get_revision(rev))) {
struct pretty_print_context ctx = {0};
ctx.date_mode = rev->date_mode;
ctx.output_encoding = get_log_output_encoding();
strbuf_setlen(&sb, 0);
strbuf_addstr(&sb, line_prefix);
if (commit->object.flags & SYMMETRIC_LEFT) {
if (del)
strbuf_addstr(&sb, del);
}
else if (add)
strbuf_addstr(&sb, add);
format_commit_message(commit, format, &sb, &ctx);
if (reset)
strbuf_addstr(&sb, reset);
strbuf_addch(&sb, '\n');
fprintf(f, "%s", sb.buf);
}
strbuf_release(&sb);
}
void show_submodule_summary(FILE *f, const char *path,
const char *line_prefix,
unsigned char one[20], unsigned char two[20],
unsigned dirty_submodule, const char *meta,
const char *del, const char *add, const char *reset)
{
struct rev_info rev;
struct commit *left = NULL, *right = NULL;
const char *message = NULL;
struct strbuf sb = STRBUF_INIT;
int fast_forward = 0, fast_backward = 0;
if (is_null_sha1(two))
message = "(submodule deleted)";
else if (add_submodule_odb(path))
message = "(not checked out)";
else if (is_null_sha1(one))
message = "(new submodule)";
else if (!(left = lookup_commit_reference(one)) ||
!(right = lookup_commit_reference(two)))
message = "(commits not present)";
else if (prepare_submodule_summary(&rev, path, left, right,
&fast_forward, &fast_backward))
message = "(revision walker failed)";
if (dirty_submodule & DIRTY_SUBMODULE_UNTRACKED)
fprintf(f, "%sSubmodule %s contains untracked content\n",
line_prefix, path);
if (dirty_submodule & DIRTY_SUBMODULE_MODIFIED)
fprintf(f, "%sSubmodule %s contains modified content\n",
line_prefix, path);
if (!hashcmp(one, two)) {
strbuf_release(&sb);
return;
}
strbuf_addf(&sb, "%s%sSubmodule %s %s..", line_prefix, meta, path,
find_unique_abbrev(one, DEFAULT_ABBREV));
if (!fast_backward && !fast_forward)
strbuf_addch(&sb, '.');
strbuf_addf(&sb, "%s", find_unique_abbrev(two, DEFAULT_ABBREV));
if (message)
strbuf_addf(&sb, " %s%s\n", message, reset);
else
strbuf_addf(&sb, "%s:%s\n", fast_backward ? " (rewind)" : "", reset);
fwrite(sb.buf, sb.len, 1, f);
if (!message) /* only NULL if we succeeded in setting up the walk */
print_submodule_summary(&rev, f, line_prefix, del, add, reset);
if (left)
clear_commit_marks(left, ~0);
if (right)
clear_commit_marks(right, ~0);
strbuf_release(&sb);
}
void set_config_fetch_recurse_submodules(int value)
{
config_fetch_recurse_submodules = value;
}
static int has_remote(const char *refname, const struct object_id *oid,
int flags, void *cb_data)
{
return 1;
}
static int submodule_needs_pushing(const char *path, const unsigned char sha1[20])
{
if (add_submodule_odb(path) || !lookup_commit_reference(sha1))
return 0;
if (for_each_remote_ref_submodule(path, has_remote, NULL) > 0) {
struct child_process cp = CHILD_PROCESS_INIT;
const char *argv[] = {"rev-list", NULL, "--not", "--remotes", "-n", "1" , NULL};
struct strbuf buf = STRBUF_INIT;
int needs_pushing = 0;
argv[1] = sha1_to_hex(sha1);
cp.argv = argv;
cp.env = local_repo_env;
cp.git_cmd = 1;
cp.no_stdin = 1;
cp.out = -1;
cp.dir = path;
if (start_command(&cp))
die("Could not run 'git rev-list %s --not --remotes -n 1' command in submodule %s",
sha1_to_hex(sha1), path);
if (strbuf_read(&buf, cp.out, 41))
needs_pushing = 1;
finish_command(&cp);
close(cp.out);
strbuf_release(&buf);
return needs_pushing;
}
return 0;
}
static void collect_submodules_from_diff(struct diff_queue_struct *q,
struct diff_options *options,
void *data)
{
int i;
struct string_list *needs_pushing = data;
for (i = 0; i < q->nr; i++) {
struct diff_filepair *p = q->queue[i];
if (!S_ISGITLINK(p->two->mode))
continue;
if (submodule_needs_pushing(p->two->path, p->two->sha1))
string_list_insert(needs_pushing, p->two->path);
}
}
static void find_unpushed_submodule_commits(struct commit *commit,
struct string_list *needs_pushing)
{
struct rev_info rev;
init_revisions(&rev, NULL);
rev.diffopt.output_format |= DIFF_FORMAT_CALLBACK;
rev.diffopt.format_callback = collect_submodules_from_diff;
rev.diffopt.format_callback_data = needs_pushing;
diff_tree_combined_merge(commit, 1, &rev);
}
int find_unpushed_submodules(unsigned char new_sha1[20],
const char *remotes_name, struct string_list *needs_pushing)
{
struct rev_info rev;
struct commit *commit;
const char *argv[] = {NULL, NULL, "--not", "NULL", NULL};
int argc = ARRAY_SIZE(argv) - 1;
char *sha1_copy;
struct strbuf remotes_arg = STRBUF_INIT;
strbuf_addf(&remotes_arg, "--remotes=%s", remotes_name);
init_revisions(&rev, NULL);
sha1_copy = xstrdup(sha1_to_hex(new_sha1));
argv[1] = sha1_copy;
argv[3] = remotes_arg.buf;
setup_revisions(argc, argv, &rev, NULL);
if (prepare_revision_walk(&rev))
die("revision walk setup failed");
while ((commit = get_revision(&rev)) != NULL)
find_unpushed_submodule_commits(commit, needs_pushing);
reset_revision_walk();
free(sha1_copy);
strbuf_release(&remotes_arg);
return needs_pushing->nr;
}
static int push_submodule(const char *path)
{
if (add_submodule_odb(path))
return 1;
if (for_each_remote_ref_submodule(path, has_remote, NULL) > 0) {
struct child_process cp = CHILD_PROCESS_INIT;
const char *argv[] = {"push", NULL};
cp.argv = argv;
cp.env = local_repo_env;
cp.git_cmd = 1;
cp.no_stdin = 1;
cp.dir = path;
if (run_command(&cp))
return 0;
close(cp.out);
}
return 1;
}
int push_unpushed_submodules(unsigned char new_sha1[20], const char *remotes_name)
{
int i, ret = 1;
struct string_list needs_pushing = STRING_LIST_INIT_DUP;
if (!find_unpushed_submodules(new_sha1, remotes_name, &needs_pushing))
return 1;
for (i = 0; i < needs_pushing.nr; i++) {
const char *path = needs_pushing.items[i].string;
fprintf(stderr, "Pushing submodule '%s'\n", path);
if (!push_submodule(path)) {
fprintf(stderr, "Unable to push submodule '%s'\n", path);
ret = 0;
}
}
string_list_clear(&needs_pushing, 0);
return ret;
}
static int is_submodule_commit_present(const char *path, unsigned char sha1[20])
{
int is_present = 0;
if (!add_submodule_odb(path) && lookup_commit_reference(sha1)) {
/* Even if the submodule is checked out and the commit is
* present, make sure it is reachable from a ref. */
struct child_process cp = CHILD_PROCESS_INIT;
const char *argv[] = {"rev-list", "-n", "1", NULL, "--not", "--all", NULL};
struct strbuf buf = STRBUF_INIT;
argv[3] = sha1_to_hex(sha1);
cp.argv = argv;
cp.env = local_repo_env;
cp.git_cmd = 1;
cp.no_stdin = 1;
cp.dir = path;
if (!capture_command(&cp, &buf, 1024) && !buf.len)
is_present = 1;
strbuf_release(&buf);
}
return is_present;
}
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
static void submodule_collect_changed_cb(struct diff_queue_struct *q,
struct diff_options *options,
void *data)
{
int i;
for (i = 0; i < q->nr; i++) {
struct diff_filepair *p = q->queue[i];
if (!S_ISGITLINK(p->two->mode))
continue;
if (S_ISGITLINK(p->one->mode)) {
/* NEEDSWORK: We should honor the name configured in
* the .gitmodules file of the commit we are examining
* here to be able to correctly follow submodules
* being moved around. */
struct string_list_item *path;
path = unsorted_string_list_lookup(&changed_submodule_paths, p->two->path);
if (!path && !is_submodule_commit_present(p->two->path, p->two->sha1))
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
string_list_append(&changed_submodule_paths, xstrdup(p->two->path));
} else {
/* Submodule is new or was moved here */
/* NEEDSWORK: When the .git directories of submodules
* live inside the superprojects .git directory some
* day we should fetch new submodules directly into
* that location too when config or options request
* that so they can be checked out from there. */
continue;
}
}
}
static int add_sha1_to_array(const char *ref, const struct object_id *oid,
fetch: avoid quadratic loop checking for updated submodules Recent versions of git can be slow to fetch repositories with a large number of refs (or when they already have a large number of refs). For example, GitHub makes pull-requests available as refs, which can lead to a large number of available refs. This slowness goes away when submodule recursion is turned off: $ git ls-remote git://github.com/rails/rails.git | wc -l 3034 [this takes ~10 seconds of CPU time to complete] git fetch --recurse-submodules=no \ git://github.com/rails/rails.git "refs/*:refs/*" [this still isn't done after 10 _minutes_ of pegging the CPU] git fetch \ git://github.com/rails/rails.git "refs/*:refs/*" You can produce a quicker and simpler test case like this: doit() { head=`git rev-parse HEAD` for i in `seq 1 $1`; do echo $head refs/heads/ref$i done >.git/packed-refs echo "==> $1" rm -rf dest git init -q --bare dest && (cd dest && time git.compile fetch -q .. refs/*:refs/*) } rm -rf repo git init -q repo && cd repo && >file && git add file && git commit -q -m one doit 100 doit 200 doit 400 doit 800 doit 1600 doit 3200 Which yields timings like: # refs seconds of CPU 100 0.06 200 0.24 400 0.95 800 3.39 1600 13.66 3200 54.09 Notice that although the number of refs doubles in each trial, the CPU time spent quadruples. The problem is that the submodule recursion code works something like: - for each ref we fetch - for each commit in git rev-list $new_sha1 --not --all - add modified submodules to list - fetch any newly referenced submodules But that means if we fetch N refs, we start N revision walks. Worse, because we use "--all", the number of refs we must process that constitute "--all" keeps growing, too. And you end up doing O(N^2) ref resolutions. Instead, this patch structures the code like this: - for each sha1 we already have - add $old_sha1 to list $old - for each ref we fetch - add $new_sha1 to list $new - for each commit in git rev-list $new --not $old - add modified submodules to list - fetch any newly referenced submodules This yields timings like: # refs seconds of CPU 100 0.00 200 0.04 400 0.04 800 0.10 1600 0.21 3200 0.39 Note that the amount of effort doubles as the number of refs doubles. Similarly, the fetch of rails.git takes about as much time as it does with --recurse-submodules=no. Signed-off-by: Jeff King <peff@peff.net> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-09-13 03:56:52 +08:00
int flags, void *data)
{
sha1_array_append(data, oid->hash);
fetch: avoid quadratic loop checking for updated submodules Recent versions of git can be slow to fetch repositories with a large number of refs (or when they already have a large number of refs). For example, GitHub makes pull-requests available as refs, which can lead to a large number of available refs. This slowness goes away when submodule recursion is turned off: $ git ls-remote git://github.com/rails/rails.git | wc -l 3034 [this takes ~10 seconds of CPU time to complete] git fetch --recurse-submodules=no \ git://github.com/rails/rails.git "refs/*:refs/*" [this still isn't done after 10 _minutes_ of pegging the CPU] git fetch \ git://github.com/rails/rails.git "refs/*:refs/*" You can produce a quicker and simpler test case like this: doit() { head=`git rev-parse HEAD` for i in `seq 1 $1`; do echo $head refs/heads/ref$i done >.git/packed-refs echo "==> $1" rm -rf dest git init -q --bare dest && (cd dest && time git.compile fetch -q .. refs/*:refs/*) } rm -rf repo git init -q repo && cd repo && >file && git add file && git commit -q -m one doit 100 doit 200 doit 400 doit 800 doit 1600 doit 3200 Which yields timings like: # refs seconds of CPU 100 0.06 200 0.24 400 0.95 800 3.39 1600 13.66 3200 54.09 Notice that although the number of refs doubles in each trial, the CPU time spent quadruples. The problem is that the submodule recursion code works something like: - for each ref we fetch - for each commit in git rev-list $new_sha1 --not --all - add modified submodules to list - fetch any newly referenced submodules But that means if we fetch N refs, we start N revision walks. Worse, because we use "--all", the number of refs we must process that constitute "--all" keeps growing, too. And you end up doing O(N^2) ref resolutions. Instead, this patch structures the code like this: - for each sha1 we already have - add $old_sha1 to list $old - for each ref we fetch - add $new_sha1 to list $new - for each commit in git rev-list $new --not $old - add modified submodules to list - fetch any newly referenced submodules This yields timings like: # refs seconds of CPU 100 0.00 200 0.04 400 0.04 800 0.10 1600 0.21 3200 0.39 Note that the amount of effort doubles as the number of refs doubles. Similarly, the fetch of rails.git takes about as much time as it does with --recurse-submodules=no. Signed-off-by: Jeff King <peff@peff.net> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-09-13 03:56:52 +08:00
return 0;
}
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
void check_for_new_submodule_commits(unsigned char new_sha1[20])
fetch: avoid quadratic loop checking for updated submodules Recent versions of git can be slow to fetch repositories with a large number of refs (or when they already have a large number of refs). For example, GitHub makes pull-requests available as refs, which can lead to a large number of available refs. This slowness goes away when submodule recursion is turned off: $ git ls-remote git://github.com/rails/rails.git | wc -l 3034 [this takes ~10 seconds of CPU time to complete] git fetch --recurse-submodules=no \ git://github.com/rails/rails.git "refs/*:refs/*" [this still isn't done after 10 _minutes_ of pegging the CPU] git fetch \ git://github.com/rails/rails.git "refs/*:refs/*" You can produce a quicker and simpler test case like this: doit() { head=`git rev-parse HEAD` for i in `seq 1 $1`; do echo $head refs/heads/ref$i done >.git/packed-refs echo "==> $1" rm -rf dest git init -q --bare dest && (cd dest && time git.compile fetch -q .. refs/*:refs/*) } rm -rf repo git init -q repo && cd repo && >file && git add file && git commit -q -m one doit 100 doit 200 doit 400 doit 800 doit 1600 doit 3200 Which yields timings like: # refs seconds of CPU 100 0.06 200 0.24 400 0.95 800 3.39 1600 13.66 3200 54.09 Notice that although the number of refs doubles in each trial, the CPU time spent quadruples. The problem is that the submodule recursion code works something like: - for each ref we fetch - for each commit in git rev-list $new_sha1 --not --all - add modified submodules to list - fetch any newly referenced submodules But that means if we fetch N refs, we start N revision walks. Worse, because we use "--all", the number of refs we must process that constitute "--all" keeps growing, too. And you end up doing O(N^2) ref resolutions. Instead, this patch structures the code like this: - for each sha1 we already have - add $old_sha1 to list $old - for each ref we fetch - add $new_sha1 to list $new - for each commit in git rev-list $new --not $old - add modified submodules to list - fetch any newly referenced submodules This yields timings like: # refs seconds of CPU 100 0.00 200 0.04 400 0.04 800 0.10 1600 0.21 3200 0.39 Note that the amount of effort doubles as the number of refs doubles. Similarly, the fetch of rails.git takes about as much time as it does with --recurse-submodules=no. Signed-off-by: Jeff King <peff@peff.net> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-09-13 03:56:52 +08:00
{
if (!initialized_fetch_ref_tips) {
for_each_ref(add_sha1_to_array, &ref_tips_before_fetch);
initialized_fetch_ref_tips = 1;
}
sha1_array_append(&ref_tips_after_fetch, new_sha1);
}
static void add_sha1_to_argv(const unsigned char sha1[20], void *data)
{
argv_array_push(data, sha1_to_hex(sha1));
fetch: avoid quadratic loop checking for updated submodules Recent versions of git can be slow to fetch repositories with a large number of refs (or when they already have a large number of refs). For example, GitHub makes pull-requests available as refs, which can lead to a large number of available refs. This slowness goes away when submodule recursion is turned off: $ git ls-remote git://github.com/rails/rails.git | wc -l 3034 [this takes ~10 seconds of CPU time to complete] git fetch --recurse-submodules=no \ git://github.com/rails/rails.git "refs/*:refs/*" [this still isn't done after 10 _minutes_ of pegging the CPU] git fetch \ git://github.com/rails/rails.git "refs/*:refs/*" You can produce a quicker and simpler test case like this: doit() { head=`git rev-parse HEAD` for i in `seq 1 $1`; do echo $head refs/heads/ref$i done >.git/packed-refs echo "==> $1" rm -rf dest git init -q --bare dest && (cd dest && time git.compile fetch -q .. refs/*:refs/*) } rm -rf repo git init -q repo && cd repo && >file && git add file && git commit -q -m one doit 100 doit 200 doit 400 doit 800 doit 1600 doit 3200 Which yields timings like: # refs seconds of CPU 100 0.06 200 0.24 400 0.95 800 3.39 1600 13.66 3200 54.09 Notice that although the number of refs doubles in each trial, the CPU time spent quadruples. The problem is that the submodule recursion code works something like: - for each ref we fetch - for each commit in git rev-list $new_sha1 --not --all - add modified submodules to list - fetch any newly referenced submodules But that means if we fetch N refs, we start N revision walks. Worse, because we use "--all", the number of refs we must process that constitute "--all" keeps growing, too. And you end up doing O(N^2) ref resolutions. Instead, this patch structures the code like this: - for each sha1 we already have - add $old_sha1 to list $old - for each ref we fetch - add $new_sha1 to list $new - for each commit in git rev-list $new --not $old - add modified submodules to list - fetch any newly referenced submodules This yields timings like: # refs seconds of CPU 100 0.00 200 0.04 400 0.04 800 0.10 1600 0.21 3200 0.39 Note that the amount of effort doubles as the number of refs doubles. Similarly, the fetch of rails.git takes about as much time as it does with --recurse-submodules=no. Signed-off-by: Jeff King <peff@peff.net> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-09-13 03:56:52 +08:00
}
static void calculate_changed_submodule_paths(void)
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
{
struct rev_info rev;
struct commit *commit;
struct argv_array argv = ARGV_ARRAY_INIT;
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
/* No need to check if there are no submodules configured */
if (!submodule_from_path(NULL, NULL))
return;
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
init_revisions(&rev, NULL);
argv_array_push(&argv, "--"); /* argv[0] program name */
fetch: avoid quadratic loop checking for updated submodules Recent versions of git can be slow to fetch repositories with a large number of refs (or when they already have a large number of refs). For example, GitHub makes pull-requests available as refs, which can lead to a large number of available refs. This slowness goes away when submodule recursion is turned off: $ git ls-remote git://github.com/rails/rails.git | wc -l 3034 [this takes ~10 seconds of CPU time to complete] git fetch --recurse-submodules=no \ git://github.com/rails/rails.git "refs/*:refs/*" [this still isn't done after 10 _minutes_ of pegging the CPU] git fetch \ git://github.com/rails/rails.git "refs/*:refs/*" You can produce a quicker and simpler test case like this: doit() { head=`git rev-parse HEAD` for i in `seq 1 $1`; do echo $head refs/heads/ref$i done >.git/packed-refs echo "==> $1" rm -rf dest git init -q --bare dest && (cd dest && time git.compile fetch -q .. refs/*:refs/*) } rm -rf repo git init -q repo && cd repo && >file && git add file && git commit -q -m one doit 100 doit 200 doit 400 doit 800 doit 1600 doit 3200 Which yields timings like: # refs seconds of CPU 100 0.06 200 0.24 400 0.95 800 3.39 1600 13.66 3200 54.09 Notice that although the number of refs doubles in each trial, the CPU time spent quadruples. The problem is that the submodule recursion code works something like: - for each ref we fetch - for each commit in git rev-list $new_sha1 --not --all - add modified submodules to list - fetch any newly referenced submodules But that means if we fetch N refs, we start N revision walks. Worse, because we use "--all", the number of refs we must process that constitute "--all" keeps growing, too. And you end up doing O(N^2) ref resolutions. Instead, this patch structures the code like this: - for each sha1 we already have - add $old_sha1 to list $old - for each ref we fetch - add $new_sha1 to list $new - for each commit in git rev-list $new --not $old - add modified submodules to list - fetch any newly referenced submodules This yields timings like: # refs seconds of CPU 100 0.00 200 0.04 400 0.04 800 0.10 1600 0.21 3200 0.39 Note that the amount of effort doubles as the number of refs doubles. Similarly, the fetch of rails.git takes about as much time as it does with --recurse-submodules=no. Signed-off-by: Jeff King <peff@peff.net> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-09-13 03:56:52 +08:00
sha1_array_for_each_unique(&ref_tips_after_fetch,
add_sha1_to_argv, &argv);
argv_array_push(&argv, "--not");
fetch: avoid quadratic loop checking for updated submodules Recent versions of git can be slow to fetch repositories with a large number of refs (or when they already have a large number of refs). For example, GitHub makes pull-requests available as refs, which can lead to a large number of available refs. This slowness goes away when submodule recursion is turned off: $ git ls-remote git://github.com/rails/rails.git | wc -l 3034 [this takes ~10 seconds of CPU time to complete] git fetch --recurse-submodules=no \ git://github.com/rails/rails.git "refs/*:refs/*" [this still isn't done after 10 _minutes_ of pegging the CPU] git fetch \ git://github.com/rails/rails.git "refs/*:refs/*" You can produce a quicker and simpler test case like this: doit() { head=`git rev-parse HEAD` for i in `seq 1 $1`; do echo $head refs/heads/ref$i done >.git/packed-refs echo "==> $1" rm -rf dest git init -q --bare dest && (cd dest && time git.compile fetch -q .. refs/*:refs/*) } rm -rf repo git init -q repo && cd repo && >file && git add file && git commit -q -m one doit 100 doit 200 doit 400 doit 800 doit 1600 doit 3200 Which yields timings like: # refs seconds of CPU 100 0.06 200 0.24 400 0.95 800 3.39 1600 13.66 3200 54.09 Notice that although the number of refs doubles in each trial, the CPU time spent quadruples. The problem is that the submodule recursion code works something like: - for each ref we fetch - for each commit in git rev-list $new_sha1 --not --all - add modified submodules to list - fetch any newly referenced submodules But that means if we fetch N refs, we start N revision walks. Worse, because we use "--all", the number of refs we must process that constitute "--all" keeps growing, too. And you end up doing O(N^2) ref resolutions. Instead, this patch structures the code like this: - for each sha1 we already have - add $old_sha1 to list $old - for each ref we fetch - add $new_sha1 to list $new - for each commit in git rev-list $new --not $old - add modified submodules to list - fetch any newly referenced submodules This yields timings like: # refs seconds of CPU 100 0.00 200 0.04 400 0.04 800 0.10 1600 0.21 3200 0.39 Note that the amount of effort doubles as the number of refs doubles. Similarly, the fetch of rails.git takes about as much time as it does with --recurse-submodules=no. Signed-off-by: Jeff King <peff@peff.net> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-09-13 03:56:52 +08:00
sha1_array_for_each_unique(&ref_tips_before_fetch,
add_sha1_to_argv, &argv);
setup_revisions(argv.argc, argv.argv, &rev, NULL);
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
if (prepare_revision_walk(&rev))
die("revision walk setup failed");
/*
* Collect all submodules (whether checked out or not) for which new
* commits have been recorded upstream in "changed_submodule_paths".
*/
while ((commit = get_revision(&rev))) {
struct commit_list *parent = commit->parents;
while (parent) {
struct diff_options diff_opts;
diff_setup(&diff_opts);
DIFF_OPT_SET(&diff_opts, RECURSIVE);
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
diff_opts.output_format |= DIFF_FORMAT_CALLBACK;
diff_opts.format_callback = submodule_collect_changed_cb;
diff_setup_done(&diff_opts);
diff_tree_sha1(parent->item->object.oid.hash, commit->object.oid.hash, "", &diff_opts);
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
diffcore_std(&diff_opts);
diff_flush(&diff_opts);
parent = parent->next;
}
}
fetch: avoid quadratic loop checking for updated submodules Recent versions of git can be slow to fetch repositories with a large number of refs (or when they already have a large number of refs). For example, GitHub makes pull-requests available as refs, which can lead to a large number of available refs. This slowness goes away when submodule recursion is turned off: $ git ls-remote git://github.com/rails/rails.git | wc -l 3034 [this takes ~10 seconds of CPU time to complete] git fetch --recurse-submodules=no \ git://github.com/rails/rails.git "refs/*:refs/*" [this still isn't done after 10 _minutes_ of pegging the CPU] git fetch \ git://github.com/rails/rails.git "refs/*:refs/*" You can produce a quicker and simpler test case like this: doit() { head=`git rev-parse HEAD` for i in `seq 1 $1`; do echo $head refs/heads/ref$i done >.git/packed-refs echo "==> $1" rm -rf dest git init -q --bare dest && (cd dest && time git.compile fetch -q .. refs/*:refs/*) } rm -rf repo git init -q repo && cd repo && >file && git add file && git commit -q -m one doit 100 doit 200 doit 400 doit 800 doit 1600 doit 3200 Which yields timings like: # refs seconds of CPU 100 0.06 200 0.24 400 0.95 800 3.39 1600 13.66 3200 54.09 Notice that although the number of refs doubles in each trial, the CPU time spent quadruples. The problem is that the submodule recursion code works something like: - for each ref we fetch - for each commit in git rev-list $new_sha1 --not --all - add modified submodules to list - fetch any newly referenced submodules But that means if we fetch N refs, we start N revision walks. Worse, because we use "--all", the number of refs we must process that constitute "--all" keeps growing, too. And you end up doing O(N^2) ref resolutions. Instead, this patch structures the code like this: - for each sha1 we already have - add $old_sha1 to list $old - for each ref we fetch - add $new_sha1 to list $new - for each commit in git rev-list $new --not $old - add modified submodules to list - fetch any newly referenced submodules This yields timings like: # refs seconds of CPU 100 0.00 200 0.04 400 0.04 800 0.10 1600 0.21 3200 0.39 Note that the amount of effort doubles as the number of refs doubles. Similarly, the fetch of rails.git takes about as much time as it does with --recurse-submodules=no. Signed-off-by: Jeff King <peff@peff.net> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-09-13 03:56:52 +08:00
argv_array_clear(&argv);
fetch: avoid quadratic loop checking for updated submodules Recent versions of git can be slow to fetch repositories with a large number of refs (or when they already have a large number of refs). For example, GitHub makes pull-requests available as refs, which can lead to a large number of available refs. This slowness goes away when submodule recursion is turned off: $ git ls-remote git://github.com/rails/rails.git | wc -l 3034 [this takes ~10 seconds of CPU time to complete] git fetch --recurse-submodules=no \ git://github.com/rails/rails.git "refs/*:refs/*" [this still isn't done after 10 _minutes_ of pegging the CPU] git fetch \ git://github.com/rails/rails.git "refs/*:refs/*" You can produce a quicker and simpler test case like this: doit() { head=`git rev-parse HEAD` for i in `seq 1 $1`; do echo $head refs/heads/ref$i done >.git/packed-refs echo "==> $1" rm -rf dest git init -q --bare dest && (cd dest && time git.compile fetch -q .. refs/*:refs/*) } rm -rf repo git init -q repo && cd repo && >file && git add file && git commit -q -m one doit 100 doit 200 doit 400 doit 800 doit 1600 doit 3200 Which yields timings like: # refs seconds of CPU 100 0.06 200 0.24 400 0.95 800 3.39 1600 13.66 3200 54.09 Notice that although the number of refs doubles in each trial, the CPU time spent quadruples. The problem is that the submodule recursion code works something like: - for each ref we fetch - for each commit in git rev-list $new_sha1 --not --all - add modified submodules to list - fetch any newly referenced submodules But that means if we fetch N refs, we start N revision walks. Worse, because we use "--all", the number of refs we must process that constitute "--all" keeps growing, too. And you end up doing O(N^2) ref resolutions. Instead, this patch structures the code like this: - for each sha1 we already have - add $old_sha1 to list $old - for each ref we fetch - add $new_sha1 to list $new - for each commit in git rev-list $new --not $old - add modified submodules to list - fetch any newly referenced submodules This yields timings like: # refs seconds of CPU 100 0.00 200 0.04 400 0.04 800 0.10 1600 0.21 3200 0.39 Note that the amount of effort doubles as the number of refs doubles. Similarly, the fetch of rails.git takes about as much time as it does with --recurse-submodules=no. Signed-off-by: Jeff King <peff@peff.net> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-09-13 03:56:52 +08:00
sha1_array_clear(&ref_tips_before_fetch);
sha1_array_clear(&ref_tips_after_fetch);
initialized_fetch_ref_tips = 0;
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
}
struct submodule_parallel_fetch {
int count;
struct argv_array args;
const char *work_tree;
const char *prefix;
int command_line_option;
int quiet;
int result;
};
#define SPF_INIT {0, ARGV_ARRAY_INIT, NULL, NULL, 0, 0, 0}
static int get_next_submodule(struct child_process *cp,
struct strbuf *err, void *data, void **task_cb)
{
int ret = 0;
struct submodule_parallel_fetch *spf = data;
fetch: avoid quadratic loop checking for updated submodules Recent versions of git can be slow to fetch repositories with a large number of refs (or when they already have a large number of refs). For example, GitHub makes pull-requests available as refs, which can lead to a large number of available refs. This slowness goes away when submodule recursion is turned off: $ git ls-remote git://github.com/rails/rails.git | wc -l 3034 [this takes ~10 seconds of CPU time to complete] git fetch --recurse-submodules=no \ git://github.com/rails/rails.git "refs/*:refs/*" [this still isn't done after 10 _minutes_ of pegging the CPU] git fetch \ git://github.com/rails/rails.git "refs/*:refs/*" You can produce a quicker and simpler test case like this: doit() { head=`git rev-parse HEAD` for i in `seq 1 $1`; do echo $head refs/heads/ref$i done >.git/packed-refs echo "==> $1" rm -rf dest git init -q --bare dest && (cd dest && time git.compile fetch -q .. refs/*:refs/*) } rm -rf repo git init -q repo && cd repo && >file && git add file && git commit -q -m one doit 100 doit 200 doit 400 doit 800 doit 1600 doit 3200 Which yields timings like: # refs seconds of CPU 100 0.06 200 0.24 400 0.95 800 3.39 1600 13.66 3200 54.09 Notice that although the number of refs doubles in each trial, the CPU time spent quadruples. The problem is that the submodule recursion code works something like: - for each ref we fetch - for each commit in git rev-list $new_sha1 --not --all - add modified submodules to list - fetch any newly referenced submodules But that means if we fetch N refs, we start N revision walks. Worse, because we use "--all", the number of refs we must process that constitute "--all" keeps growing, too. And you end up doing O(N^2) ref resolutions. Instead, this patch structures the code like this: - for each sha1 we already have - add $old_sha1 to list $old - for each ref we fetch - add $new_sha1 to list $new - for each commit in git rev-list $new --not $old - add modified submodules to list - fetch any newly referenced submodules This yields timings like: # refs seconds of CPU 100 0.00 200 0.04 400 0.04 800 0.10 1600 0.21 3200 0.39 Note that the amount of effort doubles as the number of refs doubles. Similarly, the fetch of rails.git takes about as much time as it does with --recurse-submodules=no. Signed-off-by: Jeff King <peff@peff.net> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-09-13 03:56:52 +08:00
for (; spf->count < active_nr; spf->count++) {
struct strbuf submodule_path = STRBUF_INIT;
struct strbuf submodule_git_dir = STRBUF_INIT;
struct strbuf submodule_prefix = STRBUF_INIT;
const struct cache_entry *ce = active_cache[spf->count];
const char *git_dir, *default_argv;
const struct submodule *submodule;
if (!S_ISGITLINK(ce->ce_mode))
continue;
submodule = submodule_from_path(null_sha1, ce->name);
if (!submodule)
submodule = submodule_from_name(null_sha1, ce->name);
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
default_argv = "yes";
if (spf->command_line_option == RECURSE_SUBMODULES_DEFAULT) {
if (submodule &&
submodule->fetch_recurse !=
RECURSE_SUBMODULES_NONE) {
if (submodule->fetch_recurse ==
RECURSE_SUBMODULES_OFF)
continue;
if (submodule->fetch_recurse ==
RECURSE_SUBMODULES_ON_DEMAND) {
if (!unsorted_string_list_lookup(&changed_submodule_paths, ce->name))
continue;
default_argv = "on-demand";
}
} else {
if ((config_fetch_recurse_submodules == RECURSE_SUBMODULES_OFF) ||
gitmodules_is_unmerged)
continue;
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
if (config_fetch_recurse_submodules == RECURSE_SUBMODULES_ON_DEMAND) {
if (!unsorted_string_list_lookup(&changed_submodule_paths, ce->name))
continue;
default_argv = "on-demand";
}
}
} else if (spf->command_line_option == RECURSE_SUBMODULES_ON_DEMAND) {
if (!unsorted_string_list_lookup(&changed_submodule_paths, ce->name))
continue;
default_argv = "on-demand";
}
strbuf_addf(&submodule_path, "%s/%s", spf->work_tree, ce->name);
strbuf_addf(&submodule_git_dir, "%s/.git", submodule_path.buf);
strbuf_addf(&submodule_prefix, "%s%s/", spf->prefix, ce->name);
git_dir = read_gitfile(submodule_git_dir.buf);
if (!git_dir)
git_dir = submodule_git_dir.buf;
if (is_directory(git_dir)) {
child_process_init(cp);
cp->dir = strbuf_detach(&submodule_path, NULL);
cp->env = local_repo_env;
cp->git_cmd = 1;
if (!spf->quiet)
strbuf_addf(err, "Fetching submodule %s%s\n",
spf->prefix, ce->name);
argv_array_init(&cp->args);
argv_array_pushv(&cp->args, spf->args.argv);
argv_array_push(&cp->args, default_argv);
argv_array_push(&cp->args, "--submodule-prefix");
argv_array_push(&cp->args, submodule_prefix.buf);
ret = 1;
}
strbuf_release(&submodule_path);
strbuf_release(&submodule_git_dir);
strbuf_release(&submodule_prefix);
if (ret) {
spf->count++;
return 1;
}
}
return 0;
}
static int fetch_start_failure(struct child_process *cp,
struct strbuf *err,
void *cb, void *task_cb)
{
struct submodule_parallel_fetch *spf = cb;
spf->result = 1;
return 0;
}
static int fetch_finish(int retvalue, struct child_process *cp,
struct strbuf *err, void *cb, void *task_cb)
{
struct submodule_parallel_fetch *spf = cb;
if (retvalue)
spf->result = 1;
return 0;
}
int fetch_populated_submodules(const struct argv_array *options,
const char *prefix, int command_line_option,
int quiet, int max_parallel_jobs)
{
int i;
struct submodule_parallel_fetch spf = SPF_INIT;
spf.work_tree = get_git_work_tree();
spf.command_line_option = command_line_option;
spf.quiet = quiet;
spf.prefix = prefix;
if (!spf.work_tree)
goto out;
if (read_cache() < 0)
die("index file corrupt");
argv_array_push(&spf.args, "fetch");
for (i = 0; i < options->argc; i++)
argv_array_push(&spf.args, options->argv[i]);
argv_array_push(&spf.args, "--recurse-submodules-default");
/* default value, "--submodule-prefix" and its value are added later */
calculate_changed_submodule_paths();
run_processes_parallel(max_parallel_jobs,
get_next_submodule,
fetch_start_failure,
fetch_finish,
&spf);
argv_array_clear(&spf.args);
fetch/pull: recurse into submodules when necessary To be able to access all commits of populated submodules referenced by the superproject it is sufficient to only then let "git fetch" recurse into a submodule when the new commits fetched in the superproject record new commits for it. Having these commits present is extremely useful when using the "--submodule" option to "git diff" (which is what "git gui" and "gitk" do since 1.6.6), as all submodule commits needed for creating a descriptive output can be accessed. Also merging submodule commits (added in 1.7.3) depends on the submodule commits in question being present to work. Last but not least this enables disconnected operation when using submodules, as all commits necessary for a successful "git submodule update -N" will have been fetched automatically. So we choose this mode as the default for fetch and pull. Before a new or changed ref from upstream is updated in update_local_ref() "git rev-list <new-sha1> --not --branches --remotes" is used to determine all newly fetched commits. These are then walked and diffed against their parent(s) to see if a submodule has been changed. If that is the case, its path is stored to be fetched after the superproject fetch is completed. Using the "--recurse-submodules" or the "--no-recurse-submodules" option disables the examination of the fetched refs because the result will be ignored anyway. There is currently no infrastructure for storing deleted and new submodules in the .git directory of the superproject. That's why fetch and pull for now only fetch submodules that are already checked out and are not renamed. In t7403 the "--no-recurse-submodules" argument had to be added to "git pull" to avoid failure because of the moved upstream submodule repo. Thanks-to: Jonathan Nieder <jrnieder@gmail.com> Thanks-to: Heiko Voigt <hvoigt@hvoigt.net> Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-03-07 06:10:46 +08:00
out:
string_list_clear(&changed_submodule_paths, 1);
return spf.result;
}
unsigned is_submodule_modified(const char *path, int ignore_untracked)
{
ssize_t len;
struct child_process cp = CHILD_PROCESS_INIT;
const char *argv[] = {
"status",
"--porcelain",
NULL,
NULL,
};
struct strbuf buf = STRBUF_INIT;
unsigned dirty_submodule = 0;
const char *line, *next_line;
const char *git_dir;
strbuf_addf(&buf, "%s/.git", path);
git_dir = read_gitfile(buf.buf);
if (!git_dir)
git_dir = buf.buf;
if (!is_directory(git_dir)) {
strbuf_release(&buf);
/* The submodule is not checked out, so it is not modified */
return 0;
}
strbuf_reset(&buf);
if (ignore_untracked)
argv[2] = "-uno";
cp.argv = argv;
cp.env = local_repo_env;
cp.git_cmd = 1;
cp.no_stdin = 1;
cp.out = -1;
cp.dir = path;
if (start_command(&cp))
die("Could not run 'git status --porcelain' in submodule %s", path);
len = strbuf_read(&buf, cp.out, 1024);
line = buf.buf;
while (len > 2) {
if ((line[0] == '?') && (line[1] == '?')) {
dirty_submodule |= DIRTY_SUBMODULE_UNTRACKED;
if (dirty_submodule & DIRTY_SUBMODULE_MODIFIED)
break;
} else {
dirty_submodule |= DIRTY_SUBMODULE_MODIFIED;
if (ignore_untracked ||
(dirty_submodule & DIRTY_SUBMODULE_UNTRACKED))
break;
}
next_line = strchr(line, '\n');
if (!next_line)
break;
next_line++;
len -= (next_line - line);
line = next_line;
}
close(cp.out);
if (finish_command(&cp))
die("'git status --porcelain' failed in submodule %s", path);
strbuf_release(&buf);
return dirty_submodule;
}
submodule: teach rm to remove submodules unless they contain a git directory Currently using "git rm" on a submodule - populated or not - fails with this error: fatal: git rm: '<submodule path>': Is a directory This made sense in the past as there was no way to remove a submodule without possibly removing unpushed parts of the submodule's history contained in its .git directory too, so erroring out here protected the user from possible loss of data. But submodules cloned with a recent git version do not contain the .git directory anymore, they use a gitfile to point to their git directory which is safely stored inside the superproject's .git directory. The work tree of these submodules can safely be removed without losing history, so let's teach git to do so. Using rm on an unpopulated submodule now removes the empty directory from the work tree and the gitlink from the index. If the submodule's directory is missing from the work tree, it will still be removed from the index. Using rm on a populated submodule using a gitfile will apply the usual checks for work tree modification adapted to submodules (unless forced). For a submodule that means that the HEAD is the same as recorded in the index, no tracked files are modified and no untracked files that aren't ignored are present in the submodules work tree (ignored files are deemed expendable and won't stop a submodule's work tree from being removed). That logic has to be applied in all nested submodules too. Using rm on a submodule which has its .git directory inside the work trees top level directory will just error out like it did before to protect the repository, even when forced. In the future git could either provide a message informing the user to convert the submodule to use a gitfile or even attempt to do the conversion itself, but that is not part of this change. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2012-09-27 02:21:13 +08:00
int submodule_uses_gitfile(const char *path)
{
struct child_process cp = CHILD_PROCESS_INIT;
submodule: teach rm to remove submodules unless they contain a git directory Currently using "git rm" on a submodule - populated or not - fails with this error: fatal: git rm: '<submodule path>': Is a directory This made sense in the past as there was no way to remove a submodule without possibly removing unpushed parts of the submodule's history contained in its .git directory too, so erroring out here protected the user from possible loss of data. But submodules cloned with a recent git version do not contain the .git directory anymore, they use a gitfile to point to their git directory which is safely stored inside the superproject's .git directory. The work tree of these submodules can safely be removed without losing history, so let's teach git to do so. Using rm on an unpopulated submodule now removes the empty directory from the work tree and the gitlink from the index. If the submodule's directory is missing from the work tree, it will still be removed from the index. Using rm on a populated submodule using a gitfile will apply the usual checks for work tree modification adapted to submodules (unless forced). For a submodule that means that the HEAD is the same as recorded in the index, no tracked files are modified and no untracked files that aren't ignored are present in the submodules work tree (ignored files are deemed expendable and won't stop a submodule's work tree from being removed). That logic has to be applied in all nested submodules too. Using rm on a submodule which has its .git directory inside the work trees top level directory will just error out like it did before to protect the repository, even when forced. In the future git could either provide a message informing the user to convert the submodule to use a gitfile or even attempt to do the conversion itself, but that is not part of this change. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2012-09-27 02:21:13 +08:00
const char *argv[] = {
"submodule",
"foreach",
"--quiet",
"--recursive",
"test -f .git",
NULL,
};
struct strbuf buf = STRBUF_INIT;
const char *git_dir;
strbuf_addf(&buf, "%s/.git", path);
git_dir = read_gitfile(buf.buf);
if (!git_dir) {
strbuf_release(&buf);
return 0;
}
strbuf_release(&buf);
/* Now test that all nested submodules use a gitfile too */
cp.argv = argv;
cp.env = local_repo_env;
cp.git_cmd = 1;
cp.no_stdin = 1;
cp.no_stderr = 1;
cp.no_stdout = 1;
cp.dir = path;
if (run_command(&cp))
return 0;
return 1;
}
int ok_to_remove_submodule(const char *path)
{
ssize_t len;
struct child_process cp = CHILD_PROCESS_INIT;
submodule: teach rm to remove submodules unless they contain a git directory Currently using "git rm" on a submodule - populated or not - fails with this error: fatal: git rm: '<submodule path>': Is a directory This made sense in the past as there was no way to remove a submodule without possibly removing unpushed parts of the submodule's history contained in its .git directory too, so erroring out here protected the user from possible loss of data. But submodules cloned with a recent git version do not contain the .git directory anymore, they use a gitfile to point to their git directory which is safely stored inside the superproject's .git directory. The work tree of these submodules can safely be removed without losing history, so let's teach git to do so. Using rm on an unpopulated submodule now removes the empty directory from the work tree and the gitlink from the index. If the submodule's directory is missing from the work tree, it will still be removed from the index. Using rm on a populated submodule using a gitfile will apply the usual checks for work tree modification adapted to submodules (unless forced). For a submodule that means that the HEAD is the same as recorded in the index, no tracked files are modified and no untracked files that aren't ignored are present in the submodules work tree (ignored files are deemed expendable and won't stop a submodule's work tree from being removed). That logic has to be applied in all nested submodules too. Using rm on a submodule which has its .git directory inside the work trees top level directory will just error out like it did before to protect the repository, even when forced. In the future git could either provide a message informing the user to convert the submodule to use a gitfile or even attempt to do the conversion itself, but that is not part of this change. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2012-09-27 02:21:13 +08:00
const char *argv[] = {
"status",
"--porcelain",
"-u",
"--ignore-submodules=none",
NULL,
};
struct strbuf buf = STRBUF_INIT;
int ok_to_remove = 1;
if (!file_exists(path) || is_empty_dir(path))
submodule: teach rm to remove submodules unless they contain a git directory Currently using "git rm" on a submodule - populated or not - fails with this error: fatal: git rm: '<submodule path>': Is a directory This made sense in the past as there was no way to remove a submodule without possibly removing unpushed parts of the submodule's history contained in its .git directory too, so erroring out here protected the user from possible loss of data. But submodules cloned with a recent git version do not contain the .git directory anymore, they use a gitfile to point to their git directory which is safely stored inside the superproject's .git directory. The work tree of these submodules can safely be removed without losing history, so let's teach git to do so. Using rm on an unpopulated submodule now removes the empty directory from the work tree and the gitlink from the index. If the submodule's directory is missing from the work tree, it will still be removed from the index. Using rm on a populated submodule using a gitfile will apply the usual checks for work tree modification adapted to submodules (unless forced). For a submodule that means that the HEAD is the same as recorded in the index, no tracked files are modified and no untracked files that aren't ignored are present in the submodules work tree (ignored files are deemed expendable and won't stop a submodule's work tree from being removed). That logic has to be applied in all nested submodules too. Using rm on a submodule which has its .git directory inside the work trees top level directory will just error out like it did before to protect the repository, even when forced. In the future git could either provide a message informing the user to convert the submodule to use a gitfile or even attempt to do the conversion itself, but that is not part of this change. Signed-off-by: Jens Lehmann <Jens.Lehmann@web.de> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2012-09-27 02:21:13 +08:00
return 1;
if (!submodule_uses_gitfile(path))
return 0;
cp.argv = argv;
cp.env = local_repo_env;
cp.git_cmd = 1;
cp.no_stdin = 1;
cp.out = -1;
cp.dir = path;
if (start_command(&cp))
die("Could not run 'git status --porcelain -uall --ignore-submodules=none' in submodule %s", path);
len = strbuf_read(&buf, cp.out, 1024);
if (len > 2)
ok_to_remove = 0;
close(cp.out);
if (finish_command(&cp))
die("'git status --porcelain -uall --ignore-submodules=none' failed in submodule %s", path);
strbuf_release(&buf);
return ok_to_remove;
}
static int find_first_merges(struct object_array *result, const char *path,
struct commit *a, struct commit *b)
{
int i, j;
struct object_array merges = OBJECT_ARRAY_INIT;
struct commit *commit;
int contains_another;
char merged_revision[42];
const char *rev_args[] = { "rev-list", "--merges", "--ancestry-path",
"--all", merged_revision, NULL };
struct rev_info revs;
struct setup_revision_opt rev_opts;
memset(result, 0, sizeof(struct object_array));
memset(&rev_opts, 0, sizeof(rev_opts));
/* get all revisions that merge commit a */
snprintf(merged_revision, sizeof(merged_revision), "^%s",
oid_to_hex(&a->object.oid));
init_revisions(&revs, NULL);
rev_opts.submodule = path;
setup_revisions(ARRAY_SIZE(rev_args)-1, rev_args, &revs, &rev_opts);
/* save all revisions from the above list that contain b */
if (prepare_revision_walk(&revs))
die("revision walk setup failed");
while ((commit = get_revision(&revs)) != NULL) {
struct object *o = &(commit->object);
if (in_merge_bases(b, commit))
add_object_array(o, NULL, &merges);
}
reset_revision_walk();
/* Now we've got all merges that contain a and b. Prune all
* merges that contain another found merge and save them in
* result.
*/
for (i = 0; i < merges.nr; i++) {
struct commit *m1 = (struct commit *) merges.objects[i].item;
contains_another = 0;
for (j = 0; j < merges.nr; j++) {
struct commit *m2 = (struct commit *) merges.objects[j].item;
if (i != j && in_merge_bases(m2, m1)) {
contains_another = 1;
break;
}
}
if (!contains_another)
add_object_array(merges.objects[i].item, NULL, result);
}
free(merges.objects);
return result->nr;
}
static void print_commit(struct commit *commit)
{
struct strbuf sb = STRBUF_INIT;
struct pretty_print_context ctx = {0};
convert "enum date_mode" into a struct In preparation for adding date modes that may carry extra information beyond the mode itself, this patch converts the date_mode enum into a struct. Most of the conversion is fairly straightforward; we pass the struct as a pointer and dereference the type field where necessary. Locations that declare a date_mode can use a "{}" constructor. However, the tricky case is where we use the enum labels as constants, like: show_date(t, tz, DATE_NORMAL); Ideally we could say: show_date(t, tz, &{ DATE_NORMAL }); but of course C does not allow that. Likewise, we cannot cast the constant to a struct, because we need to pass an actual address. Our options are basically: 1. Manually add a "struct date_mode d = { DATE_NORMAL }" definition to each caller, and pass "&d". This makes the callers uglier, because they sometimes do not even have their own scope (e.g., they are inside a switch statement). 2. Provide a pre-made global "date_normal" struct that can be passed by address. We'd also need "date_rfc2822", "date_iso8601", and so forth. But at least the ugliness is defined in one place. 3. Provide a wrapper that generates the correct struct on the fly. The big downside is that we end up pointing to a single global, which makes our wrapper non-reentrant. But show_date is already not reentrant, so it does not matter. This patch implements 3, along with a minor macro to keep the size of the callers sane. Signed-off-by: Jeff King <peff@peff.net> Signed-off-by: Junio C Hamano <gitster@pobox.com>
2015-06-26 00:55:02 +08:00
ctx.date_mode.type = DATE_NORMAL;
format_commit_message(commit, " %h: %m %s", &sb, &ctx);
fprintf(stderr, "%s\n", sb.buf);
strbuf_release(&sb);
}
#define MERGE_WARNING(path, msg) \
warning("Failed to merge submodule %s (%s)", path, msg);
int merge_submodule(unsigned char result[20], const char *path,
const unsigned char base[20], const unsigned char a[20],
const unsigned char b[20], int search)
{
struct commit *commit_base, *commit_a, *commit_b;
int parent_count;
struct object_array merges;
int i;
/* store a in result in case we fail */
hashcpy(result, a);
/* we can not handle deletion conflicts */
if (is_null_sha1(base))
return 0;
if (is_null_sha1(a))
return 0;
if (is_null_sha1(b))
return 0;
if (add_submodule_odb(path)) {
MERGE_WARNING(path, "not checked out");
return 0;
}
if (!(commit_base = lookup_commit_reference(base)) ||
!(commit_a = lookup_commit_reference(a)) ||
!(commit_b = lookup_commit_reference(b))) {
MERGE_WARNING(path, "commits not present");
return 0;
}
/* check whether both changes are forward */
if (!in_merge_bases(commit_base, commit_a) ||
!in_merge_bases(commit_base, commit_b)) {
MERGE_WARNING(path, "commits don't follow merge-base");
return 0;
}
/* Case #1: a is contained in b or vice versa */
if (in_merge_bases(commit_a, commit_b)) {
hashcpy(result, b);
return 1;
}
if (in_merge_bases(commit_b, commit_a)) {
hashcpy(result, a);
return 1;
}
/*
* Case #2: There are one or more merges that contain a and b in
* the submodule. If there is only one, then present it as a
* suggestion to the user, but leave it marked unmerged so the
* user needs to confirm the resolution.
*/
/* Skip the search if makes no sense to the calling context. */
if (!search)
return 0;
/* find commit which merges them */
parent_count = find_first_merges(&merges, path, commit_a, commit_b);
switch (parent_count) {
case 0:
MERGE_WARNING(path, "merge following commits not found");
break;
case 1:
MERGE_WARNING(path, "not fast-forward");
fprintf(stderr, "Found a possible merge resolution "
"for the submodule:\n");
print_commit((struct commit *) merges.objects[0].item);
fprintf(stderr,
"If this is correct simply add it to the index "
"for example\n"
"by using:\n\n"
" git update-index --cacheinfo 160000 %s \"%s\"\n\n"
"which will accept this suggestion.\n",
oid_to_hex(&merges.objects[0].item->oid), path);
break;
default:
MERGE_WARNING(path, "multiple merges found");
for (i = 0; i < merges.nr; i++)
print_commit((struct commit *) merges.objects[i].item);
}
free(merges.objects);
return 0;
}
/* Update gitfile and core.worktree setting to connect work tree and git dir */
void connect_work_tree_and_git_dir(const char *work_tree, const char *git_dir)
{
struct strbuf file_name = STRBUF_INIT;
struct strbuf rel_path = STRBUF_INIT;
const char *real_work_tree = xstrdup(real_path(work_tree));
/* Update gitfile */
strbuf_addf(&file_name, "%s/.git", work_tree);
write_file(file_name.buf, "gitdir: %s",
relative_path(git_dir, real_work_tree, &rel_path));
/* Update core.worktree setting */
strbuf_reset(&file_name);
strbuf_addf(&file_name, "%s/config", git_dir);
if (git_config_set_in_file(file_name.buf, "core.worktree",
relative_path(real_work_tree, git_dir,
&rel_path)))
die(_("Could not set core.worktree in %s"),
file_name.buf);
strbuf_release(&file_name);
strbuf_release(&rel_path);
free((void *)real_work_tree);
}