Skip to content

Commit 149ccd2

Browse files
dschoderrickstolee
andcommitted
scalar clone: support GVFS-enabled remote repositories
With this change, we come a big step closer to feature parity with Scalar: this allows cloning from Azure Repos (which do not support partial clones at time of writing). We use the just-implemented JSON parser to parse the response we got from the `gvfs/config` endpoint; Please note that this response might, or might not, contain information about a cache server. The presence or absence of said cache server, however, has nothing to do with the ability to speak the GVFS protocol (but the presence of the `gvfs/config` endpoint does that). An alternative considered during the development of this patch was to perform simple string matching instead of parsing the JSON-formatted data; However, this would have been fragile, as the response contains free-form text (e.g. the repository's description) which might contain parts that would confuse a simple string matcher (but not a proper JSON parser). Note: we need to limit the re-try logic in `git clone` to handle only the non-GVFS case: the call to `set_config()` to un-set the partial clone settings would otherwise fail because those settings would not exist in the GVFS protocol case. This will at least give us a clearer reason why such a fetch fails. Co-authored-by: Derrick Stolee <dstolee@microsoft.com> Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de> Signed-off-by: Derrick Stolee <dstolee@microsoft.com>
1 parent 276743f commit 149ccd2

File tree

2 files changed

+131
-3
lines changed

2 files changed

+131
-3
lines changed

diagnose.c

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
#include "strvec.h"
88
#include "object-store.h"
99
#include "packfile.h"
10+
#include "config.h"
1011

1112
struct archive_dir {
1213
const char *path;
@@ -172,6 +173,7 @@ int create_diagnostics_archive(struct strbuf *zip_path, enum diagnose_mode mode)
172173
struct strvec archiver_args = STRVEC_INIT;
173174
char **argv_copy = NULL;
174175
int stdout_fd = -1, archiver_fd = -1;
176+
char *cache_server_url = NULL;
175177
struct strbuf buf = STRBUF_INIT;
176178
int res, i;
177179
struct archive_dir archive_dirs[] = {
@@ -207,6 +209,11 @@ int create_diagnostics_archive(struct strbuf *zip_path, enum diagnose_mode mode)
207209
get_version_info(&buf, 1);
208210

209211
strbuf_addf(&buf, "Repository root: %s\n", the_repository->worktree);
212+
213+
git_config_get_string("gvfs.cache-server", &cache_server_url);
214+
strbuf_addf(&buf, "Cache Server: %s\n\n",
215+
cache_server_url ? cache_server_url : "None");
216+
210217
get_disk_info(&buf);
211218
write_or_die(stdout_fd, buf.buf, buf.len);
212219
strvec_pushf(&archiver_args,
@@ -264,6 +271,7 @@ int create_diagnostics_archive(struct strbuf *zip_path, enum diagnose_mode mode)
264271
free(argv_copy);
265272
strvec_clear(&archiver_args);
266273
strbuf_release(&buf);
274+
free(cache_server_url);
267275

268276
return res;
269277
}

scalar.c

Lines changed: 123 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
#include "dir.h"
1515
#include "packfile.h"
1616
#include "help.h"
17+
#include "json-parser.h"
1718

1819
static void setup_enlistment_directory(int argc, const char **argv,
1920
const char * const *usagestr,
@@ -322,6 +323,80 @@ static int set_config(const char *fmt, ...)
322323
return res;
323324
}
324325

326+
/* Find N for which .CacheServers[N].GlobalDefault == true */
327+
static int get_cache_server_index(struct json_iterator *it)
328+
{
329+
const char *p;
330+
char *q;
331+
long l;
332+
333+
if (it->type == JSON_TRUE &&
334+
skip_iprefix(it->key.buf, ".CacheServers[", &p) &&
335+
(l = strtol(p, &q, 10)) >= 0 && p != q &&
336+
!strcasecmp(q, "].GlobalDefault")) {
337+
*(long *)it->fn_data = l;
338+
return 1;
339+
}
340+
341+
return 0;
342+
}
343+
344+
struct cache_server_url_data {
345+
char *key, *url;
346+
};
347+
348+
/* Get .CacheServers[N].Url */
349+
static int get_cache_server_url(struct json_iterator *it)
350+
{
351+
struct cache_server_url_data *data = it->fn_data;
352+
353+
if (it->type == JSON_STRING &&
354+
!strcasecmp(data->key, it->key.buf)) {
355+
data->url = strbuf_detach(&it->string_value, NULL);
356+
return 1;
357+
}
358+
359+
return 0;
360+
}
361+
362+
/*
363+
* If `cache_server_url` is `NULL`, print the list to `stdout`.
364+
*
365+
* Since `gvfs-helper` requires a Git directory, this _must_ be run in
366+
* a worktree.
367+
*/
368+
static int supports_gvfs_protocol(const char *url, char **cache_server_url)
369+
{
370+
struct child_process cp = CHILD_PROCESS_INIT;
371+
struct strbuf out = STRBUF_INIT;
372+
373+
cp.git_cmd = 1;
374+
strvec_pushl(&cp.args, "gvfs-helper", "--remote", url, "config", NULL);
375+
if (!pipe_command(&cp, NULL, 0, &out, 512, NULL, 0)) {
376+
long l = 0;
377+
struct json_iterator it =
378+
JSON_ITERATOR_INIT(out.buf, get_cache_server_index, &l);
379+
struct cache_server_url_data data = { .url = NULL };
380+
381+
if (iterate_json(&it) < 0) {
382+
strbuf_release(&out);
383+
return error("JSON parse error");
384+
}
385+
data.key = xstrfmt(".CacheServers[%ld].Url", l);
386+
it.fn = get_cache_server_url;
387+
it.fn_data = &data;
388+
if (iterate_json(&it) < 0) {
389+
strbuf_release(&out);
390+
return error("JSON parse error");
391+
}
392+
*cache_server_url = data.url;
393+
free(data.key);
394+
return 1;
395+
}
396+
strbuf_release(&out);
397+
return 0; /* error out quietly */
398+
}
399+
325400
static char *remote_default_branch(const char *url)
326401
{
327402
struct child_process cp = CHILD_PROCESS_INIT;
@@ -423,6 +498,8 @@ static int cmd_clone(int argc, const char **argv)
423498
{
424499
const char *branch = NULL;
425500
int full_clone = 0, single_branch = 0;
501+
const char *cache_server_url = NULL;
502+
char *default_cache_server_url = NULL;
426503
struct option clone_options[] = {
427504
OPT_STRING('b', "branch", &branch, N_("<branch>"),
428505
N_("branch to checkout after clone")),
@@ -431,6 +508,9 @@ static int cmd_clone(int argc, const char **argv)
431508
OPT_BOOL(0, "single-branch", &single_branch,
432509
N_("only download metadata for the branch that will "
433510
"be checked out")),
511+
OPT_STRING(0, "cache-server-url", &cache_server_url,
512+
N_("<url>"),
513+
N_("the url or friendly name of the cache server")),
434514
OPT_END(),
435515
};
436516
const char * const clone_usage[] = {
@@ -441,6 +521,7 @@ static int cmd_clone(int argc, const char **argv)
441521
char *enlistment = NULL, *dir = NULL;
442522
struct strbuf buf = STRBUF_INIT;
443523
int res;
524+
int gvfs_protocol;
444525

445526
argc = parse_options(argc, argv, NULL, clone_options, clone_usage, 0);
446527

@@ -503,13 +584,46 @@ static int cmd_clone(int argc, const char **argv)
503584
set_config("remote.origin.fetch="
504585
"+refs/heads/%s:refs/remotes/origin/%s",
505586
single_branch ? branch : "*",
506-
single_branch ? branch : "*") ||
507-
set_config("remote.origin.promisor=true") ||
508-
set_config("remote.origin.partialCloneFilter=blob:none")) {
587+
single_branch ? branch : "*")) {
509588
res = error(_("could not configure remote in '%s'"), dir);
510589
goto cleanup;
511590
}
512591

592+
if (set_config("credential.https://dev.azure.com.useHttpPath=true")) {
593+
res = error(_("could not configure credential.useHttpPath"));
594+
goto cleanup;
595+
}
596+
597+
gvfs_protocol = cache_server_url ||
598+
supports_gvfs_protocol(url, &default_cache_server_url);
599+
600+
if (gvfs_protocol) {
601+
if (!cache_server_url)
602+
cache_server_url = default_cache_server_url;
603+
if (set_config("core.useGVFSHelper=true") ||
604+
set_config("core.gvfs=150") ||
605+
set_config("http.version=HTTP/1.1")) {
606+
res = error(_("could not turn on GVFS helper"));
607+
goto cleanup;
608+
}
609+
if (cache_server_url &&
610+
set_config("gvfs.cache-server=%s", cache_server_url)) {
611+
res = error(_("could not configure cache server"));
612+
goto cleanup;
613+
}
614+
if (cache_server_url)
615+
fprintf(stderr, "Cache server URL: %s\n",
616+
cache_server_url);
617+
} else {
618+
if (set_config("core.useGVFSHelper=false") ||
619+
set_config("remote.origin.promisor=true") ||
620+
set_config("remote.origin.partialCloneFilter=blob:none")) {
621+
res = error(_("could not configure partial clone in "
622+
"'%s'"), dir);
623+
goto cleanup;
624+
}
625+
}
626+
513627
if (!full_clone &&
514628
(res = run_git("sparse-checkout", "init", "--cone", NULL)))
515629
goto cleanup;
@@ -518,6 +632,11 @@ static int cmd_clone(int argc, const char **argv)
518632
return error(_("could not configure '%s'"), dir);
519633

520634
if ((res = run_git("fetch", "--quiet", "origin", NULL))) {
635+
if (gvfs_protocol) {
636+
res = error(_("failed to prefetch commits and trees"));
637+
goto cleanup;
638+
}
639+
521640
warning(_("partial clone failed; attempting full clone"));
522641

523642
if (set_config("remote.origin.promisor") ||
@@ -548,6 +667,7 @@ static int cmd_clone(int argc, const char **argv)
548667
free(enlistment);
549668
free(dir);
550669
strbuf_release(&buf);
670+
free(default_cache_server_url);
551671
return res;
552672
}
553673

0 commit comments

Comments
 (0)