Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 27 additions & 3 deletions maven_install.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,31 @@
{
"__AUTOGENERATED_FILE_DO_NOT_MODIFY_THIS_FILE_MANUALLY": "THERE_IS_NO_DATA_ONLY_ZUUL",
"__INPUT_ARTIFACTS_HASH": 1448912684,
"__RESOLVED_ARTIFACTS_HASH": 771049556,
"__INPUT_ARTIFACTS_HASH": {
"com.google.guava:guava": 1376266919,
"com.google.protobuf:protobuf-java": -414274684,
"io.netty:netty-tcnative-boringssl-static": -1979050407,
"org.hamcrest:hamcrest-core": 466791695,
"repositories": -1949687017
},
"__RESOLVED_ARTIFACTS_HASH": {
"com.google.code.findbugs:jsr305": 870839855,
"com.google.errorprone:error_prone_annotations": -924787181,
"com.google.guava:failureaccess": -1890754729,
"com.google.guava:guava": 1410177884,
"com.google.guava:listenablefuture": 1079558157,
"com.google.j2objc:j2objc-annotations": 880287147,
"com.google.protobuf:protobuf-java": -758252690,
"io.netty:netty-tcnative-boringssl-static": 786460467,
"io.netty:netty-tcnative-boringssl-static:jar:linux-aarch_64": -151974322,
"io.netty:netty-tcnative-boringssl-static:jar:linux-x86_64": -1831640381,
"io.netty:netty-tcnative-boringssl-static:jar:osx-aarch_64": -1661340718,
"io.netty:netty-tcnative-boringssl-static:jar:osx-x86_64": 2101324017,
"io.netty:netty-tcnative-boringssl-static:jar:windows-x86_64": 889950966,
"io.netty:netty-tcnative-classes": 1239547355,
"org.checkerframework:checker-qual": -1034954841,
"org.hamcrest:hamcrest": -533823501,
"org.hamcrest:hamcrest-core": 511008887
},
"artifacts": {
"com.google.code.findbugs:jsr305": {
"shasums": {
Expand Down Expand Up @@ -250,5 +274,5 @@
]
},
"services": {},
"version": "2"
"version": "3"
}
9 changes: 6 additions & 3 deletions private/extensions/maven.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ load("//private/lib:toml_parser.bzl", "parse_toml")
load("//private/rules:coursier.bzl", "DEFAULT_AAR_IMPORT_LABEL", "coursier_fetch", "pinned_coursier_fetch")
load("//private/rules:unpinned_maven_pin_command_alias.bzl", "unpinned_maven_pin_command_alias")
load("//private/rules:v1_lock_file.bzl", "v1_lock_file")
load("//private/rules:v2_lock_file.bzl", "v2_lock_file")
load("//private/rules:v3_lock_file.bzl", "v2_lock_file", "v3_lock_file")
load(":download_pinned_deps.bzl", "download_pinned_deps")

DEFAULT_REPOSITORIES = [
Expand Down Expand Up @@ -703,12 +703,15 @@ def maven_impl(mctx):
"artifacts": {},
"dependencies": {},
"repositories": {},
"version": "2",
"version": "3",
}
else:
lock_file = json.decode(lock_file_content)

if v2_lock_file.is_valid_lock_file(lock_file):
if v3_lock_file.is_valid_lock_file(lock_file):
artifacts = v3_lock_file.get_artifacts(lock_file)
importer = v3_lock_file
elif v2_lock_file.is_valid_lock_file(lock_file):
artifacts = v2_lock_file.get_artifacts(lock_file)
importer = v2_lock_file
elif v1_lock_file.is_valid_lock_file(lock_file):
Expand Down
50 changes: 38 additions & 12 deletions private/rules/coursier.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ load(
)
load("//private/lib:urls.bzl", "remove_auth_from_url")
load("//private/rules:v1_lock_file.bzl", "v1_lock_file")
load("//private/rules:v2_lock_file.bzl", "v2_lock_file")
load("//private/rules:v3_lock_file.bzl", "v2_lock_file", "v3_lock_file")

_BUILD = """
# package(default_visibility = [{visibilities}]) # https://github.com/bazelbuild/bazel/issues/13681
Expand Down Expand Up @@ -283,6 +283,7 @@ def _java_path(repository_ctx):
def _generate_java_jar_command(repository_ctx, jar_path):
coursier_opts = repository_ctx.os.environ.get("COURSIER_OPTS", "")
coursier_opts = coursier_opts.split(" ") if len(coursier_opts) > 0 else []

# if coursier OOMs from a large dependency tree, have it crash instead of hanging
coursier_opts.append("-XX:+ExitOnOutOfMemoryError")
java_path = _java_path(repository_ctx)
Expand Down Expand Up @@ -327,6 +328,13 @@ def _stable_artifact(artifact):
keys = sorted(parsed.keys())
return ":".join(["%s=%s" % (key, parsed[key]) for key in keys])

def _add_to_hash_dictionary(dictionary, artifact, salt):
artifact_dict = json.decode(artifact)
key = artifact_dict["group"] + ":" + artifact_dict["artifact"]
value = dictionary.get(key, [])
value.append(hash(_stable_artifact(artifact) + salt))
dictionary[key] = value

# Compute a signature of the list of artifacts that will be used to build
# the dependency tree. This is used as a check to see whether the dependency
# tree needs to be repinned.
Expand All @@ -345,24 +353,37 @@ def compute_dependency_inputs_signature(boms = [], artifacts = [], repositories
artifact_inputs = []
excluded_artifact_inputs = []

all_hashes = dict()

if boms and len(boms):
for bom in sorted(boms):
artifact_inputs.append(_stable_artifact(bom))
_add_to_hash_dictionary(all_hashes, bom, "bom")

for artifact in sorted(artifacts):
artifact_inputs.append(_stable_artifact(artifact))
_add_to_hash_dictionary(all_hashes, artifact, "artifact")

for artifact in sorted(excluded_artifacts):
excluded_artifact_inputs.append(_stable_artifact(artifact))
_add_to_hash_dictionary(all_hashes, artifact, "excluded_artifact")

v1_sig = hash(repr(sorted(artifact_inputs))) ^ hash(repr(sorted(repositories)))

hash_parts = [sorted(artifact_inputs), sorted(repositories), sorted(excluded_artifact_inputs)]
current_version_sig = 0
v2_sig = 0
for part in hash_parts:
current_version_sig ^= hash(repr(part))
v2_sig ^= hash(repr(part))

for k, v in all_hashes.items():
if len(v) == 1:
all_hashes[k] = v[0]
else:
all_hashes[k] = hash(repr(sorted(v)))

all_hashes["repositories"] = hash(repr(sorted(repositories)))

return (current_version_sig, [v1_sig])
return (all_hashes, [v1_sig, v2_sig])

def get_netrc_lines_from_entries(netrc_entries):
netrc_lines = []
Expand Down Expand Up @@ -459,21 +480,26 @@ def _pinned_coursier_fetch_impl(repository_ctx):
"artifacts": {},
"dependencies": {},
"repositories": {},
"version": "2",
"version": "3",
}
else:
maven_install_json_content = json.decode(lock_file_content)

if v1_lock_file.is_valid_lock_file(maven_install_json_content):
if v3_lock_file.is_valid_lock_file(maven_install_json_content):
importer = v3_lock_file
elif v2_lock_file.is_valid_lock_file(maven_install_json_content):
importer = v2_lock_file
elif v1_lock_file.is_valid_lock_file(maven_install_json_content):
importer = v1_lock_file
else:
fail("Unable to read lock file: %s" % repository_ctx.attr.maven_install_json)

# Check if using the most recent lock file format.
if importer != v3_lock_file:
print_if_not_repinning(
repository_ctx,
"Lock file should be updated. Please run `REPIN=1 bazel run @unpinned_%s//:pin`" % repository_ctx.name,
)
elif v2_lock_file.is_valid_lock_file(maven_install_json_content):
importer = v2_lock_file
else:
fail("Unable to read lock file: %s" % repository_ctx.attr.maven_install_json)

# Validation steps for maven_install.json.

Expand Down Expand Up @@ -1298,7 +1324,7 @@ def _coursier_fetch_impl(repository_ctx):

repository_ctx.file(
"unsorted_deps.json",
content = v2_lock_file.render_lock_file(
content = v3_lock_file.render_lock_file(
lock_file_contents,
inputs_hash,
),
Expand All @@ -1307,7 +1333,7 @@ def _coursier_fetch_impl(repository_ctx):
repository_ctx.report_progress("Generating BUILD targets..")
(generated_imports, jar_versionless_target_labels) = parser.generate_imports(
repository_ctx = repository_ctx,
dependencies = v2_lock_file.get_artifacts(lock_file_contents),
dependencies = v3_lock_file.get_artifacts(lock_file_contents),
explicit_artifacts = {
a["group"] + ":" + a["artifact"] + (":" + a["classifier"] if "classifier" in a else ""): True
for a in artifacts
Expand Down
14 changes: 10 additions & 4 deletions private/rules/pin_dependencies.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ load("//private/rules:coursier.bzl", "compute_dependency_inputs_signature")

_TEMPLATE = """#!/usr/bin/env bash

{resolver_cmd} --jvm_flags={jvm_flags} --argsfile {config} --resolver {resolver} --input_hash '{input_hash}' --output {output}
{resolver_cmd} --jvm_flags={jvm_flags} --argsfile {config} --resolver {resolver} --input-hash-path '{input_hash_path}' --output {output}
"""

def _stringify_exclusions(exclusions):
Expand Down Expand Up @@ -63,19 +63,25 @@ def _pin_dependencies_impl(ctx):
content = json.encode_indent(config, indent = " "),
)

input_hash = compute_dependency_inputs_signature(
input_hash, _ = compute_dependency_inputs_signature(
boms = ctx.attr.boms,
artifacts = ctx.attr.artifacts,
repositories = ctx.attr.repositories,
excluded_artifacts = ctx.attr.excluded_artifacts,
)

hash_file = ctx.actions.declare_file("%s-input-hash.json" % ctx.label.name)
ctx.actions.write(
hash_file,
content = json.encode_indent(input_hash, indent = " "),
)

script = ctx.actions.declare_file(ctx.label.name)
ctx.actions.write(
script,
_TEMPLATE.format(
config = config_file.short_path,
input_hash = input_hash[0],
input_hash_path = hash_file.short_path,
resolver_cmd = ctx.executable._resolver.short_path,
resolver = ctx.attr.resolver,
output = "$BUILD_WORKSPACE_DIRECTORY/" + ctx.attr.lock_file,
Expand All @@ -88,7 +94,7 @@ def _pin_dependencies_impl(ctx):
DefaultInfo(
executable = script,
files = depset([script, config_file]),
runfiles = ctx.runfiles(files = [script, config_file]).merge(ctx.attr._resolver[DefaultInfo].default_runfiles),
runfiles = ctx.runfiles(files = [script, config_file, hash_file]).merge(ctx.attr._resolver[DefaultInfo].default_runfiles),
),
]

Expand Down
101 changes: 93 additions & 8 deletions private/rules/v2_lock_file.bzl → private/rules/v3_lock_file.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,15 @@ load("//private/lib:coordinates.bzl", "to_external_form")

_REQUIRED_KEYS = ["artifacts", "dependencies", "repositories"]

def _is_valid_lock_file(lock_file_contents):
def _is_valid_lock_file_v2(lock_file_contents):
return _is_valid_lock_file(lock_file_contents, "2")

def _is_valid_lock_file_v3(lock_file_contents):
return _is_valid_lock_file(lock_file_contents, "3")

def _is_valid_lock_file(lock_file_contents, desiered_version):
version = lock_file_contents.get("version")
if "2" != version:
if desiered_version != version:
return False

all_keys = lock_file_contents.keys()
Expand All @@ -37,7 +43,7 @@ def _get_input_artifacts_hash(lock_file_contents):
def _get_lock_file_hash(lock_file_contents):
return lock_file_contents.get("__RESOLVED_ARTIFACTS_HASH")

def _compute_lock_file_hash(lock_file_contents):
def _compute_lock_file_hash_v2(lock_file_contents):
to_hash = {}
for key in sorted(_REQUIRED_KEYS):
value = lock_file_contents.get(key)
Expand All @@ -47,6 +53,75 @@ def _compute_lock_file_hash(lock_file_contents):
to_hash.update({key: json.decode(json.encode(value))})
return hash(repr(to_hash))

def _compute_final_hash(all_infos):
final_hashes = dict()

# in case of circular dependencies, we take a normal hash of the original info as a starting point
backup_hashes = {k: hash(repr(v)) for k, v in all_infos.items()}

# sets are balzel 8 only, we use a dict instead
remaining = {k: 0 for k in all_infos.keys()}

# bazel does not support recursion, we have to emulate it manually
stack = []

# replacement for while True
for _ in range(1000000000):
if len(remaining) == 0 and len(stack) == 0:
break

curr = None
if len(stack) == 0:
curr, _ = remaining.popitem()
else:
curr = stack.pop()

if curr in final_hashes:
continue

deps = all_infos[curr].get("dependencies", [])

# make sure all dependencies are processed first
unprocessed = [d for d in deps if d in remaining]
if len(unprocessed) > 0:
dep = unprocessed[0]
stack.append(curr)
stack.append(dep)
remaining.pop(dep, None)
continue

all_infos[curr]["dependency_hashes"] = {dep: final_hashes.get(dep, backup_hashes.get(dep, 0)) for dep in deps}
final_hashes[curr] = hash(repr(all_infos[curr]))

return final_hashes

def _compute_lock_file_hash_v3(lock_file_contents):
all_infos = dict()

for dep, dep_info in lock_file_contents["artifacts"].items():
shasums = dep_info["shasums"]
common_info = {k: v for k, v in dep_info.items() if k != "shasums"}

is_jar_type = dep.count(":") == 1

for type, sha in shasums.items():
jar_suffix = ":jar" if is_jar_type else ""
suffix = jar_suffix + ":" + type if type != "jar" else ""

type_info = dict()
type_info["standard"] = common_info
type_info["sha"] = sha
all_infos[dep + suffix] = type_info

for repo, artifacts in lock_file_contents["repositories"].items():
for artifact in artifacts:
all_infos[artifact]["repository"] = repo

for dep, dep_info in lock_file_contents["dependencies"].items():
all_infos[dep]["dependencies"] = sorted(dep_info)

return _compute_final_hash(all_infos)

def _to_m2_path(unpacked):
path = "{group}/{artifact}/{version}/{artifact}-{version}".format(
artifact = unpacked["artifact"],
Expand Down Expand Up @@ -188,8 +263,8 @@ def _render_lock_file(lock_file_contents, input_hash):
contents = [
"{",
" \"__AUTOGENERATED_FILE_DO_NOT_MODIFY_THIS_FILE_MANUALLY\": \"THERE_IS_NO_DATA_ONLY_ZUUL\",",
" \"__INPUT_ARTIFACTS_HASH\": %s," % input_hash,
" \"__RESOLVED_ARTIFACTS_HASH\": %s," % _compute_lock_file_hash(lock_file_contents),
" \"__INPUT_ARTIFACTS_HASH\": %s," % json.encode_indent(input_hash, prefix = " ", indent = " "),
" \"__RESOLVED_ARTIFACTS_HASH\": %s," % json.encode_indent(_compute_lock_file_hash_v3(lock_file_contents), prefix = " ", indent = " "),
]
if lock_file_contents.get("conflict_resolution"):
contents.append(" \"conflict_resolution\": %s," % json.encode_indent(lock_file_contents["conflict_resolution"], prefix = " ", indent = " "))
Expand All @@ -212,17 +287,27 @@ def _render_lock_file(lock_file_contents, input_hash):
contents.append(" \"services\": %s," % json.encode_indent(lock_file_contents["services"], prefix = " ", indent = " "))
if lock_file_contents.get("skipped"):
contents.append(" \"skipped\": %s," % json.encode_indent(lock_file_contents["skipped"], prefix = " ", indent = " "))
contents.append(" \"version\": \"2\"")
contents.append(" \"version\": \"3\"")
contents.append("}")
contents.append("")

return "\n".join(contents)

v2_lock_file = struct(
is_valid_lock_file = _is_valid_lock_file,
is_valid_lock_file = _is_valid_lock_file_v2,
get_input_artifacts_hash = _get_input_artifacts_hash,
get_lock_file_hash = _get_lock_file_hash,
compute_lock_file_hash = _compute_lock_file_hash_v2,
get_artifacts = _get_artifacts,
get_netrc_entries = _get_netrc_entries,
has_m2local = _has_m2local,
)

v3_lock_file = struct(
is_valid_lock_file = _is_valid_lock_file_v3,
get_input_artifacts_hash = _get_input_artifacts_hash,
get_lock_file_hash = _get_lock_file_hash,
compute_lock_file_hash = _compute_lock_file_hash,
compute_lock_file_hash = _compute_lock_file_hash_v3,
get_artifacts = _get_artifacts,
get_netrc_entries = _get_netrc_entries,
render_lock_file = _render_lock_file,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import com.github.bazelbuild.rules_jvm_external.Coordinates;
import com.github.bazelbuild.rules_jvm_external.resolver.Conflict;
import com.github.bazelbuild.rules_jvm_external.resolver.DependencyInfo;
import com.github.bazelbuild.rules_jvm_external.resolver.lockfile.V2LockFile;
import com.github.bazelbuild.rules_jvm_external.resolver.lockfile.V3LockFile;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
Expand Down Expand Up @@ -100,7 +100,7 @@ public static void main(String[] args) {
Set<DependencyInfo> infos = converter.getDependencies();
Set<Conflict> conflicts = converter.getConflicts();

Map<String, Object> rendered = new V2LockFile(repositories, infos, conflicts).render();
Map<String, Object> rendered = new V3LockFile(repositories, infos, conflicts).render();

String converted =
new GsonBuilder().setPrettyPrinting().serializeNulls().create().toJson(rendered);
Expand Down
Loading