Skip to content

Commit 500fe58

Browse files
committed
Add mechanism to disable creating new worlds
As a performance optimization, when you are certain only to run all remaining code in a fixed world (e.g. not even eval a new closure or comprehension), such as when all code is defined in a system image and not being used interactively, you can put a call to: Base.Experimental.disable_new_worlds() Which will enable performance optimizations around avoiding tracking backedges and invalidations.
1 parent c31710a commit 500fe58

File tree

6 files changed

+240
-130
lines changed

6 files changed

+240
-130
lines changed

base/experimental.jl

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -494,4 +494,13 @@ function entrypoint(@nospecialize(argt::Type))
494494
nothing
495495
end
496496

497+
"""
498+
Base.Experimental.disable_new_worlds()
499+
500+
Mark that no new worlds (methods additions, deletions, etc) are permitted to be
501+
created, allowing for greater performance and slightly lower memory usage by
502+
eliminating tracking of those possible invalidation.
503+
"""
504+
disable_new_worlds() = ccall(:jl_disable_new_worlds, Cvoid, ())
505+
497506
end

src/gf.c

Lines changed: 104 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
extern "C" {
2525
#endif
2626

27+
_Atomic(int) allow_new_worlds = 1;
2728
JL_DLLEXPORT _Atomic(size_t) jl_world_counter = 1; // uses atomic acquire/release
2829
jl_mutex_t world_counter_lock;
2930
JL_DLLEXPORT size_t jl_get_world_counter(void) JL_NOTSAFEPOINT
@@ -1819,76 +1820,84 @@ static void invalidate_backedges(jl_method_instance_t *replaced_mi, size_t max_w
18191820
// add a backedge from callee to caller
18201821
JL_DLLEXPORT void jl_method_instance_add_backedge(jl_method_instance_t *callee, jl_value_t *invokesig, jl_code_instance_t *caller)
18211822
{
1823+
if (!jl_atomic_load_relaxed(&allow_new_worlds))
1824+
return;
18221825
if (invokesig == jl_nothing)
18231826
invokesig = NULL; // julia uses `nothing` but C uses NULL (#undef)
18241827
assert(jl_is_method_instance(callee));
18251828
assert(jl_is_code_instance(caller));
18261829
assert(invokesig == NULL || jl_is_type(invokesig));
18271830
JL_LOCK(&callee->def.method->writelock);
1828-
int found = 0;
1829-
// TODO: use jl_cache_type_(invokesig) like cache_method does to save memory
1830-
if (!callee->backedges) {
1831-
// lazy-init the backedges array
1832-
callee->backedges = jl_alloc_vec_any(0);
1833-
jl_gc_wb(callee, callee->backedges);
1834-
}
1835-
else {
1836-
size_t i = 0, l = jl_array_nrows(callee->backedges);
1837-
for (i = 0; i < l; i++) {
1838-
// optimized version of while (i < l) i = get_next_edge(callee->backedges, i, &invokeTypes, &mi);
1839-
jl_value_t *mi = jl_array_ptr_ref(callee->backedges, i);
1840-
if (mi != (jl_value_t*)caller)
1841-
continue;
1842-
jl_value_t *invokeTypes = i > 0 ? jl_array_ptr_ref(callee->backedges, i - 1) : NULL;
1843-
if (invokeTypes && jl_is_method_instance(invokeTypes))
1844-
invokeTypes = NULL;
1845-
if ((invokesig == NULL && invokeTypes == NULL) ||
1846-
(invokesig && invokeTypes && jl_types_equal(invokesig, invokeTypes))) {
1847-
found = 1;
1848-
break;
1831+
if (jl_atomic_load_relaxed(&allow_new_worlds)) {
1832+
int found = 0;
1833+
// TODO: use jl_cache_type_(invokesig) like cache_method does to save memory
1834+
if (!callee->backedges) {
1835+
// lazy-init the backedges array
1836+
callee->backedges = jl_alloc_vec_any(0);
1837+
jl_gc_wb(callee, callee->backedges);
1838+
}
1839+
else {
1840+
size_t i = 0, l = jl_array_nrows(callee->backedges);
1841+
for (i = 0; i < l; i++) {
1842+
// optimized version of while (i < l) i = get_next_edge(callee->backedges, i, &invokeTypes, &mi);
1843+
jl_value_t *mi = jl_array_ptr_ref(callee->backedges, i);
1844+
if (mi != (jl_value_t*)caller)
1845+
continue;
1846+
jl_value_t *invokeTypes = i > 0 ? jl_array_ptr_ref(callee->backedges, i - 1) : NULL;
1847+
if (invokeTypes && jl_is_method_instance(invokeTypes))
1848+
invokeTypes = NULL;
1849+
if ((invokesig == NULL && invokeTypes == NULL) ||
1850+
(invokesig && invokeTypes && jl_types_equal(invokesig, invokeTypes))) {
1851+
found = 1;
1852+
break;
1853+
}
18491854
}
18501855
}
1856+
if (!found)
1857+
push_edge(callee->backedges, invokesig, caller);
18511858
}
1852-
if (!found)
1853-
push_edge(callee->backedges, invokesig, caller);
18541859
JL_UNLOCK(&callee->def.method->writelock);
18551860
}
18561861

18571862
// add a backedge from a non-existent signature to caller
18581863
JL_DLLEXPORT void jl_method_table_add_backedge(jl_methtable_t *mt, jl_value_t *typ, jl_code_instance_t *caller)
18591864
{
18601865
assert(jl_is_code_instance(caller));
1866+
if (!jl_atomic_load_relaxed(&allow_new_worlds))
1867+
return;
18611868
JL_LOCK(&mt->writelock);
1862-
if (!mt->backedges) {
1863-
// lazy-init the backedges array
1864-
mt->backedges = jl_alloc_vec_any(2);
1865-
jl_gc_wb(mt, mt->backedges);
1866-
jl_array_ptr_set(mt->backedges, 0, typ);
1867-
jl_array_ptr_set(mt->backedges, 1, caller);
1868-
}
1869-
else {
1870-
// check if the edge is already present and avoid adding a duplicate
1871-
size_t i, l = jl_array_nrows(mt->backedges);
1872-
for (i = 1; i < l; i += 2) {
1873-
if (jl_array_ptr_ref(mt->backedges, i) == (jl_value_t*)caller) {
1874-
if (jl_types_equal(jl_array_ptr_ref(mt->backedges, i - 1), typ)) {
1875-
JL_UNLOCK(&mt->writelock);
1876-
return;
1869+
if (jl_atomic_load_relaxed(&allow_new_worlds)) {
1870+
if (!mt->backedges) {
1871+
// lazy-init the backedges array
1872+
mt->backedges = jl_alloc_vec_any(2);
1873+
jl_gc_wb(mt, mt->backedges);
1874+
jl_array_ptr_set(mt->backedges, 0, typ);
1875+
jl_array_ptr_set(mt->backedges, 1, caller);
1876+
}
1877+
else {
1878+
// check if the edge is already present and avoid adding a duplicate
1879+
size_t i, l = jl_array_nrows(mt->backedges);
1880+
for (i = 1; i < l; i += 2) {
1881+
if (jl_array_ptr_ref(mt->backedges, i) == (jl_value_t*)caller) {
1882+
if (jl_types_equal(jl_array_ptr_ref(mt->backedges, i - 1), typ)) {
1883+
JL_UNLOCK(&mt->writelock);
1884+
return;
1885+
}
18771886
}
18781887
}
1879-
}
1880-
// reuse an already cached instance of this type, if possible
1881-
// TODO: use jl_cache_type_(tt) like cache_method does, instead of this linear scan?
1882-
for (i = 1; i < l; i += 2) {
1883-
if (jl_array_ptr_ref(mt->backedges, i) != (jl_value_t*)caller) {
1884-
if (jl_types_equal(jl_array_ptr_ref(mt->backedges, i - 1), typ)) {
1885-
typ = jl_array_ptr_ref(mt->backedges, i - 1);
1886-
break;
1888+
// reuse an already cached instance of this type, if possible
1889+
// TODO: use jl_cache_type_(tt) like cache_method does, instead of this linear scan?
1890+
for (i = 1; i < l; i += 2) {
1891+
if (jl_array_ptr_ref(mt->backedges, i) != (jl_value_t*)caller) {
1892+
if (jl_types_equal(jl_array_ptr_ref(mt->backedges, i - 1), typ)) {
1893+
typ = jl_array_ptr_ref(mt->backedges, i - 1);
1894+
break;
1895+
}
18871896
}
18881897
}
1898+
jl_array_ptr_1d_push(mt->backedges, typ);
1899+
jl_array_ptr_1d_push(mt->backedges, (jl_value_t*)caller);
18891900
}
1890-
jl_array_ptr_1d_push(mt->backedges, typ);
1891-
jl_array_ptr_1d_push(mt->backedges, (jl_value_t*)caller);
18921901
}
18931902
JL_UNLOCK(&mt->writelock);
18941903
}
@@ -2024,10 +2033,55 @@ static void jl_method_table_invalidate(jl_methtable_t *mt, jl_method_t *replaced
20242033
}
20252034
}
20262035

2036+
static int erase_method_backedges(jl_typemap_entry_t *def, void *closure)
2037+
{
2038+
jl_method_t *method = def->func.method;
2039+
JL_LOCK(&method->writelock);
2040+
jl_value_t *specializations = jl_atomic_load_relaxed(&method->specializations);
2041+
if (jl_is_svec(specializations)) {
2042+
size_t i, l = jl_svec_len(specializations);
2043+
for (i = 0; i < l; i++) {
2044+
jl_method_instance_t *mi = (jl_method_instance_t*)jl_svecref(specializations, i);
2045+
if ((jl_value_t*)mi != jl_nothing) {
2046+
mi->backedges = NULL;
2047+
}
2048+
}
2049+
}
2050+
else {
2051+
jl_method_instance_t *mi = (jl_method_instance_t*)specializations;
2052+
mi->backedges = NULL;
2053+
}
2054+
JL_UNLOCK(&method->writelock);
2055+
return 1;
2056+
}
2057+
2058+
static int erase_all_backedges(jl_methtable_t *mt, void *env)
2059+
{
2060+
// removes all method caches
2061+
// this might not be entirely safe (GC or MT), thus we only do it very early in bootstrapping
2062+
JL_LOCK(&mt->writelock);
2063+
mt->backedges = NULL;
2064+
JL_UNLOCK(&mt->writelock);
2065+
jl_typemap_visitor(jl_atomic_load_relaxed(&mt->defs), erase_method_backedges, env);
2066+
return 1;
2067+
}
2068+
2069+
JL_DLLEXPORT void jl_disable_new_worlds(void)
2070+
{
2071+
if (jl_generating_output())
2072+
jl_error("Disabling Method changes is not possible when generating output.");
2073+
JL_LOCK(&world_counter_lock);
2074+
jl_atomic_store_relaxed(&allow_new_worlds, 0);
2075+
JL_UNLOCK(&world_counter_lock);
2076+
jl_foreach_reachable_mtable(erase_all_backedges, (void*)NULL);
2077+
}
2078+
20272079
JL_DLLEXPORT void jl_method_table_disable(jl_methtable_t *mt, jl_method_t *method)
20282080
{
20292081
jl_typemap_entry_t *methodentry = do_typemap_search(mt, method);
20302082
JL_LOCK(&world_counter_lock);
2083+
if (!jl_atomic_load_relaxed(&allow_new_worlds))
2084+
jl_error("Method changes have been disabled via a call to disable_new_worlds.");
20312085
JL_LOCK(&mt->writelock);
20322086
// Narrow the world age on the method to make it uncallable
20332087
size_t world = jl_atomic_load_relaxed(&jl_world_counter);
@@ -2341,6 +2395,8 @@ JL_DLLEXPORT void jl_method_table_insert(jl_methtable_t *mt, jl_method_t *method
23412395
jl_typemap_entry_t *newentry = jl_method_table_add(mt, method, simpletype);
23422396
JL_GC_PUSH1(&newentry);
23432397
JL_LOCK(&world_counter_lock);
2398+
if (!jl_atomic_load_relaxed(&allow_new_worlds))
2399+
jl_error("Method changes have been disabled via a call to disable_new_worlds.");
23442400
size_t world = jl_atomic_load_relaxed(&jl_world_counter) + 1;
23452401
jl_atomic_store_relaxed(&method->primary_world, world);
23462402
jl_atomic_store_relaxed(&method->deleted_world, ~(size_t)0);

src/staticdata.c

Lines changed: 23 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,7 @@ External links:
8989
#include "julia_assert.h"
9090

9191
static const size_t WORLD_AGE_REVALIDATION_SENTINEL = 0x1;
92+
size_t jl_require_world = ~(size_t)0;
9293

9394
#include "staticdata_utils.c"
9495
#include "precompile_utils.c"
@@ -2678,7 +2679,6 @@ jl_genericmemory_t *jl_global_roots_list;
26782679
jl_genericmemory_t *jl_global_roots_keyset;
26792680
jl_mutex_t global_roots_lock;
26802681
extern jl_mutex_t world_counter_lock;
2681-
extern size_t jl_require_world;
26822682

26832683
jl_mutex_t precompile_field_replace_lock;
26842684
jl_svec_t *precompile_field_replace JL_GLOBALLY_ROOTED;
@@ -4044,16 +4044,30 @@ static jl_value_t *jl_restore_package_image_from_stream(void* pkgimage_handle, i
40444044
// Add roots to methods
40454045
jl_copy_roots(method_roots_list, jl_worklist_key((jl_array_t*)restored));
40464046
// Insert method extensions and handle edges
4047+
int new_methods = jl_array_nrows(extext_methods) > 0;
4048+
if (!new_methods) {
4049+
size_t i, l = jl_array_nrows(internal_methods);
4050+
for (i = 0; i < l; i++) {
4051+
jl_value_t *obj = jl_array_ptr_ref(internal_methods, i);
4052+
if (jl_is_method(obj)) {
4053+
new_methods = 1;
4054+
break;
4055+
}
4056+
}
4057+
}
40474058
JL_LOCK(&world_counter_lock);
4048-
// allocate a world for the new methods, and insert them there, invalidating content as needed
4049-
size_t world = jl_atomic_load_relaxed(&jl_world_counter) + 1;
4050-
jl_activate_methods(extext_methods, internal_methods, world);
4051-
// TODO: inject new_ext_cis into caches here, so the system can see them immediately as potential candidates (before validation)
4052-
// allow users to start running in this updated world
4053-
jl_atomic_store_release(&jl_world_counter, world);
4054-
// now permit more methods to be added again
4059+
// allocate a world for the new methods, and insert them there, invalidating content as needed
4060+
size_t world = jl_atomic_load_relaxed(&jl_world_counter);
4061+
if (new_methods)
4062+
world += 1;
4063+
jl_activate_methods(extext_methods, internal_methods, world, pkgname);
4064+
// TODO: inject new_ext_cis into caches here, so the system can see them immediately as potential candidates (before validation)
4065+
// allow users to start running in this updated world
4066+
if (new_methods)
4067+
jl_atomic_store_release(&jl_world_counter, world);
4068+
// now permit more methods to be added again
40554069
JL_UNLOCK(&world_counter_lock);
4056-
// but one of those immediate users is going to be our cache insertions
4070+
// but one of those immediate users is going to be our cache insertions
40574071
jl_insert_backedges((jl_array_t*)edges, (jl_array_t*)new_ext_cis); // restore existing caches (needs to be last)
40584072
// reinit ccallables
40594073
jl_reinit_ccallable(&ccallable_list, base, pkgimage_handle);

0 commit comments

Comments
 (0)