diff --git a/.github/workflows/discord-posts.yml b/.github/workflows/discord-posts.yml index 3895e6f231..0c2e5ef3c7 100644 --- a/.github/workflows/discord-posts.yml +++ b/.github/workflows/discord-posts.yml @@ -9,14 +9,37 @@ jobs: runs-on: ubuntu-latest if: github.event.pull_request.merged == true && github.event.pull_request.base.ref == 'master' + env: + CHECK_NAME: Internal Tests + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: + - name: Set up GitHub CLI + run: | + curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo tee /usr/share/keyrings/githubcli-archive-keyring.gpg > /dev/null + sudo apt-get install -y apt-transport-https + echo "deb [arch=amd64 signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list + sudo apt-get update + sudo apt-get install gh + + - name: Fetch Check Run Results + run: | + RESULT="$(gh pr checks "${{github.event.pull_request.html_url}}" --json 'name,state' | + jq -r ".[] | select(.name==\"${CHECK_NAME}\").state")" + + if [ -z "$RESULT" ]; then + RESULT="The check did not run!" + fi + + echo "CHECK_RESULT=${RESULT}" >> $GITHUB_ENV + - name: Send Discord notification env: DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_WEBHOOK_URL }} PR_TITLE: ${{ github.event.pull_request.title }} PR_NUMBER: ${{ github.event.pull_request.number }} PR_URL: ${{ github.event.pull_request.html_url }} + CHECK_RESULT: ${{ env.CHECK_RESULT }} run: | curl -X POST -H 'Content-Type: application/json' -d '{ - "content": "'"PR merged: [(#${PR_NUMBER}) ${PR_TITLE}](${PR_URL})"'" + "content": "'"PR merged: [(#${PR_NUMBER}) ${PR_TITLE}](${PR_URL})\\n${CHECK_NAME} result: ${CHECK_RESULT}"'" }' ${DISCORD_WEBHOOK_URL} diff --git a/.gitignore b/.gitignore index 2dd1541c7f..689a13a310 100644 --- a/.gitignore +++ b/.gitignore @@ -202,6 +202,7 @@ __pycache__/ cs-src/ crates/bench/spacetime.svg crates/bench/sqlite.svg +.vs/ # benchmark files out.json diff --git a/Cargo.lock b/Cargo.lock index 09c2325748..e7da5c26d5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -513,9 +513,9 @@ checksum = "ad152d03a2c813c80bb94fedbf3a3f02b28f793e39e7c214c8a0bcc196343de7" [[package]] name = "bytemuck" -version = "1.16.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78834c15cb5d5efe3452d58b1e8ba890dd62d21907f867f383358198e56ebca5" +checksum = "94bbb0ad554ad961ddc5da507a12a29b14e4ae5bda06b19f575a3e6079d2e2ae" dependencies = [ "bytemuck_derive", ] @@ -1519,6 +1519,12 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + [[package]] name = "flate2" version = "1.0.28" @@ -2852,6 +2858,16 @@ dependencies = [ "spacetimedb", ] +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset", + "indexmap 2.0.2", +] + [[package]] name = "pg_interval" version = "0.4.2" @@ -4375,6 +4391,7 @@ dependencies = [ "sqlparser", "strum", "tempfile", + "thin-vec", "thiserror", "tokio", "tokio-stream", @@ -4468,10 +4485,24 @@ version = "0.12.0" dependencies = [ "bitflags 2.4.1", "either", + "itertools 0.12.0", "nohash-hasher", "proptest", ] +[[package]] +name = "spacetimedb-query-planner" +version = "0.12.0" +dependencies = [ + "derive_more", + "spacetimedb-lib", + "spacetimedb-primitives", + "spacetimedb-sats", + "spacetimedb-schema", + "spacetimedb-sql-parser", + "thiserror", +] + [[package]] name = "spacetimedb-quickstart-module" version = "0.1.0" @@ -4514,9 +4545,11 @@ dependencies = [ name = "spacetimedb-schema" version = "0.12.0" dependencies = [ + "enum-as-inner", "hashbrown 0.14.1", "itertools 0.12.0", "lazy_static", + "petgraph", "proptest", "serde_json", "smallvec", @@ -5051,6 +5084,12 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" +[[package]] +name = "thin-vec" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a38c90d48152c236a3ab59271da4f4ae63d678c5d7ad6b7714d7cb9760be5e4b" + [[package]] name = "thiserror" version = "1.0.49" diff --git a/Cargo.toml b/Cargo.toml index d7776b61cf..7a22802ce2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,6 +13,7 @@ members = [ "crates/durability", "crates/fs-utils", "crates/lib", + "crates/planner", "crates/metrics", "crates/primitives", "crates/sats", @@ -35,7 +36,7 @@ members = [ "crates/sdk/tests/test-client", "crates/sdk/tests/test-counter", "crates/sdk/tests/connect_disconnect_client", - "tools/upgrade-version", + "tools/upgrade-version", ] default-members = ["crates/cli"] # cargo feature graph resolver. v2 is default in edition2021 but workspace @@ -120,7 +121,7 @@ bitflags = "2.3.3" blake3 = "1.5.1" brotli = "3.5" byte-unit = "4.0.18" -bytemuck = { version = "1.16", features = ["must_cast"] } +bytemuck = { version = "1.16.2", features = ["must_cast"] } bytes = "1.2.1" bytestring = { version = "1.2.0", features = ["serde"] } cargo_metadata = "0.17.0" @@ -175,6 +176,7 @@ nohash-hasher = "0.2" once_cell = "1.16" parking_lot = { version = "0.12.1", features = ["send_guard", "arc_lock"] } paste = "1.0" +petgraph = { version = "0.6.5", default-features = false } pin-project-lite = "0.2.9" postgres-types = "0.2.5" pretty_assertions = "1.4" @@ -221,6 +223,7 @@ tar = "0.4" tempdir = "0.3.7" tempfile = "3.8" termcolor = "1.2.0" +thin-vec = "0.2.13" thiserror = "1.0.37" tokio = { version = "1.37", features = ["full"] } tokio-postgres = { version = "0.7.8", features = ["with-chrono-0_4"] } diff --git a/crates/bindings-csharp/.editorconfig b/crates/bindings-csharp/.editorconfig index 02a75fb58b..f9da54f786 100644 --- a/crates/bindings-csharp/.editorconfig +++ b/crates/bindings-csharp/.editorconfig @@ -1,5 +1,111 @@ root = true [*.cs] -csharp_using_directive_placement = inside_namespace -csharp_style_namespace_declarations = file_scoped +csharp_using_directive_placement = inside_namespace:warning +csharp_style_namespace_declarations = file_scoped:warning +csharp_prefer_simple_using_statement = true:warning +csharp_prefer_braces = true:silent +csharp_style_prefer_method_group_conversion = true:silent +csharp_style_prefer_top_level_statements = true:silent +csharp_style_prefer_primary_constructors = true:warning +csharp_style_expression_bodied_methods = when_on_single_line:silent +csharp_style_expression_bodied_constructors = when_on_single_line:silent +csharp_style_expression_bodied_operators = when_on_single_line:silent +csharp_style_expression_bodied_properties = when_on_single_line:silent +csharp_style_expression_bodied_indexers = when_on_single_line:silent +csharp_style_expression_bodied_accessors = when_on_single_line:silent +csharp_style_expression_bodied_lambdas = when_on_single_line:silent +csharp_style_expression_bodied_local_functions = when_on_single_line:silent +csharp_indent_labels = one_less_than_current +csharp_style_inlined_variable_declaration = true:warning +csharp_style_deconstructed_variable_declaration = true:suggestion +csharp_style_var_for_built_in_types = true:warning +csharp_style_var_when_type_is_apparent = true:warning +csharp_style_var_elsewhere = true:suggestion +csharp_style_prefer_extended_property_pattern = true:suggestion +csharp_style_prefer_not_pattern = true:suggestion +csharp_style_pattern_matching_over_as_with_null_check = true:suggestion +csharp_style_pattern_matching_over_is_with_cast_check = true:suggestion +csharp_style_prefer_pattern_matching = true:warning +csharp_style_prefer_switch_expression = true:warning +csharp_style_conditional_delegate_call = true:warning +csharp_style_prefer_readonly_struct_member = true:warning +csharp_style_prefer_readonly_struct = true:warning +csharp_prefer_static_anonymous_function = true:suggestion +csharp_prefer_static_local_function = true:suggestion +csharp_space_around_binary_operators = before_and_after + +[*.{cs,vb}] +#### Naming styles #### + +# Naming rules + +dotnet_naming_rule.interface_should_be_begins_with_i.severity = suggestion +dotnet_naming_rule.interface_should_be_begins_with_i.symbols = interface +dotnet_naming_rule.interface_should_be_begins_with_i.style = begins_with_i + +dotnet_naming_rule.types_should_be_pascal_case.severity = suggestion +dotnet_naming_rule.types_should_be_pascal_case.symbols = types +dotnet_naming_rule.types_should_be_pascal_case.style = pascal_case + +dotnet_naming_rule.non_field_members_should_be_pascal_case.severity = suggestion +dotnet_naming_rule.non_field_members_should_be_pascal_case.symbols = non_field_members +dotnet_naming_rule.non_field_members_should_be_pascal_case.style = pascal_case + +# Symbol specifications + +dotnet_naming_symbols.interface.applicable_kinds = interface +dotnet_naming_symbols.interface.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.interface.required_modifiers = + +dotnet_naming_symbols.types.applicable_kinds = class, struct, interface, enum +dotnet_naming_symbols.types.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.types.required_modifiers = + +dotnet_naming_symbols.non_field_members.applicable_kinds = property, event, method +dotnet_naming_symbols.non_field_members.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.non_field_members.required_modifiers = + +# Naming styles + +dotnet_naming_style.begins_with_i.required_prefix = I +dotnet_naming_style.begins_with_i.required_suffix = +dotnet_naming_style.begins_with_i.word_separator = +dotnet_naming_style.begins_with_i.capitalization = pascal_case + +dotnet_naming_style.pascal_case.required_prefix = +dotnet_naming_style.pascal_case.required_suffix = +dotnet_naming_style.pascal_case.word_separator = +dotnet_naming_style.pascal_case.capitalization = pascal_case + +dotnet_naming_style.pascal_case.required_prefix = +dotnet_naming_style.pascal_case.required_suffix = +dotnet_naming_style.pascal_case.word_separator = +dotnet_naming_style.pascal_case.capitalization = pascal_case +dotnet_style_coalesce_expression = true:suggestion +dotnet_style_null_propagation = true:suggestion +dotnet_style_prefer_is_null_check_over_reference_equality_method = true:suggestion +dotnet_style_prefer_auto_properties = true:silent +dotnet_style_object_initializer = true:suggestion +dotnet_style_collection_initializer = true:suggestion +dotnet_style_prefer_simplified_boolean_expressions = true:suggestion +dotnet_style_prefer_conditional_expression_over_assignment = true:silent +dotnet_style_prefer_conditional_expression_over_return = true:silent +dotnet_style_explicit_tuple_names = true:suggestion +dotnet_style_operator_placement_when_wrapping = beginning_of_line +tab_width = 4 +indent_size = 4 +dotnet_style_qualification_for_event = false:silent +dotnet_style_qualification_for_method = false:silent +dotnet_style_qualification_for_property = false:silent +dotnet_style_qualification_for_field = false:silent +dotnet_style_parentheses_in_other_operators = never_if_unnecessary:silent +dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity:silent +dotnet_style_parentheses_in_other_binary_operators = always_for_clarity:silent +dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity:silent +dotnet_code_quality_unused_parameters = all:suggestion +dotnet_style_require_accessibility_modifiers = all:silent +dotnet_style_namespace_match_folder = true:suggestion + +# https://csharpier.com/docs/IntegratingWithLinters +dotnet_diagnostic.IDE0055.severity = none diff --git a/crates/bindings-csharp/BSATN.Codegen/Type.cs b/crates/bindings-csharp/BSATN.Codegen/Type.cs index 6c0bc947b4..ff0c2795a1 100644 --- a/crates/bindings-csharp/BSATN.Codegen/Type.cs +++ b/crates/bindings-csharp/BSATN.Codegen/Type.cs @@ -257,7 +257,9 @@ public void Initialize(IncrementalGeneratorInitializationContext context) { // structs and classes should be always processed if (node is not EnumDeclarationSyntax enumType) + { return true; + } // Ensure variants are contiguous as SATS enums don't support explicit tags. if (enumType.Members.Any(m => m.EqualsValue is not null)) diff --git a/crates/bindings-csharp/BSATN.Runtime/Attrs.cs b/crates/bindings-csharp/BSATN.Runtime/Attrs.cs index be7102aece..6dc073d987 100644 --- a/crates/bindings-csharp/BSATN.Runtime/Attrs.cs +++ b/crates/bindings-csharp/BSATN.Runtime/Attrs.cs @@ -1,6 +1,6 @@ -using System.Runtime.CompilerServices; +namespace SpacetimeDB; -namespace SpacetimeDB; +using System.Runtime.CompilerServices; [AttributeUsage( AttributeTargets.Struct | AttributeTargets.Class | AttributeTargets.Enum, diff --git a/crates/bindings-csharp/BSATN.Runtime/BSATN/Runtime.cs b/crates/bindings-csharp/BSATN.Runtime/BSATN/Runtime.cs index 995d9c75f5..a31ae445ac 100644 --- a/crates/bindings-csharp/BSATN.Runtime/BSATN/Runtime.cs +++ b/crates/bindings-csharp/BSATN.Runtime/BSATN/Runtime.cs @@ -1,7 +1,7 @@ -using System.Text; - namespace SpacetimeDB.BSATN; +using System.Text; + public interface IStructuralReadWrite { void ReadFields(BinaryReader reader); diff --git a/crates/bindings-csharp/BSATN.Runtime/Builtins.cs b/crates/bindings-csharp/BSATN.Runtime/Builtins.cs index 40f4b7a634..89301a0070 100644 --- a/crates/bindings-csharp/BSATN.Runtime/Builtins.cs +++ b/crates/bindings-csharp/BSATN.Runtime/Builtins.cs @@ -1,9 +1,9 @@ +namespace SpacetimeDB; + using System.Diagnostics; using SpacetimeDB.BSATN; using SpacetimeDB.Internal; -namespace SpacetimeDB; - public readonly partial struct Unit { // Custom BSATN that returns an inline empty product type that can be recognised by SpacetimeDB. diff --git a/crates/bindings-csharp/BSATN.Runtime/Internal/ByteArrayComparer.cs b/crates/bindings-csharp/BSATN.Runtime/Internal/ByteArrayComparer.cs index 51d4e05b6c..9b49d853e0 100644 --- a/crates/bindings-csharp/BSATN.Runtime/Internal/ByteArrayComparer.cs +++ b/crates/bindings-csharp/BSATN.Runtime/Internal/ByteArrayComparer.cs @@ -1,7 +1,7 @@ -using System.Runtime.CompilerServices; - namespace SpacetimeDB.Internal; +using System.Runtime.CompilerServices; + // Note: this utility struct is used by the C# client SDK so it needs to be public. public readonly struct ByteArrayComparer : IEqualityComparer { @@ -25,7 +25,7 @@ public bool Equals(byte[]? left, byte[]? right) [MethodImpl(MethodImplOptions.AggressiveInlining)] static bool EqualsUnvectorized(byte[] left, byte[] right) { - for (int i = 0; i < left.Length; i++) + for (var i = 0; i < left.Length; i++) { if (left[i] != right[i]) { @@ -38,8 +38,8 @@ static bool EqualsUnvectorized(byte[] left, byte[] right) public int GetHashCode(byte[] obj) { - int hash = 17; - foreach (byte b in obj) + var hash = 17; + foreach (var b in obj) { hash = hash * 31 + b; } diff --git a/crates/bindings-csharp/Codegen.Tests/fixtures/client/Lib.cs b/crates/bindings-csharp/Codegen.Tests/fixtures/client/Lib.cs index fe079756be..e9bc0c7218 100644 --- a/crates/bindings-csharp/Codegen.Tests/fixtures/client/Lib.cs +++ b/crates/bindings-csharp/Codegen.Tests/fixtures/client/Lib.cs @@ -1,10 +1,10 @@ using System; using System.Collections.Generic; -using System.IO; -using System.Linq; using System.Runtime.InteropServices; using SpacetimeDB; +#pragma warning disable CA1050 // Declare types in namespaces - this is a test fixture, no need for a namespace. + [SpacetimeDB.Type] public partial struct CustomStruct { diff --git a/crates/bindings-csharp/Codegen.Tests/fixtures/server/Lib.cs b/crates/bindings-csharp/Codegen.Tests/fixtures/server/Lib.cs index 448116f520..cace016747 100644 --- a/crates/bindings-csharp/Codegen.Tests/fixtures/server/Lib.cs +++ b/crates/bindings-csharp/Codegen.Tests/fixtures/server/Lib.cs @@ -1,6 +1,8 @@ using System.Runtime.InteropServices; using SpacetimeDB; +#pragma warning disable CA1050 // Declare types in namespaces - this is a test fixture, no need for a namespace. + [SpacetimeDB.Type] public partial struct CustomStruct { diff --git a/crates/bindings-csharp/Codegen/Module.cs b/crates/bindings-csharp/Codegen/Module.cs index 9c69d396c8..efd020c185 100644 --- a/crates/bindings-csharp/Codegen/Module.cs +++ b/crates/bindings-csharp/Codegen/Module.cs @@ -424,7 +424,9 @@ public void Initialize(IncrementalGeneratorInitializationContext context) var addReducers = tuple.Right.Sort((a, b) => a.Key.CompareTo(b.Key)); // Don't generate the FFI boilerplate if there are no tables or reducers. if (tableNames.IsEmpty && addReducers.IsEmpty) + { return; + } context.AddSource( "FFI.cs", $$""" diff --git a/crates/bindings-csharp/Directory.Build.props b/crates/bindings-csharp/Directory.Build.props index 32a1e8058c..42d61d1181 100644 --- a/crates/bindings-csharp/Directory.Build.props +++ b/crates/bindings-csharp/Directory.Build.props @@ -17,9 +17,16 @@ enable enable latest-Minimum + Recommended true + + + true + $(NoWarn);CS1591;CS1574 + + diff --git a/crates/bindings-csharp/Runtime/Exceptions.cs b/crates/bindings-csharp/Runtime/Exceptions.cs index 0695b9b413..57113dc0c4 100644 --- a/crates/bindings-csharp/Runtime/Exceptions.cs +++ b/crates/bindings-csharp/Runtime/Exceptions.cs @@ -42,6 +42,11 @@ public class NoSuchIterException : StdbException public override string Message => "The provided row iterator does not exist"; } +public class NoSuchLogStopwatch : StdbException +{ + public override string Message => "The provided stopwatch does not exist"; +} + public class NoSuchBytesException : StdbException { public override string Message => "The provided bytes source or sink does not exist"; diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawColumnDefV8.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawColumnDefV8.cs index 72c6d2cf12..03c46c42c0 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawColumnDefV8.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawColumnDefV8.cs @@ -30,9 +30,11 @@ SpacetimeDB.BSATN.AlgebraicType ColType this.ColType = ColType; } - public RawColumnDefV8() : this( - "", - default! - ) { } + public RawColumnDefV8() + { + this.ColName = ""; + this.ColType = null!; + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawConstraintDataV9.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawConstraintDataV9.cs new file mode 100644 index 0000000000..bb51f8a231 --- /dev/null +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawConstraintDataV9.cs @@ -0,0 +1,17 @@ +// THIS FILE IS AUTOMATICALLY GENERATED BY SPACETIMEDB. EDITS TO THIS FILE +// WILL NOT BE SAVED. MODIFY TABLES IN RUST INSTEAD. +// + +#nullable enable + +using System; +using SpacetimeDB; + +namespace SpacetimeDB.Internal +{ + [SpacetimeDB.Type] + public partial record RawConstraintDataV9 : SpacetimeDB.TaggedEnum<( + SpacetimeDB.Internal.RawUniqueConstraintDataV9 Unique, + SpacetimeDB.Unit _Reserved + )>; +} diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawConstraintDefV8.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawConstraintDefV8.cs index a4d3d30a2f..d08bcef32b 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawConstraintDefV8.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawConstraintDefV8.cs @@ -34,10 +34,11 @@ System.Collections.Generic.List Columns this.Columns = Columns; } - public RawConstraintDefV8() : this( - "", - default!, - new() - ) { } + public RawConstraintDefV8() + { + this.ConstraintName = ""; + this.Columns = new(); + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawConstraintDefV9.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawConstraintDefV9.cs new file mode 100644 index 0000000000..ecd300be15 --- /dev/null +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawConstraintDefV9.cs @@ -0,0 +1,40 @@ +// THIS FILE IS AUTOMATICALLY GENERATED BY SPACETIMEDB. EDITS TO THIS FILE +// WILL NOT BE SAVED. MODIFY TABLES IN RUST INSTEAD. +// + +#nullable enable + +using System; +using SpacetimeDB; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; + +namespace SpacetimeDB.Internal +{ + [SpacetimeDB.Type] + [DataContract] + public partial class RawConstraintDefV9 + { + [DataMember(Name = "name")] + public string Name; + [DataMember(Name = "data")] + public SpacetimeDB.Internal.RawConstraintDataV9 Data; + + public RawConstraintDefV9( + string Name, + SpacetimeDB.Internal.RawConstraintDataV9 Data + ) + { + this.Name = Name; + this.Data = Data; + } + + public RawConstraintDefV9() + { + this.Name = ""; + this.Data = null!; + } + + } +} diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawIndexDefV8.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawIndexDefV8.cs index 15b9825ea9..64c15e3683 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawIndexDefV8.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawIndexDefV8.cs @@ -38,11 +38,11 @@ System.Collections.Generic.List Columns this.Columns = Columns; } - public RawIndexDefV8() : this( - "", - default!, - default!, - new() - ) { } + public RawIndexDefV8() + { + this.IndexName = ""; + this.Columns = new(); + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawIndexDefV9.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawIndexDefV9.cs index ca8c9b3ff2..71aa4c3a6e 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawIndexDefV9.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawIndexDefV9.cs @@ -34,10 +34,11 @@ SpacetimeDB.Internal.RawIndexAlgorithm Algorithm this.Algorithm = Algorithm; } - public RawIndexDefV9() : this( - "", - default!, - default! - ) { } + public RawIndexDefV9() + { + this.Name = ""; + this.Algorithm = null!; + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawModuleDefV8.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawModuleDefV8.cs index 45fbc5a141..c4933cd334 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawModuleDefV8.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawModuleDefV8.cs @@ -38,11 +38,13 @@ public RawModuleDefV8( this.MiscExports = MiscExports; } - public RawModuleDefV8() : this( - new(), - new(), - new(), - new() - ) { } + public RawModuleDefV8() + { + this.Typespace = new(); + this.Tables = new(); + this.Reducers = new(); + this.MiscExports = new(); + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawModuleDefV9.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawModuleDefV9.cs index a5d4b71995..a6979b6159 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawModuleDefV9.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawModuleDefV9.cs @@ -42,12 +42,14 @@ public RawModuleDefV9( this.MiscExports = MiscExports; } - public RawModuleDefV9() : this( - new(), - new(), - new(), - new(), - new() - ) { } + public RawModuleDefV9() + { + this.Typespace = new(); + this.Tables = new(); + this.Reducers = new(); + this.Types = new(); + this.MiscExports = new(); + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawReducerDefV9.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawReducerDefV9.cs index a4bed12292..0d3268a10a 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawReducerDefV9.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawReducerDefV9.cs @@ -34,10 +34,11 @@ public RawReducerDefV9( this.Lifecycle = Lifecycle; } - public RawReducerDefV9() : this( - "", - new(), - default! - ) { } + public RawReducerDefV9() + { + this.Name = ""; + this.Params = new(); + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawScheduleDefV9.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawScheduleDefV9.cs index 7527c7a53e..260bb28c21 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawScheduleDefV9.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawScheduleDefV9.cs @@ -30,9 +30,11 @@ string ReducerName this.ReducerName = ReducerName; } - public RawScheduleDefV9() : this( - "", - "" - ) { } + public RawScheduleDefV9() + { + this.Name = ""; + this.ReducerName = ""; + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawScopedTypeNameV9.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawScopedTypeNameV9.cs index a1e7593683..b2329d0a22 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawScopedTypeNameV9.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawScopedTypeNameV9.cs @@ -30,9 +30,11 @@ string Name this.Name = Name; } - public RawScopedTypeNameV9() : this( - new(), - "" - ) { } + public RawScopedTypeNameV9() + { + this.Scope = new(); + this.Name = ""; + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawSequenceDefV8.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawSequenceDefV8.cs index 830dbf586e..abc372d315 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawSequenceDefV8.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawSequenceDefV8.cs @@ -50,14 +50,10 @@ I128 Allocated this.Allocated = Allocated; } - public RawSequenceDefV8() : this( - "", - default!, - default!, - default!, - default!, - default!, - default! - ) { } + public RawSequenceDefV8() + { + this.SequenceName = ""; + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawSequenceDefV9.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawSequenceDefV9.cs index dd631ebc05..768eb01176 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawSequenceDefV9.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawSequenceDefV9.cs @@ -46,13 +46,10 @@ I128 Increment this.Increment = Increment; } - public RawSequenceDefV9() : this( - "", - default!, - default!, - default!, - default!, - default! - ) { } + public RawSequenceDefV9() + { + this.Name = ""; + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawTableDefV8.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawTableDefV8.cs index ee0650dab0..844b4d134b 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawTableDefV8.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawTableDefV8.cs @@ -54,15 +54,16 @@ public RawTableDefV8( this.Scheduled = Scheduled; } - public RawTableDefV8() : this( - "", - new(), - new(), - new(), - new(), - "", - "", - default! - ) { } + public RawTableDefV8() + { + this.TableName = ""; + this.Columns = new(); + this.Indexes = new(); + this.Constraints = new(); + this.Sequences = new(); + this.TableType = ""; + this.TableAccess = ""; + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawTableDefV9.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawTableDefV9.cs index 09c9c6cdd0..506da87e29 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawTableDefV9.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawTableDefV9.cs @@ -24,8 +24,8 @@ public partial class RawTableDefV9 public ushort? PrimaryKey; [DataMember(Name = "indexes")] public System.Collections.Generic.List Indexes; - [DataMember(Name = "unique_constraints")] - public System.Collections.Generic.List UniqueConstraints; + [DataMember(Name = "constraints")] + public System.Collections.Generic.List Constraints; [DataMember(Name = "sequences")] public System.Collections.Generic.List Sequences; [DataMember(Name = "schedule")] @@ -40,7 +40,7 @@ public RawTableDefV9( uint ProductTypeRef, ushort? PrimaryKey, System.Collections.Generic.List Indexes, - System.Collections.Generic.List UniqueConstraints, + System.Collections.Generic.List Constraints, System.Collections.Generic.List Sequences, SpacetimeDB.Internal.RawScheduleDefV9? Schedule, SpacetimeDB.Internal.TableType TableType, @@ -51,23 +51,20 @@ SpacetimeDB.Internal.TableAccess TableAccess this.ProductTypeRef = ProductTypeRef; this.PrimaryKey = PrimaryKey; this.Indexes = Indexes; - this.UniqueConstraints = UniqueConstraints; + this.Constraints = Constraints; this.Sequences = Sequences; this.Schedule = Schedule; this.TableType = TableType; this.TableAccess = TableAccess; } - public RawTableDefV9() : this( - "", - default!, - default!, - new(), - new(), - new(), - default!, - default!, - default! - ) { } + public RawTableDefV9() + { + this.Name = ""; + this.Indexes = new(); + this.Constraints = new(); + this.Sequences = new(); + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawTypeDefV9.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawTypeDefV9.cs index 4d2af6c33f..62fdcc0f9b 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawTypeDefV9.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawTypeDefV9.cs @@ -34,10 +34,10 @@ bool CustomOrdering this.CustomOrdering = CustomOrdering; } - public RawTypeDefV9() : this( - new(), - default!, - default! - ) { } + public RawTypeDefV9() + { + this.Name = new(); + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/RawUniqueConstraintDefV9.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/RawUniqueConstraintDataV9.cs similarity index 69% rename from crates/bindings-csharp/Runtime/Internal/Autogen/RawUniqueConstraintDefV9.cs rename to crates/bindings-csharp/Runtime/Internal/Autogen/RawUniqueConstraintDataV9.cs index a264a26a56..93171c5352 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/RawUniqueConstraintDefV9.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/RawUniqueConstraintDataV9.cs @@ -14,25 +14,22 @@ namespace SpacetimeDB.Internal { [SpacetimeDB.Type] [DataContract] - public partial class RawUniqueConstraintDefV9 + public partial class RawUniqueConstraintDataV9 { - [DataMember(Name = "name")] - public string Name; [DataMember(Name = "columns")] public System.Collections.Generic.List Columns; - public RawUniqueConstraintDefV9( - string Name, + public RawUniqueConstraintDataV9( System.Collections.Generic.List Columns ) { - this.Name = Name; this.Columns = Columns; } - public RawUniqueConstraintDefV9() : this( - "", - new() - ) { } + public RawUniqueConstraintDataV9() + { + this.Columns = new(); + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/ReducerDef.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/ReducerDef.cs index 50c5b84f35..c7592876b2 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/ReducerDef.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/ReducerDef.cs @@ -30,9 +30,11 @@ public ReducerDef( this.Args = Args; } - public ReducerDef() : this( - "", - new() - ) { } + public ReducerDef() + { + this.Name = ""; + this.Args = new(); + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/TableDesc.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/TableDesc.cs index 3cd6d77c3f..59190a1cc9 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/TableDesc.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/TableDesc.cs @@ -30,9 +30,10 @@ uint Data this.Data = Data; } - public TableDesc() : this( - new(), - default! - ) { } + public TableDesc() + { + this.Schema = new(); + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/TypeAlias.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/TypeAlias.cs index c2eb1d665c..ab3dea846b 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/TypeAlias.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/TypeAlias.cs @@ -30,9 +30,10 @@ uint Ty this.Ty = Ty; } - public TypeAlias() : this( - "", - default! - ) { } + public TypeAlias() + { + this.Name = ""; + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/Autogen/Typespace.cs b/crates/bindings-csharp/Runtime/Internal/Autogen/Typespace.cs index 3bd98ea857..36811803be 100644 --- a/crates/bindings-csharp/Runtime/Internal/Autogen/Typespace.cs +++ b/crates/bindings-csharp/Runtime/Internal/Autogen/Typespace.cs @@ -26,8 +26,10 @@ public Typespace( this.Types = Types; } - public Typespace() : this( - new() - ) { } + public Typespace() + { + this.Types = new(); + } + } } diff --git a/crates/bindings-csharp/Runtime/Internal/FFI.cs b/crates/bindings-csharp/Runtime/Internal/FFI.cs index 6cd773d257..aa9ee4c957 100644 --- a/crates/bindings-csharp/Runtime/Internal/FFI.cs +++ b/crates/bindings-csharp/Runtime/Internal/FFI.cs @@ -27,6 +27,7 @@ public enum Errno : short BSATN_DECODE_ERROR = 3, NO_SUCH_TABLE = 4, NO_SUCH_ITER = 6, + NO_SUCH_CONSOLE_TIMER = 7, NO_SUCH_BYTES = 8, NO_SPACE = 9, BUFFER_TOO_SMALL = 11, @@ -75,6 +76,7 @@ public static CheckedStatus ConvertToManaged(Errno status) Errno.BSATN_DECODE_ERROR => new BsatnDecodeException(), Errno.NO_SUCH_TABLE => new NoSuchTableException(), Errno.NO_SUCH_ITER => new NoSuchIterException(), + Errno.NO_SUCH_CONSOLE_TIMER => new NoSuchLogStopwatch(), Errno.NO_SUCH_BYTES => new NoSuchBytesException(), Errno.NO_SPACE => new NoSpaceException(), Errno.BUFFER_TOO_SMALL => new BufferTooSmallException(), @@ -221,4 +223,35 @@ public static partial CheckedStatus _bytes_sink_write( ReadOnlySpan buffer, ref uint buffer_len ); + + [NativeMarshalling(typeof(ConsoleTimerIdMarshaller))] + [StructLayout(LayoutKind.Sequential)] + public readonly struct ConsoleTimerId + { + private readonly uint timer_id; + + private ConsoleTimerId(uint id) + { + timer_id = id; + } + + //LayoutKind.Sequential is apparently not enough for this struct to be returnable in PInvoke, so we need a custom marshaller unfortunately + [CustomMarshaller( + typeof(ConsoleTimerId), + MarshalMode.Default, + typeof(ConsoleTimerIdMarshaller) + )] + internal static class ConsoleTimerIdMarshaller + { + public static ConsoleTimerId ConvertToManaged(uint id) => new ConsoleTimerId(id); + + public static uint ConvertToUnmanaged(ConsoleTimerId id) => id.timer_id; + } + } + + [LibraryImport(StdbNamespace)] + public static partial ConsoleTimerId _console_timer_start([In] byte[] name, uint name_len); + + [LibraryImport(StdbNamespace)] + public static partial CheckedStatus _console_timer_end(ConsoleTimerId stopwatch_id); } diff --git a/crates/bindings-csharp/Runtime/Internal/Module.cs b/crates/bindings-csharp/Runtime/Internal/Module.cs index f2e2e4d948..2df5e436ed 100644 --- a/crates/bindings-csharp/Runtime/Internal/Module.cs +++ b/crates/bindings-csharp/Runtime/Internal/Module.cs @@ -26,11 +26,6 @@ private static string GetFriendlyName(Type type) => private void RegisterTypeName(AlgebraicType.Ref typeRef) { - // If it's a table, it doesn't need an alias as name will be registered automatically. - if (typeof(T).IsDefined(typeof(TableAttribute), false)) - { - return; - } MiscExports.Add( new MiscModuleExport.TypeAlias(new(GetFriendlyName(typeof(T)), (uint)typeRef.Ref_)) ); @@ -197,11 +192,10 @@ BytesSink error try { - Runtime.Random = new((int)timestamp.MicrosecondsSinceEpoch); - using var stream = new MemoryStream(args.Consume()); using var reader = new BinaryReader(stream); - reducers[(int)id].Invoke(reader, new(sender, address, timestamp.ToStd())); + var context = new ReducerContext(sender, address, timestamp); + reducers[(int)id].Invoke(reader, context); if (stream.Position != stream.Length) { throw new Exception("Unrecognised extra bytes in the reducer arguments"); diff --git a/crates/bindings-csharp/Runtime/LogStopwatch.cs b/crates/bindings-csharp/Runtime/LogStopwatch.cs new file mode 100644 index 0000000000..7965b98782 --- /dev/null +++ b/crates/bindings-csharp/Runtime/LogStopwatch.cs @@ -0,0 +1,30 @@ +using System.Text; +using SpacetimeDB.Internal; + +namespace SpacetimeDB; + +public sealed class LogStopwatch : IDisposable +{ + private readonly FFI.ConsoleTimerId StopwatchId; + private bool WasStopped; + + public LogStopwatch(string name) + { + var name_bytes = Encoding.UTF8.GetBytes(name); + StopwatchId = FFI._console_timer_start(name_bytes, (uint)name_bytes.Length); + } + + void IDisposable.Dispose() + { + if (!WasStopped) + { + End(); + } + } + + public void End() + { + FFI._console_timer_end(StopwatchId); + WasStopped = true; + } +} diff --git a/crates/bindings-csharp/Runtime/Runtime.cs b/crates/bindings-csharp/Runtime/Runtime.cs index a135393b4c..c1afcfe3c7 100644 --- a/crates/bindings-csharp/Runtime/Runtime.cs +++ b/crates/bindings-csharp/Runtime/Runtime.cs @@ -11,15 +11,21 @@ public class ReducerContext public readonly DateTimeOffset Time; public readonly Address? Address; + /// + /// A reducer-specific instance of `System.Random` that is seeded by current reducer's timestamp. This object is unchanged throught the entire reducer call + /// + public readonly Random Rng; + internal ReducerContext( Identity senderIdentity, Address? senderAddress, - DateTimeOffset timestamp + DateTimeOffsetRepr timestamp ) { Sender = senderIdentity; Address = senderAddress; - Time = timestamp; + Time = timestamp.ToStd(); + Rng = new Random((int)timestamp.MicrosecondsSinceEpoch); } } @@ -112,7 +118,4 @@ public static void Log( (uint)text_bytes.Length ); } - - // An instance of `System.Random` that is reseeded by each reducer's timestamp. - public static Random Random { get; internal set; } = new(); } diff --git a/crates/bindings-csharp/Runtime/bindings.c b/crates/bindings-csharp/Runtime/bindings.c index 8a2571cdbb..1ce9b03a09 100644 --- a/crates/bindings-csharp/Runtime/bindings.c +++ b/crates/bindings-csharp/Runtime/bindings.c @@ -21,6 +21,7 @@ OPAQUE_TYPEDEF(LogLevel, uint8_t); OPAQUE_TYPEDEF(BytesSink, uint32_t); OPAQUE_TYPEDEF(BytesSource, uint32_t); OPAQUE_TYPEDEF(RowIter, uint32_t); +OPAQUE_TYPEDEF(ConsoleTimerId, uint32_t); #define CSTR(s) (uint8_t*)s, sizeof(s) - 1 @@ -79,6 +80,12 @@ IMPORT(int16_t, _bytes_source_read, (BytesSource source, uint8_t* buffer_ptr, si (source, buffer_ptr, buffer_len_ptr)); IMPORT(uint16_t, _bytes_sink_write, (BytesSink sink, const uint8_t* buffer_ptr, size_t* buffer_len_ptr), (sink, buffer_ptr, buffer_len_ptr)); +IMPORT(ConsoleTimerId, _console_timer_start, + (const uint8_t* name, size_t name_len), + (name, name_len)); +IMPORT(Status, _console_timer_end, + (ConsoleTimerId stopwatch_id), + (stopwatch_id)); #ifndef EXPERIMENTAL_WASM_AOT static MonoClass* ffi_class; diff --git a/crates/bindings-csharp/SpacetimeSharpSATS.sln b/crates/bindings-csharp/SpacetimeSharpSATS.sln index ed0d3283e3..f76a5c6ce3 100644 --- a/crates/bindings-csharp/SpacetimeSharpSATS.sln +++ b/crates/bindings-csharp/SpacetimeSharpSATS.sln @@ -3,24 +3,26 @@ Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio Version 17 VisualStudioVersion = 17.0.31903.59 MinimumVisualStudioVersion = 10.0.40219.1 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Codegen", "Codegen\Codegen.csproj", "{1E37FF71-567A-4AC8-947E-117098F01142}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Codegen", "Codegen\Codegen.csproj", "{1E37FF71-567A-4AC8-947E-117098F01142}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Runtime", "Runtime\Runtime.csproj", "{003DDE57-BB32-49F0-B94E-88F2D0414D19}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runtime", "Runtime\Runtime.csproj", "{003DDE57-BB32-49F0-B94E-88F2D0414D19}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BSATN.Codegen", "BSATN.Codegen\BSATN.Codegen.csproj", "{771D5368-E850-4441-8B82-90B38BF3DD9E}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BSATN.Codegen", "BSATN.Codegen\BSATN.Codegen.csproj", "{771D5368-E850-4441-8B82-90B38BF3DD9E}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BSATN.Runtime", "BSATN.Runtime\BSATN.Runtime.csproj", "{A3AFB5AD-15DE-46CC-ACEE-E5819C07BE58}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BSATN.Runtime", "BSATN.Runtime\BSATN.Runtime.csproj", "{A3AFB5AD-15DE-46CC-ACEE-E5819C07BE58}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Codegen.Tests", "Codegen.Tests\Codegen.Tests.csproj", "{2C282EBD-8E37-4F4C-8EE1-E91E21E75FEE}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Codegen.Tests", "Codegen.Tests\Codegen.Tests.csproj", "{2C282EBD-8E37-4F4C-8EE1-E91E21E75FEE}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "config", "config", "{12907A52-3915-43D6-B4D0-94E2938CB647}" + ProjectSection(SolutionItems) = preProject + .editorconfig = .editorconfig + EndProjectSection EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU Release|Any CPU = Release|Any CPU EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {1E37FF71-567A-4AC8-947E-117098F01142}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {1E37FF71-567A-4AC8-947E-117098F01142}.Debug|Any CPU.Build.0 = Debug|Any CPU @@ -43,4 +45,10 @@ Global {2C282EBD-8E37-4F4C-8EE1-E91E21E75FEE}.Release|Any CPU.ActiveCfg = Release|Any CPU {2C282EBD-8E37-4F4C-8EE1-E91E21E75FEE}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {8A5DE392-1C9D-4806-B6C7-EDD4D33C5D1E} + EndGlobalSection EndGlobal diff --git a/crates/bindings-macro/src/lib.rs b/crates/bindings-macro/src/lib.rs index 488a697275..67a5111116 100644 --- a/crates/bindings-macro/src/lib.rs +++ b/crates/bindings-macro/src/lib.rs @@ -887,7 +887,7 @@ fn table_impl(mut args: TableArgs, mut item: MutItem) -> syn:: let deserialize_impl = derive_deserialize(&sats_ty); let serialize_impl = derive_serialize(&sats_ty); - let schema_impl = derive_satstype(&sats_ty, *original_struct_ident != table_name); + let schema_impl = derive_satstype(&sats_ty, true); let column_attrs = columns.iter().map(|col| { Ident::new( ColumnAttribute::FLAGS diff --git a/crates/bindings-sys/src/lib.rs b/crates/bindings-sys/src/lib.rs index f357008894..07f7dadaa2 100644 --- a/crates/bindings-sys/src/lib.rs +++ b/crates/bindings-sys/src/lib.rs @@ -8,11 +8,11 @@ use core::mem::MaybeUninit; use core::num::NonZeroU16; use std::ptr; -use spacetimedb_primitives::{errno, errnos, ColId, TableId}; +use spacetimedb_primitives::{errno, errnos, ColId, IndexId, TableId}; /// Provides a raw set of sys calls which abstractions can be built atop of. pub mod raw { - use spacetimedb_primitives::{ColId, TableId}; + use spacetimedb_primitives::{ColId, IndexId, TableId}; // this module identifier determines the abi version that modules built with this crate depend // on. Any non-breaking additions to the abi surface should be put in a new `extern {}` block @@ -40,6 +40,26 @@ pub mod raw { /// - `NO_SUCH_TABLE`, when `name` is not the name of a table. pub fn _table_id_from_name(name: *const u8, name_len: usize, out: *mut TableId) -> u16; + /// Queries the `index_id` associated with the given (index) `name` + /// where `name` is the UTF-8 slice in WASM memory at `name_ptr[..name_len]`. + /// + /// The index id is written into the `out` pointer. + /// + /// # Traps + /// + /// Traps if: + /// - `name_ptr` is NULL or `name` is not in bounds of WASM memory. + /// - `name` is not valid UTF-8. + /// - `out` is NULL or `out[..size_of::()]` is not in bounds of WASM memory. + /// + /// # Errors + /// + /// Returns an error: + /// + /// - `NOT_IN_TRANSACTION`, when called outside of a transaction. + /// - `NO_SUCH_INDEX`, when `name` is not the name of an index. + pub fn _index_id_from_name(name_ptr: *const u8, name_len: usize, out: *mut IndexId) -> u16; + /// Writes the number of rows currently in table identified by `table_id` to `out`. /// /// # Traps @@ -72,6 +92,80 @@ pub mod raw { /// - `NO_SUCH_TABLE`, when `table_id` is not a known ID of a table. pub fn _datastore_table_scan_bsatn(table_id: TableId, out: *mut RowIter) -> u16; + /// Finds all rows in the index identified by `index_id`, + /// according to the: + /// - `prefix = prefix_ptr[..prefix_len]`, + /// - `rstart = rstart_ptr[..rstart_len]`, + /// - `rend = rend_ptr[..rend_len]`, + /// in WASM memory. + /// + /// The index itself has a schema/type. + /// The `prefix` is decoded to the initial `prefix_elems` `AlgebraicType`s + /// whereas `rstart` and `rend` are decoded to the `prefix_elems + 1` `AlgebraicType` + /// where the `AlgebraicValue`s are wrapped in `Bound`. + /// That is, `rstart, rend` are BSATN-encoded `Bound`s. + /// + /// Matching is then defined by equating `prefix` + /// to the initial `prefix_elems` columns of the index + /// and then imposing `rstart` as the starting bound + /// and `rend` as the ending bound on the `prefix_elems + 1` column of the index. + /// Remaining columns of the index are then unbounded. + /// Note that the `prefix` in this case can be empty (`prefix_elems = 0`), + /// in which case this becomes a ranged index scan on a single-col index + /// or even a full table scan if `rstart` and `rend` are both unbounded. + /// + /// The relevant table for the index is found implicitly via the `index_id`, + /// which is unique for the module. + /// + /// On success, the iterator handle is written to the `out` pointer. + /// This handle can be advanced by [`row_iter_bsatn_advance`]. + /// + /// # Non-obvious queries + /// + /// For an index on columns `[a, b, c]`: + /// + /// - `a = x, b = y` is encoded as a prefix `[x, y]` + /// and a range `Range::Unbounded`, + /// or as a prefix `[x]` and a range `rstart = rend = Range::Inclusive(y)`. + /// - `a = x, b = y, c = z` is encoded as a prefix `[x, y]` + /// and a range `rstart = rend = Range::Inclusive(z)`. + /// - A sorted full scan is encoded as an empty prefix + /// and a range `Range::Unbounded`. + /// + /// # Traps + /// + /// Traps if: + /// - `prefix_elems > 0` + /// and (`prefix_ptr` is NULL or `prefix` is not in bounds of WASM memory). + /// - `rstart` is NULL or `rstart` is not in bounds of WASM memory. + /// - `rend` is NULL or `rend` is not in bounds of WASM memory. + /// - `out` is NULL or `out[..size_of::()]` is not in bounds of WASM memory. + /// + /// # Errors + /// + /// Returns an error: + /// + /// - `NOT_IN_TRANSACTION`, when called outside of a transaction. + /// - `NO_SUCH_INDEX`, when `index_id` is not a known ID of an index. + /// - `WRONG_INDEX_ALGO` if the index is not a btree index. + /// - `BSATN_DECODE_ERROR`, when `prefix` cannot be decoded to + /// a `prefix_elems` number of `AlgebraicValue` + /// typed at the initial `prefix_elems` `AlgebraicType`s of the index's key type. + /// Or when `rstart` or `rend` cannot be decoded to an `Bound` + /// where the inner `AlgebraicValue`s are + /// typed at the `prefix_elems + 1` `AlgebraicType` of the index's key type. + pub fn _datastore_btree_scan_bsatn( + index_id: IndexId, + prefix_ptr: *const u8, + prefix_len: usize, + prefix_elems: ColId, + rstart_ptr: *const u8, // Bound + rstart_len: usize, + rend_ptr: *const u8, // Bound + rend_len: usize, + out: *mut RowIter, + ) -> u16; + /// Finds all rows in the table identified by `table_id`, /// where the row has a column, identified by `col_id`, /// with data matching the byte string, in WASM memory, pointed to at by `val`. @@ -120,6 +214,54 @@ pub mod raw { out: *mut u32, ) -> u16; + /// Deletes all rows found in the index identified by `index_id`, + /// according to the: + /// - `prefix = prefix_ptr[..prefix_len]`, + /// - `rstart = rstart_ptr[..rstart_len]`, + /// - `rend = rend_ptr[..rend_len]`, + /// in WASM memory. + /// + /// This syscall will delete all the rows found by + /// [`datastore_btree_scan_bsatn`] with the same arguments passed, + /// including `prefix_elems`. + /// See `datastore_btree_scan_bsatn` for details. + /// + /// The number of rows deleted is written to the WASM pointer `out`. + /// + /// # Traps + /// + /// Traps if: + /// - `prefix_elems > 0` + /// and (`prefix_ptr` is NULL or `prefix` is not in bounds of WASM memory). + /// - `rstart` is NULL or `rstart` is not in bounds of WASM memory. + /// - `rend` is NULL or `rend` is not in bounds of WASM memory. + /// - `out` is NULL or `out[..size_of::()]` is not in bounds of WASM memory. + /// + /// # Errors + /// + /// Returns an error: + /// + /// - `NOT_IN_TRANSACTION`, when called outside of a transaction. + /// - `NO_SUCH_INDEX`, when `index_id` is not a known ID of an index. + /// - `WRONG_INDEX_ALGO` if the index is not a btree index. + /// - `BSATN_DECODE_ERROR`, when `prefix` cannot be decoded to + /// a `prefix_elems` number of `AlgebraicValue` + /// typed at the initial `prefix_elems` `AlgebraicType`s of the index's key type. + /// Or when `rstart` or `rend` cannot be decoded to an `Bound` + /// where the inner `AlgebraicValue`s are + /// typed at the `prefix_elems + 1` `AlgebraicType` of the index's key type. + pub fn _datastore_delete_by_btree_scan_bsatn( + index_id: IndexId, + prefix_ptr: *const u8, + prefix_len: usize, + prefix_elems: ColId, + rstart_ptr: *const u8, // Bound + rstart_len: usize, + rend_ptr: *const u8, // Bound + rend_len: usize, + out: *mut u32, + ) -> u16; + /// Deletes those rows, in the table identified by `table_id`, /// that match any row in the byte string `rel = rel_ptr[..rel_len]` in WASM memory. /// @@ -597,12 +739,28 @@ unsafe fn call(f: impl FnOnce(*mut T) -> u16) -> Result { /// /// Returns an error: /// +/// - `NOT_IN_TRANSACTION`, when called outside of a transaction. /// - `NO_SUCH_TABLE`, when `name` is not the name of a table. #[inline] pub fn table_id_from_name(name: &str) -> Result { unsafe { call(|out| raw::_table_id_from_name(name.as_ptr(), name.len(), out)) } } +/// Queries the `index_id` associated with the given (index) `name`. +/// +/// The index id is returned. +/// +/// # Errors +/// +/// Returns an error: +/// +/// - `NOT_IN_TRANSACTION`, when called outside of a transaction. +/// - `NO_SUCH_INDEX`, when `name` is not the name of an index. +#[inline] +pub fn index_id_from_name(name: &str) -> Result { + unsafe { call(|out| raw::_index_id_from_name(name.as_ptr(), name.len(), out)) } +} + /// Returns the number of rows currently in table identified by `table_id`. /// /// # Errors @@ -714,6 +872,127 @@ pub fn datastore_table_scan_bsatn(table_id: TableId) -> Result { Ok(RowIter { raw }) } +/// Finds all rows in the index identified by `index_id`, +/// according to the `prefix`, `rstart`, and `rend`. +/// +/// The index itself has a schema/type. +/// The `prefix` is decoded to the initial `prefix_elems` `AlgebraicType`s +/// whereas `rstart` and `rend` are decoded to the `prefix_elems + 1` `AlgebraicType` +/// where the `AlgebraicValue`s are wrapped in `Bound`. +/// That is, `rstart, rend` are BSATN-encoded `Bound`s. +/// +/// Matching is then defined by equating `prefix` +/// to the initial `prefix_elems` columns of the index +/// and then imposing `rstart` as the starting bound +/// and `rend` as the ending bound on the `prefix_elems + 1` column of the index. +/// Remaining columns of the index are then unbounded. +/// Note that the `prefix` in this case can be empty (`prefix_elems = 0`), +/// in which case this becomes a ranged index scan on a single-col index +/// or even a full table scan if `rstart` and `rend` are both unbounded. +/// +/// The relevant table for the index is found implicitly via the `index_id`, +/// which is unique for the module. +/// +/// On success, the iterator handle is written to the `out` pointer. +/// This handle can be advanced by [`row_iter_bsatn_advance`]. +/// +/// # Non-obvious queries +/// +/// For an index on columns `[a, b, c]`: +/// +/// - `a = x, b = y` is encoded as a prefix `[x, y]` +/// and a range `Range::Unbounded`, +/// or as a prefix `[x]` and a range `rstart = rend = Range::Inclusive(y)`. +/// - `a = x, b = y, c = z` is encoded as a prefix `[x, y]` +/// and a range `rstart = rend = Range::Inclusive(z)`. +/// - A sorted full scan is encoded as an empty prefix +/// and a range `Range::Unbounded`. +/// +/// # Errors +/// +/// Returns an error: +/// +/// - `NOT_IN_TRANSACTION`, when called outside of a transaction. +/// - `NO_SUCH_INDEX`, when `index_id` is not a known ID of an index. +/// - `WRONG_INDEX_ALGO` if the index is not a btree index. +/// - `BSATN_DECODE_ERROR`, when `prefix` cannot be decoded to +/// a `prefix_elems` number of `AlgebraicValue` +/// typed at the initial `prefix_elems` `AlgebraicType`s of the index's key type. +/// Or when `rstart` or `rend` cannot be decoded to an `Bound` +/// where the inner `AlgebraicValue`s are +/// typed at the `prefix_elems + 1` `AlgebraicType` of the index's key type. +pub fn datastore_btree_scan_bsatn( + index_id: IndexId, + prefix: &[u8], + prefix_elems: ColId, + rstart: &[u8], + rend: &[u8], +) -> Result { + let raw = unsafe { + call(|out| { + raw::_datastore_btree_scan_bsatn( + index_id, + prefix.as_ptr(), + prefix.len(), + prefix_elems, + rstart.as_ptr(), + rstart.len(), + rend.as_ptr(), + rend.len(), + out, + ) + })? + }; + Ok(RowIter { raw }) +} + +/// Deletes all rows found in the index identified by `index_id`, +/// according to the `prefix`, `rstart`, and `rend`. +/// +/// This syscall will delete all the rows found by +/// [`datastore_btree_scan_bsatn`] with the same arguments passed, +/// including `prefix_elems`. +/// See `datastore_btree_scan_bsatn` for details. +/// +/// The number of rows deleted is returned on success. +/// +/// # Errors +/// +/// Returns an error: +/// +/// - `NOT_IN_TRANSACTION`, when called outside of a transaction. +/// - `NO_SUCH_INDEX`, when `index_id` is not a known ID of an index. +/// - `WRONG_INDEX_ALGO` if the index is not a btree index. +/// - `BSATN_DECODE_ERROR`, when `prefix` cannot be decoded to +/// a `prefix_elems` number of `AlgebraicValue` +/// typed at the initial `prefix_elems` `AlgebraicType`s of the index's key type. +/// Or when `rstart` or `rend` cannot be decoded to an `Bound` +/// where the inner `AlgebraicValue`s are +/// typed at the `prefix_elems + 1` `AlgebraicType` of the index's key type. +pub fn datastore_delete_by_btree_scan_bsatn( + index_id: IndexId, + prefix: &[u8], + prefix_elems: ColId, + rstart: &[u8], + rend: &[u8], +) -> Result { + unsafe { + call(|out| { + raw::_datastore_delete_by_btree_scan_bsatn( + index_id, + prefix.as_ptr(), + prefix.len(), + prefix_elems, + rstart.as_ptr(), + rstart.len(), + rend.as_ptr(), + rend.len(), + out, + ) + }) + } +} + /// Iterate through a table, filtering by an encoded `spacetimedb_lib::filter::Expr`. /// /// # Errors diff --git a/crates/bindings/src/lib.rs b/crates/bindings/src/lib.rs index 6105adb19d..6102b4924b 100644 --- a/crates/bindings/src/lib.rs +++ b/crates/bindings/src/lib.rs @@ -4,12 +4,12 @@ #[macro_use] mod io; mod impls; +pub mod log_stopwatch; mod logger; #[cfg(feature = "rand")] mod rng; #[doc(hidden)] pub mod rt; -pub mod time_span; mod timestamp; use spacetimedb_lib::buffer::{BufReader, BufWriter, Cursor, DecodeError}; diff --git a/crates/bindings/src/time_span.rs b/crates/bindings/src/log_stopwatch.rs similarity index 63% rename from crates/bindings/src/time_span.rs rename to crates/bindings/src/log_stopwatch.rs index 9380a951e2..b1081865a2 100644 --- a/crates/bindings/src/time_span.rs +++ b/crates/bindings/src/log_stopwatch.rs @@ -1,12 +1,12 @@ -pub struct Span { - span_id: u32, +pub struct LogStopwatch { + stopwatch_id: u32, } -impl Span { - pub fn start(name: &str) -> Self { +impl LogStopwatch { + pub fn new(name: &str) -> Self { let name = name.as_bytes(); let id = unsafe { spacetimedb_bindings_sys::raw::_console_timer_start(name.as_ptr(), name.len()) }; - Self { span_id: id } + Self { stopwatch_id: id } } pub fn end(self) { @@ -14,10 +14,10 @@ impl Span { } } -impl std::ops::Drop for Span { +impl std::ops::Drop for LogStopwatch { fn drop(&mut self) { unsafe { - spacetimedb_bindings_sys::raw::_console_timer_end(self.span_id); + spacetimedb_bindings_sys::raw::_console_timer_end(self.stopwatch_id); } } } diff --git a/crates/bindings/tests/snapshots/deps__spacetimedb_bindings_dependencies.snap b/crates/bindings/tests/snapshots/deps__spacetimedb_bindings_dependencies.snap index 1fbac2d5aa..c28f1ca376 100644 --- a/crates/bindings/tests/snapshots/deps__spacetimedb_bindings_dependencies.snap +++ b/crates/bindings/tests/snapshots/deps__spacetimedb_bindings_dependencies.snap @@ -39,6 +39,8 @@ spacetimedb │ ├── spacetimedb_primitives │ │ ├── bitflags │ │ ├── either +│ │ ├── itertools +│ │ │ └── either │ │ └── nohash_hasher │ └── syn │ ├── proc_macro2 (*) @@ -56,8 +58,7 @@ spacetimedb │ │ ├── quote (*) │ │ └── syn (*) │ ├── hex -│ ├── itertools -│ │ └── either +│ ├── itertools (*) │ ├── spacetimedb_bindings_macro (*) │ ├── spacetimedb_data_structures │ │ ├── hashbrown diff --git a/crates/cli/examples/regen-csharp-moduledef.rs b/crates/cli/examples/regen-csharp-moduledef.rs index e52a055b51..19becad395 100644 --- a/crates/cli/examples/regen-csharp-moduledef.rs +++ b/crates/cli/examples/regen-csharp-moduledef.rs @@ -21,7 +21,11 @@ fn main() -> anyhow::Result<()> { module.add_type::(); }); - let mut results = generate(module, Language::Csharp, "SpacetimeDB.Internal")?; + let mut results = generate( + RawModuleDef::V8BackCompat(module), + Language::Csharp, + "SpacetimeDB.Internal", + )?; // Someday we might replace custom BSATN types with autogenerated ones as well, // but for now they're not very large and our copies are somewhat more optimised. diff --git a/crates/cli/src/subcommands/call.rs b/crates/cli/src/subcommands/call.rs index d85f493f36..744948bff9 100644 --- a/crates/cli/src/subcommands/call.rs +++ b/crates/cli/src/subcommands/call.rs @@ -1,7 +1,6 @@ use crate::common_args; use crate::config::Config; use crate::edit_distance::{edit_distance, find_best_match_for_name}; -use crate::generate::rust::{write_arglist_no_delimiters, write_type}; use crate::util; use crate::util::{add_auth_header_opt, database_address, get_auth_header_only}; use anyhow::{bail, Context, Error}; @@ -191,10 +190,10 @@ fn reducer_signature(schema_json: Value, reducer_name: &str) -> Option { fn ctx(typespace: &Typespace, r: AlgebraicTypeRef) -> String { let ty = &typespace[r]; let mut ty_str = String::new(); - write_type(&|r| ctx(typespace, r), &mut ty_str, ty).unwrap(); + write_type::write_type(&|r| ctx(typespace, r), &mut ty_str, ty).unwrap(); ty_str } - write_arglist_no_delimiters(&|r| ctx(&typespace, r), &mut args, ¶ms, None).unwrap(); + write_type::write_arglist_no_delimiters(&|r| ctx(&typespace, r), &mut args, ¶ms, None).unwrap(); let args = args.trim().trim_end_matches(',').replace('\n', " "); // Print the full signature to `reducer_fmt`. @@ -322,3 +321,136 @@ fn typespace(value: &serde_json::Value) -> Option { let types = value.as_object()?.get("typespace")?; deserialize_from(types).map(Typespace::new).ok() } + +// this is an old version of code in generate::rust that got +// refactored, but reducer_signature() was using it +// TODO: port reducer_signature() to use AlgebraicTypeUse et al, somehow. +mod write_type { + use super::*; + use convert_case::{Case, Casing}; + use spacetimedb_lib::sats::ArrayType; + use spacetimedb_lib::ProductType; + use std::fmt; + use std::ops::Deref; + + pub fn write_type( + ctx: &impl Fn(AlgebraicTypeRef) -> String, + out: &mut W, + ty: &AlgebraicType, + ) -> fmt::Result { + match ty { + p if p.is_identity() => write!(out, "Identity")?, + p if p.is_address() => write!(out, "Address")?, + p if p.is_schedule_at() => write!(out, "ScheduleAt")?, + AlgebraicType::Sum(sum_type) => { + if let Some(inner_ty) = sum_type.as_option() { + write!(out, "Option<")?; + write_type(ctx, out, inner_ty)?; + write!(out, ">")?; + } else { + write!(out, "enum ")?; + print_comma_sep_braced(out, &sum_type.variants, |out: &mut W, elem: &_| { + if let Some(name) = &elem.name { + write!(out, "{name}: ")?; + } + write_type(ctx, out, &elem.algebraic_type) + })?; + } + } + AlgebraicType::Product(ProductType { elements }) => { + print_comma_sep_braced(out, elements, |out: &mut W, elem: &ProductTypeElement| { + if let Some(name) = &elem.name { + write!(out, "{name}: ")?; + } + write_type(ctx, out, &elem.algebraic_type) + })?; + } + AlgebraicType::Bool => write!(out, "bool")?, + AlgebraicType::I8 => write!(out, "i8")?, + AlgebraicType::U8 => write!(out, "u8")?, + AlgebraicType::I16 => write!(out, "i16")?, + AlgebraicType::U16 => write!(out, "u16")?, + AlgebraicType::I32 => write!(out, "i32")?, + AlgebraicType::U32 => write!(out, "u32")?, + AlgebraicType::I64 => write!(out, "i64")?, + AlgebraicType::U64 => write!(out, "u64")?, + AlgebraicType::I128 => write!(out, "i128")?, + AlgebraicType::U128 => write!(out, "u128")?, + AlgebraicType::I256 => write!(out, "i256")?, + AlgebraicType::U256 => write!(out, "u256")?, + AlgebraicType::F32 => write!(out, "f32")?, + AlgebraicType::F64 => write!(out, "f64")?, + AlgebraicType::String => write!(out, "String")?, + AlgebraicType::Array(ArrayType { elem_ty }) => { + write!(out, "Vec<")?; + write_type(ctx, out, elem_ty)?; + write!(out, ">")?; + } + AlgebraicType::Map(ty) => { + write!(out, "Map<")?; + write_type(ctx, out, &ty.key_ty)?; + write!(out, ", ")?; + write_type(ctx, out, &ty.ty)?; + write!(out, ">")?; + } + AlgebraicType::Ref(r) => { + write!(out, "{}", ctx(*r))?; + } + } + Ok(()) + } + + fn print_comma_sep_braced( + out: &mut W, + elems: &[T], + on: impl Fn(&mut W, &T) -> fmt::Result, + ) -> fmt::Result { + write!(out, "{{")?; + + let mut iter = elems.iter(); + + // First factor. + if let Some(elem) = iter.next() { + write!(out, " ")?; + on(out, elem)?; + } + // Other factors. + for elem in iter { + write!(out, ", ")?; + on(out, elem)?; + } + + if !elems.is_empty() { + write!(out, " ")?; + } + + write!(out, "}}")?; + + Ok(()) + } + + pub fn write_arglist_no_delimiters( + ctx: &impl Fn(AlgebraicTypeRef) -> String, + out: &mut impl Write, + elements: &[ProductTypeElement], + + // Written before each line. Useful for `pub`. + prefix: Option<&str>, + ) -> fmt::Result { + for elt in elements { + if let Some(prefix) = prefix { + write!(out, "{prefix} ")?; + } + + let Some(name) = &elt.name else { + panic!("Product type element has no name: {elt:?}"); + }; + let name = name.deref().to_case(Case::Snake); + + write!(out, "{name}: ")?; + write_type(ctx, out, &elt.algebraic_type)?; + writeln!(out, ",")?; + } + Ok(()) + } +} diff --git a/crates/cli/src/subcommands/generate/mod.rs b/crates/cli/src/subcommands/generate/mod.rs index 0215984c37..dd349188f8 100644 --- a/crates/cli/src/subcommands/generate/mod.rs +++ b/crates/cli/src/subcommands/generate/mod.rs @@ -5,13 +5,18 @@ use clap::ArgAction::SetTrue; use convert_case::{Case, Casing}; use core::mem; use duct::cmd; +use itertools::Itertools; use spacetimedb::host::wasmtime::{Mem, MemView, WasmPointee as _}; -use spacetimedb_lib::db::raw_def::RawColumnDefV8; +use spacetimedb_data_structures::map::HashSet; use spacetimedb_lib::de::serde::DeserializeWrapper; -use spacetimedb_lib::sats::{AlgebraicType, Typespace}; -use spacetimedb_lib::{bsatn, MiscModuleExport, RawModuleDefV8, ReducerDef, TableDesc, TypeAlias}; +use spacetimedb_lib::sats::{AlgebraicType, AlgebraicTypeRef, Typespace}; +use spacetimedb_lib::{bsatn, RawModuleDefV8, TableDesc, TypeAlias}; use spacetimedb_lib::{RawModuleDef, MODULE_ABI_MAJOR_VERSION}; use spacetimedb_primitives::errno; +use spacetimedb_schema; +use spacetimedb_schema::def::{ModuleDef, ReducerDef, ScopedTypeName, TableDef, TypeDef}; +use spacetimedb_schema::identifier::Identifier; +use spacetimedb_schema::schema::TableSchema; use std::fs; use std::io::Write; use std::ops::Deref; @@ -240,114 +245,128 @@ pub struct GenCtx { names: Vec>, } -pub fn generate(module: RawModuleDefV8, lang: Language, namespace: &str) -> anyhow::Result> { - let (ctx, items) = extract_from_moduledef(module); - let items: Vec = items.collect(); - let mut files: Vec<(String, String)> = items - .iter() - .filter_map(|item| item.generate(&ctx, lang, namespace)) - .collect(); - files.extend(generate_globals(&ctx, lang, namespace, &items)); - - Ok(files) +pub fn generate(module: RawModuleDef, lang: Language, namespace: &str) -> anyhow::Result> { + let module = ModuleDef::try_from(module)?; + Ok(match lang { + Language::Rust => generate_lang(&module, rust::Rust, namespace), + Language::Csharp | Language::TypeScript => { + let ctx = GenCtx { + typespace: module.typespace().clone(), + names: (0..module.typespace().types.len()) + .map(|r| { + module + .type_def_from_ref(AlgebraicTypeRef(r as _)) + .map(|(name, _)| name.name_segments().join(".")) + }) + .collect(), + }; + + let tableset = module.tables().map(|t| t.product_type_ref).collect::>(); + let tables = module + .tables() + .map(|table| TableDesc { + schema: TableSchema::from_module_def(table, 0.into()).into(), + data: table.product_type_ref, + }) + .sorted_by(|a, b| a.schema.table_name.cmp(&b.schema.table_name)); + + // HACK: Patch the fields to have the types that point to `AlgebraicTypeRef` because all generators depend on that + // `register_table` in rt.rs resolve the types early, but the generators do it late. This impact enums where + // the enum name is not preserved in the `AlgebraicType`. + // x.schema.columns = + // RawColumnDefV8::from_product_type(typespace[x.data].as_product().unwrap().clone()); + + let types = module.types().filter(|typ| !tableset.contains(&typ.ty)).map(|typ| { + GenItem::TypeAlias(TypeAlias { + name: typ.name.name_segments().join("."), + ty: typ.ty, + }) + }); + + let reducers = module + .reducers() + .map(|reducer| spacetimedb_lib::ReducerDef { + name: reducer.name.clone().into(), + args: reducer.params.elements.to_vec(), + }) + .sorted_by(|a, b| a.name.cmp(&b.name)); + + let items = itertools::chain!( + types, + tables.into_iter().map(GenItem::Table), + reducers + .filter(|r| !(r.name.starts_with("__") && r.name.ends_with("__"))) + .map(GenItem::Reducer), + ); + + let items: Vec = items.collect(); + let mut files: Vec<(String, String)> = items + .iter() + .filter_map(|item| item.generate(&ctx, lang, namespace)) + .collect(); + files.extend(generate_globals(&ctx, lang, namespace, &items)); + files + } + }) } -fn generate_globals(ctx: &GenCtx, lang: Language, namespace: &str, items: &[GenItem]) -> Vec<(String, String)> { - match lang { - Language::Csharp => csharp::autogen_csharp_globals(ctx, items, namespace), - Language::TypeScript => typescript::autogen_typescript_globals(ctx, items), - Language::Rust => rust::autogen_rust_globals(ctx, items), - } +fn generate_lang(module: &ModuleDef, lang: impl Lang, namespace: &str) -> Vec<(String, String)> { + let table_refs = module.tables().map(|tbl| tbl.product_type_ref).collect::>(); + itertools::chain!( + module.tables().map(|tbl| { + ( + lang.table_filename(module, tbl), + lang.generate_table(module, namespace, tbl), + ) + }), + module.types().filter(|typ| !table_refs.contains(&typ.ty)).map(|typ| { + ( + lang.type_filename(&typ.name), + lang.generate_type(module, namespace, typ), + ) + }), + module.reducers().filter(|r| r.lifecycle.is_none()).map(|reducer| { + ( + lang.reducer_filename(&reducer.name), + lang.generate_reducer(module, namespace, reducer), + ) + }), + lang.generate_globals(module, namespace), + ) + .collect() } -pub fn extract_from_moduledef(module: RawModuleDefV8) -> (GenCtx, impl Iterator) { - let RawModuleDefV8 { - typespace, - tables, - reducers, - misc_exports, - } = module; - // HACK: Patch the fields to have the types that point to `AlgebraicTypeRef` because all generators depend on that - // `register_table` in rt.rs resolve the types early, but the generators do it late. This impact enums where - // the enum name is not preserved in the `AlgebraicType`. - let tables: Vec<_> = tables - .into_iter() - .map(|mut x| { - x.schema.columns = RawColumnDefV8::from_product_type(typespace[x.data].as_product().unwrap().clone()); - x - }) - .collect(); - let tableset = tables - .iter() - .map(|t| t.data) - .collect::>(); - - let mut names = vec![None; typespace.types.len()]; - let name_info = itertools::chain!( - tables.iter().map(|t| (t.data, &*t.schema.table_name)), - misc_exports - .iter() - .map(|MiscModuleExport::TypeAlias(a)| (a.ty, &*a.name)), - ); - for (typeref, name) in name_info { - names[typeref.idx()] = Some(name.into()) - } - let ctx = GenCtx { typespace, names }; - let iter = itertools::chain!( - misc_exports - .into_iter() - .filter(move |MiscModuleExport::TypeAlias(a)| !tableset.contains(&a.ty)) - .map(GenItem::from_misc_export), - tables.into_iter().map(GenItem::Table), - reducers - .into_iter() - .filter(|r| !(r.name.starts_with("__") && r.name.ends_with("__"))) - .map(GenItem::Reducer), - ); - (ctx, iter) +trait Lang { + fn table_filename(&self, module: &ModuleDef, table: &TableDef) -> String; + fn type_filename(&self, type_name: &ScopedTypeName) -> String; + fn reducer_filename(&self, reducer_name: &Identifier) -> String; + + fn generate_table(&self, module: &ModuleDef, namespace: &str, tbl: &TableDef) -> String; + fn generate_type(&self, module: &ModuleDef, namespace: &str, typ: &TypeDef) -> String; + fn generate_reducer(&self, module: &ModuleDef, namespace: &str, reducer: &ReducerDef) -> String; + fn generate_globals(&self, module: &ModuleDef, namespace: &str) -> Vec<(String, String)>; } pub enum GenItem { Table(TableDesc), TypeAlias(TypeAlias), - Reducer(ReducerDef), + Reducer(spacetimedb_lib::ReducerDef), } -impl GenItem { - fn from_misc_export(exp: MiscModuleExport) -> Self { - match exp { - MiscModuleExport::TypeAlias(a) => Self::TypeAlias(a), - } +fn generate_globals(ctx: &GenCtx, lang: Language, namespace: &str, items: &[GenItem]) -> Vec<(String, String)> { + match lang { + Language::Csharp => csharp::autogen_csharp_globals(ctx, items, namespace), + Language::TypeScript => typescript::autogen_typescript_globals(ctx, items), + Language::Rust => unreachable!(), } +} +impl GenItem { fn generate(&self, ctx: &GenCtx, lang: Language, namespace: &str) -> Option<(String, String)> { match lang { Language::Csharp => self.generate_csharp(ctx, namespace), Language::TypeScript => self.generate_typescript(ctx), - Language::Rust => self.generate_rust(ctx), - } - } - - fn generate_rust(&self, ctx: &GenCtx) -> Option<(String, String)> { - match self { - GenItem::Table(table) => { - let code = rust::autogen_rust_table(ctx, table); - // TODO: this is not ideal (should use table name, not row type name) - let tyname = ctx.names[table.data.idx()].as_ref().unwrap(); - Some((rust::rust_type_file_name(tyname), code)) - } - GenItem::TypeAlias(TypeAlias { name, ty }) => { - let code = match &ctx.typespace[*ty] { - AlgebraicType::Sum(sum) => rust::autogen_rust_sum(ctx, name, sum), - AlgebraicType::Product(prod) => rust::autogen_rust_tuple(ctx, name, prod), - _ => todo!(), - }; - Some((rust::rust_type_file_name(name), code)) - } - GenItem::Reducer(reducer) => { - let code = rust::autogen_rust_reducer(ctx, reducer); - Some((rust::rust_reducer_file_name(&reducer.name), code)) - } + Language::Rust => unreachable!(), } } @@ -407,7 +426,7 @@ impl GenItem { } } -pub fn extract_descriptions(wasm_file: &Path) -> anyhow::Result { +pub fn extract_descriptions(wasm_file: &Path) -> anyhow::Result { let engine = wasmtime::Engine::default(); let t = std::time::Instant::now(); let module = wasmtime::Module::from_file(&engine, wasm_file)?; @@ -460,10 +479,6 @@ pub fn extract_descriptions(wasm_file: &Path) -> anyhow::Result // TODO: shouldn't we return an error here? None => RawModuleDef::V8BackCompat(RawModuleDefV8::default()), }; - let module = match module { - RawModuleDef::V8BackCompat(v8) => v8, - _ => anyhow::bail!("Unimplemented module definition version"), - }; Ok(module) } diff --git a/crates/cli/src/subcommands/generate/rust.rs b/crates/cli/src/subcommands/generate/rust.rs index 8eef1e9944..d87d5ea14e 100644 --- a/crates/cli/src/subcommands/generate/rust.rs +++ b/crates/cli/src/subcommands/generate/rust.rs @@ -1,12 +1,14 @@ use super::code_indenter::CodeIndenter; -use super::{GenCtx, GenItem}; use convert_case::{Case, Casing}; -use spacetimedb_lib::sats::{ - AlgebraicType, AlgebraicTypeRef, ArrayType, ProductType, ProductTypeElement, SumType, SumTypeVariant, -}; -use spacetimedb_lib::{ReducerDef, TableDesc}; +use itertools::Itertools; +use spacetimedb_lib::sats::AlgebraicTypeRef; use spacetimedb_primitives::ColList; +use spacetimedb_schema::def::{ModuleDef, ReducerDef, ScopedTypeName, TableDef, TypeDef}; +use spacetimedb_schema::identifier::Identifier; use spacetimedb_schema::schema::TableSchema; +use spacetimedb_schema::type_for_generate::{ + AlgebraicTypeDef, AlgebraicTypeUse, PlainEnumTypeDef, PrimitiveType, ProductTypeDef, SumTypeDef, +}; use std::collections::BTreeSet; use std::fmt::{self, Write}; use std::ops::Deref; @@ -16,60 +18,88 @@ type Indenter = CodeIndenter; /// Pairs of (module_name, TypeName). type Imports = BTreeSet<(String, String)>; -fn write_type_ctx(ctx: &GenCtx, out: &mut Indenter, ty: &AlgebraicType) { - write_type(&|r| type_name(ctx, r), out, ty).unwrap() +pub struct Rust; + +impl super::Lang for Rust { + fn table_filename(&self, module: &ModuleDef, table: &TableDef) -> String { + let (name, _) = module.type_def_from_ref(table.product_type_ref).unwrap(); + collect_case(Case::Snake, name.name_segments()) + ".rs" + } + + fn type_filename(&self, type_name: &ScopedTypeName) -> String { + collect_case(Case::Snake, type_name.name_segments()) + ".rs" + } + + fn reducer_filename(&self, reducer_name: &Identifier) -> String { + reducer_name.deref().to_case(Case::Snake) + "_reducer.rs" + } + + fn generate_table(&self, module: &ModuleDef, _namespace: &str, table: &TableDef) -> String { + autogen_rust_table(module, table) + } + + fn generate_type(&self, module: &ModuleDef, _namespace: &str, typ: &TypeDef) -> String { + let name = &collect_case(Case::Pascal, typ.name.name_segments()); + match &module.typespace_for_generate()[typ.ty] { + AlgebraicTypeDef::Product(product) => autogen_rust_tuple(module, name, product), + AlgebraicTypeDef::Sum(sum_type) => autogen_rust_sum(module, name, sum_type), + AlgebraicTypeDef::PlainEnum(plain_enum) => autogen_rust_plain_enum(name, plain_enum), + } + } + + fn generate_reducer(&self, module: &ModuleDef, _namespace: &str, reducer: &ReducerDef) -> String { + autogen_rust_reducer(module, reducer) + } + + fn generate_globals(&self, module: &ModuleDef, _namespace: &str) -> Vec<(String, String)> { + autogen_rust_globals(module) + } +} + +fn collect_case<'a>(case: Case, segs: impl Iterator) -> String { + segs.map(|s| s.deref().to_case(case)).join(case.delim()) +} + +fn write_type(module: &ModuleDef, out: &mut Indenter, ty: &AlgebraicTypeUse) { + write_type_generic(module, out, ty).ok(); } -pub fn write_type(ctx: &impl Fn(AlgebraicTypeRef) -> String, out: &mut W, ty: &AlgebraicType) -> fmt::Result { +pub fn write_type_generic(module: &ModuleDef, out: &mut W, ty: &AlgebraicTypeUse) -> fmt::Result { match ty { - p if p.is_identity() => write!(out, "Identity")?, - p if p.is_address() => write!(out, "Address")?, - p if p.is_schedule_at() => write!(out, "ScheduleAt")?, - AlgebraicType::Sum(sum_type) => { - if let Some(inner_ty) = sum_type.as_option() { - write!(out, "Option::<")?; - write_type(ctx, out, inner_ty)?; - write!(out, ">")?; - } else { - write!(out, "enum ")?; - print_comma_sep_braced(out, &sum_type.variants, |out: &mut W, elem: &_| { - if let Some(name) = &elem.name { - write!(out, "{name}: ")?; - } - write_type(ctx, out, &elem.algebraic_type) - })?; - } - } - AlgebraicType::Product(ProductType { elements }) => { - print_comma_sep_braced(out, elements, |out: &mut W, elem: &ProductTypeElement| { - if let Some(name) = &elem.name { - write!(out, "{name}: ")?; - } - write_type(ctx, out, &elem.algebraic_type) - })?; + AlgebraicTypeUse::Unit => write!(out, "()")?, + AlgebraicTypeUse::Never => write!(out, "std::convert::Infallible")?, + AlgebraicTypeUse::Identity => write!(out, "Identity")?, + AlgebraicTypeUse::Address => write!(out, "Address")?, + AlgebraicTypeUse::ScheduleAt => write!(out, "ScheduleAt")?, + AlgebraicTypeUse::Option(inner_ty) => { + write!(out, "Option::<")?; + write_type_generic(module, out, inner_ty)?; + write!(out, ">")?; } - AlgebraicType::Bool => write!(out, "bool")?, - AlgebraicType::I8 => write!(out, "i8")?, - AlgebraicType::U8 => write!(out, "u8")?, - AlgebraicType::I16 => write!(out, "i16")?, - AlgebraicType::U16 => write!(out, "u16")?, - AlgebraicType::I32 => write!(out, "i32")?, - AlgebraicType::U32 => write!(out, "u32")?, - AlgebraicType::I64 => write!(out, "i64")?, - AlgebraicType::U64 => write!(out, "u64")?, - AlgebraicType::I128 => write!(out, "i128")?, - AlgebraicType::U128 => write!(out, "u128")?, - AlgebraicType::I256 => write!(out, "i256")?, - AlgebraicType::U256 => write!(out, "u256")?, - AlgebraicType::F32 => write!(out, "f32")?, - AlgebraicType::F64 => write!(out, "f64")?, - AlgebraicType::String => write!(out, "String")?, - AlgebraicType::Array(ArrayType { elem_ty }) => { + AlgebraicTypeUse::Primitive(prim) => match prim { + PrimitiveType::Bool => write!(out, "bool")?, + PrimitiveType::I8 => write!(out, "i8")?, + PrimitiveType::U8 => write!(out, "u8")?, + PrimitiveType::I16 => write!(out, "i16")?, + PrimitiveType::U16 => write!(out, "u16")?, + PrimitiveType::I32 => write!(out, "i32")?, + PrimitiveType::U32 => write!(out, "u32")?, + PrimitiveType::I64 => write!(out, "i64")?, + PrimitiveType::U64 => write!(out, "u64")?, + PrimitiveType::I128 => write!(out, "i128")?, + PrimitiveType::U128 => write!(out, "u128")?, + PrimitiveType::I256 => write!(out, "i256")?, + PrimitiveType::U256 => write!(out, "u256")?, + PrimitiveType::F32 => write!(out, "f32")?, + PrimitiveType::F64 => write!(out, "f64")?, + }, + AlgebraicTypeUse::String => write!(out, "String")?, + AlgebraicTypeUse::Array(elem_ty) => { write!(out, "Vec::<")?; - write_type(ctx, out, elem_ty)?; + write_type_generic(module, out, elem_ty)?; write!(out, ">")?; } - AlgebraicType::Map(ty) => { + AlgebraicTypeUse::Map { key, value } => { // TODO: Should `AlgebraicType::Map` translate to `HashMap`? This requires // that any map-key type implement `Hash`. We'll have to derive hash // on generated types, and notably, `HashMap` is not itself `Hash`, @@ -79,55 +109,24 @@ pub fn write_type(ctx: &impl Fn(AlgebraicTypeRef) -> String, out: &mut // This will require deriving `Ord` for generated types, // and is likely to be a big headache. write!(out, "HashMap::<")?; - write_type(ctx, out, &ty.key_ty)?; + write_type_generic(module, out, key)?; write!(out, ", ")?; - write_type(ctx, out, &ty.ty)?; + write_type_generic(module, out, value)?; write!(out, ">")?; } - AlgebraicType::Ref(r) => { - write!(out, "{}", ctx(*r))?; + AlgebraicTypeUse::Ref(r) => { + write!(out, "{}", type_name(module, *r))?; } } Ok(()) } -fn print_comma_sep_braced( - out: &mut W, - elems: &[T], - on: impl Fn(&mut W, &T) -> fmt::Result, -) -> fmt::Result { - write!(out, "{{")?; - - let mut iter = elems.iter(); - - // First factor. - if let Some(elem) = iter.next() { - write!(out, " ")?; - on(out, elem)?; - } - // Other factors. - for elem in iter { - write!(out, ", ")?; - on(out, elem)?; - } - - if !elems.is_empty() { - write!(out, " ")?; - } - - write!(out, "}}")?; - - Ok(()) -} - // This is (effectively) duplicated in [typescript.rs] as `typescript_typename` and in // [csharp.rs] as `csharp_typename`, and should probably be lifted to a shared utils // module. -fn type_name(ctx: &GenCtx, typeref: AlgebraicTypeRef) -> String { - ctx.names[typeref.idx()] - .as_deref() - .expect("TypeRefs should have names") - .to_case(Case::Pascal) +fn type_name(module: &ModuleDef, typeref: AlgebraicTypeRef) -> String { + let (name, _def) = module.type_def_from_ref(typeref).unwrap(); + collect_case(Case::Pascal, name.name_segments()) } fn print_lines(output: &mut Indenter, lines: &[&str]) { @@ -191,7 +190,7 @@ fn print_enum_derives(output: &mut Indenter) { } /// Generate a file which defines an `enum` corresponding to the `sum_type`. -pub fn autogen_rust_sum(ctx: &GenCtx, name: &str, sum_type: &SumType) -> String { +pub fn autogen_rust_sum(module: &ModuleDef, name: &str, sum_type: &SumTypeDef) -> String { let mut output = CodeIndenter::new(String::new()); let out = &mut output; @@ -207,7 +206,7 @@ pub fn autogen_rust_sum(ctx: &GenCtx, name: &str, sum_type: &SumType) -> String // For some reason, deref coercion doesn't work on `&sum_type.variants` here - rustc // wants to pass it as `&Vec<_>`, not `&[_]`. The slicing index `[..]` forces passing // as a slice. - gen_and_print_imports(ctx, out, &sum_type.variants[..], generate_imports_variants, this_file); + gen_and_print_imports(module, out, &sum_type.variants[..], this_file); out.newline(); @@ -218,8 +217,8 @@ pub fn autogen_rust_sum(ctx: &GenCtx, name: &str, sum_type: &SumType) -> String out.delimited_block( "{", |out| { - for variant in &*sum_type.variants { - write_enum_variant(ctx, out, variant); + for (name, ty) in &*sum_type.variants { + write_enum_variant(module, out, name, ty); out.newline(); } }, @@ -229,14 +228,11 @@ pub fn autogen_rust_sum(ctx: &GenCtx, name: &str, sum_type: &SumType) -> String output.into_inner() } -fn write_enum_variant(ctx: &GenCtx, out: &mut Indenter, variant: &SumTypeVariant) { - let Some(name) = &variant.name else { - panic!("Sum type variant has no name: {variant:?}"); - }; +fn write_enum_variant(module: &ModuleDef, out: &mut Indenter, name: &Identifier, ty: &AlgebraicTypeUse) { let name = name.deref().to_case(Case::Pascal); write!(out, "{name}"); - match &variant.algebraic_type { - AlgebraicType::Product(ProductType { elements }) if elements.is_empty() => { + match ty { + AlgebraicTypeUse::Unit => { // If the contained type is the unit type, i.e. this variant has no members, // write it without parens or braces, like // ``` @@ -248,97 +244,70 @@ fn write_enum_variant(ctx: &GenCtx, out: &mut Indenter, variant: &SumTypeVariant // If the contained type is not a product, i.e. this variant has a single // member, write it tuple-style, with parens. write!(out, "("); - write_type_ctx(ctx, out, otherwise); + write_type(module, out, otherwise); write!(out, "),"); } } } -fn write_struct_type_fields_in_braces( - ctx: &GenCtx, - out: &mut Indenter, - elements: &[ProductTypeElement], +/// Generate a file which defines an `enum` corresponding to the `sum_type`. +pub fn autogen_rust_plain_enum(name: &str, plain_enum: &PlainEnumTypeDef) -> String { + let mut output = CodeIndenter::new(String::new()); + let out = &mut output; - // Whether to print a `pub` qualifier on the fields. Necessary for `struct` defns, - // disallowed for `enum` defns. - pub_qualifier: bool, -) { - out.delimited_block( - "{", - |out| write_arglist_no_delimiters_ctx(ctx, out, elements, pub_qualifier.then_some("pub")), - "}", - ); -} + let sum_type_name = name.replace("r#", "").to_case(Case::Pascal); -fn write_arglist_no_delimiters_ctx( - ctx: &GenCtx, - out: &mut Indenter, - elements: &[ProductTypeElement], + print_file_header(out); + out.newline(); - // Written before each line. Useful for `pub`. - prefix: Option<&str>, -) { - write_arglist_no_delimiters(&|r| type_name(ctx, r), out, elements, prefix).unwrap() -} + print_enum_derives(out); -pub fn write_arglist_no_delimiters( - ctx: &impl Fn(AlgebraicTypeRef) -> String, - out: &mut impl Write, - elements: &[ProductTypeElement], - - // Written before each line. Useful for `pub`. - prefix: Option<&str>, -) -> fmt::Result { - for elt in elements { - if let Some(prefix) = prefix { - write!(out, "{prefix} ")?; - } + write!(out, "pub enum {sum_type_name} "); - let Some(name) = &elt.name else { - panic!("Product type element has no name: {elt:?}"); - }; - let name = name.deref().to_case(Case::Snake); + out.delimited_block( + "{", + |out| { + for name in &plain_enum.variants[..] { + writeln!(out, "{name},"); + } + }, + "}\n", + ); - write!(out, "{name}: ")?; - write_type(ctx, out, &elt.algebraic_type)?; - writeln!(out, ",")?; - } - Ok(()) + output.into_inner() } /// Generate a file which defines a `struct` corresponding to the `product` type. -pub fn autogen_rust_tuple(ctx: &GenCtx, name: &str, product: &ProductType) -> String { +pub fn autogen_rust_tuple(module: &ModuleDef, name: &str, product: &ProductTypeDef) -> String { let mut output = CodeIndenter::new(String::new()); let out = &mut output; let type_name = name.to_case(Case::Pascal); - begin_rust_struct_def_shared(ctx, out, &type_name, &product.elements); + begin_rust_struct_def_shared(module, out, &type_name, product); output.into_inner() } -fn find_product_type(ctx: &GenCtx, ty: AlgebraicTypeRef) -> &ProductType { - ctx.typespace[ty].as_product().unwrap() -} - /// Generate a file which defines a `struct` corresponding to the `table`'s `ProductType`, /// and implements `spacetimedb_sdk::table::TableType` for it. -#[allow(deprecated)] -pub fn autogen_rust_table(ctx: &GenCtx, table: &TableDesc) -> String { +pub fn autogen_rust_table(module: &ModuleDef, table: &TableDef) -> String { let mut output = CodeIndenter::new(String::new()); let out = &mut output; - let type_name = type_name(ctx, table.data); + let type_name = type_name(module, table.product_type_ref); - begin_rust_struct_def_shared(ctx, out, &type_name, &find_product_type(ctx, table.data).elements); + let product_def = module.typespace_for_generate()[table.product_type_ref] + .as_product() + .unwrap(); + begin_rust_struct_def_shared(module, out, &type_name, product_def); out.newline(); - let table = TableSchema::from_def(0.into(), table.schema.clone()) + let table = TableSchema::from_module_def(table, 0.into()) .validated() .expect("Failed to generate table due to validation errors"); - print_impl_tabletype(ctx, out, &type_name, &table); + print_impl_tabletype(module, out, &type_name, product_def, &table); output.into_inner() } @@ -368,7 +337,7 @@ fn print_struct_derives(output: &mut Indenter) { print_lines(output, STRUCT_DERIVES); } -fn begin_rust_struct_def_shared(ctx: &GenCtx, out: &mut Indenter, name: &str, elements: &[ProductTypeElement]) { +fn begin_rust_struct_def_shared(module: &ModuleDef, out: &mut Indenter, name: &str, def: &ProductTypeDef) { print_file_header(out); // Pass this file into `gen_and_print_imports` to avoid recursively importing self @@ -379,7 +348,7 @@ fn begin_rust_struct_def_shared(ctx: &GenCtx, out: &mut Indenter, name: &str, el let file_name = name.to_case(Case::Snake); let this_file = (file_name.as_str(), name); - gen_and_print_imports(ctx, out, elements, generate_imports_elements, this_file); + gen_and_print_imports(module, out, &def.elements, this_file); out.newline(); @@ -388,9 +357,16 @@ fn begin_rust_struct_def_shared(ctx: &GenCtx, out: &mut Indenter, name: &str, el write!(out, "pub struct {name} "); // TODO: if elements is empty, define a unit struct with no brace-delimited list of fields. - write_struct_type_fields_in_braces( - ctx, out, elements, // `pub`-qualify fields. - true, + out.delimited_block( + "{", + |out| { + for (name, ty) in def { + write!(out, "pub {}: ", name.deref().to_case(Case::Snake)); + write_type(module, out, ty); + writeln!(out, ","); + } + }, + "}", ); out.newline(); @@ -400,7 +376,13 @@ fn find_primary_key_column_index(table: &TableSchema) -> Option { table.pk().map(|x| x.col_pos.idx()) } -fn print_impl_tabletype(ctx: &GenCtx, out: &mut Indenter, type_name: &str, table: &TableSchema) { +fn print_impl_tabletype( + module: &ModuleDef, + out: &mut Indenter, + type_name: &str, + product_def: &ProductTypeDef, + table: &TableSchema, +) { write!(out, "impl TableType for {type_name} "); out.delimited_block( @@ -422,7 +404,7 @@ fn print_impl_tabletype(ctx: &GenCtx, out: &mut Indenter, type_name: &str, table "{", |out| { write!(out, "type PrimaryKey = "); - write_type_ctx(ctx, out, &pk_field.col_type); + write_type(module, out, &product_def.elements[pk_field.col_pos.idx()].1); writeln!(out, ";"); out.delimited_block( @@ -437,10 +419,16 @@ fn print_impl_tabletype(ctx: &GenCtx, out: &mut Indenter, type_name: &str, table out.newline(); - print_table_filter_methods(ctx, out, type_name, table); + print_table_filter_methods(module, out, type_name, product_def, table); } -fn print_table_filter_methods(ctx: &GenCtx, out: &mut Indenter, table_type_name: &str, table: &TableSchema) { +fn print_table_filter_methods( + module: &ModuleDef, + out: &mut Indenter, + table_type_name: &str, + product_def: &ProductTypeDef, + table: &TableSchema, +) { write!(out, "impl {table_type_name} "); let constraints = table.column_constraints(); out.delimited_block( @@ -448,15 +436,12 @@ fn print_table_filter_methods(ctx: &GenCtx, out: &mut Indenter, table_type_name: |out| { for field in table.columns() { let field_name = field.col_name.deref().to_case(Case::Snake); - match &field.col_type { - AlgebraicType::Product(prod) if prod.is_special() => {} - AlgebraicType::Product(_) - | AlgebraicType::Ref(_) - | AlgebraicType::Sum(_) - | AlgebraicType::Array(_) - | AlgebraicType::Map(_) => { - continue; - } + let col_ty = &product_def.elements[field.col_pos.idx()].1; + match col_ty { + AlgebraicTypeUse::Ref(_) + | AlgebraicTypeUse::Array(_) + | AlgebraicTypeUse::Map { .. } + | AlgebraicTypeUse::Option(_) => continue, _ => {} } writeln!(out, "{ALLOW_UNUSED}"); @@ -466,7 +451,7 @@ fn print_table_filter_methods(ctx: &GenCtx, out: &mut Indenter, table_type_name: // fields should take &[T]. Determine if integer types should be by // value. Is there a trait for this? // Look at `Borrow` or Deref or AsRef? - write_type_ctx(ctx, out, &field.col_type); + write_type(module, out, col_ty); write!(out, ") -> "); let ct = constraints[&ColList::new(field.col_pos)]; @@ -487,7 +472,7 @@ fn print_table_filter_methods(ctx: &GenCtx, out: &mut Indenter, table_type_name: if ct.has_unique() { writeln!(out, "{ALLOW_UNUSED}"); write!(out, "pub fn find_by_{field_name}({field_name}: "); - write_type_ctx(ctx, out, &field.col_type); + write_type(module, out, col_ty); write!(out, ") -> Option "); out.delimited_block( "{", @@ -521,26 +506,14 @@ fn reducer_function_name(reducer: &ReducerDef) -> String { reducer.name.deref().to_case(Case::Snake) } -fn iter_reducer_arg_names(reducer: &ReducerDef) -> impl Iterator> + '_ { - reducer - .args - .iter() - .map(|elt| elt.name.as_ref().map(|name| name.deref().to_case(Case::Snake))) -} - -fn iter_reducer_arg_types(reducer: &'_ ReducerDef) -> impl Iterator { - reducer.args.iter().map(|elt| &elt.algebraic_type) -} - fn print_reducer_struct_literal(out: &mut Indenter, reducer: &ReducerDef) { write!(out, "{} ", reducer_type_name(reducer)); // TODO: if reducer.args is empty, write a unit struct. out.delimited_block( "{", |out| { - for arg_name in iter_reducer_arg_names(reducer) { - let name = arg_name.unwrap(); - writeln!(out, "{name},"); + for (name, _) in &reducer.params_for_generate { + writeln!(out, "{},", name.deref().to_case(Case::Snake)); } }, "}", @@ -550,14 +523,14 @@ fn print_reducer_struct_literal(out: &mut Indenter, reducer: &ReducerDef) { /// Generate a file which defines a struct corresponding to the `reducer`'s arguments, /// implements `spacetimedb_sdk::table::Reducer` for it, and defines a helper /// function which invokes the reducer. -pub fn autogen_rust_reducer(ctx: &GenCtx, reducer: &ReducerDef) -> String { +pub fn autogen_rust_reducer(module: &ModuleDef, reducer: &ReducerDef) -> String { let func_name = reducer_function_name(reducer); let type_name = reducer_type_name(reducer); let mut output = CodeIndenter::new(String::new()); let out = &mut output; - begin_rust_struct_def_shared(ctx, out, &type_name, &reducer.args); + begin_rust_struct_def_shared(module, out, &type_name, &reducer.params_for_generate); out.newline(); @@ -580,7 +553,13 @@ pub fn autogen_rust_reducer(ctx: &GenCtx, reducer: &ReducerDef) -> String { // TODO: if reducer.args is empty, just write "()" with no newlines out.delimited_block( "(", - |out| write_arglist_no_delimiters_ctx(ctx, out, &reducer.args, None), + |out| { + for (name, ty) in &reducer.params_for_generate { + write!(out, "{}: ", name.deref().to_case(Case::Snake)); + write_type(module, out, ty); + writeln!(out, ","); + } + }, ") ", ); @@ -596,91 +575,54 @@ pub fn autogen_rust_reducer(ctx: &GenCtx, reducer: &ReducerDef) -> String { out.newline(); - // Function definition for convenient callback function, - // which takes a closure fromunpacked args, - // and wraps it in a closure from the args struct. - writeln!(out, "{ALLOW_UNUSED}"); - write!( - out, - "pub fn on_{func_name}(mut __callback: impl FnMut(&Identity, Option
, &Status" - ); - for arg_type in iter_reducer_arg_types(reducer) { - write!(out, ", &"); - write_type_ctx(ctx, out, arg_type); - } - writeln!(out, ") + Send + 'static) -> ReducerCallbackId<{type_name}> "); - out.delimited_block( - "{", - |out| { - write!(out, "{type_name}"); - out.delimited_block( - "::on_reducer(move |__identity, __addr, __status, __args| {", - |out| { - write!(out, "let "); - print_reducer_struct_literal(out, reducer); - writeln!(out, " = __args;"); - out.delimited_block( - "__callback(", - |out| { - writeln!(out, "__identity,"); - writeln!(out, "__addr,"); - writeln!(out, "__status,"); - for arg_name in iter_reducer_arg_names(reducer) { - writeln!(out, "{},", arg_name.unwrap()); - } - }, - ");\n", - ); - }, - "})\n", - ); - }, - "}\n", - ); - - out.newline(); + let mut on_func = |on_prefix, mut_, fn_kind| { + // Function definition for convenient callback function, + // which takes a closure fromunpacked args, + // and wraps it in a closure from the args struct. + writeln!(out, "{ALLOW_UNUSED}"); + write!( + out, + "pub fn {on_prefix}_{func_name}({mut_}__callback: impl {fn_kind}(&Identity, Option
, &Status" + ); + for (_, arg_type) in &reducer.params_for_generate { + write!(out, ", &"); + write_type(module, out, arg_type); + } + writeln!(out, ") + Send + 'static) -> ReducerCallbackId<{type_name}> "); + out.delimited_block( + "{", + |out| { + write!(out, "{type_name}::{on_prefix}_reducer"); + out.delimited_block( + "(move |__identity, __addr, __status, __args| {", + |out| { + write!(out, "let "); + print_reducer_struct_literal(out, reducer); + writeln!(out, " = __args;"); + out.delimited_block( + "__callback(", + |out| { + writeln!(out, "__identity,"); + writeln!(out, "__addr,"); + writeln!(out, "__status,"); + for (arg_name, _) in &reducer.params_for_generate { + writeln!(out, "{},", arg_name.deref().to_case(Case::Snake)); + } + }, + ");\n", + ); + }, + "})\n", + ); + }, + "}\n", + ); - // Function definition for convenient once_on callback function. - writeln!(out, "{ALLOW_UNUSED}"); - write!( - out, - "pub fn once_on_{func_name}(__callback: impl FnOnce(&Identity, Option
, &Status" - ); - for arg_type in iter_reducer_arg_types(reducer) { - write!(out, ", &"); - write_type_ctx(ctx, out, arg_type); - } - writeln!(out, ") + Send + 'static) -> ReducerCallbackId<{type_name}> "); - out.delimited_block( - "{", - |out| { - write!(out, "{type_name}"); - out.delimited_block( - "::once_on_reducer(move |__identity, __addr, __status, __args| {", - |out| { - write!(out, "let "); - print_reducer_struct_literal(out, reducer); - writeln!(out, " = __args;"); - out.delimited_block( - "__callback(", - |out| { - writeln!(out, "__identity,"); - writeln!(out, "__addr,"); - writeln!(out, "__status,"); - for arg_name in iter_reducer_arg_names(reducer) { - writeln!(out, "{},", arg_name.unwrap()); - } - }, - ");\n", - ); - }, - "})\n", - ) - }, - "}\n", - ); + out.newline(); + }; - out.newline(); + on_func("on", "mut ", "FnMut"); + on_func("once_on", "", "FnOnce"); // Function definition for callback-canceling `remove_on_{reducer}` function. writeln!(out, "{ALLOW_UNUSED}"); @@ -717,7 +659,7 @@ pub fn autogen_rust_reducer(ctx: &GenCtx, reducer: &ReducerDef) -> String { /// to connect to a remote database, and passes the `handle_row_update` /// and `handle_event` functions so the `BackgroundDbConnection` can spawn workers /// which use those functions to dispatch on the content of messages. -pub fn autogen_rust_globals(ctx: &GenCtx, items: &[GenItem]) -> Vec<(String, String)> { +pub fn autogen_rust_globals(module: &ModuleDef) -> Vec<(String, String)> { let mut output = CodeIndenter::new(String::new()); let out = &mut output; @@ -729,21 +671,21 @@ pub fn autogen_rust_globals(ctx: &GenCtx, items: &[GenItem]) -> Vec<(String, Str out.newline(); // Declare `pub mod` for each of the files generated. - print_module_decls(ctx, out, items); + print_module_decls(module, out); out.newline(); // Re-export all the modules for the generated files. - print_module_reexports(ctx, out, items); + print_module_reexports(module, out); out.newline(); // Define `enum ReducerEvent`. - print_reducer_event_defn(out, items); + print_reducer_event_defn(module, out); out.newline(); - print_spacetime_module_struct_defn(ctx, out, items); + print_spacetime_module_struct_defn(module, out); out.newline(); @@ -769,38 +711,38 @@ fn print_dispatch_imports(out: &mut Indenter) { print_lines(out, DISPATCH_IMPORTS); } -fn iter_reducer_items(items: &[GenItem]) -> impl Iterator { - items.iter().filter_map(|item| match item { - GenItem::Reducer(reducer) => Some(reducer), - _ => None, - }) +fn iter_module_names(module: &ModuleDef) -> impl Iterator + '_ { + dbg!(module.types().map(|ty| (&ty.name, ty.ty)).collect::>()); + itertools::chain!( + module + .types() + .sorted_by_key(|ty| &ty.name) + .map(|ty| collect_case(Case::Snake, ty.name.name_segments())), + iter_reducers(module).map(reducer_module_name), + ) } -fn iter_table_items(items: &[GenItem]) -> impl Iterator { - items.iter().filter_map(|item| match item { - GenItem::Table(table) => Some(table), - _ => None, - }) +fn iter_tables(module: &ModuleDef) -> impl Iterator { + module.tables().sorted_by_key(|tbl| &tbl.name) } -fn iter_module_names<'a>(ctx: &'a GenCtx, items: &'a [GenItem]) -> impl Iterator + 'a { - items.iter().map(|item| match item { - GenItem::Table(table) => type_name(ctx, table.data).to_case(Case::Snake), - GenItem::TypeAlias(ty) => ty.name.to_case(Case::Snake), - GenItem::Reducer(reducer) => reducer_module_name(reducer), - }) +fn iter_reducers(module: &ModuleDef) -> impl Iterator { + module + .reducers() + .filter(|r| r.lifecycle.is_none()) + .sorted_by_key(|r| &r.name) } /// Print `pub mod` declarations for all the files that will be generated for `items`. -fn print_module_decls(ctx: &GenCtx, out: &mut Indenter, items: &[GenItem]) { - for module_name in iter_module_names(ctx, items) { +fn print_module_decls(module: &ModuleDef, out: &mut Indenter) { + for module_name in iter_module_names(module) { writeln!(out, "pub mod {module_name};"); } } /// Print `pub use *` declarations for all the files that will be generated for `items`. -fn print_module_reexports(ctx: &GenCtx, out: &mut Indenter, items: &[GenItem]) { - for module_name in iter_module_names(ctx, items) { +fn print_module_reexports(module: &ModuleDef, out: &mut Indenter) { + for module_name in iter_module_names(module) { writeln!(out, "pub use {module_name}::*;"); } } @@ -825,7 +767,7 @@ fn print_module_reexports(ctx: &GenCtx, out: &mut Indenter, items: &[GenItem]) { /// /// - `handle_event`, which serves the same role as `handle_table_update`, but for /// reducers. -fn print_spacetime_module_struct_defn(ctx: &GenCtx, out: &mut Indenter, items: &[GenItem]) { +fn print_spacetime_module_struct_defn(module: &ModuleDef, out: &mut Indenter) { // Muffle unused warning for `Module`, which is not supposed to be visible to // users. It will be used if and only if `connect` is used, so that unused warning is // sufficient, and not as confusing. @@ -834,10 +776,10 @@ fn print_spacetime_module_struct_defn(ctx: &GenCtx, out: &mut Indenter, items: & out.delimited_block( "impl SpacetimeModule for Module {", |out| { - print_handle_table_update_defn(ctx, out, items); - print_invoke_row_callbacks_defn(ctx, out, items); - print_handle_event_defn(out, items); - print_handle_resubscribe_defn(ctx, out, items); + print_handle_table_update_defn(module, out); + print_invoke_row_callbacks_defn(module, out); + print_handle_event_defn(module, out); + print_handle_resubscribe_defn(module, out); }, "}\n", ); @@ -847,7 +789,7 @@ fn print_spacetime_module_struct_defn(ctx: &GenCtx, out: &mut Indenter, items: & /// which dispatches on the table name in a `TableUpdate` message /// to call an appropriate method on the `ClientCache`. #[allow(deprecated)] -fn print_handle_table_update_defn(ctx: &GenCtx, out: &mut Indenter, items: &[GenItem]) { +fn print_handle_table_update_defn(module: &ModuleDef, out: &mut Indenter) { out.delimited_block( "fn handle_table_update(&self, table_update: TableUpdate, client_cache: &mut ClientCache, callbacks: &mut RowCallbackReminders) {", |out| { @@ -855,8 +797,8 @@ fn print_handle_table_update_defn(ctx: &GenCtx, out: &mut Indenter, items: &[Gen out.delimited_block( "match table_name {", |out| { - for table_desc in iter_table_items(items) { - let table = TableSchema::from_def(0.into(), table_desc.schema.clone()).validated().unwrap(); + for table_desc in iter_tables(module) { + let table = TableSchema::from_module_def(table_desc, 0.into()).validated().unwrap(); writeln!( out, "{:?} => client_cache.{}::<{}::{}>(callbacks, table_update),", @@ -866,8 +808,8 @@ fn print_handle_table_update_defn(ctx: &GenCtx, out: &mut Indenter, items: &[Gen } else { "handle_table_update_no_primary_key" }, - type_name(ctx, table_desc.data).to_case(Case::Snake), - type_name(ctx, table_desc.data).to_case(Case::Pascal), + type_name(module, table_desc.product_type_ref).to_case(Case::Snake), + type_name(module, table_desc.product_type_ref).to_case(Case::Pascal), ); } writeln!( @@ -884,16 +826,16 @@ fn print_handle_table_update_defn(ctx: &GenCtx, out: &mut Indenter, items: &[Gen /// Define the `invoke_row_callbacks` function, /// which does `RowCallbackReminders::invoke_callbacks` on each table type defined in the `items`. -fn print_invoke_row_callbacks_defn(ctx: &GenCtx, out: &mut Indenter, items: &[GenItem]) { +fn print_invoke_row_callbacks_defn(module: &ModuleDef, out: &mut Indenter) { out.delimited_block( "fn invoke_row_callbacks(&self, reminders: &mut RowCallbackReminders, worker: &mut DbCallbacks, reducer_event: Option>, state: &Arc) {", |out| { - for table in iter_table_items(items) { + for table in iter_tables(module) { writeln!( out, "reminders.invoke_callbacks::<{}::{}>(worker, &reducer_event, state);", - type_name(ctx, table.data).to_case(Case::Snake), - type_name(ctx, table.data).to_case(Case::Pascal), + type_name(module, table.product_type_ref).to_case(Case::Snake), + type_name(module, table.product_type_ref).to_case(Case::Pascal), ); } }, @@ -904,7 +846,7 @@ fn print_invoke_row_callbacks_defn(ctx: &GenCtx, out: &mut Indenter, items: &[Ge /// Define the `handle_resubscribe` function, /// which dispatches on the table name in a `TableUpdate` /// to invoke `ClientCache::handle_resubscribe_for_type` with an appropriate type arg. -fn print_handle_resubscribe_defn(ctx: &GenCtx, out: &mut Indenter, items: &[GenItem]) { +fn print_handle_resubscribe_defn(module: &ModuleDef, out: &mut Indenter) { out.delimited_block( "fn handle_resubscribe(&self, new_subs: TableUpdate, client_cache: &mut ClientCache, callbacks: &mut RowCallbackReminders) {", |out| { @@ -912,13 +854,13 @@ fn print_handle_resubscribe_defn(ctx: &GenCtx, out: &mut Indenter, items: &[GenI out.delimited_block( "match table_name {", |out| { - for table in iter_table_items(items) { + for table in iter_tables(module) { writeln!( out, "{:?} => client_cache.handle_resubscribe_for_type::<{}::{}>(callbacks, new_subs),", - table.schema.table_name, - type_name(ctx, table.data).to_case(Case::Snake), - type_name(ctx, table.data).to_case(Case::Pascal), + table.name, + type_name(module, table.product_type_ref).to_case(Case::Snake), + type_name(module, table.product_type_ref).to_case(Case::Pascal), ); } writeln!( @@ -936,7 +878,7 @@ fn print_handle_resubscribe_defn(ctx: &GenCtx, out: &mut Indenter, items: &[GenI /// Define the `handle_event` function, /// which dispatches on the reducer name in an `Event` /// to `ReducerCallbacks::handle_event_of_type` with an appropriate type argument. -fn print_handle_event_defn(out: &mut Indenter, items: &[GenItem]) { +fn print_handle_event_defn(module: &ModuleDef, out: &mut Indenter) { out.delimited_block( "fn handle_event(&self, event: TransactionUpdate, _reducer_callbacks: &mut ReducerCallbacks, _state: Arc) -> Option> {", |out| { @@ -952,7 +894,7 @@ fn print_handle_event_defn(out: &mut Indenter, items: &[GenItem]) { out.delimited_block( "match &reducer_call.reducer_name[..] {", |out| { - for reducer in iter_reducer_items(items) { + for reducer in iter_reducers(module) { writeln!( out, "{:?} => _reducer_callbacks.handle_event_of_type::<{}::{}, ReducerEvent>(event, _state, ReducerEvent::{}),", @@ -1011,14 +953,14 @@ where ); } -fn print_reducer_event_defn(out: &mut Indenter, items: &[GenItem]) { +fn print_reducer_event_defn(module: &ModuleDef, out: &mut Indenter) { writeln!(out, "{ALLOW_UNUSED}"); print_enum_derives(out); out.delimited_block( "pub enum ReducerEvent {", |out| { - for reducer in iter_reducer_items(items) { + for reducer in iter_reducers(module) { writeln!( out, "{}({}::{}),", @@ -1032,42 +974,10 @@ fn print_reducer_event_defn(out: &mut Indenter, items: &[GenItem]) { ); } -fn generate_imports_variants(ctx: &GenCtx, imports: &mut Imports, variants: &[SumTypeVariant]) { - for variant in variants { - generate_imports(ctx, imports, &variant.algebraic_type); - } -} - -fn generate_imports_elements(ctx: &GenCtx, imports: &mut Imports, elements: &[ProductTypeElement]) { - for element in elements { - generate_imports(ctx, imports, &element.algebraic_type); - } -} - fn module_name(name: &str) -> String { name.to_case(Case::Snake) } -fn generate_imports(ctx: &GenCtx, imports: &mut Imports, ty: &AlgebraicType) { - match ty { - AlgebraicType::Array(ArrayType { elem_ty }) => generate_imports(ctx, imports, elem_ty), - AlgebraicType::Map(map_type) => { - generate_imports(ctx, imports, &map_type.key_ty); - generate_imports(ctx, imports, &map_type.ty); - } - AlgebraicType::Ref(r) => { - let type_name = type_name(ctx, *r); - let module_name = module_name(&type_name); - imports.insert((module_name, type_name)); - } - // Recurse into variants of anonymous sum types, e.g. for `Option`, import `T`. - AlgebraicType::Sum(s) => generate_imports_variants(ctx, imports, &s.variants), - // Products, scalars, and strings. - // Do we need to generate imports for fields of anonymous product types? - _ => {} - } -} - /// Print `use super::` imports for each of the `imports`, except `this_file`. /// /// `this_file` is passed and excluded for the case of recursive types: @@ -1086,17 +996,20 @@ fn print_imports(out: &mut Indenter, imports: Imports, this_file: (&str, &str)) /// `this_file` is passed and excluded for the case of recursive types: /// without it, the definition for a type like `struct Foo { foos: Vec }` /// would attempt to include `import super::foo::Foo`, which fails to compile. -fn gen_and_print_imports( - ctx: &GenCtx, +fn gen_and_print_imports( + module: &ModuleDef, out: &mut Indenter, - roots: Roots, - search_fn: SearchFn, + roots: &[(Identifier, AlgebraicTypeUse)], this_file: (&str, &str), -) where - SearchFn: FnOnce(&GenCtx, &mut Imports, Roots), -{ +) { let mut imports = BTreeSet::new(); - search_fn(ctx, &mut imports, roots); + for (_, ty) in roots { + ty.for_each_ref(|r| { + let type_name = type_name(module, r); + let module_name = module_name(&type_name); + imports.insert((module_name, type_name)); + }); + } print_imports(out, imports, this_file); } diff --git a/crates/cli/tests/snapshots/codegen__codegen_rust.snap b/crates/cli/tests/snapshots/codegen__codegen_rust.snap index dc85f775c1..1d45c242ca 100644 --- a/crates/cli/tests/snapshots/codegen__codegen_rust.snap +++ b/crates/cli/tests/snapshots/codegen__codegen_rust.snap @@ -337,17 +337,17 @@ use spacetimedb_sdk::global_connection::with_connection_mut; use spacetimedb_sdk::spacetime_module::SpacetimeModule; use std::sync::Arc; -pub mod test_b; -pub mod namespace.test_c; -pub mod namespace.test_f; pub mod has_special_stuff; pub mod pk_multi_identity; pub mod point; pub mod private; pub mod repeating_test_arg; pub mod test_a; +pub mod test_b; pub mod test_d; pub mod test_e; +pub mod namespace_test_c; +pub mod namespace_test_f; pub mod add_player_reducer; pub mod add_private_reducer; pub mod delete_player_reducer; @@ -356,17 +356,17 @@ pub mod query_private_reducer; pub mod repeating_test_reducer; pub mod test_reducer; -pub use test_b::*; -pub use namespace.test_c::*; -pub use namespace.test_f::*; pub use has_special_stuff::*; pub use pk_multi_identity::*; pub use point::*; pub use private::*; pub use repeating_test_arg::*; pub use test_a::*; +pub use test_b::*; pub use test_d::*; pub use test_e::*; +pub use namespace_test_c::*; +pub use namespace_test_f::*; pub use add_player_reducer::*; pub use add_private_reducer::*; pub use delete_player_reducer::*; @@ -475,11 +475,9 @@ where }; #[derive(Serialize, Deserialize, Clone, PartialEq, Debug)] -pub enum Namespace.testC { +pub enum NamespaceTestC { Foo, - Bar, - } ''' "namespace_test_f.rs" = ''' @@ -497,7 +495,7 @@ pub enum Namespace.testC { }; #[derive(Serialize, Deserialize, Clone, PartialEq, Debug)] -pub enum Namespace.testF { +pub enum NamespaceTestF { Foo, Bar, @@ -732,6 +730,10 @@ impl RepeatingTestArg { #[allow(unused)] pub fn find_by_scheduled_id(scheduled_id: u64) -> Option { Self::find(|row| row.scheduled_id == scheduled_id) +} + #[allow(unused)] + pub fn filter_by_scheduled_at(scheduled_at: ScheduleAt) -> TableIter { + Self::filter(|row| row.scheduled_at == scheduled_at) } } ''' @@ -879,11 +881,11 @@ pub struct TestB { spacetimedb_lib, anyhow::{Result, anyhow}, }; -use super::namespace.test_c::Namespace.testC; +use super::namespace_test_c::NamespaceTestC; #[derive(Serialize, Deserialize, Clone, PartialEq, Debug)] pub struct TestD { - pub test_c: Option::, + pub test_c: Option::, } impl TableType for TestD { @@ -955,8 +957,8 @@ impl TestE { spacetimedb_lib, anyhow::{Result, anyhow}, }; -use super::namespace.test_c::Namespace.testC; -use super::namespace.test_f::Namespace.testF; +use super::namespace_test_c::NamespaceTestC; +use super::namespace_test_f::NamespaceTestF; use super::test_a::TestA; use super::test_b::TestB; @@ -964,8 +966,8 @@ use super::test_b::TestB; pub struct TestArgs { pub arg: TestA, pub arg_2: TestB, - pub arg_3: Namespace.testC, - pub arg_4: Namespace.testF, + pub arg_3: NamespaceTestC, + pub arg_4: NamespaceTestF, } impl Reducer for TestArgs { @@ -976,8 +978,8 @@ impl Reducer for TestArgs { pub fn test( arg: TestA, arg_2: TestB, - arg_3: Namespace.testC, - arg_4: Namespace.testF, + arg_3: NamespaceTestC, + arg_4: NamespaceTestF, ) { TestArgs { arg, @@ -988,7 +990,7 @@ pub fn test( } #[allow(unused)] -pub fn on_test(mut __callback: impl FnMut(&Identity, Option
, &Status, &TestA, &TestB, &Namespace.testC, &Namespace.testF) + Send + 'static) -> ReducerCallbackId +pub fn on_test(mut __callback: impl FnMut(&Identity, Option
, &Status, &TestA, &TestB, &NamespaceTestC, &NamespaceTestF) + Send + 'static) -> ReducerCallbackId { TestArgs::on_reducer(move |__identity, __addr, __status, __args| { let TestArgs { @@ -1010,7 +1012,7 @@ __callback( } #[allow(unused)] -pub fn once_on_test(__callback: impl FnOnce(&Identity, Option
, &Status, &TestA, &TestB, &Namespace.testC, &Namespace.testF) + Send + 'static) -> ReducerCallbackId +pub fn once_on_test(__callback: impl FnOnce(&Identity, Option
, &Status, &TestA, &TestB, &NamespaceTestC, &NamespaceTestF) + Send + 'static) -> ReducerCallbackId { TestArgs::once_on_reducer(move |__identity, __addr, __status, __args| { let TestArgs { diff --git a/crates/client-api-messages/examples/get_ws_schema.rs b/crates/client-api-messages/examples/get_ws_schema.rs index dede6e0ce2..7b50e80f94 100644 --- a/crates/client-api-messages/examples/get_ws_schema.rs +++ b/crates/client-api-messages/examples/get_ws_schema.rs @@ -1,12 +1,13 @@ use spacetimedb_client_api_messages::websocket::{ClientMessage, ServerMessage}; use spacetimedb_lib::ser::serde::SerializeWrapper; -use spacetimedb_lib::RawModuleDefV8; +use spacetimedb_lib::{RawModuleDef, RawModuleDefV8}; fn main() -> Result<(), serde_json::Error> { let module = RawModuleDefV8::with_builder(|module| { module.add_type::(); module.add_type::(); }); + let module = RawModuleDef::V8BackCompat(module); serde_json::to_writer(std::io::stdout().lock(), SerializeWrapper::from_ref(&module)) } diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index 886b96dd19..635753c713 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -85,6 +85,7 @@ sqlparser.workspace = true strum.workspace = true tempfile.workspace = true thiserror.workspace = true +thin-vec.workspace = true tokio-util.workspace = true tokio.workspace = true tokio-stream = "0.1" diff --git a/crates/core/src/db/datastore/locking_tx_datastore/committed_state.rs b/crates/core/src/db/datastore/locking_tx_datastore/committed_state.rs index 1f2a68935d..8f67112831 100644 --- a/crates/core/src/db/datastore/locking_tx_datastore/committed_state.rs +++ b/crates/core/src/db/datastore/locking_tx_datastore/committed_state.rs @@ -2,7 +2,7 @@ use super::{ datastore::Result, sequence::{Sequence, SequencesState}, state_view::{Iter, IterByColRange, ScanIterByColRange, StateView}, - tx_state::{DeleteTable, TxState}, + tx_state::{DeleteTable, IndexIdMap, TxState}, }; use crate::{ db::{ @@ -29,10 +29,9 @@ use spacetimedb_lib::{ address::Address, db::auth::{StAccess, StTableType}, }; -use spacetimedb_primitives::{ColList, TableId}; +use spacetimedb_primitives::{ColList, IndexId, TableId}; use spacetimedb_sats::{AlgebraicValue, ProductValue}; use spacetimedb_schema::schema::TableSchema; - use spacetimedb_table::{ blob_store::{BlobStore, HashMapBlobStore}, indexes::{RowPointer, SquashedOffset}, @@ -51,6 +50,8 @@ pub struct CommittedState { pub(crate) next_tx_offset: u64, pub(crate) tables: IntMap, pub(crate) blob_store: HashMapBlobStore, + /// Provides fast lookup for index id -> an index. + pub(super) index_id_map: IndexIdMap, } impl StateView for CommittedState { @@ -331,7 +332,9 @@ impl CommittedState { panic!("Cannot create index for table which doesn't exist in committed state"); }; let index = table.new_index(index_row.index_id, &index_row.columns, index_row.is_unique)?; - table.insert_index(blob_store, index_row.columns, index); + table.insert_index(blob_store, index_row.columns.clone(), index); + self.index_id_map + .insert(index_row.index_id, (index_row.table_id, index_row.columns)); } Ok(()) } @@ -443,9 +446,11 @@ impl CommittedState { // Then, apply inserts. This will re-fill the holes freed by deletions // before allocating new pages. - self.merge_apply_inserts(&mut tx_data, tx_state.insert_tables, tx_state.blob_store); + // Merge index id fast-lookup map changes. + self.merge_index_map(tx_state.index_id_map, &tx_state.index_id_map_removals); + // If the TX will be logged, record its projected tx offset, // then increment the counter. if self.tx_consumes_offset(&tx_data, ctx) { @@ -537,6 +542,13 @@ impl CommittedState { } } + fn merge_index_map(&mut self, index_id_map: IndexIdMap, index_id_map_removals: &[IndexId]) { + for index_id in index_id_map_removals { + self.index_id_map.remove(index_id); + } + self.index_id_map.extend(index_id_map); + } + pub(super) fn get_table(&self, table_id: TableId) -> Option<&Table> { self.tables.get(&table_id) } diff --git a/crates/core/src/db/datastore/locking_tx_datastore/mut_tx.rs b/crates/core/src/db/datastore/locking_tx_datastore/mut_tx.rs index 0bb17142e9..03d02c7824 100644 --- a/crates/core/src/db/datastore/locking_tx_datastore/mut_tx.rs +++ b/crates/core/src/db/datastore/locking_tx_datastore/mut_tx.rs @@ -4,7 +4,7 @@ use super::{ sequence::{Sequence, SequencesState}, state_view::{IndexSeekIterMutTxId, Iter, IterByColRange, ScanIterByColRange, StateView}, tx::TxId, - tx_state::TxState, + tx_state::{IndexIdMap, TxState}, SharedMutexGuard, SharedWriteGuard, }; use crate::db::datastore::{ @@ -20,17 +20,24 @@ use crate::{ execution_context::ExecutionContext, }; use core::ops::RangeBounds; +use core::{iter, ops::Bound}; use smallvec::SmallVec; use spacetimedb_lib::{ address::Address, + bsatn::Deserializer, db::{ auth::StAccess, error::SchemaErrors, raw_def::{RawConstraintDefV8, RawIndexDefV8, RawSequenceDefV8, RawTableDefV8, SEQUENCE_ALLOCATION_STEP}, }, + de::DeserializeSeed, }; use spacetimedb_primitives::{ColId, ColList, ConstraintId, Constraints, IndexId, SequenceId, TableId}; -use spacetimedb_sats::{AlgebraicValue, ProductType, ProductValue}; +use spacetimedb_sats::{ + bsatn::{self, DecodeError}, + de::WithBound, + AlgebraicType, AlgebraicValue, ProductType, ProductValue, WithTypespace, +}; use spacetimedb_schema::schema::{ConstraintSchema, IndexSchema, SequenceSchema, TableSchema}; use spacetimedb_table::{ blob_store::{BlobStore, HashMapBlobStore}, @@ -42,6 +49,8 @@ use std::{ time::{Duration, Instant}, }; +type DecodeResult = core::result::Result; + /// Represents a Mutable transaction. Holds locks for its duration /// /// The initialization of this struct is sensitive because improper @@ -285,17 +294,32 @@ impl MutTxId { } /// Retrieves or creates the insert tx table for `table_id`. + #[allow(clippy::type_complexity)] fn get_or_create_insert_table_mut( &mut self, table_id: TableId, - ) -> Result<(&mut Table, &mut dyn BlobStore, Option<&Table>, &HashMapBlobStore)> { + ) -> Result<( + &mut Table, + &mut dyn BlobStore, + &mut IndexIdMap, + Option<&Table>, + &HashMapBlobStore, + )> { let commit_table = self.committed_state_write_lock.get_table(table_id); // Get the insert table, so we can write the row into it. self.tx_state .get_table_and_blob_store_or_maybe_create_from(table_id, commit_table) .ok_or_else(|| TableError::IdNotFoundState(table_id).into()) - .map(|(tx, bs, _)| (tx, bs, commit_table, &self.committed_state_write_lock.blob_store)) + .map(|(tx, bs, idx_map, _)| { + ( + tx, + bs, + idx_map, + commit_table, + &self.committed_state_write_lock.blob_store, + ) + }) } /// Set the table access of `table_id` to `access`. @@ -352,40 +376,35 @@ impl MutTxId { // Construct the index schema. #[allow(deprecated)] - let mut index = IndexSchema::from_def(table_id, index.clone()); + let mut index = IndexSchema::from_def(table_id, index); index.index_id = index_id; // Add the index to the transaction's insert table. - let (table, blob_store, commit_table, commit_blob_store) = self.get_or_create_insert_table_mut(table_id)?; + let (table, blob_store, idx_map, commit_table, commit_blob_store) = + self.get_or_create_insert_table_mut(table_id)?; // Create and build the index. - let mut insert_index = table.new_index(index.index_id, &index.columns, is_unique)?; - insert_index.build_from_rows(&index.columns, table.scan_rows(blob_store))?; + let mut insert_index = table.new_index(index.index_id, &columns, is_unique)?; + insert_index.build_from_rows(&columns, table.scan_rows(blob_store))?; // NOTE: Also add all the rows in the already committed table to the index. // FIXME: Is this correct? Index scan iterators (incl. the existing `Locking` versions) // appear to assume that a table's index refers only to rows within that table, // and does not handle the case where a `TxState` index refers to `CommittedState` rows. if let Some(committed_table) = commit_table { - insert_index.build_from_rows(&index.columns, committed_table.scan_rows(commit_blob_store))?; + insert_index.build_from_rows(&columns, committed_table.scan_rows(commit_blob_store))?; } table.indexes.insert(columns.clone(), insert_index); + // Associate `index_id -> (table_id, col_list)` for fast lookup. + idx_map.insert(index_id, (table_id, columns.clone())); log::trace!( "INDEX CREATED: {} for table: {} and col(s): {:?}", index_id, table_id, - index.columns + columns ); // Update the table's schema. // This won't clone-write when creating a table but likely to otherwise. - let schema = IndexSchema { - table_id, - columns, - index_name: index.index_name, - is_unique, - index_id, - index_type: index.index_type, - }; - table.with_mut_schema(|s| s.indexes.push(schema)); + table.with_mut_schema(|s| s.indexes.push(index)); Ok(index_id) } @@ -453,7 +472,7 @@ impl MutTxId { // Remove the index in the transaction's insert table. // By altering the insert table, this gets moved over to the committed state on merge. - let (table, ..) = self.get_or_create_insert_table_mut(table_id)?; + let (table, _, idx_map, ..) = self.get_or_create_insert_table_mut(table_id)?; if let Some(col) = table .indexes .iter() @@ -465,6 +484,9 @@ impl MutTxId { table.with_mut_schema(|s| s.indexes.retain(|x| x.columns != col)); table.indexes.remove(&col); } + // Remove the `index_id -> (table_id, col_list)` association. + idx_map.remove(&index_id); + self.tx_state.index_id_map_removals.push(index_id); log::trace!("INDEX DROPPED: {}", index_id); Ok(()) @@ -472,9 +494,187 @@ impl MutTxId { pub fn index_id_from_name(&self, index_name: &str, database_address: Address) -> Result> { let ctx = ExecutionContext::internal(database_address); - let name = &>::from(index_name).into(); - self.iter_by_col_eq(&ctx, ST_INDEX_ID, StIndexFields::IndexName, name) - .map(|mut iter| iter.next().map(|row| row.read_col(StIndexFields::IndexId).unwrap())) + let name = &index_name.into(); + let row = self + .iter_by_col_eq(&ctx, ST_INDEX_ID, StIndexFields::IndexName, name)? + .next(); + Ok(row.map(|row| row.read_col(StIndexFields::IndexId).unwrap())) + } + + /// Returns an iterator yielding rows by performing a btree index scan + /// on the btree index identified by `index_id`. + /// + /// The `prefix` is equated to the first `prefix_elems` values of the index key + /// and then `prefix_elem`th value is bounded to the left by by `rstart` + /// and to the right by `rend`. + pub fn btree_scan<'a>( + &'a self, + index_id: IndexId, + prefix: &[u8], + prefix_elems: ColId, + rstart: &[u8], + rend: &[u8], + ) -> Result<(TableId, impl Iterator>)> { + // Extract the table and index type for the tx state. + let (table_id, col_list, tx_idx_key_type) = self + .get_table_and_index_type(index_id) + .ok_or_else(|| IndexError::NotFound(index_id))?; + + // TODO(centril): Once we have more index types than `btree`, + // we'll need to enforce that `index_id` refers to a btree index. + + // We have the index key type, so we can decode everything. + let bounds = Self::btree_decode_bounds(tx_idx_key_type, prefix, prefix_elems, rstart, rend) + .map_err(IndexError::Decode)?; + + // Get an index seek iterator for the tx and committed state. + let tx_iter = self.tx_state.index_seek(table_id, col_list, &bounds).unwrap(); + let commit_iter = self.committed_state_write_lock.index_seek(table_id, col_list, &bounds); + + // Chain together the indexed rows in the tx and committed state, + // but don't yield rows deleted in the tx state. + enum Choice { + A(A), + B(B), + C(C), + } + impl, B: Iterator, C: Iterator> Iterator for Choice { + type Item = T; + fn next(&mut self) -> Option { + match self { + Self::A(i) => i.next(), + Self::B(i) => i.next(), + Self::C(i) => i.next(), + } + } + } + let iter = match commit_iter { + None => Choice::A(tx_iter), + Some(commit_iter) => match self.tx_state.delete_tables.get(&table_id) { + None => Choice::B(tx_iter.chain(commit_iter)), + Some(tx_dels) => { + Choice::C(tx_iter.chain(commit_iter.filter(move |row| !tx_dels.contains(&row.pointer())))) + } + }, + }; + Ok((table_id, iter)) + } + + /// Translate `index_id` to the table id, the column list and index key type. + fn get_table_and_index_type(&self, index_id: IndexId) -> Option<(TableId, &ColList, &AlgebraicType)> { + // The order of querying the committed vs. tx state for the translation is not important. + // But it is vastly more likely that it is in the committed state, + // so query that first to avoid two lookups. + let (table_id, col_list) = self + .committed_state_write_lock + .index_id_map + .get(&index_id) + .or_else(|| self.tx_state.index_id_map.get(&index_id))?; + // The tx state must have the index. + // If the index was e.g., dropped from the tx state but exists physically in the committed state, + // the index does not exist, semantically. + let key_ty = self.tx_state.get_table_and_index_type(*table_id, col_list)?; + Some((*table_id, col_list, key_ty)) + } + + /// Decode the bounds for a btree scan for an index typed at `key_type`. + fn btree_decode_bounds( + key_type: &AlgebraicType, + mut prefix: &[u8], + prefix_elems: ColId, + rstart: &[u8], + rend: &[u8], + ) -> DecodeResult<(Bound, Bound)> { + match key_type { + // Multi-column index case. + AlgebraicType::Product(key_types) => { + let key_types = &key_types.elements; + // Split into types for the prefix and for the rest. + // TODO(centril): replace with `.split_at_checked(...)`. + if key_types.len() < prefix_elems.idx() { + return Err(DecodeError::Other( + "index key type has too few fields compared to prefix".into(), + )); + } + let (prefix_types, rest_types) = key_types.split_at(prefix_elems.idx()); + + // The `rstart` and `rend`s must be typed at `Bound`. + // Extract that type and determine the length of the suffix. + let Some((range_type, suffix_types)) = rest_types.split_first() else { + return Err(DecodeError::Other( + "prefix length leaves no room for a range in btree index scan".into(), + )); + }; + let suffix_len = suffix_types.len(); + + // We now have the types, + // so proceed to decoding the prefix, and the start/end bounds. + // Finally combine all of these to a single bound pair. + let prefix = bsatn::decode(prefix_types, &mut prefix)?; + let (start, end) = Self::btree_decode_ranges(&range_type.algebraic_type, rstart, rend)?; + Ok(Self::btree_combine_prefix_and_bounds(prefix, start, end, suffix_len)) + } + // Single-column index case. We implicitly have a PT of len 1. + _ if !prefix.is_empty() && prefix_elems.idx() != 0 => Err(DecodeError::Other( + "a single-column index cannot be prefix scanned".into(), + )), + ty => Self::btree_decode_ranges(ty, rstart, rend), + } + } + + /// Decode `rstart` and `rend` as `Bound`. + fn btree_decode_ranges( + ty: &AlgebraicType, + mut rstart: &[u8], + mut rend: &[u8], + ) -> DecodeResult<(Bound, Bound)> { + let range_type = WithBound(WithTypespace::empty(ty)); + let range_start = range_type.deserialize(Deserializer::new(&mut rstart))?; + let range_end = range_type.deserialize(Deserializer::new(&mut rend))?; + Ok((range_start, range_end)) + } + + /// Combines `prefix` equality constraints with `start` and `end` bounds + /// filling with `suffix_len` to ensure that the number of fields matches + /// that of the index type. + fn btree_combine_prefix_and_bounds( + prefix: ProductValue, + start: Bound, + end: Bound, + suffix_len: usize, + ) -> (Bound, Bound) { + let prefix_is_empty = prefix.elements.is_empty(); + // Concatenate prefix, value, and the most permissive value for the suffix. + let concat = |prefix: ProductValue, val, fill| { + let mut vals: Vec<_> = prefix.elements.into(); + vals.reserve(1 + suffix_len); + vals.push(val); + vals.extend(iter::repeat(fill).take(suffix_len)); + AlgebraicValue::product(vals) + }; + // The start endpoint needs `Min` as the suffix-filling element, + // as it imposes the least and acts like `Unbounded`. + let concat_start = |val| concat(prefix.clone(), val, AlgebraicValue::Min); + let range_start = match start { + Bound::Included(r) => Bound::Included(concat_start(r)), + Bound::Excluded(r) => Bound::Excluded(concat_start(r)), + // Prefix is empty, and suffix will be `Min`, + // so simplify `(Min, Min, ...)` to `Unbounded`. + Bound::Unbounded if prefix_is_empty => Bound::Unbounded, + Bound::Unbounded => Bound::Included(concat_start(AlgebraicValue::Min)), + }; + // The end endpoint needs `Max` as the suffix-filling element, + // as it imposes the least and acts like `Unbounded`. + let concat_end = |val| concat(prefix, val, AlgebraicValue::Max); + let range_end = match end { + Bound::Included(r) => Bound::Included(concat_end(r)), + Bound::Excluded(r) => Bound::Excluded(concat_end(r)), + // Prefix is empty, and suffix will be `Max`, + // so simplify `(Max, Max, ...)` to `Unbounded`. + Bound::Unbounded if prefix_is_empty => Bound::Unbounded, + Bound::Unbounded => Bound::Included(concat_end(AlgebraicValue::Max)), + }; + (range_start, range_end) } pub fn get_next_sequence_value(&mut self, seq_id: SequenceId, database_address: Address) -> Result { @@ -892,7 +1092,7 @@ impl MutTxId { } // Get the insert table, so we can write the row into it. - let (tx_table, tx_blob_store, delete_table) = self + let (tx_table, tx_blob_store, _, delete_table) = self .tx_state .get_table_and_blob_store_or_maybe_create_from(table_id, commit_table) .ok_or(TableError::IdNotFoundState(table_id))?; @@ -1014,7 +1214,7 @@ impl MutTxId { // If the tx table exists, get it. // If it doesn't exist, but the commit table does, // create the tx table using the commit table as a template. - let Some((tx_table, tx_blob_store, _)) = self + let Some((tx_table, tx_blob_store, ..)) = self .tx_state .get_table_and_blob_store_or_maybe_create_from(table_id, commit_table.as_deref()) else { diff --git a/crates/core/src/db/datastore/locking_tx_datastore/tx_state.rs b/crates/core/src/db/datastore/locking_tx_datastore/tx_state.rs index 0bd03ba350..b993bb3c08 100644 --- a/crates/core/src/db/datastore/locking_tx_datastore/tx_state.rs +++ b/crates/core/src/db/datastore/locking_tx_datastore/tx_state.rs @@ -1,15 +1,20 @@ use core::ops::RangeBounds; -use spacetimedb_primitives::{ColList, TableId}; -use spacetimedb_sats::AlgebraicValue; +use spacetimedb_data_structures::map::IntMap; +use spacetimedb_primitives::{ColList, IndexId, TableId}; +use spacetimedb_sats::{AlgebraicType, AlgebraicValue}; use spacetimedb_table::{ blob_store::{BlobStore, HashMapBlobStore}, indexes::{RowPointer, SquashedOffset}, table::{IndexScanIter, RowRef, Table}, }; use std::collections::{btree_map, BTreeMap, BTreeSet}; +use thin_vec::ThinVec; pub(super) type DeleteTable = BTreeSet; +/// A mapping to find the actual index given an `IndexId`. +pub(super) type IndexIdMap = IntMap; + /// `TxState` tracks all of the modifications made during a particular transaction. /// Rows inserted during a transaction will be added to insert_tables, and similarly, /// rows deleted in the transaction will be added to delete_tables. @@ -61,6 +66,12 @@ pub(super) struct TxState { /// and free each of them during rollback. /// - Traverse all rows in the `insert_tables` and free each of their blobs during rollback. pub(super) blob_store: HashMapBlobStore, + + /// Provides fast lookup for index id -> an index. + pub(super) index_id_map: IndexIdMap, + + /// Lists all the `IndexId` that are to be removed from `CommittedState::index_id_map`. + pub(super) index_id_map_removals: ThinVec, } impl TxState { @@ -143,10 +154,16 @@ impl TxState { &'this mut self, table_id: TableId, template: Option<&Table>, - ) -> Option<(&'this mut Table, &'this mut dyn BlobStore, &'this mut DeleteTable)> { + ) -> Option<( + &'this mut Table, + &'this mut dyn BlobStore, + &'this mut IndexIdMap, + &'this mut DeleteTable, + )> { let insert_tables = &mut self.insert_tables; let delete_tables = &mut self.delete_tables; let blob_store = &mut self.blob_store; + let idx_map = &mut self.index_id_map; let tbl = match insert_tables.entry(table_id) { btree_map::Entry::Vacant(e) => { let new_table = template?.clone_structure(SquashedOffset::TX_STATE); @@ -154,6 +171,13 @@ impl TxState { } btree_map::Entry::Occupied(e) => e.into_mut(), }; - Some((tbl, blob_store, delete_tables.entry(table_id).or_default())) + Some((tbl, blob_store, idx_map, delete_tables.entry(table_id).or_default())) + } + + /// Returns the table and index associated with the given `table_id` and `col_list`, if any. + pub(super) fn get_table_and_index_type(&self, table_id: TableId, col_list: &ColList) -> Option<&AlgebraicType> { + let table = self.insert_tables.get(&table_id)?; + let index = table.indexes.get(col_list)?; + Some(&index.key_type) } } diff --git a/crates/core/src/db/relational_db.rs b/crates/core/src/db/relational_db.rs index fe36dfaa60..f5f692e2fa 100644 --- a/crates/core/src/db/relational_db.rs +++ b/crates/core/src/db/relational_db.rs @@ -1,5 +1,5 @@ use super::datastore::locking_tx_datastore::committed_state::CommittedState; -use super::datastore::locking_tx_datastore::state_view::StateView as _; +use super::datastore::locking_tx_datastore::state_view::StateView; use super::datastore::system_tables::ST_MODULE_ID; use super::datastore::traits::{ IsolationLevel, Metadata, MutTx as _, MutTxDatastore, Program, RowTypeForTable, Tx as _, TxDatastore, @@ -938,6 +938,10 @@ impl RelationalDB { self.inner.table_name_from_id_mut_tx(ctx, tx, table_id) } + pub fn index_id_from_name_mut(&self, tx: &MutTx, index_name: &str) -> Result, DBError> { + self.inner.index_id_from_name_mut_tx(tx, index_name) + } + pub fn table_row_count_mut(&self, tx: &MutTx, table_id: TableId) -> Option { // TODO(Centril): Go via MutTxDatastore trait instead. // Doing this for now to ship this quicker. @@ -1061,6 +1065,18 @@ impl RelationalDB { self.inner.iter_by_col_range_tx(ctx, tx, table_id.into(), cols, range) } + pub fn btree_scan<'a>( + &'a self, + tx: &'a MutTx, + index_id: IndexId, + prefix: &[u8], + prefix_elems: ColId, + rstart: &[u8], + rend: &[u8], + ) -> Result<(TableId, impl Iterator>), DBError> { + tx.btree_scan(index_id, prefix, prefix_elems, rstart, rend) + } + pub fn insert<'a>( &'a self, tx: &'a mut MutTx, diff --git a/crates/core/src/error.rs b/crates/core/src/error.rs index 14f09de7bf..df1ff5626a 100644 --- a/crates/core/src/error.rs +++ b/crates/core/src/error.rs @@ -80,6 +80,8 @@ pub enum IndexError { UniqueConstraintViolation(#[from] UniqueConstraintViolation), #[error("Attempt to define a index with more than 1 auto_inc column: Table: {0:?}, Columns: {1:?}")] OneAutoInc(TableId, Vec), + #[error("Could not decode arguments to index scan")] + Decode(DecodeError), } #[derive(Error, Debug, PartialEq, Eq)] @@ -322,6 +324,8 @@ pub enum NodesError { DecodeFilter(#[source] DecodeError), #[error("table with provided name or id doesn't exist")] TableNotFound, + #[error("index with provided name or id doesn't exist")] + IndexNotFound, #[error("column is out of bounds")] BadColumn, #[error("can't perform operation; not inside transaction")] @@ -347,6 +351,8 @@ impl From for NodesError { DBError::Table(TableError::System(name)) => Self::SystemName(name), DBError::Table(TableError::IdNotFound(_, _) | TableError::NotFound(_)) => Self::TableNotFound, DBError::Table(TableError::ColumnNotFound(_)) => Self::BadColumn, + DBError::Index(IndexError::NotFound(_)) => Self::IndexNotFound, + DBError::Index(IndexError::Decode(e)) => Self::DecodeRow(e), _ => Self::Internal(Box::new(e)), } } diff --git a/crates/core/src/host/instance_env.rs b/crates/core/src/host/instance_env.rs index e12148322d..406e136b9a 100644 --- a/crates/core/src/host/instance_env.rs +++ b/crates/core/src/host/instance_env.rs @@ -15,7 +15,7 @@ use crate::vm::{build_query, TxMode}; use spacetimedb_lib::filter::CmpArgs; use spacetimedb_lib::operator::OpQuery; use spacetimedb_lib::relation::FieldName; -use spacetimedb_primitives::{ColId, TableId}; +use spacetimedb_primitives::{ColId, IndexId, TableId}; use spacetimedb_sats::Typespace; use spacetimedb_sats::{AlgebraicValue, ProductValue}; use spacetimedb_vm::expr::{FieldExpr, FieldOp, NoInMemUsed, QueryExpr}; @@ -175,6 +175,27 @@ impl InstanceEnv { Ok(stdb.delete(tx, table_id, rows_to_delete)) } + #[tracing::instrument(skip_all)] + pub fn datastore_delete_by_btree_scan_bsatn( + &self, + index_id: IndexId, + prefix: &[u8], + prefix_elems: ColId, + rstart: &[u8], + rend: &[u8], + ) -> Result { + let stdb = &*self.dbic.relational_db; + let tx = &mut *self.tx.get()?; + + // Find all rows in the table to delete. + let (table_id, iter) = stdb.btree_scan(tx, index_id, prefix, prefix_elems, rstart, rend)?; + // Re. `SmallVec`, `delete_by_field` only cares about 1 element, so optimize for that. + let rows_to_delete = iter.map(|row_ref| row_ref.pointer()).collect::>(); + + // Delete them and count how many we deleted. + Ok(stdb.delete(tx, table_id, rows_to_delete)) + } + /// Deletes all rows in the table identified by `table_id` /// where the rows match one in `relation` /// which is a bsatn encoding of `Vec`. @@ -212,6 +233,20 @@ impl InstanceEnv { .ok_or(NodesError::TableNotFound) } + /// Returns the `index_id` associated with the given `index_name`. + /// + /// Errors with `GetTxError` if not in a transaction + /// and `IndexNotFound` if the index does not exist. + #[tracing::instrument(skip_all)] + pub fn index_id_from_name(&self, index_name: &str) -> Result { + let stdb = &*self.dbic.relational_db; + let tx = &mut *self.get_tx()?; + + // Query the index id from the name. + stdb.index_id_from_name_mut(tx, index_name)? + .ok_or(NodesError::IndexNotFound) + } + /// Returns the number of rows in the table identified by `table_id`. /// /// Errors with `GetTxError` if not in a transaction @@ -263,6 +298,23 @@ impl InstanceEnv { Ok(chunks) } + #[tracing::instrument(skip_all)] + pub fn datastore_btree_scan_bsatn_chunks( + &self, + index_id: IndexId, + prefix: &[u8], + prefix_elems: ColId, + rstart: &[u8], + rend: &[u8], + ) -> Result>, NodesError> { + let stdb = &*self.dbic.relational_db; + let tx = &mut *self.tx.get()?; + + let (_, iter) = stdb.btree_scan(tx, index_id, prefix, prefix_elems, rstart, rend)?; + let chunks = ChunkedWriter::collect_iter(iter); + Ok(chunks) + } + pub fn iter_filtered_chunks( &self, ctx: &ExecutionContext, diff --git a/crates/core/src/host/mod.rs b/crates/core/src/host/mod.rs index 41affa131c..b4ae1836c0 100644 --- a/crates/core/src/host/mod.rs +++ b/crates/core/src/host/mod.rs @@ -142,11 +142,14 @@ fn from_json_seed<'de, T: serde::de::DeserializeSeed<'de>>(s: &'de str, seed: T) #[derive(Debug, Display, Enum, Clone, Copy, strum::AsRefStr)] pub enum AbiCall { TableIdFromName, + IndexIdFromName, DatastoreTableRowCount, DatastoreTableScanBsatn, + DatastoreBtreeScanBsatn, RowIterBsatnAdvance, RowIterBsatnClose, DatastoreInsertBsatn, + DatastoreDeleteByBtreeScanBsatn, DatastoreDeleteAllByEqBsatn, BytesSourceRead, BytesSinkWrite, diff --git a/crates/core/src/host/wasm_common.rs b/crates/core/src/host/wasm_common.rs index 45bc604979..9efdb64f5e 100644 --- a/crates/core/src/host/wasm_common.rs +++ b/crates/core/src/host/wasm_common.rs @@ -322,6 +322,7 @@ pub fn err_to_errno(err: &NodesError) -> Option { NodesError::NotInTransaction => Some(errno::NOT_IN_TRANSACTION), NodesError::DecodeRow(_) => Some(errno::BSATN_DECODE_ERROR), NodesError::TableNotFound => Some(errno::NO_SUCH_TABLE), + NodesError::IndexNotFound => Some(errno::NO_SUCH_INDEX), NodesError::ScheduleError(ScheduleError::DelayTooLong(_)) => Some(errno::SCHEDULE_AT_DELAY_TOO_LONG), NodesError::AlreadyExists(_) => Some(errno::UNIQUE_ALREADY_EXISTS), NodesError::Internal(internal) => match **internal { diff --git a/crates/core/src/host/wasmtime/wasm_instance_env.rs b/crates/core/src/host/wasmtime/wasm_instance_env.rs index feffcc23f2..98f77c6905 100644 --- a/crates/core/src/host/wasmtime/wasm_instance_env.rs +++ b/crates/core/src/host/wasmtime/wasm_instance_env.rs @@ -364,6 +364,41 @@ impl WasmInstanceEnv { }) } + /// Queries the `index_id` associated with the given (index) `name` + /// where `name` is the UTF-8 slice in WASM memory at `name_ptr[..name_len]`. + /// + /// The index id is written into the `out` pointer. + /// + /// # Traps + /// + /// Traps if: + /// - `name_ptr` is NULL or `name` is not in bounds of WASM memory. + /// - `name` is not valid UTF-8. + /// - `out` is NULL or `out[..size_of::()]` is not in bounds of WASM memory. + /// + /// # Errors + /// + /// Returns an error: + /// + /// - `NOT_IN_TRANSACTION`, when called outside of a transaction. + /// - `NO_SUCH_INDEX`, when `name` is not the name of an index. + #[tracing::instrument(skip_all)] + pub fn index_id_from_name( + caller: Caller<'_, Self>, + name: WasmPtr, + name_len: u32, + out: WasmPtr, + ) -> RtResult { + Self::cvt_ret::(caller, AbiCall::IndexIdFromName, out, |caller| { + let (mem, env) = Self::mem_env(caller); + // Read the index name from WASM memory. + let name = mem.deref_str(name, name_len)?; + + // Query the index id. + Ok(env.instance_env.index_id_from_name(name)?.into()) + }) + } + /// Writes the number of rows currently in table identified by `table_id` to `out`. /// /// # Traps @@ -421,6 +456,107 @@ impl WasmInstanceEnv { }) } + /// Finds all rows in the index identified by `index_id`, + /// according to the: + /// - `prefix = prefix_ptr[..prefix_len]`, + /// - `rstart = rstart_ptr[..rstart_len]`, + /// - `rend = rend_ptr[..rend_len]`, + /// in WASM memory. + /// + /// The index itself has a schema/type. + /// The `prefix` is decoded to the initial `prefix_elems` `AlgebraicType`s + /// whereas `rstart` and `rend` are decoded to the `prefix_elems + 1` `AlgebraicType` + /// where the `AlgebraicValue`s are wrapped in `Bound`. + /// That is, `rstart, rend` are BSATN-encoded `Bound`s. + /// + /// Matching is then defined by equating `prefix` + /// to the initial `prefix_elems` columns of the index + /// and then imposing `rstart` as the starting bound + /// and `rend` as the ending bound on the `prefix_elems + 1` column of the index. + /// Remaining columns of the index are then unbounded. + /// Note that the `prefix` in this case can be empty (`prefix_elems = 0`), + /// in which case this becomes a ranged index scan on a single-col index + /// or even a full table scan if `rstart` and `rend` are both unbounded. + /// + /// The relevant table for the index is found implicitly via the `index_id`, + /// which is unique for the module. + /// + /// On success, the iterator handle is written to the `out` pointer. + /// This handle can be advanced by [`row_iter_bsatn_advance`]. + /// + /// # Non-obvious queries + /// + /// For an index on columns `[a, b, c]`: + /// + /// - `a = x, b = y` is encoded as a prefix `[x, y]` + /// and a range `Range::Unbounded`, + /// or as a prefix `[x]` and a range `rstart = rend = Range::Inclusive(y)`. + /// - `a = x, b = y, c = z` is encoded as a prefix `[x, y]` + /// and a range `rstart = rend = Range::Inclusive(z)`. + /// - A sorted full scan is encoded as an empty prefix + /// and a range `Range::Unbounded`. + /// + /// # Traps + /// + /// Traps if: + /// - `prefix_elems > 0` + /// and (`prefix_ptr` is NULL or `prefix` is not in bounds of WASM memory). + /// - `rstart` is NULL or `rstart` is not in bounds of WASM memory. + /// - `rend` is NULL or `rend` is not in bounds of WASM memory. + /// - `out` is NULL or `out[..size_of::()]` is not in bounds of WASM memory. + /// + /// # Errors + /// + /// Returns an error: + /// + /// - `NOT_IN_TRANSACTION`, when called outside of a transaction. + /// - `NO_SUCH_INDEX`, when `index_id` is not a known ID of an index. + /// - `WRONG_INDEX_ALGO` if the index is not a btree index. + /// - `BSATN_DECODE_ERROR`, when `prefix` cannot be decoded to + /// a `prefix_elems` number of `AlgebraicValue` + /// typed at the initial `prefix_elems` `AlgebraicType`s of the index's key type. + /// Or when `rstart` or `rend` cannot be decoded to an `Bound` + /// where the inner `AlgebraicValue`s are + /// typed at the `prefix_elems + 1` `AlgebraicType` of the index's key type. + fn datastore_btree_scan_bsatn( + caller: Caller<'_, Self>, + index_id: u32, + prefix_ptr: WasmPtr, + prefix_len: u32, + prefix_elems: u32, + rstart_ptr: WasmPtr, // Bound + rstart_len: u32, + rend_ptr: WasmPtr, // Bound + rend_len: u32, + out: WasmPtr, + ) -> RtResult { + Self::cvt_ret(caller, AbiCall::DatastoreBtreeScanBsatn, out, |caller| { + let prefix_elems = Self::convert_u32_to_col_id(prefix_elems)?; + + let (mem, env) = Self::mem_env(caller); + // Read the prefix and range start & end from WASM memory. + let prefix = if prefix_elems.idx() == 0 { + &[] + } else { + mem.deref_slice(prefix_ptr, prefix_len)? + }; + let rstart = mem.deref_slice(rstart_ptr, rstart_len)?; + let rend = mem.deref_slice(rend_ptr, rend_len)?; + + // Find the relevant rows. + let chunks = env.instance_env.datastore_btree_scan_bsatn_chunks( + index_id.into(), + prefix, + prefix_elems, + rstart, + rend, + )?; + + // Insert the encoded + concatenated rows into a new buffer and return its id. + Ok(env.iters.insert(chunks.into_iter())) + }) + } + /// Finds all rows in the table identified by `table_id`, /// where the row has a column, identified by `cols`, /// with data matching the byte string, in WASM memory, pointed to at by `val`. @@ -688,6 +824,78 @@ impl WasmInstanceEnv { }) } + /// Deletes all rows found in the index identified by `index_id`, + /// according to the: + /// - `prefix = prefix_ptr[..prefix_len]`, + /// - `rstart = rstart_ptr[..rstart_len]`, + /// - `rend = rend_ptr[..rend_len]`, + /// in WASM memory. + /// + /// This syscall will delete all the rows found by + /// [`datastore_btree_scan_bsatn`] with the same arguments passed, + /// including `prefix_elems`. + /// See `datastore_btree_scan_bsatn` for details. + /// + /// The number of rows deleted is written to the WASM pointer `out`. + /// + /// # Traps + /// + /// Traps if: + /// - `prefix_elems > 0` + /// and (`prefix_ptr` is NULL or `prefix` is not in bounds of WASM memory). + /// - `rstart` is NULL or `rstart` is not in bounds of WASM memory. + /// - `rend` is NULL or `rend` is not in bounds of WASM memory. + /// - `out` is NULL or `out[..size_of::()]` is not in bounds of WASM memory. + /// + /// # Errors + /// + /// Returns an error: + /// + /// - `NOT_IN_TRANSACTION`, when called outside of a transaction. + /// - `NO_SUCH_INDEX`, when `index_id` is not a known ID of an index. + /// - `WRONG_INDEX_ALGO` if the index is not a btree index. + /// - `BSATN_DECODE_ERROR`, when `prefix` cannot be decoded to + /// a `prefix_elems` number of `AlgebraicValue` + /// typed at the initial `prefix_elems` `AlgebraicType`s of the index's key type. + /// Or when `rstart` or `rend` cannot be decoded to an `Bound` + /// where the inner `AlgebraicValue`s are + /// typed at the `prefix_elems + 1` `AlgebraicType` of the index's key type. + pub fn datastore_delete_by_btree_scan_bsatn( + caller: Caller<'_, Self>, + index_id: u32, + prefix_ptr: WasmPtr, + prefix_len: u32, + prefix_elems: u32, + rstart_ptr: WasmPtr, // Bound + rstart_len: u32, + rend_ptr: WasmPtr, // Bound + rend_len: u32, + out: WasmPtr, + ) -> RtResult { + Self::cvt_ret(caller, AbiCall::DatastoreDeleteByBtreeScanBsatn, out, |caller| { + let prefix_elems = Self::convert_u32_to_col_id(prefix_elems)?; + + let (mem, env) = Self::mem_env(caller); + // Read the prefix and range start & end from WASM memory. + let prefix = if prefix_elems.idx() == 0 { + &[] + } else { + mem.deref_slice(prefix_ptr, prefix_len)? + }; + let rstart = mem.deref_slice(rstart_ptr, rstart_len)?; + let rend = mem.deref_slice(rend_ptr, rend_len)?; + + // Delete the relevant rows. + Ok(env.instance_env.datastore_delete_by_btree_scan_bsatn( + index_id.into(), + prefix, + prefix_elems, + rstart, + rend, + )?) + }) + } + /// Deletes those rows, in the table identified by `table_id`, /// that match any row in the byte string `rel = rel_ptr[..rel_len]` in WASM memory. /// diff --git a/crates/lib/src/db/raw_def/v9.rs b/crates/lib/src/db/raw_def/v9.rs index 99ffcf05db..75cd34599e 100644 --- a/crates/lib/src/db/raw_def/v9.rs +++ b/crates/lib/src/db/raw_def/v9.rs @@ -87,9 +87,9 @@ pub struct RawModuleDefV9 { /// constraints, sequences, type, and access rights. /// /// Validation rules: -/// - The table name must be a valid [crate::db::identifier::Identifier]. +/// - The table name must be a valid [`crate::db::identifier::Identifier`]. /// - The table's indexes, constraints, and sequences need not be sorted; they will be sorted according to their respective ordering rules. -/// - The table's column types may refer only to types in the containing RawDatabaseDef's typespace. +/// - The table's column types may refer only to types in the containing `RawModuleDefV9`'s typespace. /// - The table's column names must be unique. #[derive(Debug, Clone, SpacetimeType)] #[sats(crate = crate)] @@ -120,7 +120,7 @@ pub struct RawTableDefV9 { pub indexes: Vec, /// Any unique constraints on the table. - pub unique_constraints: Vec, + pub constraints: Vec, /// The sequences for the table. pub sequences: Vec, @@ -192,7 +192,7 @@ impl From for StAccess { #[sats(crate = crate)] #[cfg_attr(feature = "test", derive(PartialEq, Eq, PartialOrd, Ord))] pub struct RawSequenceDefV9 { - /// The name of the sequence. Must be unique within the containing `RawDatabaseDef`. + /// The name of the sequence. Must be unique within the containing `RawModuleDefV9`. pub name: RawIdentifier, /// The position of the column associated with this sequence. @@ -273,7 +273,7 @@ pub enum RawIndexAlgorithm { #[sats(crate = crate)] #[cfg_attr(feature = "test", derive(PartialEq, Eq, PartialOrd, Ord))] pub struct RawUniqueConstraintDefV9 { - /// The name of the unique constraint. Must be unique within the containing `RawDatabaseDef`. + /// The name of the unique constraint. Must be unique within the containing `RawModuleDefV9`. pub name: RawIdentifier, /// The columns that must be unique. @@ -289,13 +289,44 @@ pub struct RawUniqueConstraintDefV9 { #[sats(crate = crate)] #[cfg_attr(feature = "test", derive(PartialEq, Eq, PartialOrd, Ord))] pub struct RawScheduleDefV9 { - /// The name of the schedule. Must be unique within the containing `RawDatabaseDef`. + /// The name of the schedule. Must be unique within the containing `RawModuleDefV9`. pub name: RawIdentifier, /// The name of the reducer to call. pub reducer_name: RawIdentifier, } +/// A constraint definition attached to a table. +#[derive(Debug, Clone, SpacetimeType)] +#[sats(crate = crate)] +#[cfg_attr(feature = "test", derive(PartialEq, Eq, PartialOrd, Ord))] +pub struct RawConstraintDefV9 { + /// The name of the constraint. Must be unique within the containing `RawModuleDefV9`. + pub name: RawIdentifier, + + /// The data for the constraint. + pub data: RawConstraintDataV9, +} + +#[derive(Debug, Clone, SpacetimeType)] +#[sats(crate = crate)] +#[cfg_attr(feature = "test", derive(PartialEq, Eq, PartialOrd, Ord))] +#[non_exhaustive] +pub enum RawConstraintDataV9 { + Unique(RawUniqueConstraintDataV9), +} + +/// Requires that the projection of the table onto these `columns` is a bijection. +/// +/// That is, there must be a one-to-one relationship between a row and the `columns` of that row. +#[derive(Debug, Clone, SpacetimeType)] +#[sats(crate = crate)] +#[cfg_attr(feature = "test", derive(PartialEq, Eq, PartialOrd, Ord))] +pub struct RawUniqueConstraintDataV9 { + /// The columns that must be unique. + pub columns: ColList, +} + /// A miscellaneous module export. #[derive(Debug, Clone, SpacetimeType)] #[sats(crate = crate)] @@ -314,6 +345,7 @@ pub struct RawTypeDefV9 { pub name: RawScopedTypeNameV9, /// The type to which the declaration refers. + /// This must point to an `AlgebraicType::Product` or an `AlgebraicType::Sum` in the module's typespace. pub ty: AlgebraicTypeRef, /// Whether this type has a custom ordering. @@ -411,7 +443,7 @@ impl RawModuleDefV9Builder { name, product_type_ref, indexes: vec![], - unique_constraints: vec![], + constraints: vec![], sequences: vec![], schedule: None, primary_key: None, @@ -583,9 +615,10 @@ impl<'a> RawTableDefBuilder<'a> { /// Generates a [UniqueConstraintDef] using the supplied `columns`. pub fn with_unique_constraint(mut self, columns: ColList, name: Option) -> Self { let name = name.unwrap_or_else(|| self.generate_unique_constraint_name(&columns)); - self.table - .unique_constraints - .push(RawUniqueConstraintDefV9 { name, columns }); + self.table.constraints.push(RawConstraintDefV9 { + name, + data: RawConstraintDataV9::Unique(RawUniqueConstraintDataV9 { columns }), + }); self } diff --git a/crates/lib/src/lib.rs b/crates/lib/src/lib.rs index b895660c3c..3eb5190ea1 100644 --- a/crates/lib/src/lib.rs +++ b/crates/lib/src/lib.rs @@ -252,8 +252,11 @@ impl ModuleDefBuilder { algebraic_type: c.col_type.clone(), }) .collect(); - // do NOT add a `TypeAlias`: in v8, the `RawTableDef` itself serves as a `TypeAlias`. let data = self.module.typespace.add(ty.into()); + self.add_type_alias(TypeAlias { + name: schema.table_name.clone().into(), + ty: data, + }); self.add_table(TableDesc { schema, data }); data } @@ -379,6 +382,12 @@ pub fn from_hex_pad, T: AsRef<[u8]>>( hex: T, ) -> Result { let hex = hex.as_ref(); - let hex = if hex.starts_with(b"0x") { &hex[2..] } else { hex }; + let hex = if hex.starts_with(b"0x") { + &hex[2..] + } else if hex.starts_with(b"X'") { + &hex[2..hex.len()] + } else { + hex + }; hex::FromHex::from_hex(hex) } diff --git a/crates/planner/Cargo.toml b/crates/planner/Cargo.toml new file mode 100644 index 0000000000..3832810378 --- /dev/null +++ b/crates/planner/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "spacetimedb-query-planner" +version.workspace = true +edition.workspace = true +license-file = "LICENSE" + +[dependencies] +derive_more.workspace = true +thiserror.workspace = true +spacetimedb-lib.workspace = true +spacetimedb-sats.workspace = true +spacetimedb-schema.workspace = true +spacetimedb-sql-parser.workspace = true + +[dev-dependencies] +spacetimedb-lib.workspace = true +spacetimedb-primitives.workspace = true diff --git a/crates/planner/LICENSE b/crates/planner/LICENSE new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/planner/src/lib.rs b/crates/planner/src/lib.rs new file mode 100644 index 0000000000..82d25f24e8 --- /dev/null +++ b/crates/planner/src/lib.rs @@ -0,0 +1 @@ +pub mod logical; diff --git a/crates/planner/src/logical/bind.rs b/crates/planner/src/logical/bind.rs new file mode 100644 index 0000000000..31b64451c8 --- /dev/null +++ b/crates/planner/src/logical/bind.rs @@ -0,0 +1,414 @@ +use std::sync::Arc; + +use spacetimedb_lib::{from_hex_pad, Address, AlgebraicValue, Identity}; +use spacetimedb_sats::{AlgebraicType, ProductType, ProductTypeElement}; +use spacetimedb_schema::schema::TableSchema; +use spacetimedb_sql_parser::{ + ast::{ + self, + sub::{SqlAst, SqlSelect}, + BinOp, ProjectElem, SqlExpr, SqlFrom, SqlLiteral, + }, + parser::sub::parse_subscription, +}; + +use super::errors::{ConstraintViolation, ResolutionError, TypingError, Unsupported}; +use super::expr::{Expr, Ref, RelExpr, Type, Vars}; + +pub type TypingResult = core::result::Result; + +pub trait SchemaView { + fn schema(&self, name: &str, case_sensitive: bool) -> Option>; +} + +/// Parse and type check a subscription query +pub fn parse_and_type_sub(sql: &str, tx: &impl SchemaView) -> TypingResult { + expect_table_type(type_ast(parse_subscription(sql)?, tx)?) +} + +/// Type check and lower a [SqlAst] into a [RelExpr]. +/// This includes name resolution and variable binding. +pub fn type_ast(expr: SqlAst, tx: &impl SchemaView) -> TypingResult { + match expr { + SqlAst::Union(a, b) => { + let a = type_ast(*a, tx)?; + let b = type_ast(*b, tx)?; + assert_eq_types(a.ty(), b.ty())?; + Ok(RelExpr::Union(Box::new(a), Box::new(b))) + } + SqlAst::Minus(a, b) => { + let a = type_ast(*a, tx)?; + let b = type_ast(*b, tx)?; + assert_eq_types(a.ty(), b.ty())?; + Ok(RelExpr::Minus(Box::new(a), Box::new(b))) + } + SqlAst::Select(SqlSelect { + project, + from, + filter: None, + }) => { + let (arg, vars) = type_from(from, tx)?; + type_proj(project, arg, vars) + } + SqlAst::Select(SqlSelect { + project, + from, + filter: Some(expr), + }) => { + let (from, vars) = type_from(from, tx)?; + let arg = type_select(expr, from, vars.clone())?; + type_proj(project, arg, vars.clone()) + } + } +} + +/// Type check and lower a [SqlFrom] +pub fn type_from(from: SqlFrom, tx: &impl SchemaView) -> TypingResult<(RelExpr, Vars)> { + match from { + SqlFrom::Expr(expr, None) => type_rel(expr, tx), + SqlFrom::Expr(expr, Some(alias)) => { + let (expr, _) = type_rel(expr, tx)?; + let ty = expr.ty().clone(); + Ok((expr, vec![(alias.name, ty)].into())) + } + SqlFrom::Join(r, alias, joins) => { + let (mut vars, mut args, mut exprs) = (Vars::default(), Vec::new(), Vec::new()); + + let (r, _) = type_rel(r, tx)?; + let ty = r.ty().clone(); + + args.push(r); + vars.push((alias.name, ty)); + + for join in joins { + let (r, _) = type_rel(join.expr, tx)?; + let ty = r.ty().clone(); + + args.push(r); + vars.push((join.alias.name, ty)); + + if let Some(on) = join.on { + exprs.push(type_expr(&vars, on, Some(&Type::BOOL))?); + } + } + let types = vars.iter().map(|(_, ty)| ty.clone()).collect(); + let input = RelExpr::Join(args.into(), Type::Tup(types)); + Ok((RelExpr::select(input, vars.clone(), exprs), vars)) + } + } +} + +/// Type check and lower a [ast::RelExpr] +fn type_rel(expr: ast::RelExpr, tx: &impl SchemaView) -> TypingResult<(RelExpr, Vars)> { + match expr { + ast::RelExpr::Var(var) => tx + .schema(&var.name, var.case_sensitive) + .ok_or_else(|| ResolutionError::unresolved_table(&var.name).into()) + .map(|schema| { + ( + RelExpr::RelVar(schema.clone(), Type::Var(schema.clone())), + vec![(var.name, Type::Var(schema))].into(), + ) + }), + ast::RelExpr::Ast(ast) => Ok((type_ast(*ast, tx)?, Vars::default())), + } +} + +/// Type check and lower a [SqlExpr] +fn type_select(expr: SqlExpr, input: RelExpr, vars: Vars) -> TypingResult { + let exprs = vec![type_expr(&vars, expr, Some(&Type::BOOL))?]; + Ok(RelExpr::select(input, vars, exprs)) +} + +/// Type check and lower a [ast::Project] +fn type_proj(proj: ast::Project, input: RelExpr, vars: Vars) -> TypingResult { + match proj { + ast::Project::Star(None) => Ok(input), + ast::Project::Star(Some(var)) => { + let (i, ty) = vars.expect_var(&var.name, None)?; + let ty = ty.clone(); + let refs = vec![Ref::Var(i, ty.clone())]; + Ok(RelExpr::project(input, vars, refs, ty)) + } + ast::Project::Exprs(elems) => { + let (mut refs, mut fields) = (Vec::new(), Vec::new()); + for ProjectElem(expr, alias) in elems { + if let SqlExpr::Var(_) = expr { + return Err(Unsupported::UnqualifiedProjectExpr.into()); + } + let SqlExpr::Field(table, field) = expr else { + return Err(Unsupported::ProjectExpr.into()); + }; + let (i, j, ty) = vars.expect_field(&table.name, &field.name, None)?; + refs.push(Ref::Field(i, j, Type::Alg(ty.clone()))); + if let Some(alias) = alias { + fields.push((alias.name, ty.clone())); + } else { + fields.push((field.name, ty.clone())); + } + } + let ty = Type::Row(ProductType::from_iter( + fields + .into_iter() + .map(|(name, t)| ProductTypeElement::new_named(t, name.into_boxed_str())), + )); + Ok(RelExpr::project(input, vars, refs, ty)) + } + } +} + +/// Type check and lower a [SqlExpr] into a logical [Expr]. +fn type_expr(vars: &Vars, expr: SqlExpr, expected: Option<&Type>) -> TypingResult { + match (expr, expected) { + (SqlExpr::Lit(SqlLiteral::Bool(v)), None | Some(Type::Alg(AlgebraicType::Bool))) => Ok(Expr::bool(v)), + (SqlExpr::Lit(SqlLiteral::Bool(_)), Some(t)) => Err(unexpected_type(&Type::BOOL, t)), + (SqlExpr::Lit(SqlLiteral::Str(v)), None | Some(Type::Alg(AlgebraicType::String))) => Ok(Expr::str(v)), + (SqlExpr::Lit(SqlLiteral::Str(_)), Some(t)) => Err(unexpected_type(&Type::STR, t)), + (SqlExpr::Lit(SqlLiteral::Num(_) | SqlLiteral::Hex(_)), None) => Err(ResolutionError::UntypedLiteral.into()), + (SqlExpr::Lit(SqlLiteral::Num(v) | SqlLiteral::Hex(v)), Some(t)) => parse(v, t), + (SqlExpr::Var(var), expected) => vars.expect_var_ref(&var.name, expected), + (SqlExpr::Field(table, field), expected) => vars.expect_field_ref(&table.name, &field.name, expected), + (SqlExpr::Bin(a, b, op), None | Some(Type::Alg(AlgebraicType::Bool))) => match (*a, *b) { + (a, b @ SqlExpr::Lit(_)) | (b @ SqlExpr::Lit(_), a) | (a, b) => { + let a = expect_op_type(op, type_expr(vars, a, None)?)?; + let b = expect_op_type(op, type_expr(vars, b, Some(a.ty()))?)?; + Ok(Expr::Bin(op, Box::new(a), Box::new(b))) + } + }, + (SqlExpr::Bin(..), Some(t)) => Err(unexpected_type(&Type::BOOL, t)), + } +} + +/// Parses a source text literal as a particular type +fn parse(v: String, ty: &Type) -> TypingResult { + let constraint_err = |v, ty| TypingError::from(ConstraintViolation::lit(v, ty)); + match ty { + Type::Alg(AlgebraicType::I8) => v + .parse::() + .map(AlgebraicValue::I8) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(AlgebraicType::U8) => v + .parse::() + .map(AlgebraicValue::U8) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(AlgebraicType::I16) => v + .parse::() + .map(AlgebraicValue::I16) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(AlgebraicType::U16) => v + .parse::() + .map(AlgebraicValue::U16) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(AlgebraicType::I32) => v + .parse::() + .map(AlgebraicValue::I32) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(AlgebraicType::U32) => v + .parse::() + .map(AlgebraicValue::U32) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(AlgebraicType::I64) => v + .parse::() + .map(AlgebraicValue::I64) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(AlgebraicType::U64) => v + .parse::() + .map(AlgebraicValue::U64) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(AlgebraicType::F32) => v + .parse::() + .map(|v| AlgebraicValue::F32(v.into())) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(AlgebraicType::F64) => v + .parse::() + .map(|v| AlgebraicValue::F64(v.into())) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(AlgebraicType::I128) => v + .parse::() + .map(|v| AlgebraicValue::I128(v.into())) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(AlgebraicType::U128) => v + .parse::() + .map(|v| AlgebraicValue::U128(v.into())) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(t) if t.is_bytes() => from_hex_pad::, _>(&v) + .map(|v| AlgebraicValue::Bytes(v.into_boxed_slice())) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(t) if t.is_identity() => Identity::from_hex(&v) + .map(AlgebraicValue::from) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + Type::Alg(t) if t.is_address() => Address::from_hex(&v) + .map(AlgebraicValue::from) + .map(|v| Expr::Lit(v, ty.clone())) + .map_err(|_| constraint_err(&v, ty)), + _ => Err(constraint_err(&v, ty)), + } +} + +/// Returns a type constraint violation for an unexpected type +fn unexpected_type(expected: &Type, actual: &Type) -> TypingError { + ConstraintViolation::eq(expected, actual).into() +} + +/// Returns an error if the input type is not a table type [Type::Var] +fn expect_table_type(expr: RelExpr) -> TypingResult { + match expr.ty() { + Type::Var(_) => Ok(expr), + _ => Err(Unsupported::SubReturnType.into()), + } +} + +/// Assert that this type is compatible with this operator +fn expect_op_type(op: BinOp, expr: Expr) -> TypingResult { + match (op, expr.ty()) { + // Logic operators take booleans + (BinOp::And | BinOp::Or, Type::Alg(AlgebraicType::Bool)) => Ok(expr), + // Comparison operators take integers or floats + (BinOp::Lt | BinOp::Gt | BinOp::Lte | BinOp::Gte, Type::Alg(t)) if t.is_integer() || t.is_float() => Ok(expr), + // Equality supports numerics, strings, and bytes + (BinOp::Eq | BinOp::Ne, Type::Alg(t)) + if t.is_bool() + || t.is_integer() + || t.is_float() + || t.is_string() + || t.is_bytes() + || t.is_identity() + || t.is_address() => + { + Ok(expr) + } + (op, ty) => Err(ConstraintViolation::op(op, ty).into()), + } +} + +fn assert_eq_types(a: &Type, b: &Type) -> TypingResult<()> { + if a == b { + Ok(()) + } else { + Err(unexpected_type(a, b)) + } +} + +#[cfg(test)] +mod tests { + use spacetimedb_lib::{db::raw_def::v9::RawModuleDefV9Builder, AlgebraicType, ProductType}; + use spacetimedb_primitives::TableId; + use spacetimedb_schema::{def::ModuleDef, schema::TableSchema}; + use std::sync::Arc; + + use super::{parse_and_type_sub, SchemaView}; + + fn module_def() -> ModuleDef { + let mut builder = RawModuleDefV9Builder::new(); + builder.build_table_with_new_type( + "t", + ProductType::from([ + ("u32", AlgebraicType::U32), + ("f32", AlgebraicType::F32), + ("str", AlgebraicType::String), + ("arr", AlgebraicType::array(AlgebraicType::String)), + ]), + true, + ); + builder.build_table_with_new_type( + "s", + ProductType::from([ + ("id", AlgebraicType::identity()), + ("u32", AlgebraicType::U32), + ("arr", AlgebraicType::array(AlgebraicType::String)), + ("bytes", AlgebraicType::bytes()), + ]), + true, + ); + builder.finish().try_into().expect("failed to generate module def") + } + + struct SchemaViewer(ModuleDef); + + impl SchemaView for SchemaViewer { + fn schema(&self, name: &str, _: bool) -> Option> { + self.0.table(name).map(|def| { + Arc::new(TableSchema::from_module_def( + def, + TableId(if *def.name == *"t" { 0 } else { 1 }), + )) + }) + } + } + + #[test] + fn valid() { + let tx = SchemaViewer(module_def()); + + for sql in [ + "select * from t", + "select * from t where true", + "select * from t where t.u32 = 1", + "select * from t where t.u32 = 1 or t.str = ''", + "select * from s where s.bytes = 0xABCD", + "select * from s where s.bytes = X'ABCD'", + "select * from s as r where r.bytes = 0xABCD", + "select * from (select t.* from t join s)", + "select * from (select t.* from t join s on t.u32 = s.u32 where t.f32 = 0.1)", + "select * from (select t.* from t join (select s.u32 from s) s on t.u32 = s.u32)", + ] { + let result = parse_and_type_sub(sql, &tx).inspect_err(|_| { + // println!("sql: {}\n\n\terr: {}\n", sql, err); + }); + assert!(result.is_ok()); + } + } + + #[test] + fn invalid() { + let tx = SchemaViewer(module_def()); + + for sql in [ + // Table r does not exist + "select * from r", + // Field u32 is not in scope + "select * from t where u32 = 1", + // Field a does not exist on table t + "select * from t where t.a = 1", + // Field a does not exist on table t + "select * from t as r where r.a = 1", + // Field u32 is not a string + "select * from t where t.u32 = 'str'", + // Field u32 is not a float + "select * from t where t.u32 = 1.3", + // t is not in scope after alias + "select * from t as r where t.u32 = 5", + // Field u32 is not in scope + "select u32 from t", + // Subscriptions must be typed to a single table + "select t.u32 from t", + // Subscriptions must be typed to a single table + "select * from t join s", + // Product values are not comparable + "select * from (select t.* from t join s on t.arr = s.arr)", + // Subscriptions must be typed to a single table + "select * from (select s.* from t join (select s.u32 from s) s on t.u32 = s.u32)", + // Field bytes is no longer in scope + "select * from (select t.* from t join (select s.u32 from s) s on s.bytes = 0xABCD)", + ] { + let result = parse_and_type_sub(sql, &tx).inspect_err(|_| { + // println!("sql: {}\n\n\terr: {}\n", sql, err); + }); + assert!(result.is_err()); + } + } +} diff --git a/crates/planner/src/logical/errors.rs b/crates/planner/src/logical/errors.rs new file mode 100644 index 0000000000..3234b25159 --- /dev/null +++ b/crates/planner/src/logical/errors.rs @@ -0,0 +1,98 @@ +use spacetimedb_sql_parser::{ast::BinOp, parser::errors::SqlParseError}; +use thiserror::Error; + +use super::expr::Type; + +#[derive(Error, Debug)] +pub enum ConstraintViolation { + #[error("(expected) {expected} != (actual) {actual}")] + Eq { expected: Type, actual: Type }, + #[error("{0} is not a numeric type")] + Num(Type), + #[error("{0} cannot be interpreted as a byte array")] + Hex(Type), + #[error("{0} cannot be parsed as {1}")] + Lit(String, Type), + #[error("{1} is not supported by the binary operator {0}")] + Op(BinOp, Type), +} + +impl ConstraintViolation { + // Types are not equal + pub fn eq(expected: &Type, actual: &Type) -> Self { + let expected = expected.clone(); + let actual = actual.clone(); + Self::Eq { expected, actual } + } + + // Not a numeric type + pub fn num(t: &Type) -> Self { + Self::Num(t.clone()) + } + + // Not a type that can be compared to a hex value + pub fn hex(t: &Type) -> Self { + Self::Hex(t.clone()) + } + + // This literal expression cannot be parsed as this type + pub fn lit(v: &str, ty: &Type) -> Self { + Self::Lit(v.to_string(), ty.clone()) + } + + // This type is not supported by this operator + pub fn op(op: BinOp, ty: &Type) -> Self { + Self::Op(op, ty.clone()) + } +} + +#[derive(Error, Debug)] +pub enum ResolutionError { + #[error("Cannot resolve {0}")] + Var(String), + #[error("Cannot resolve table {0}")] + Table(String), + #[error("Cannot resolve field {1} in {0}")] + Field(String, String), + #[error("Cannot resolve type for literal expression")] + UntypedLiteral, +} + +impl ResolutionError { + /// Cannot resolve name + pub fn unresolved_var(name: &str) -> Self { + Self::Var(name.to_string()) + } + + /// Cannot resolve table name + pub fn unresolved_table(name: &str) -> Self { + Self::Table(name.to_string()) + } + + /// Cannot resolve field name within table + pub fn unresolved_field(table: &str, field: &str) -> Self { + Self::Field(table.to_string(), field.to_string()) + } +} + +#[derive(Error, Debug)] +pub enum Unsupported { + #[error("Subscriptions must return a single table type")] + SubReturnType, + #[error("Unsupported expression in projection")] + ProjectExpr, + #[error("Unqualified column projections are not supported")] + UnqualifiedProjectExpr, +} + +#[derive(Error, Debug)] +pub enum TypingError { + #[error(transparent)] + Unsupported(#[from] Unsupported), + #[error(transparent)] + Constraint(#[from] ConstraintViolation), + #[error(transparent)] + ResolutionError(#[from] ResolutionError), + #[error(transparent)] + ParseError(#[from] SqlParseError), +} diff --git a/crates/planner/src/logical/expr.rs b/crates/planner/src/logical/expr.rs new file mode 100644 index 0000000000..53f9daa34c --- /dev/null +++ b/crates/planner/src/logical/expr.rs @@ -0,0 +1,317 @@ +use std::fmt::{Display, Formatter}; +use std::sync::Arc; + +use spacetimedb_lib::AlgebraicValue; +use spacetimedb_sats::algebraic_type::fmt::{fmt_algebraic_type, fmt_product_type}; +use spacetimedb_sats::AlgebraicType; +use spacetimedb_sats::ProductType; +use spacetimedb_schema::schema::{ColumnSchema, TableSchema}; +use spacetimedb_sql_parser::ast::BinOp; + +use crate::static_assert_size; + +use super::bind::TypingResult; +use super::errors::{ConstraintViolation, ResolutionError, TypingError}; + +/// The type of a relation or scalar expression +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Type { + /// A base relation + Var(Arc), + /// A derived relation + Row(ProductType), + /// A join relation + Tup(Box<[Type]>), + /// A column type + Alg(AlgebraicType), +} + +static_assert_size!(Type, 24); + +impl Display for Type { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Self::Alg(ty) => write!(f, "{}", fmt_algebraic_type(ty)), + Self::Var(schema) => write!(f, "{}", fmt_product_type(schema.get_row_type())), + Self::Row(ty) => write!(f, "{}", fmt_product_type(ty)), + Self::Tup(types) => { + write!(f, "(")?; + write!(f, "{}", types[0])?; + for t in &types[1..] { + write!(f, ", {}", t)?; + } + write!(f, ")") + } + } + } +} + +impl Type { + /// A constant for the bool type + pub const BOOL: Self = Self::Alg(AlgebraicType::Bool); + + /// A constant for the string type + pub const STR: Self = Self::Alg(AlgebraicType::String); + + /// Is this a numeric type? + pub fn is_num(&self) -> bool { + match self { + Self::Alg(t) => t.is_integer() || t.is_float(), + _ => false, + } + } + + /// Is this a hex type? + pub fn is_hex(&self) -> bool { + match self { + Self::Alg(t) => t.is_bytes() || t.is_identity() || t.is_address(), + _ => false, + } + } + + /// Find a field and its position in a Row or Var type + pub fn find(&self, field: &str) -> Option<(usize, &AlgebraicType)> { + match self { + Self::Var(schema) => schema + .columns() + .iter() + .enumerate() + .find(|(_, ColumnSchema { col_name, .. })| col_name.as_ref() == field) + .map(|(i, ColumnSchema { col_type, .. })| (i, col_type)), + Self::Row(row) => row + .elements + .iter() + .enumerate() + .find(|(_, elem)| elem.has_name(field)) + .map(|(i, elem)| (i, &elem.algebraic_type)), + _ => None, + } + } +} + +/// A logical relational expression +#[derive(Debug)] +pub enum RelExpr { + /// A base table + RelVar(Arc, Type), + /// A filter + Select(Box