Skip to content

Avoid nested JSON elements in ps_crud #108

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jul 11, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion crates/core/src/schema/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@ use serde::Deserialize;
use sqlite::ResultCode;
use sqlite_nostd as sqlite;
pub use table_info::{
DiffIncludeOld, PendingStatement, PendingStatementValue, RawTable, Table, TableInfoFlags,
Column, DiffIncludeOld, PendingStatement, PendingStatementValue, RawTable, Table,
TableInfoFlags,
};

#[derive(Deserialize, Default)]
Expand Down
17 changes: 15 additions & 2 deletions crates/core/src/schema/table_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,21 @@ impl Table {
}
}

pub fn column_names(&self) -> impl Iterator<Item = &str> {
self.columns.iter().map(|c| c.name.as_str())
pub fn filtered_columns<'a>(
&'a self,
names: impl Iterator<Item = &'a str>,
) -> impl Iterator<Item = &'a Column> {
// First, sort all columns by name for faster lookups by name.
let mut sorted_by_name: Vec<&Column> = self.columns.iter().collect();
sorted_by_name.sort_by_key(|c| &*c.name);

names.filter_map(move |name| {
let index = sorted_by_name
.binary_search_by_key(&name, |c| c.name.as_str())
.ok()?;

Some(sorted_by_name[index])
})
}
}

Expand Down
28 changes: 26 additions & 2 deletions crates/core/src/util.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
extern crate alloc;

use core::fmt::{Display, Write};

use alloc::format;
use alloc::string::String;
use alloc::string::{String, ToString};

#[cfg(not(feature = "getrandom"))]
use crate::sqlite;
Expand All @@ -13,7 +15,7 @@ use uuid::Uuid;
use uuid::Builder;

pub fn quote_string(s: &str) -> String {
format!("'{:}'", s.replace("'", "''"))
return QuotedString(s).to_string();
}

pub fn quote_json_path(s: &str) -> String {
Expand All @@ -32,6 +34,28 @@ pub fn quote_internal_name(name: &str, local_only: bool) -> String {
}
}

/// A string that [Display]s as a SQLite string literal.
pub struct QuotedString<'a>(pub &'a str);

impl<'a> Display for QuotedString<'a> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
const SINGLE_QUOTE: char = '\'';
const ESCAPE_SEQUENCE: &'static str = "''";

f.write_char(SINGLE_QUOTE)?;

for (i, group) in self.0.split(SINGLE_QUOTE).enumerate() {
if i != 0 {
f.write_str(ESCAPE_SEQUENCE)?;
}

f.write_str(group)?;
}

f.write_char(SINGLE_QUOTE)
}
}

pub fn quote_identifier_prefixed(prefix: &str, name: &str) -> String {
return format!("\"{:}{:}\"", prefix, name.replace("\"", "\"\""));
}
Expand Down
58 changes: 38 additions & 20 deletions crates/core/src/views.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use sqlite_nostd::{self as sqlite};

use crate::create_sqlite_text_fn;
use crate::error::PowerSyncError;
use crate::schema::{DiffIncludeOld, Table};
use crate::schema::{Column, DiffIncludeOld, Table};
use crate::util::*;

fn powersync_view_sql_impl(
Expand Down Expand Up @@ -88,11 +88,12 @@ fn powersync_trigger_delete_sql_impl(
match &table_info.diff_include_old {
Some(include_old) => {
let mut json = match include_old {
DiffIncludeOld::OnlyForColumns { columns } => {
json_object_fragment("OLD", &mut columns.iter().map(|c| c.as_str()))
}
DiffIncludeOld::OnlyForColumns { columns } => json_object_fragment(
"OLD",
&mut table_info.filtered_columns(columns.iter().map(|c| c.as_str())),
),
DiffIncludeOld::ForAllColumns => {
json_object_fragment("OLD", &mut table_info.column_names())
json_object_fragment("OLD", &mut table_info.columns.iter())
}
}?;

Expand Down Expand Up @@ -174,7 +175,7 @@ fn powersync_trigger_insert_sql_impl(
let trigger_name = quote_identifier_prefixed("ps_view_insert_", view_name);
let type_string = quote_string(name);

let json_fragment = json_object_fragment("NEW", &mut table_info.column_names())?;
let json_fragment = json_object_fragment("NEW", &mut table_info.columns.iter())?;

let (metadata_key, metadata_value) = if table_info.flags.include_metadata() {
(",metadata", ",NEW._metadata")
Expand Down Expand Up @@ -247,15 +248,15 @@ fn powersync_trigger_update_sql_impl(
let trigger_name = quote_identifier_prefixed("ps_view_update_", view_name);
let type_string = quote_string(name);

let json_fragment_new = json_object_fragment("NEW", &mut table_info.column_names())?;
let json_fragment_old = json_object_fragment("OLD", &mut table_info.column_names())?;
let json_fragment_new = json_object_fragment("NEW", &mut table_info.columns.iter())?;
let json_fragment_old = json_object_fragment("OLD", &mut table_info.columns.iter())?;

let mut old_values_fragment = match &table_info.diff_include_old {
None => None,
Some(DiffIncludeOld::ForAllColumns) => Some(json_fragment_old.clone()),
Some(DiffIncludeOld::OnlyForColumns { columns }) => Some(json_object_fragment(
"OLD",
&mut columns.iter().map(|c| c.as_str()),
&mut table_info.filtered_columns(columns.iter().map(|c| c.as_str())),
)?),
};

Expand All @@ -266,9 +267,12 @@ fn powersync_trigger_update_sql_impl(
let filtered_new_fragment = match &table_info.diff_include_old {
// When include_old_only_when_changed is combined with a column filter, make sure we
// only include the powersync_diff of columns matched by the filter.
Some(DiffIncludeOld::OnlyForColumns { columns }) => Cow::Owned(
json_object_fragment("NEW", &mut columns.iter().map(|c| c.as_str()))?,
),
Some(DiffIncludeOld::OnlyForColumns { columns }) => {
Cow::Owned(json_object_fragment(
"NEW",
&mut table_info.filtered_columns(columns.iter().map(|c| c.as_str())),
)?)
}
_ => Cow::Borrowed(json_fragment_new.as_str()),
};

Expand Down Expand Up @@ -401,21 +405,35 @@ pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> {
/// Example output with prefix "NEW": "json_object('id', NEW.id, 'name', NEW.name, 'age', NEW.age)".
fn json_object_fragment<'a>(
prefix: &str,
name_results: &mut dyn Iterator<Item = &'a str>,
columns: &mut dyn Iterator<Item = &'a Column>,
) -> Result<String, PowerSyncError> {
// floor(SQLITE_MAX_FUNCTION_ARG / 2).
// To keep databases portable, we use the default limit of 100 args for this,
// and don't try to query the limit dynamically.
const MAX_ARG_COUNT: usize = 50;

let mut column_names_quoted: Vec<String> = alloc::vec![];
while let Some(name) = name_results.next() {
let quoted: String = format!(
"{:}, {:}.{:}",
quote_string(name),
prefix,
quote_identifier(name)
);
while let Some(column) = columns.next() {
let name = &*column.name;
let quoted = match &*column.type_name {
// We really want the individual columns here to appear as they show up in the database.
// For text columns however, it's possible that e.g. NEW.column was created by a JSON
// function, meaning that it has a JSON subtype active - causing the json_object() call
// we're about to emit to include it as a subobject instead of a string.
"TEXT" | "text" => format!(
"{:}, concat({:}.{:})",
QuotedString(name),
prefix,
quote_identifier(name)
),
_ => format!(
"{:}, {:}.{:}",
QuotedString(name),
prefix,
quote_identifier(name)
),
};

column_names_quoted.push(quoted);
}

Expand Down
20 changes: 20 additions & 0 deletions dart/test/crud_test.dart
Original file line number Diff line number Diff line change
Expand Up @@ -661,5 +661,25 @@ void main() {
// The update which didn't change any rows should not be recorded.
expect(db.select('SELECT * FROM ps_crud'), hasLength(1));
});

test('json values are included as text', () {
db
..execute('select powersync_replace_schema(?)', [
json.encode({
'tables': [
{
'name': 'items',
'columns': [
{'name': 'col', 'type': 'text'}
],
}
]
})
])
..execute('INSERT INTO items (id, col) VALUES (uuid(), json_object())');

final [update] = db.select('SELECT data FROM ps_crud');
expect(json.decode(update['data']), containsPair('data', {'col': '{}'}));
});
});
}
24 changes: 12 additions & 12 deletions dart/test/utils/migration_fixtures.dart
Original file line number Diff line number Diff line change
Expand Up @@ -536,8 +536,8 @@ END
THEN RAISE (FAIL, 'id should be text')
END;
INSERT INTO "ps_data__lists"
SELECT NEW.id, json_object('description', NEW."description");
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', 'lists', 'id', NEW.id, 'data', json(powersync_diff('{}', json_object('description', NEW."description")))));
SELECT NEW.id, json_object('description', concat(NEW."description"));
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', 'lists', 'id', NEW.id, 'data', json(powersync_diff('{}', json_object('description', concat(NEW."description"))))));
INSERT INTO ps_oplog(bucket, op_id, op, row_type, row_id, hash, superseded)
SELECT '$local',
1,
Expand All @@ -557,9 +557,9 @@ BEGIN
THEN RAISE (FAIL, 'Cannot update id')
END;
UPDATE "ps_data__lists"
SET data = json_object('description', NEW."description")
SET data = json_object('description', concat(NEW."description"))
WHERE id = NEW.id;
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PATCH', 'type', 'lists', 'id', NEW.id, 'data', json(powersync_diff(json_object('description', OLD."description"), json_object('description', NEW."description")))));
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PATCH', 'type', 'lists', 'id', NEW.id, 'data', json(powersync_diff(json_object('description', concat(OLD."description")), json_object('description', concat(NEW."description"))))));
INSERT INTO ps_oplog(bucket, op_id, op, row_type, row_id, hash, superseded)
SELECT '$local',
1,
Expand Down Expand Up @@ -598,8 +598,8 @@ END
WHEN (typeof(NEW.id) != 'text')
THEN RAISE (FAIL, 'id should be text')
END;
INSERT INTO "ps_data__lists" SELECT NEW.id, json_object('description', NEW."description");
INSERT INTO powersync_crud(op,id,type,data) VALUES ('PUT',NEW.id,'lists',json(powersync_diff('{}', json_object('description', NEW."description"))));
INSERT INTO "ps_data__lists" SELECT NEW.id, json_object('description', concat(NEW."description"));
INSERT INTO powersync_crud(op,id,type,data) VALUES ('PUT',NEW.id,'lists',json(powersync_diff('{}', json_object('description', concat(NEW."description")))));
END
;CREATE TRIGGER "ps_view_update_lists"
INSTEAD OF UPDATE ON "lists"
Expand All @@ -610,9 +610,9 @@ BEGIN
THEN RAISE (FAIL, 'Cannot update id')
END;
UPDATE "ps_data__lists"
SET data = json_object('description', NEW."description")
SET data = json_object('description', concat(NEW."description"))
WHERE id = NEW.id;
INSERT INTO powersync_crud(op,type,id,data,options) VALUES ('PATCH','lists',NEW.id,json(powersync_diff(json_object('description', OLD."description"), json_object('description', NEW."description"))),0);
INSERT INTO powersync_crud(op,type,id,data,options) VALUES ('PATCH','lists',NEW.id,json(powersync_diff(json_object('description', concat(OLD."description")), json_object('description', concat(NEW."description")))),0);
END
''';

Expand All @@ -636,8 +636,8 @@ END
WHEN (typeof(NEW.id) != 'text')
THEN RAISE (FAIL, 'id should be text')
END;
INSERT INTO "ps_data__lists" SELECT NEW.id, json_object('description', NEW."description");
INSERT INTO powersync_crud(op,id,type,data) VALUES ('PUT',NEW.id,'lists',json(powersync_diff('{}', json_object('description', NEW."description"))));
INSERT INTO "ps_data__lists" SELECT NEW.id, json_object('description', concat(NEW."description"));
INSERT INTO powersync_crud(op,id,type,data) VALUES ('PUT',NEW.id,'lists',json(powersync_diff('{}', json_object('description', concat(NEW."description")))));
END
;CREATE TRIGGER "ps_view_update_lists"
INSTEAD OF UPDATE ON "lists"
Expand All @@ -648,8 +648,8 @@ BEGIN
THEN RAISE (FAIL, 'Cannot update id')
END;
UPDATE "ps_data__lists"
SET data = json_object('description', NEW."description")
SET data = json_object('description', concat(NEW."description"))
WHERE id = NEW.id;
INSERT INTO powersync_crud(op,type,id,data,options) VALUES ('PATCH','lists',NEW.id,json(powersync_diff(json_object('description', OLD."description"), json_object('description', NEW."description"))),0);
INSERT INTO powersync_crud(op,type,id,data,options) VALUES ('PATCH','lists',NEW.id,json(powersync_diff(json_object('description', concat(OLD."description")), json_object('description', concat(NEW."description")))),0);
END
''';