Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions server/mergin/sync/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
PushChangeType,
)
from .interfaces import WorkspaceRole
from .storages.disk import move_to_tmp
from .storages.disk import copy_file, move_to_tmp
from ..app import db
from .storages import DiskStorage
from .utils import (
Expand Down Expand Up @@ -1021,8 +1021,14 @@ def construct_checkpoint(self) -> bool:

project: Project = basefile.file.project
os.makedirs(project.storage.diffs_dir, exist_ok=True)

try:
project.storage.geodiff.concat_changes(diffs_paths, self.abs_path)
if len(diffs_paths) == 1:
# if there is only one diff, we can just copy it as a checkpoint without merging
# geodiff.concat_changes is not able to concat one diff
copy_file(diffs_paths[0], self.abs_path)
else:
project.storage.geodiff.concat_changes(diffs_paths, self.abs_path)
except (GeoDiffLibError, GeoDiffLibConflictError):
logging.error(
f"Geodiff: Failed to merge diffs for file {self.file_path_id}"
Expand Down
20 changes: 19 additions & 1 deletion server/mergin/tests/test_public_api_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ def test_create_diff_checkpoint(diff_project):
shutil.copy(
os.path.join(diff_project.storage.project_dir, "v9", "test.gpkg"), base_gpkg
)
for i in range(22):
for i in range(23):
sql = f"UPDATE simple SET rating={i}"
execute_query(base_gpkg, sql)
pv = push_change(
Expand Down Expand Up @@ -348,6 +348,24 @@ def test_create_diff_checkpoint(diff_project):
assert mock.called
assert not os.path.exists(diff.abs_path)

# testing checkpoint with one diff only, no merging should happen, just copy the diff file
individual_diffs = (
FileDiff.query.filter_by(file_path_id=file_path_id, rank=0)
.filter(FileDiff.version.between(33, 36))
.all()
)
create_blank_version(diff_project) # v34
create_blank_version(diff_project) # v35
create_blank_version(diff_project) # v36
diff = FileDiff(
basefile=basefile, path=f"test.gpkg-diff-{uuid.uuid4()}", version=36, rank=1
)
db.session.add(diff)
db.session.commit()
diff.construct_checkpoint()
assert os.path.exists(diff.abs_path)
assert diffs_are_equal(diff.abs_path, individual_diffs[0].abs_path)


def test_can_create_checkpoint(diff_project):
"""Test if diff file checkpoint can be created"""
Expand Down
30 changes: 18 additions & 12 deletions server/migrations/community/bd1ec73db389_create_file_diff_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,27 +62,28 @@ def upgrade():
# migrate data
conn = op.get_bind()
conn.execute(
"""
sa.text(
"""
WITH diffs AS (
SELECT *
FROM file_history
SELECT *
FROM file_history
WHERE diff IS NOT NULL
),
basefiles AS (
SELECT DISTINCT
fh.id AS basefile_id,
SELECT DISTINCT
fh.id AS basefile_id,
fh.file_path_id,
fh.project_version_name AS basefile_version
FROM diffs d
LEFT OUTER JOIN file_history fh ON fh.file_path_id = d.file_path_id
WHERE
WHERE
fh.change = ANY(ARRAY['create'::push_change_type, 'update'::push_change_type])
),
relevant_basefiles AS (
SELECT
d.id,
d.project_version_name,
b.basefile_id,
SELECT
d.id,
d.project_version_name,
b.basefile_id,
b.basefile_version
FROM diffs d
LEFT OUTER JOIN basefiles b ON b.file_path_id = d.file_path_id AND b.basefile_version < d.project_version_name
Expand All @@ -104,6 +105,7 @@ def upgrade():
-- it seems that some projects / files might be broken so we need to play it safe here
SELECT * FROM file_diffs WHERE basefile_id IS NOT NULL;
"""
)
)

op.drop_column("file_history", "diff")
Expand All @@ -123,7 +125,8 @@ def downgrade():
# migrate data
conn = op.get_bind()
conn.execute(
"""
sa.text(
"""
UPDATE file_history fh
SET diff = jsonb_build_object(
'path', fd.path,
Expand All @@ -134,11 +137,13 @@ def downgrade():
FROM file_diff fd
WHERE fh.file_path_id = fd.file_path_id AND fh.project_version_name = fd.version AND fd.rank = 0;
"""
)
)

# if there were any broken gpkg files (ommited in upgrade), let's add there a dummy diff
conn.execute(
"""
sa.text(
"""
UPDATE file_history fh
SET diff = jsonb_build_object(
'path', 'missing-diff',
Expand All @@ -148,6 +153,7 @@ def downgrade():
)
WHERE fh.change = 'update_diff' AND fh.diff IS NULL;
"""
)
)

# add back consistency constraint
Expand Down
Loading