[feat] add a simple conflict resolution mecanism.
The server tries to merge conflicting saves of the same layer. What it does: - use the `If-Unmodified-Since` header to check if changes happened to the stored data ; - Compare the incoming version with its reference version to get a diff. - Reapply the diff on top of the latest version. - If the merge is not possible, return a "422 Conflict" HTTP response. - If the merge worked, return the merged document, to be updated by the client.
This commit is contained in:
parent
34e0461ca8
commit
f37ed700f6
5 changed files with 336 additions and 24 deletions
|
@ -1535,6 +1535,12 @@ L.U.DataLayer = L.Evented.extend({
|
|||
this.map.post(this.getSaveUrl(), {
|
||||
data: formData,
|
||||
callback: function (data, response) {
|
||||
// Response contains geojson only if save has conflicted and conflicts have
|
||||
// been resolved. So we need to reload to get extra data (saved from someone else)
|
||||
if (data.geojson) {
|
||||
this.clear()
|
||||
this.fromGeoJSON(data.geojson)
|
||||
}
|
||||
this._geojson = geojson
|
||||
this._last_modified = response.getResponseHeader('Last-Modified')
|
||||
this.setUmapId(data.id)
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import json
|
||||
import time
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from django.core.files.base import ContentFile
|
||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||
from django.urls import reverse
|
||||
|
||||
from umap.models import DataLayer, Map
|
||||
|
@ -19,7 +22,10 @@ def post_data():
|
|||
"display_on_load": True,
|
||||
"settings": '{"displayOnLoad": true, "browsable": true, "name": "name"}',
|
||||
"rank": 0,
|
||||
"geojson": '{"type":"FeatureCollection","features":[{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-3.1640625,53.014783245859235],[-3.1640625,51.86292391360244],[-0.50537109375,51.385495069223204],[1.16455078125,52.38901106223456],[-0.41748046875,53.91728101547621],[-2.109375,53.85252660044951],[-3.1640625,53.014783245859235]]]},"properties":{"_umap_options":{},"name":"Ho god, sounds like a polygouine"}},{"type":"Feature","geometry":{"type":"LineString","coordinates":[[1.8017578124999998,51.16556659836182],[-0.48339843749999994,49.710272582105695],[-3.1640625,50.0923932109388],[-5.60302734375,51.998410382390325]]},"properties":{"_umap_options":{},"name":"Light line"}},{"type":"Feature","geometry":{"type":"Point","coordinates":[0.63720703125,51.15178610143037]},"properties":{"_umap_options":{},"name":"marker he"}}],"_umap_options":{"displayOnLoad":true,"name":"new name","id":1668,"remoteData":{},"color":"LightSeaGreen","description":"test"}}',
|
||||
"geojson": SimpleUploadedFile(
|
||||
"name.json",
|
||||
b'{"type":"FeatureCollection","features":[{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-3.1640625,53.014783245859235],[-3.1640625,51.86292391360244],[-0.50537109375,51.385495069223204],[1.16455078125,52.38901106223456],[-0.41748046875,53.91728101547621],[-2.109375,53.85252660044951],[-3.1640625,53.014783245859235]]]},"properties":{"_umap_options":{},"name":"Ho god, sounds like a polygouine"}},{"type":"Feature","geometry":{"type":"LineString","coordinates":[[1.8017578124999998,51.16556659836182],[-0.48339843749999994,49.710272582105695],[-3.1640625,50.0923932109388],[-5.60302734375,51.998410382390325]]},"properties":{"_umap_options":{},"name":"Light line"}},{"type":"Feature","geometry":{"type":"Point","coordinates":[0.63720703125,51.15178610143037]},"properties":{"_umap_options":{},"name":"marker he"}}],"_umap_options":{"displayOnLoad":true,"name":"new name","id":1668,"remoteData":{},"color":"LightSeaGreen","description":"test"}}',
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
@ -385,3 +391,159 @@ def test_anonymous_user_can_edit_if_inherit_and_map_in_public_mode(
|
|||
assert response.status_code == 200
|
||||
modified_datalayer = DataLayer.objects.get(pk=datalayer.pk)
|
||||
assert modified_datalayer.name == name
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def reference_data():
|
||||
return {
|
||||
"type": "FeatureCollection",
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {"type": "Point", "coordinates": [-1, 2]},
|
||||
"properties": {"_umap_options": {}, "name": "foo"},
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {"type": "LineString", "coordinates": [2, 3]},
|
||||
"properties": {"_umap_options": {}, "name": "bar"},
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {"type": "Point", "coordinates": [3, 4]},
|
||||
"properties": {"_umap_options": {}, "name": "marker"},
|
||||
},
|
||||
],
|
||||
"_umap_options": {
|
||||
"displayOnLoad": True,
|
||||
"name": "new name",
|
||||
"id": 1668,
|
||||
"remoteData": {},
|
||||
"color": "LightSeaGreen",
|
||||
"description": "test",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def test_optimistic_merge_both_added(client, datalayer, map, reference_data):
|
||||
url = reverse("datalayer_update", args=(map.pk, datalayer.pk))
|
||||
client.login(username=map.owner.username, password="123123")
|
||||
|
||||
# Reference data is:
|
||||
# Point (-1, 2); Linestring (2, 3); Point (3, 4).
|
||||
|
||||
post_data = {
|
||||
"name": "name",
|
||||
"display_on_load": True,
|
||||
"rank": 0,
|
||||
"geojson": SimpleUploadedFile(
|
||||
"foo.json", json.dumps(reference_data).encode("utf-8")
|
||||
),
|
||||
}
|
||||
response = client.post(url, post_data, follow=True)
|
||||
assert response.status_code == 200
|
||||
|
||||
response = client.get(reverse("datalayer_view", args=(map.pk, datalayer.pk)))
|
||||
reference_timestamp = response["Last-Modified"]
|
||||
|
||||
# Client 1 adds "Point 5, 6" to the existing data
|
||||
client1_feature = {
|
||||
"type": "Feature",
|
||||
"geometry": {"type": "Point", "coordinates": [5, 6]},
|
||||
"properties": {"_umap_options": {}, "name": "marker"},
|
||||
}
|
||||
client1_data = deepcopy(reference_data)
|
||||
client1_data["features"].append(client1_feature)
|
||||
# Sleep to change the current timestamp (used in the If-Unmodified-Since header)
|
||||
time.sleep(1)
|
||||
post_data["geojson"] = SimpleUploadedFile(
|
||||
"foo.json",
|
||||
json.dumps(client1_data).encode("utf-8"),
|
||||
)
|
||||
response = client.post(
|
||||
url, post_data, follow=True, HTTP_IF_UNMODIFIED_SINCE=reference_timestamp
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Client 2 adds "Point 7, 8" instead, on the same reference.
|
||||
client2_feature = {
|
||||
"type": "Feature",
|
||||
"geometry": {"type": "Point", "coordinates": [7, 8]},
|
||||
"properties": {"_umap_options": {}, "name": "marker"},
|
||||
}
|
||||
client2_data = deepcopy(reference_data)
|
||||
client2_data["features"].append(client2_feature)
|
||||
|
||||
post_data["geojson"] = SimpleUploadedFile(
|
||||
"foo.json",
|
||||
json.dumps(client2_data).encode("utf-8"),
|
||||
)
|
||||
response = client.post(
|
||||
url, post_data, follow=True, HTTP_IF_UNMODIFIED_SINCE=reference_timestamp
|
||||
)
|
||||
assert response.status_code == 200
|
||||
modified_datalayer = DataLayer.objects.get(pk=datalayer.pk)
|
||||
merged_features = json.load(modified_datalayer.geojson)["features"]
|
||||
|
||||
for reference_feature in reference_data["features"]:
|
||||
assert reference_feature in merged_features
|
||||
|
||||
assert client1_feature in merged_features
|
||||
assert client2_feature in merged_features
|
||||
|
||||
|
||||
def test_optimistic_merge_conflicting_change_raises(
|
||||
client, datalayer, map, reference_data
|
||||
):
|
||||
url = reverse("datalayer_update", args=(map.pk, datalayer.pk))
|
||||
client.login(username=map.owner.username, password="123123")
|
||||
|
||||
# Reference data is:
|
||||
# Point (-1, 2); Linestring (2, 3); Point (3, 4).
|
||||
|
||||
post_data = {
|
||||
"name": "name",
|
||||
"display_on_load": True,
|
||||
"rank": 0,
|
||||
"geojson": SimpleUploadedFile(
|
||||
"foo.json", json.dumps(reference_data).encode("utf-8")
|
||||
),
|
||||
}
|
||||
response = client.post(url, post_data, follow=True)
|
||||
assert response.status_code == 200
|
||||
|
||||
response = client.get(reverse("datalayer_view", args=(map.pk, datalayer.pk)))
|
||||
reference_timestamp = response["Last-Modified"]
|
||||
|
||||
# First client changes the first feature.
|
||||
client1_data = deepcopy(reference_data)
|
||||
client1_data["features"][0]["geometry"] = {"type": "Point", "coordinates": [5, 6]}
|
||||
|
||||
# Sleep to change the current timestamp (used in the If-Unmodified-Since header)
|
||||
time.sleep(1)
|
||||
post_data["geojson"] = SimpleUploadedFile(
|
||||
"foo.json",
|
||||
json.dumps(client1_data).encode("utf-8"),
|
||||
)
|
||||
response = client.post(
|
||||
url, post_data, follow=True, HTTP_IF_UNMODIFIED_SINCE=reference_timestamp
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Second client changes the first feature as well.
|
||||
client2_data = deepcopy(reference_data)
|
||||
client2_data["features"][0]["geometry"] = {"type": "Point", "coordinates": [7, 8]}
|
||||
|
||||
post_data["geojson"] = SimpleUploadedFile(
|
||||
"foo.json",
|
||||
json.dumps(client2_data).encode("utf-8"),
|
||||
)
|
||||
response = client.post(
|
||||
url, post_data, follow=True, HTTP_IF_UNMODIFIED_SINCE=reference_timestamp
|
||||
)
|
||||
assert response.status_code == 412
|
||||
|
||||
# Check that the server rejected conflicting changes.
|
||||
modified_datalayer = DataLayer.objects.get(pk=datalayer.pk)
|
||||
merged_features = json.load(modified_datalayer.geojson)["features"]
|
||||
assert merged_features == client1_data["features"]
|
||||
|
|
67
umap/tests/test_merge_features.py
Normal file
67
umap/tests/test_merge_features.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
import pytest
|
||||
|
||||
from umap.utils import merge_features
|
||||
|
||||
|
||||
def test_adding_one_element():
|
||||
assert merge_features(["A", "B"], ["A", "B", "C"], ["A", "B", "D"]) == [
|
||||
"A",
|
||||
"B",
|
||||
"C",
|
||||
"D",
|
||||
]
|
||||
|
||||
|
||||
def test_adding_elements():
|
||||
assert merge_features(["A", "B"], ["A", "B", "C", "D"], ["A", "B", "E", "F"]) == [
|
||||
"A",
|
||||
"B",
|
||||
"C",
|
||||
"D",
|
||||
"E",
|
||||
"F",
|
||||
]
|
||||
# Order does not count
|
||||
assert merge_features(["A", "B"], ["B", "C", "D", "A"], ["A", "B", "E", "F"]) == [
|
||||
"B",
|
||||
"C",
|
||||
"D",
|
||||
"A",
|
||||
"E",
|
||||
"F",
|
||||
]
|
||||
|
||||
|
||||
def test_adding_one_removing_one():
|
||||
assert merge_features(["A", "B"], ["A", "C"], ["A", "B", "D"]) == [
|
||||
"A",
|
||||
"C",
|
||||
"D",
|
||||
]
|
||||
|
||||
|
||||
def test_removing_same_element():
|
||||
# No added element (otherwise we cannot know if "new" elements are old modified
|
||||
# or old removed and new added).
|
||||
assert merge_features(["A", "B", "C"], ["A", "B"], ["A", "B"]) == [
|
||||
"A",
|
||||
"B",
|
||||
]
|
||||
|
||||
|
||||
def test_removing_changed_element():
|
||||
with pytest.raises(ValueError):
|
||||
merge_features(["A", "B"], ["A", "C"], ["A"])
|
||||
|
||||
|
||||
def test_changing_removed_element():
|
||||
with pytest.raises(ValueError):
|
||||
merge_features(["A", "B"], ["A"], ["A", "C"])
|
||||
|
||||
|
||||
def test_changing_same_element():
|
||||
with pytest.raises(ValueError):
|
||||
merge_features(["A", "B"], ["A", "D"], ["A", "C"])
|
||||
# Order does not count
|
||||
with pytest.raises(ValueError):
|
||||
merge_features(["A", "B", "C"], ["B", "D", "A"], ["A", "E", "B"])
|
|
@ -116,3 +116,32 @@ def gzip_file(from_path, to_path):
|
|||
|
||||
def is_ajax(request):
|
||||
return request.headers.get("x-requested-with") == "XMLHttpRequest"
|
||||
|
||||
|
||||
class ConflictError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
def merge_features(reference: list, latest: list, incoming: list):
|
||||
"""Finds the changes between reference and incoming, and reapplies them on top of latest."""
|
||||
if latest == incoming:
|
||||
return latest
|
||||
|
||||
removed = [item for item in reference if item not in incoming]
|
||||
added = [item for item in incoming if item not in reference]
|
||||
|
||||
# Ensure that items changed in the reference weren't also changed in the latest.
|
||||
for item in removed:
|
||||
if item not in latest:
|
||||
raise ConflictError()
|
||||
|
||||
merged = latest[:]
|
||||
|
||||
# Reapply the changes on top of the latest.
|
||||
for item in removed:
|
||||
merged.delete(item)
|
||||
|
||||
for item in added:
|
||||
merged.append(item)
|
||||
|
||||
return merged
|
||||
|
|
|
@ -5,6 +5,7 @@ import re
|
|||
import socket
|
||||
from datetime import datetime, timedelta
|
||||
from http.client import InvalidURL
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from urllib.error import HTTPError, URLError
|
||||
from urllib.parse import quote, urlparse
|
||||
|
@ -61,7 +62,7 @@ from .forms import (
|
|||
UserProfileForm,
|
||||
)
|
||||
from .models import DataLayer, Licence, Map, Pictogram, Star, TileLayer
|
||||
from .utils import get_uri_template, gzip_file, is_ajax
|
||||
from .utils import ConflictError, get_uri_template, gzip_file, is_ajax, merge_features
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
@ -851,6 +852,10 @@ class GZipMixin(object):
|
|||
def gzip_path(self):
|
||||
return Path(f"{self.path}{self.EXT}")
|
||||
|
||||
def compute_last_modified(self, path):
|
||||
stat = os.stat(path)
|
||||
return http_date(stat.st_mtime)
|
||||
|
||||
@property
|
||||
def last_modified(self):
|
||||
# Prior to 1.3.0 we did not set gzip mtime as geojson mtime,
|
||||
|
@ -864,8 +869,7 @@ class GZipMixin(object):
|
|||
if self.accepts_gzip and self.gzip_path.exists()
|
||||
else self.path
|
||||
)
|
||||
stat = os.stat(path)
|
||||
return http_date(stat.st_mtime)
|
||||
return self.compute_last_modified(path)
|
||||
|
||||
@property
|
||||
def accepts_gzip(self):
|
||||
|
@ -929,34 +933,78 @@ class DataLayerUpdate(FormLessEditMixin, GZipMixin, UpdateView):
|
|||
model = DataLayer
|
||||
form_class = DataLayerForm
|
||||
|
||||
def form_valid(self, form):
|
||||
self.object = form.save()
|
||||
# Simple response with only metadatas (client should not reload all data
|
||||
# on save)
|
||||
response = simple_json_response(
|
||||
**self.object.metadata(self.request.user, self.request)
|
||||
)
|
||||
response["Last-Modified"] = self.last_modified
|
||||
return response
|
||||
def has_been_modified_since(self, if_unmodified_since):
|
||||
return if_unmodified_since and self.last_modified != if_unmodified_since
|
||||
|
||||
def is_unmodified(self):
|
||||
"""Optimistic concurrency control."""
|
||||
modified = True
|
||||
if_unmodified = self.request.META.get("HTTP_IF_UNMODIFIED_SINCE")
|
||||
if if_unmodified:
|
||||
if self.last_modified != if_unmodified:
|
||||
modified = False
|
||||
return modified
|
||||
def merge(self, if_unmodified_since):
|
||||
"""
|
||||
Attempt to apply the incoming changes to the document the client was using, and
|
||||
then merge it with the last document we have on storage.
|
||||
|
||||
Returns either None (if the merge failed) or the merged python GeoJSON object.
|
||||
"""
|
||||
|
||||
# Use If-Modified-Since to find the correct version in our storage.
|
||||
for name in self.object.get_versions():
|
||||
path = os.path.join(settings.MEDIA_ROOT, self.object.get_version_path(name))
|
||||
if if_unmodified_since == self.compute_last_modified(path):
|
||||
with open(path) as f:
|
||||
reference = json.loads(f.read())
|
||||
break
|
||||
else:
|
||||
# If the document is not found, we can't merge.
|
||||
return None
|
||||
|
||||
# New data received in the request.
|
||||
entrant = json.loads(self.request.FILES["geojson"].read())
|
||||
|
||||
# Latest known version of the data.
|
||||
with open(self.path) as f:
|
||||
latest = json.loads(f.read())
|
||||
|
||||
try:
|
||||
merged_features = merge_features(
|
||||
reference["features"], latest["features"], entrant["features"]
|
||||
)
|
||||
latest["features"] = merged_features
|
||||
return latest
|
||||
except ConflictError:
|
||||
return None
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
self.object = self.get_object()
|
||||
if self.object.map.pk != int(self.kwargs["map_id"]):
|
||||
return HttpResponseForbidden()
|
||||
|
||||
if not self.object.can_edit(user=self.request.user, request=self.request):
|
||||
return HttpResponseForbidden()
|
||||
if not self.is_unmodified():
|
||||
|
||||
ius_header = self.request.META.get("HTTP_IF_UNMODIFIED_SINCE")
|
||||
|
||||
if self.has_been_modified_since(ius_header):
|
||||
merged = self.merge(ius_header)
|
||||
if not merged:
|
||||
return HttpResponse(status=412)
|
||||
return super(DataLayerUpdate, self).post(request, *args, **kwargs)
|
||||
|
||||
# Replace the uploaded file by the merged version.
|
||||
self.request.FILES["geojson"].file = BytesIO(
|
||||
json.dumps(merged).encode("utf-8")
|
||||
)
|
||||
|
||||
# Mark the data to be reloaded by form_valid
|
||||
self.request.session["needs_reload"] = True
|
||||
return super().post(request, *args, **kwargs)
|
||||
|
||||
def form_valid(self, form):
|
||||
self.object = form.save()
|
||||
data = {**self.object.metadata(self.request.user, self.request)}
|
||||
if self.request.session.get("needs_reload"):
|
||||
data["geojson"] = json.loads(self.object.geojson.read().decode())
|
||||
self.request.session["needs_reload"] = False
|
||||
response = simple_json_response(**data)
|
||||
|
||||
response["Last-Modified"] = self.last_modified
|
||||
return response
|
||||
|
||||
|
||||
class DataLayerDelete(DeleteView):
|
||||
|
|
Loading…
Reference in a new issue