fix: make sure to order datalayer versions by time
When we changed from id to uuid, we broke the sorting, which supposed to have a constant id as string prefix from version to the other.
This commit is contained in:
parent
258262ae14
commit
99207638d9
3 changed files with 51 additions and 29 deletions
|
@ -1,4 +1,5 @@
|
||||||
import json
|
import json
|
||||||
|
import operator
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
|
@ -471,17 +472,14 @@ class DataLayer(NamedModel):
|
||||||
"size": self.geojson.storage.size(self.get_version_path(name)),
|
"size": self.geojson.storage.size(self.get_version_path(name)),
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_versions(self):
|
@property
|
||||||
|
def versions(self):
|
||||||
root = self.storage_root()
|
root = self.storage_root()
|
||||||
names = self.geojson.storage.listdir(root)[1]
|
names = self.geojson.storage.listdir(root)[1]
|
||||||
names = [name for name in names if self.is_valid_version(name)]
|
names = [name for name in names if self.is_valid_version(name)]
|
||||||
names.sort(reverse=True) # Recent first.
|
versions = [self.version_metadata(name) for name in names]
|
||||||
return names
|
versions.sort(reverse=True, key=operator.itemgetter("at"))
|
||||||
|
return versions
|
||||||
@property
|
|
||||||
def versions(self):
|
|
||||||
names = self.get_versions()
|
|
||||||
return [self.version_metadata(name) for name in names]
|
|
||||||
|
|
||||||
def get_version(self, name):
|
def get_version(self, name):
|
||||||
path = self.get_version_path(name)
|
path = self.get_version_path(name)
|
||||||
|
@ -493,8 +491,13 @@ class DataLayer(NamedModel):
|
||||||
|
|
||||||
def purge_old_versions(self):
|
def purge_old_versions(self):
|
||||||
root = self.storage_root()
|
root = self.storage_root()
|
||||||
names = self.get_versions()[settings.UMAP_KEEP_VERSIONS :]
|
versions = self.versions[settings.UMAP_KEEP_VERSIONS :]
|
||||||
for name in names:
|
for version in versions:
|
||||||
|
name = version["name"]
|
||||||
|
# Should not be in the list, but ensure to not delete the file
|
||||||
|
# currently used in database
|
||||||
|
if self.geojson.name.endswith(name):
|
||||||
|
continue
|
||||||
try:
|
try:
|
||||||
self.geojson.storage.delete(os.path.join(root, name))
|
self.geojson.storage.delete(os.path.join(root, name))
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
|
@ -503,8 +506,12 @@ class DataLayer(NamedModel):
|
||||||
def purge_gzip(self):
|
def purge_gzip(self):
|
||||||
root = self.storage_root()
|
root = self.storage_root()
|
||||||
names = self.geojson.storage.listdir(root)[1]
|
names = self.geojson.storage.listdir(root)[1]
|
||||||
|
prefixes = [f"{self.pk}_"]
|
||||||
|
if self.old_id:
|
||||||
|
prefixes.append(f"{self.old_id}_")
|
||||||
|
prefixes = tuple(prefixes)
|
||||||
for name in names:
|
for name in names:
|
||||||
if name.startswith(f"{self.pk}_") and name.endswith(".gz"):
|
if name.startswith(prefixes) and name.endswith(".gz"):
|
||||||
self.geojson.storage.delete(os.path.join(root, name))
|
self.geojson.storage.delete(os.path.join(root, name))
|
||||||
|
|
||||||
def can_edit(self, user=None, request=None):
|
def can_edit(self, user=None, request=None):
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import os
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from django.core.files.base import ContentFile
|
from django.core.files.base import ContentFile
|
||||||
|
@ -60,30 +61,43 @@ def test_clone_should_clone_geojson_too(datalayer):
|
||||||
assert clone.geojson.path != datalayer.geojson.path
|
assert clone.geojson.path != datalayer.geojson.path
|
||||||
|
|
||||||
|
|
||||||
def test_should_remove_old_versions_on_save(datalayer, map, settings):
|
def test_should_remove_old_versions_on_save(map, settings):
|
||||||
|
datalayer = DataLayerFactory(uuid="0f1161c0-c07f-4ba4-86c5-8d8981d8a813", old_id=17)
|
||||||
settings.UMAP_KEEP_VERSIONS = 3
|
settings.UMAP_KEEP_VERSIONS = 3
|
||||||
root = datalayer.storage_root()
|
root = Path(datalayer.storage_root())
|
||||||
before = len(datalayer.geojson.storage.listdir(root)[1])
|
before = len(datalayer.geojson.storage.listdir(root)[1])
|
||||||
newer = f"{root}/{datalayer.pk}_1440924889.geojson"
|
newer = f"{datalayer.pk}_1440924889.geojson"
|
||||||
medium = f"{root}/{datalayer.pk}_1440923687.geojson"
|
medium = f"{datalayer.pk}_1440923687.geojson"
|
||||||
older = f"{root}/{datalayer.pk}_1440918637.geojson"
|
older = f"{datalayer.pk}_1440918637.geojson"
|
||||||
other = f"{root}/123456_1440918637.geojson"
|
with_old_id = f"{datalayer.old_id}_1440918537.geojson"
|
||||||
for path in [medium, newer, older, other]:
|
other = "123456_1440918637.geojson"
|
||||||
datalayer.geojson.storage.save(path, ContentFile("{}"))
|
for path in [medium, newer, older, with_old_id, other]:
|
||||||
datalayer.geojson.storage.save(path + ".gz", ContentFile("{}"))
|
datalayer.geojson.storage.save(root / path, ContentFile("{}"))
|
||||||
assert len(datalayer.geojson.storage.listdir(root)[1]) == 8 + before
|
datalayer.geojson.storage.save(root / f"{path}.gz", ContentFile("{}"))
|
||||||
|
assert len(datalayer.geojson.storage.listdir(root)[1]) == 10 + before
|
||||||
|
files = datalayer.geojson.storage.listdir(root)[1]
|
||||||
|
# Those files should be present before save, which will purge them
|
||||||
|
assert older in files
|
||||||
|
assert older + ".gz" in files
|
||||||
|
assert with_old_id in files
|
||||||
|
assert with_old_id + ".gz" in files
|
||||||
datalayer.save()
|
datalayer.save()
|
||||||
files = datalayer.geojson.storage.listdir(root)[1]
|
files = datalayer.geojson.storage.listdir(root)[1]
|
||||||
# Flat + gz files, but not latest gz, which is created at first datalayer read.
|
# Flat + gz files, but not latest gz, which is created at first datalayer read.
|
||||||
|
# older and with_old_id should have been removed
|
||||||
assert len(files) == 5
|
assert len(files) == 5
|
||||||
assert os.path.basename(newer) in files
|
assert newer in files
|
||||||
assert os.path.basename(medium) in files
|
assert medium in files
|
||||||
assert os.path.basename(datalayer.geojson.path) in files
|
assert Path(datalayer.geojson.path).name in files
|
||||||
# File from another datalayer, purge should have impacted it.
|
# File from another datalayer, purge should have impacted it.
|
||||||
assert os.path.basename(other) in files
|
assert other in files
|
||||||
assert os.path.basename(other + ".gz") in files
|
assert other + ".gz" in files
|
||||||
assert os.path.basename(older) not in files
|
assert older not in files
|
||||||
assert os.path.basename(older + ".gz") not in files
|
assert older + ".gz" not in files
|
||||||
|
assert with_old_id not in files
|
||||||
|
assert with_old_id + ".gz" not in files
|
||||||
|
names = [v["name"] for v in datalayer.versions]
|
||||||
|
assert names == [Path(datalayer.geojson.name).name, newer, medium]
|
||||||
|
|
||||||
|
|
||||||
def test_anonymous_cannot_edit_in_editors_mode(datalayer):
|
def test_anonymous_cannot_edit_in_editors_mode(datalayer):
|
||||||
|
|
|
@ -1073,7 +1073,8 @@ class DataLayerUpdate(FormLessEditMixin, GZipMixin, UpdateView):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Use the provided info to find the correct version in our storage.
|
# Use the provided info to find the correct version in our storage.
|
||||||
for name in self.object.get_versions():
|
for version in self.object.versions:
|
||||||
|
name = version["name"]
|
||||||
path = Path(settings.MEDIA_ROOT) / self.object.get_version_path(name)
|
path = Path(settings.MEDIA_ROOT) / self.object.get_version_path(name)
|
||||||
if reference_version == self.read_version(path):
|
if reference_version == self.read_version(path):
|
||||||
with open(path) as f:
|
with open(path) as f:
|
||||||
|
|
Loading…
Reference in a new issue