From 3b12796ec137a11cd5fc3f917c4ec78ab92c4a7a Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Sun, 7 Apr 2019 23:34:22 -0500 Subject: [PATCH 01/17] Add custom basefilenode_version through table to represent the many-to-many relationship between files and versions. - Add migration to move the file/version through table into to the custom through table, along with the version name - Expose version name in the API - Refactor adding versions since we can't do versions.add() any longer since the manytomany field has its custom through table. Using the custom through table's manager instead. - Modify move/copy operations to use new table - Renaming changes the latest version name as well. --- addons/osfstorage/models.py | 3 +- addons/osfstorage/tests/test_models.py | 41 +++++++++- addons/osfstorage/tests/test_utils.py | 3 +- addons/osfstorage/tests/test_views.py | 81 ++++++++++++++++--- addons/osfstorage/views.py | 14 ++-- api/files/serializers.py | 5 ++ api_tests/files/views/test_file_detail.py | 2 + osf/management/commands/force_archive.py | 3 +- .../0160_add_custom_file_versions_through.py | 29 +++++++ .../0161_populate_file_versions_through.py | 59 ++++++++++++++ .../0162_remove_basefilenode_versions.py | 19 +++++ osf/migrations/0163_basefilenode_versions.py | 20 +++++ osf/models/__init__.py | 1 + osf/models/files.py | 31 ++++++- tests/test_addons.py | 10 +-- tests/test_websitefiles.py | 5 +- website/files/utils.py | 17 +++- 17 files changed, 310 insertions(+), 33 deletions(-) create mode 100644 osf/migrations/0160_add_custom_file_versions_through.py create mode 100644 osf/migrations/0161_populate_file_versions_through.py create mode 100644 osf/migrations/0162_remove_basefilenode_versions.py create mode 100644 osf/migrations/0163_basefilenode_versions.py diff --git a/addons/osfstorage/models.py b/addons/osfstorage/models.py index 54703208bee..cdc49cc937b 100644 --- a/addons/osfstorage/models.py +++ b/addons/osfstorage/models.py @@ -307,7 +307,8 @@ def create_version(self, creator, location, metadata=None): version._find_matching_archive(save=False) version.save() - self.versions.add(version) + # Adds version to the list of file versions - using custom through table + self.add_version(version) self.save() return version diff --git a/addons/osfstorage/tests/test_models.py b/addons/osfstorage/tests/test_models.py index d7bc8d601d8..7b3dc3a7f96 100644 --- a/addons/osfstorage/tests/test_models.py +++ b/addons/osfstorage/tests/test_models.py @@ -298,11 +298,24 @@ def test_materialized_path_nested(self): def test_copy(self): to_copy = self.node_settings.get_root().append_file('Carp') copy_to = self.node_settings.get_root().append_folder('Cloud') + version = to_copy.create_version( + self.user, + { + 'service': 'cloud', + settings.WATERBUTLER_RESOURCE: 'osf', + 'object': '06d80e', + }, { + 'sha256': 'existing', + 'vault': 'the cloud', + 'archive': 'erchiv' + }) + assert_equal(to_copy.versions.first().get_basefilenode_version(to_copy).version_name, 'Carp') copied = to_copy.copy_under(copy_to) assert_not_equal(copied, to_copy) assert_equal(copied.parent, copy_to) + assert_equal(copied.versions.first().get_basefilenode_version(copied).version_name, 'Carp') assert_equal(to_copy.parent, self.node_settings.get_root()) def test_copy_node_file_to_preprint(self): @@ -336,8 +349,21 @@ def test_move_nested(self): def test_copy_rename(self): to_copy = self.node_settings.get_root().append_file('Carp') copy_to = self.node_settings.get_root().append_folder('Cloud') + version = to_copy.create_version( + self.user, + { + 'service': 'cloud', + settings.WATERBUTLER_RESOURCE: 'osf', + 'object': '06d80e', + }, { + 'sha256': 'existing', + 'vault': 'the cloud', + 'archive': 'erchiv' + }) + assert_equal(to_copy.versions.first().get_basefilenode_version(to_copy).version_name, 'Carp') copied = to_copy.copy_under(copy_to, name='But') + assert_equal(copied.versions.first().get_basefilenode_version(copied).version_name, 'But') assert_equal(copied.name, 'But') assert_not_equal(copied, to_copy) @@ -356,12 +382,25 @@ def test_move(self): def test_move_and_rename(self): to_move = self.node_settings.get_root().append_file('Carp') + version = to_move.create_version( + self.user, + { + 'service': 'cloud', + settings.WATERBUTLER_RESOURCE: 'osf', + 'object': '06d80e', + }, { + 'sha256': 'existing', + 'vault': 'the cloud', + 'archive': 'erchiv' + }) move_to = self.node_settings.get_root().append_folder('Cloud') + assert_equal(to_move.versions.first().get_basefilenode_version(to_move).version_name, 'Carp') moved = to_move.move_under(move_to, name='Tuna') assert_equal(to_move, moved) assert_equal(to_move.name, 'Tuna') + assert_equal(moved.versions.first().get_basefilenode_version(moved).version_name, 'Tuna') assert_equal(moved.parent, move_to) def test_move_preprint_primary_file_to_node(self): @@ -624,7 +663,7 @@ def test_after_fork_copies_versions(self): for _ in range(num_versions): version = factories.FileVersionFactory() - record.versions.add(version) + record.add_version(version) fork = self.project.fork_node(self.auth_obj) fork_node_settings = fork.get_addon('osfstorage') diff --git a/addons/osfstorage/tests/test_utils.py b/addons/osfstorage/tests/test_utils.py index 7675db8bf36..7e5e82d2711 100644 --- a/addons/osfstorage/tests/test_utils.py +++ b/addons/osfstorage/tests/test_utils.py @@ -12,6 +12,7 @@ from addons.osfstorage import utils from addons.osfstorage.tests.utils import StorageTestCase +from website.files.utils import attach_versions @pytest.mark.django_db @@ -25,7 +26,7 @@ def setUp(self): factories.FileVersionFactory(creator=self.user) for __ in range(3) ] - self.record.versions = self.versions + attach_versions(self.record, self.versions) self.record.save() def test_serialize_revision(self): diff --git a/addons/osfstorage/tests/test_views.py b/addons/osfstorage/tests/test_views.py index 4fc86d2c5bb..d04dae7eda5 100644 --- a/addons/osfstorage/tests/test_views.py +++ b/addons/osfstorage/tests/test_views.py @@ -33,15 +33,16 @@ from addons.osfstorage import utils from addons.base.views import make_auth from addons.osfstorage import settings as storage_settings -from api_tests.utils import create_test_file +from api_tests.utils import create_test_file, create_test_preprint_file from api.caching.settings import STORAGE_USAGE_KEY from osf_tests.factories import ProjectFactory, ApiOAuth2PersonalTokenFactory, PreprintFactory +from website.files.utils import attach_versions def create_record_with_version(path, node_settings, **kwargs): version = factories.FileVersionFactory(**kwargs) record = node_settings.get_root().append_file(path) - record.versions.add(version) + record.add_version(version) record.save() return record @@ -76,7 +77,7 @@ def test_file_metdata(self): path = u'kind/of/magíc.mp3' record = recursively_create_file(self.node_settings, path) version = factories.FileVersionFactory() - record.versions.add(version) + record.add_version(version) record.save() res = self.send_hook( 'osfstorage_get_metadata', @@ -91,7 +92,7 @@ def test_preprint_primary_file_metadata(self): preprint = PreprintFactory() record = preprint.primary_file version = factories.FileVersionFactory() - record.versions.add(version) + record.add_version(version) record.save() res = self.send_hook( 'osfstorage_get_metadata', @@ -106,7 +107,7 @@ def test_children_metadata(self): path = u'kind/of/magíc.mp3' record = recursively_create_file(self.node_settings, path) version = factories.FileVersionFactory() - record.versions.add(version) + record.add_version(version) record.save() res = self.send_hook( 'osfstorage_get_children', @@ -141,7 +142,7 @@ def test_children_metadata_preprint(self): preprint = PreprintFactory() record = preprint.primary_file version = factories.FileVersionFactory() - record.versions.add(version) + record.add_version(version) record.save() res = self.send_hook( 'osfstorage_get_children', @@ -274,7 +275,9 @@ def test_upload_create(self): assert_is_not(version, None) assert_equal([version], list(record.versions.all())) assert_not_in(version, self.record.versions.all()) + assert_equal(version.get_basefilenode_version(record).version_name, record.name) assert_equal(record.serialize(), res.json['data']) + assert_equal(version.get_basefilenode_version(record).version_name, record.name) assert_equal(res.json['data']['downloads'], self.record.get_download_count()) def test_upload_update(self): @@ -321,6 +324,7 @@ def test_upload_create_child(self): record = parent.find_child_by_name(name) assert_in(version, record.versions.all()) assert_equals(record.name, name) + assert_equals(record.versions.first().get_basefilenode_version(record).version_name, name) assert_equals(record.parent, parent) def test_upload_create_child_with_same_name(self): @@ -341,6 +345,7 @@ def test_upload_create_child_with_same_name(self): record = parent.find_child_by_name(name) assert_in(version, record.versions.all()) assert_equals(record.name, name) + assert_equals(record.versions.first().get_basefilenode_version(record).version_name, name) assert_equals(record.parent, parent) def test_upload_fail_to_create_version_due_to_checkout(self): @@ -637,7 +642,7 @@ def setUp(self): self.path = 'greasy/pízza.png' self.record = recursively_create_file(self.node_settings, self.path) self.version = factories.FileVersionFactory() - self.record.versions = [self.version] + self.record.add_version(self.version) self.record.save() self.payload = { 'metadata': { @@ -711,7 +716,7 @@ def setUp(self): self.record = self.preprint.primary_file self.path = 'greasy/pízza.png' self.version = factories.FileVersionFactory() - self.record.versions = [self.version] + self.record.add_version(self.version) self.record.save() self.payload = { 'metadata': { @@ -783,7 +788,7 @@ def setUp(self): super(TestGetRevisions, self).setUp() self.path = 'tie/your/mother/down.mp3' self.record = recursively_create_file(self.node_settings, self.path) - self.record.versions = [factories.FileVersionFactory() for __ in range(15)] + attach_versions(self.record, [factories.FileVersionFactory() for __ in range(15)]) self.record.save() def get_revisions(self, fid=None, guid=None, **kwargs): @@ -1165,6 +1170,35 @@ def test_move_file_out_of_node(self): ) assert_equal(res.status_code, 200) + def test_can_rename_file(self): + file = create_test_file(self.node, self.user, filename='road_dogg.mp3') + new_name = 'JesseJames.mp3' + + res = self.send_hook( + 'osfstorage_move_hook', + {'guid': self.node._id}, + payload={ + 'action': 'rename', + 'source': file._id, + 'target': self.root_node._id, + 'user': self.user._id, + 'name': file.name, + 'destination': { + 'parent': self.root_node._id, + 'target': self.node._id, + 'name': new_name, + } + }, + target=self.node, + method='post_json', + expect_errors=True, + ) + file.reload() + + assert_equal(res.status_code, 200) + assert_equal(file.name, new_name) + assert_equal(file.versions.first().get_basefilenode_version(file).version_name, new_name) + def test_can_move_file_out_of_quickfiles_node(self): quickfiles_node = QuickFilesNode.objects.get_for_user(self.user) quickfiles_file = create_test_file(quickfiles_node, self.user, filename='slippery.mp3') @@ -1254,6 +1288,35 @@ def test_move_primary_file_out_of_preprint(self): ) assert_equal(res.status_code, 403) + def test_can_rename_file(self): + file = create_test_preprint_file(self.node, self.user, filename='road_dogg.mp3') + new_name = 'JesseJames.mp3' + + res = self.send_hook( + 'osfstorage_move_hook', + {'guid': self.node._id}, + payload={ + 'action': 'rename', + 'source': file._id, + 'target': self.root_node._id, + 'user': self.user._id, + 'name': file.name, + 'destination': { + 'parent': self.root_node._id, + 'target': self.node._id, + 'name': new_name, + } + }, + target=self.node, + method='post_json', + expect_errors=True, + ) + file.reload() + + assert_equal(res.status_code, 200) + assert_equal(file.name, new_name) + assert_equal(file.versions.first().get_basefilenode_version(file).version_name, new_name) + @pytest.mark.django_db class TestMoveHookProjectsOnly(TestMoveHook): diff --git a/addons/osfstorage/views.py b/addons/osfstorage/views.py index 2c29472ed21..769dd394c38 100644 --- a/addons/osfstorage/views.py +++ b/addons/osfstorage/views.py @@ -191,7 +191,7 @@ def osfstorage_get_children(file_node, **kwargs): , 'kind', 'file' , 'size', LATEST_VERSION.size , 'downloads', COALESCE(DOWNLOAD_COUNT, 0) - , 'version', (SELECT COUNT(*) FROM osf_basefilenode_versions WHERE osf_basefilenode_versions.basefilenode_id = F.id) + , 'version', (SELECT COUNT(*) FROM osf_basefileversionsthrough WHERE osf_basefileversionsthrough.basefilenode_id = F.id) , 'contentType', LATEST_VERSION.content_type , 'modified', LATEST_VERSION.created , 'created', EARLIEST_VERSION.created @@ -212,15 +212,15 @@ def osfstorage_get_children(file_node, **kwargs): FROM osf_basefilenode AS F LEFT JOIN LATERAL ( SELECT * FROM osf_fileversion - JOIN osf_basefilenode_versions ON osf_fileversion.id = osf_basefilenode_versions.fileversion_id - WHERE osf_basefilenode_versions.basefilenode_id = F.id + JOIN osf_basefileversionsthrough ON osf_fileversion.id = osf_basefileversionsthrough.fileversion_id + WHERE osf_basefileversionsthrough.basefilenode_id = F.id ORDER BY created DESC LIMIT 1 ) LATEST_VERSION ON TRUE LEFT JOIN LATERAL ( SELECT * FROM osf_fileversion - JOIN osf_basefilenode_versions ON osf_fileversion.id = osf_basefilenode_versions.fileversion_id - WHERE osf_basefilenode_versions.basefilenode_id = F.id + JOIN osf_basefileversionsthrough ON osf_fileversion.id = osf_basefileversionsthrough.fileversion_id + WHERE osf_basefileversionsthrough.basefilenode_id = F.id ORDER BY created ASC LIMIT 1 ) EARLIEST_VERSION ON TRUE @@ -239,9 +239,9 @@ def osfstorage_get_children(file_node, **kwargs): SELECT EXISTS( SELECT (1) FROM osf_fileversionusermetadata INNER JOIN osf_fileversion ON osf_fileversionusermetadata.file_version_id = osf_fileversion.id - INNER JOIN osf_basefilenode_versions ON osf_fileversion.id = osf_basefilenode_versions.fileversion_id + INNER JOIN osf_basefileversionsthrough ON osf_fileversion.id = osf_basefileversionsthrough.fileversion_id WHERE osf_fileversionusermetadata.user_id = %s - AND osf_basefilenode_versions.basefilenode_id = F.id + AND osf_basefileversionsthrough.basefilenode_id = F.id LIMIT 1 ) ) SEEN_FILE ON TRUE diff --git a/api/files/serializers.py b/api/files/serializers.py index 4e4bbbd5501..2e2782a4850 100644 --- a/api/files/serializers.py +++ b/api/files/serializers.py @@ -410,6 +410,7 @@ class FileVersionSerializer(JSONAPISerializer): size = ser.IntegerField(read_only=True, help_text='The size of this file at this version') content_type = ser.CharField(read_only=True, help_text='The mime type of this file at this verison') date_created = VersionedDateTimeField(source='created', read_only=True, help_text='The date that this version was created') + name = ser.SerializerMethodField() links = LinksField({ 'self': 'self_url', 'html': 'absolute_url', @@ -417,6 +418,10 @@ class FileVersionSerializer(JSONAPISerializer): 'render': 'get_render_link', }) + def get_name(self, obj): + file = self.context['file'] + return obj.get_basefilenode_version(file).version_name + class Meta: type_ = 'file_versions' diff --git a/api_tests/files/views/test_file_detail.py b/api_tests/files/views/test_file_detail.py index 5294e92cba9..d2e8c19f0ab 100644 --- a/api_tests/files/views/test_file_detail.py +++ b/api_tests/files/views/test_file_detail.py @@ -674,7 +674,9 @@ def test_listing(self, app, user, file): assert res.status_code == 200 assert len(res.json['data']) == 2 assert res.json['data'][0]['id'] == '2' + assert res.json['data'][0]['attributes']['name'] == file.name assert res.json['data'][1]['id'] == '1' + assert res.json['data'][1]['attributes']['name'] == file.name def test_load_and_property(self, app, user, file): # test_by_id diff --git a/osf/management/commands/force_archive.py b/osf/management/commands/force_archive.py index 0f42f53f3a6..ae73cb52663 100644 --- a/osf/management/commands/force_archive.py +++ b/osf/management/commands/force_archive.py @@ -38,6 +38,7 @@ from scripts import utils as script_utils from website.archiver import ARCHIVER_SUCCESS from website.settings import ARCHIVE_TIMEOUT_TIMEDELTA, ARCHIVE_PROVIDER +from website.files.utils import attach_versions logger = logging.getLogger(__name__) @@ -194,7 +195,7 @@ def manually_archive(tree, reg, node_settings, parent=None): if file_obj.versions.exists() and filenode['version']: # Min version identifier is 1 if not cloned.versions.filter(identifier=filenode['version']).exists(): - cloned.versions.add(*file_obj.versions.filter(identifier__lte=filenode['version'])) + attach_versions(cloned, file_obj.versions.filter(identifier__lte=filenode['version'])) if filenode.get('children'): manually_archive(filenode['children'], reg, node_settings, parent=cloned) diff --git a/osf/migrations/0160_add_custom_file_versions_through.py b/osf/migrations/0160_add_custom_file_versions_through.py new file mode 100644 index 00000000000..d8c8b24b67e --- /dev/null +++ b/osf/migrations/0160_add_custom_file_versions_through.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-04-07 23:20 +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0159_merge_20190331_2301'), + ] + + operations = [ + migrations.CreateModel( + name='BaseFileVersionsThrough', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('version_name', models.TextField(blank=True)), + ('basefilenode', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='osf.BaseFileNode')), + ('fileversion', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='osf.FileVersion')), + ], + ), + migrations.AlterUniqueTogether( + name='basefileversionsthrough', + unique_together=set([('basefilenode', 'fileversion')]), + ), + ] diff --git a/osf/migrations/0161_populate_file_versions_through.py b/osf/migrations/0161_populate_file_versions_through.py new file mode 100644 index 00000000000..8c766552d4e --- /dev/null +++ b/osf/migrations/0161_populate_file_versions_through.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-03-03 17:52 +from __future__ import unicode_literals + +import logging +from math import ceil + +from django.db import migrations, connection + +logger = logging.getLogger(__file__) + +increment = 10000 + +def noop(*args, **kwargs): + pass + + +# Batching adapted from strategy in website/search_migration/migrate.py +def populate_fileversion_name(state, schema): + BaseFileNodeVersion = state.get_model('osf.basefilenode_versions') + max_thru_id = getattr(BaseFileNodeVersion.objects.order_by('id').last(), 'id', 0) + + sql = """ + INSERT INTO osf_basefileversionsthrough (basefilenode_id, fileversion_id, version_name) + SELECT F.id, V.id, F.name + FROM osf_basefilenode_versions THRU, osf_fileversion V, osf_basefilenode F + WHERE THRU.fileversion_id = V.id + AND THRU.basefilenode_id = F.id + AND THRU.id > {} + AND THRU.id <= {}; + """ + + total_pages = int(ceil(max_thru_id / float(increment))) + page_start = 0 + page_end = 0 + page = 0 + while page_end <= (max_thru_id): + page += 1 + page_end += increment + if page <= total_pages: + logger.info('Transferring BaseFileNodeVersions to custom through table: {} / {}'.format(page_end / increment, total_pages)) + with connection.cursor() as cursor: + cursor.execute(sql.format( + page_start, + page_end, + page_start, + page_end + )) + page_start = page_end + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0160_add_custom_file_versions_through'), + ] + + operations = [ + migrations.RunPython(populate_fileversion_name, migrations.RunPython.noop) + ] diff --git a/osf/migrations/0162_remove_basefilenode_versions.py b/osf/migrations/0162_remove_basefilenode_versions.py new file mode 100644 index 00000000000..a1e198f3fe8 --- /dev/null +++ b/osf/migrations/0162_remove_basefilenode_versions.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-04-08 00:51 +from __future__ import unicode_literals + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0161_populate_file_versions_through'), + ] + + operations = [ + migrations.RemoveField( + model_name='basefilenode', + name='versions', + ), + ] diff --git a/osf/migrations/0163_basefilenode_versions.py b/osf/migrations/0163_basefilenode_versions.py new file mode 100644 index 00000000000..09031f82a2b --- /dev/null +++ b/osf/migrations/0163_basefilenode_versions.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-04-08 00:52 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0162_remove_basefilenode_versions'), + ] + + operations = [ + migrations.AddField( + model_name='basefilenode', + name='versions', + field=models.ManyToManyField(through='osf.BaseFileVersionsThrough', to='osf.FileVersion'), + ), + ] diff --git a/osf/models/__init__.py b/osf/models/__init__.py index 714c009ae9b..32faf3cb3bd 100644 --- a/osf/models/__init__.py +++ b/osf/models/__init__.py @@ -29,6 +29,7 @@ from osf.models.identifiers import Identifier # noqa from osf.models.files import ( # noqa BaseFileNode, + BaseFileVersionsThrough, File, Folder, # noqa FileVersion, TrashedFile, TrashedFileNode, TrashedFolder, FileVersionUserMetadata, # noqa ) # noqa diff --git a/osf/models/files.py b/osf/models/files.py index 63884ce8acd..fbc1b47b9d5 100644 --- a/osf/models/files.py +++ b/osf/models/files.py @@ -89,7 +89,7 @@ class BaseFileNode(TypedModel, CommentableMixin, OptionalGuidMixin, Taggable, Ob # Add regardless it can be pinned to a version or not _history = DateTimeAwareJSONField(default=list, blank=True) # A concrete version of a FileNode, must have an identifier - versions = models.ManyToManyField('FileVersion') + versions = models.ManyToManyField('FileVersion', through='BaseFileVersionsThrough') target_content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) target_object_id = models.PositiveIntegerField() @@ -234,6 +234,10 @@ def to_storage(self, **kwargs): storage.pop(key) return storage + def add_version(self, version): + BaseFileVersionsThrough.objects.create(fileversion=version, basefilenode=self, version_name=self.name) + return + @classmethod def files_checked_out(cls, user): """ @@ -359,10 +363,17 @@ def copy_under(self, destination_parent, name=None): return utils.copy_files(self, destination_parent.target, destination_parent, name=name) def move_under(self, destination_parent, name=None): + renaming = name != self.name self.name = name or self.name self.parent = destination_parent self._update_node(save=True) # Trust _update_node to save us + if renaming and self.is_file and self.versions.exists(): + newest_version = self.versions.first() + node_file_version = newest_version.get_basefilenode_version(self) + node_file_version.version_name = self.name + node_file_version.save() + return self def belongs_to_node(self, target_id): @@ -443,6 +454,7 @@ def __repr__(self): self.target ) + class UnableToRestore(Exception): pass @@ -458,7 +470,7 @@ def kind(self): def update(self, revision, data, user=None, save=True): """Using revision and data update all data pretaining to self :param str or None revision: The revision that data points to - :param dict data: Metadata recieved from waterbutler + :param dict data: Metadata received from waterbutler :returns: FileVersion """ self.name = data['name'] @@ -479,7 +491,8 @@ def update(self, revision, data, user=None, save=True): # Dont save the latest information if revision is not None: version.save() - self.versions.add(version) + # Adds version to the list of file versions - using custom through table + self.add_version(version) for entry in self.history: # Some entry might have an undefined modified field if data['modified'] is not None and entry['modified'] is not None and data['modified'] < entry['modified']: @@ -750,6 +763,9 @@ def archive(self): def is_duplicate(self, other): return self.location_hash == other.location_hash + def get_basefilenode_version(self, file): + return self.basefileversionsthrough_set.get(basefilenode=file) + def update_metadata(self, metadata, save=True): self.metadata.update(metadata) # metadata has no defined structure so only attempt to set attributes @@ -806,3 +822,12 @@ def serialize_waterbutler_settings(self, node_id, root_id): class Meta: ordering = ('-created',) + + +class BaseFileVersionsThrough(models.Model): + basefilenode = models.ForeignKey(BaseFileNode) + fileversion = models.ForeignKey(FileVersion) + version_name = models.TextField(blank=True) + + class Meta: + unique_together = (('basefilenode', 'fileversion'),) diff --git a/tests/test_addons.py b/tests/test_addons.py index 45f382c4611..5d2fe35d6e0 100644 --- a/tests/test_addons.py +++ b/tests/test_addons.py @@ -170,7 +170,7 @@ def test_action_downloads_marks_version_as_seen(self): # Add a new version, make sure that does not have a record version = FileVersionFactory() - test_file.versions.add(version) + test_file.add_version(version) test_file.save() versions = test_file.versions.order_by('created') @@ -737,7 +737,7 @@ def get_test_file(self): materialized_path='/test/Test', ) ret.save() - ret.versions.add(version) + ret.add_version(version) return ret def get_second_test_file(self): @@ -750,7 +750,7 @@ def get_second_test_file(self): materialized_path='/test/Test2', ) ret.save() - ret.versions.add(version) + ret.add_version(version) return ret def get_uppercased_ext_test_file(self): @@ -763,7 +763,7 @@ def get_uppercased_ext_test_file(self): materialized_path='/test/Test2', ) ret.save() - ret.versions.add(version) + ret.add_version(version) return ret def get_ext_test_file(self): @@ -776,7 +776,7 @@ def get_ext_test_file(self): materialized_path='/test/Test2', ) ret.save() - ret.versions.add(version) + ret.add_version(version) return ret def get_mako_return(self): diff --git a/tests/test_websitefiles.py b/tests/test_websitefiles.py index e4e0c5231f4..ab51f807227 100644 --- a/tests/test_websitefiles.py +++ b/tests/test_websitefiles.py @@ -12,6 +12,7 @@ from tests.base import OsfTestCase from osf_tests.factories import AuthUserFactory, ProjectFactory from website.files import exceptions +from website.files.utils import attach_versions from osf import models @@ -495,7 +496,7 @@ def test_get_version(self): ) file.save() - file.versions.add(*[v1, v2]) + attach_versions(file, [v1, v2]) assert_equals(file.get_version('1'), v1) assert_equals(file.get_version('2', required=True), v2) @@ -519,7 +520,7 @@ def test_update_version_metadata(self): file.save() - file.versions.add(v1) + file.add_version(v1) file.update_version_metadata(None, {'size': 1337}) with assert_raises(exceptions.VersionNotFoundError): diff --git a/website/files/utils.py b/website/files/utils.py index 022fcfc8706..a75d1180311 100644 --- a/website/files/utils.py +++ b/website/files/utils.py @@ -6,6 +6,7 @@ def copy_files(src, target_node, parent=None, name=None): :param Folder parent: The parent of to attach the clone of src to, if applicable """ assert not parent or not parent.is_file, 'Parent must be a folder' + renaming = src.name != name cloned = src.clone() cloned.parent = parent @@ -20,14 +21,20 @@ def copy_files(src, target_node, parent=None, name=None): most_recent_fileversion = fileversions.first() if most_recent_fileversion.region and most_recent_fileversion.region != target_node.osfstorage_region: # add all original version except the most recent - cloned.versions.add(*fileversions[1:]) + attach_versions(cloned, fileversions[1:]) # create a new most recent version and update the region before adding new_fileversion = most_recent_fileversion.clone() new_fileversion.region = target_node.osfstorage_region new_fileversion.save() - cloned.versions.add(new_fileversion) + attach_versions(cloned, [new_fileversion]) else: - cloned.versions.add(*src.versions.all()) + attach_versions(cloned, src.versions.all()) + + if renaming: + latest_version = cloned.versions.first() + node_file_version = latest_version.get_basefilenode_version(cloned) + node_file_version.version_name = cloned.name + node_file_version.save() # copy over file metadata records if cloned.provider == 'osfstorage': @@ -40,3 +47,7 @@ def copy_files(src, target_node, parent=None, name=None): copy_files(child, target_node, parent=cloned) return cloned + +def attach_versions(file, versions_list): + for version in versions_list: + file.add_version(version) From 68488c04925a2ea94b1df021d300c3707fdcb7aa Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Thu, 13 Jun 2019 12:44:36 -0500 Subject: [PATCH 02/17] Rebase migrations and add indices. --- ...y => 0162_add_custom_file_versions_through.py} | 6 +++++- ....py => 0163_populate_file_versions_through.py} | 15 +++++++++++---- ...ns.py => 0164_remove_basefilenode_versions.py} | 2 +- ..._versions.py => 0165_basefilenode_versions.py} | 2 +- osf/models/files.py | 7 +++++-- 5 files changed, 23 insertions(+), 9 deletions(-) rename osf/migrations/{0160_add_custom_file_versions_through.py => 0162_add_custom_file_versions_through.py} (82%) rename osf/migrations/{0161_populate_file_versions_through.py => 0163_populate_file_versions_through.py} (76%) rename osf/migrations/{0162_remove_basefilenode_versions.py => 0164_remove_basefilenode_versions.py} (86%) rename osf/migrations/{0163_basefilenode_versions.py => 0165_basefilenode_versions.py} (89%) diff --git a/osf/migrations/0160_add_custom_file_versions_through.py b/osf/migrations/0162_add_custom_file_versions_through.py similarity index 82% rename from osf/migrations/0160_add_custom_file_versions_through.py rename to osf/migrations/0162_add_custom_file_versions_through.py index d8c8b24b67e..2ee766b885d 100644 --- a/osf/migrations/0160_add_custom_file_versions_through.py +++ b/osf/migrations/0162_add_custom_file_versions_through.py @@ -9,7 +9,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0159_merge_20190331_2301'), + ('osf', '0161_add_spam_fields_to_user'), ] operations = [ @@ -26,4 +26,8 @@ class Migration(migrations.Migration): name='basefileversionsthrough', unique_together=set([('basefilenode', 'fileversion')]), ), + migrations.AlterIndexTogether( + name='basefileversionsthrough', + index_together=set([('basefilenode', 'fileversion')]), + ), ] diff --git a/osf/migrations/0161_populate_file_versions_through.py b/osf/migrations/0163_populate_file_versions_through.py similarity index 76% rename from osf/migrations/0161_populate_file_versions_through.py rename to osf/migrations/0163_populate_file_versions_through.py index 8c766552d4e..986fb73ab9b 100644 --- a/osf/migrations/0161_populate_file_versions_through.py +++ b/osf/migrations/0163_populate_file_versions_through.py @@ -11,8 +11,15 @@ increment = 10000 -def noop(*args, **kwargs): - pass +def restore_default_through_table(state, schema): + sql = """ + + INSERT INTO osf_basefilenode_versions (basefilenode_id, fileversion_id) + SELECT new_thru.basefilenode_id, new_thru.fileversion_id + FROM osf_basefileversionsthrough new_thru; + """ + with connection.cursor() as cursor: + cursor.execute(sql) # Batching adapted from strategy in website/search_migration/migrate.py @@ -51,9 +58,9 @@ def populate_fileversion_name(state, schema): class Migration(migrations.Migration): dependencies = [ - ('osf', '0160_add_custom_file_versions_through'), + ('osf', '0162_add_custom_file_versions_through'), ] operations = [ - migrations.RunPython(populate_fileversion_name, migrations.RunPython.noop) + migrations.RunPython(populate_fileversion_name, restore_default_through_table) ] diff --git a/osf/migrations/0162_remove_basefilenode_versions.py b/osf/migrations/0164_remove_basefilenode_versions.py similarity index 86% rename from osf/migrations/0162_remove_basefilenode_versions.py rename to osf/migrations/0164_remove_basefilenode_versions.py index a1e198f3fe8..f296eb87d7c 100644 --- a/osf/migrations/0162_remove_basefilenode_versions.py +++ b/osf/migrations/0164_remove_basefilenode_versions.py @@ -8,7 +8,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0161_populate_file_versions_through'), + ('osf', '0163_populate_file_versions_through'), ] operations = [ diff --git a/osf/migrations/0163_basefilenode_versions.py b/osf/migrations/0165_basefilenode_versions.py similarity index 89% rename from osf/migrations/0163_basefilenode_versions.py rename to osf/migrations/0165_basefilenode_versions.py index 09031f82a2b..9ae89b10f85 100644 --- a/osf/migrations/0163_basefilenode_versions.py +++ b/osf/migrations/0165_basefilenode_versions.py @@ -8,7 +8,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0162_remove_basefilenode_versions'), + ('osf', '0164_remove_basefilenode_versions'), ] operations = [ diff --git a/osf/models/files.py b/osf/models/files.py index fbc1b47b9d5..1fd1d1f4bb4 100644 --- a/osf/models/files.py +++ b/osf/models/files.py @@ -825,9 +825,12 @@ class Meta: class BaseFileVersionsThrough(models.Model): - basefilenode = models.ForeignKey(BaseFileNode) - fileversion = models.ForeignKey(FileVersion) + basefilenode = models.ForeignKey(BaseFileNode, db_index=True) + fileversion = models.ForeignKey(FileVersion, db_index=True) version_name = models.TextField(blank=True) class Meta: unique_together = (('basefilenode', 'fileversion'),) + index_together = ( + ('basefilenode', 'fileversion', ) + ) From b64c0c545e1c6749093f0513a3d0485dffaeb6d3 Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Thu, 13 Jun 2019 16:41:07 -0500 Subject: [PATCH 03/17] If uploading a new version of a file with the `osfstorage_create_child` hook, a `new_name` key in the payload will rename the file and rename the latest version. --- addons/osfstorage/tests/test_views.py | 23 ++++++++++++++++++----- addons/osfstorage/views.py | 6 ++++++ osf/models/files.py | 2 ++ 3 files changed, 26 insertions(+), 5 deletions(-) diff --git a/addons/osfstorage/tests/test_views.py b/addons/osfstorage/tests/test_views.py index d04dae7eda5..cc24e821428 100644 --- a/addons/osfstorage/tests/test_views.py +++ b/addons/osfstorage/tests/test_views.py @@ -241,7 +241,8 @@ def send_upload_hook(self, parent, target=None, payload=None, **kwargs): def make_payload(self, **kwargs): user = kwargs.pop('user', self.user) name = kwargs.pop('name', self.name) - return make_payload(user=user, name=name, **kwargs) + new_name = kwargs.pop('new_name', '') + return make_payload(user=user, name=name, new_name=new_name, **kwargs) def test_upload_create(self): name = 'slightly-mad' @@ -510,15 +511,27 @@ def test_upload_create(self): assert_equal(res.json['data']['downloads'], self.record.get_download_count()) def test_upload_update(self): - delta = Delta(lambda: self.record.versions.count(), lambda value: value + 1) - with AssertDeltas(delta): - res = self.send_upload_hook(self.preprint.root_folder, self.preprint, self.make_payload()) - self.record.reload() + assert_equal(self.record.versions.first().get_basefilenode_version(self.record).version_name, self.name) + assert_equal(self.record.versions.count(), 1) + + res = self.send_upload_hook(self.preprint.root_folder, self.preprint, self.make_payload()) + self.record.reload() + assert_equal(self.record.versions.count(), 2) assert_equal(res.status_code, 200) assert_equal(res.json['status'], 'success') version = models.FileVersion.load(res.json['version']) assert_is_not(version, None) assert_in(version, self.record.versions.all()) + assert_equal(self.record.versions.first().get_basefilenode_version(self.record).version_name, self.name) + + # upload version of same file with new name + new_payload = self.make_payload(new_name='new_preprint.jpg') + new_payload['metadata']['name'] = 'newdata' + res = self.send_upload_hook(self.preprint.root_folder, self.preprint, new_payload) + self.record.reload() + assert_equal(self.record.versions.count(), 3) + assert_equal(self.record.versions.first().get_basefilenode_version(self.record).version_name, 'new_preprint.jpg') + assert_equal(self.record.name, 'new_preprint.jpg') def test_upload_duplicate(self): location = { diff --git a/addons/osfstorage/views.py b/addons/osfstorage/views.py index 769dd394c38..d1fb20ba0aa 100644 --- a/addons/osfstorage/views.py +++ b/addons/osfstorage/views.py @@ -282,6 +282,8 @@ def osfstorage_get_children(file_node, **kwargs): def osfstorage_create_child(file_node, payload, **kwargs): parent = file_node # Just for clarity name = payload.get('name') + # If you are uploading a new version of an existing file with a new name. + new_name = payload.get('new_name') user = OSFUser.load(payload.get('user')) is_folder = payload.get('kind') == 'folder' @@ -335,6 +337,10 @@ def osfstorage_create_child(file_node, payload, **kwargs): )) except KeyError: raise HTTPError(httplib.BAD_REQUEST) + + if new_name: + file_node.name = new_name + file_node._update_node(save=True) current_version = file_node.get_version() new_version = file_node.create_version(user, location, metadata) diff --git a/osf/models/files.py b/osf/models/files.py index 1fd1d1f4bb4..e31b23fcdc3 100644 --- a/osf/models/files.py +++ b/osf/models/files.py @@ -764,6 +764,8 @@ def is_duplicate(self, other): return self.location_hash == other.location_hash def get_basefilenode_version(self, file): + # Returns the throughtable object - the record that links this version + # to the given file. return self.basefileversionsthrough_set.get(basefilenode=file) def update_metadata(self, metadata, save=True): From 9c19ab9910e266c6101ebb39bce625b10e701d65 Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Fri, 14 Jun 2019 11:58:54 -0500 Subject: [PATCH 04/17] Revert changes to upload methods. --- addons/osfstorage/tests/test_views.py | 22 +++++----------------- addons/osfstorage/views.py | 5 ----- 2 files changed, 5 insertions(+), 22 deletions(-) diff --git a/addons/osfstorage/tests/test_views.py b/addons/osfstorage/tests/test_views.py index cc24e821428..1a016a35029 100644 --- a/addons/osfstorage/tests/test_views.py +++ b/addons/osfstorage/tests/test_views.py @@ -241,8 +241,7 @@ def send_upload_hook(self, parent, target=None, payload=None, **kwargs): def make_payload(self, **kwargs): user = kwargs.pop('user', self.user) name = kwargs.pop('name', self.name) - new_name = kwargs.pop('new_name', '') - return make_payload(user=user, name=name, new_name=new_name, **kwargs) + return make_payload(user=user, name=name, **kwargs) def test_upload_create(self): name = 'slightly-mad' @@ -511,12 +510,10 @@ def test_upload_create(self): assert_equal(res.json['data']['downloads'], self.record.get_download_count()) def test_upload_update(self): - assert_equal(self.record.versions.first().get_basefilenode_version(self.record).version_name, self.name) - assert_equal(self.record.versions.count(), 1) - - res = self.send_upload_hook(self.preprint.root_folder, self.preprint, self.make_payload()) - self.record.reload() - assert_equal(self.record.versions.count(), 2) + delta = Delta(lambda: self.record.versions.count(), lambda value: value + 1) + with AssertDeltas(delta): + res = self.send_upload_hook(self.preprint.root_folder, self.preprint, self.make_payload()) + self.record.reload() assert_equal(res.status_code, 200) assert_equal(res.json['status'], 'success') version = models.FileVersion.load(res.json['version']) @@ -524,15 +521,6 @@ def test_upload_update(self): assert_in(version, self.record.versions.all()) assert_equal(self.record.versions.first().get_basefilenode_version(self.record).version_name, self.name) - # upload version of same file with new name - new_payload = self.make_payload(new_name='new_preprint.jpg') - new_payload['metadata']['name'] = 'newdata' - res = self.send_upload_hook(self.preprint.root_folder, self.preprint, new_payload) - self.record.reload() - assert_equal(self.record.versions.count(), 3) - assert_equal(self.record.versions.first().get_basefilenode_version(self.record).version_name, 'new_preprint.jpg') - assert_equal(self.record.name, 'new_preprint.jpg') - def test_upload_duplicate(self): location = { 'service': 'cloud', diff --git a/addons/osfstorage/views.py b/addons/osfstorage/views.py index d1fb20ba0aa..45ad0a1485d 100644 --- a/addons/osfstorage/views.py +++ b/addons/osfstorage/views.py @@ -282,8 +282,6 @@ def osfstorage_get_children(file_node, **kwargs): def osfstorage_create_child(file_node, payload, **kwargs): parent = file_node # Just for clarity name = payload.get('name') - # If you are uploading a new version of an existing file with a new name. - new_name = payload.get('new_name') user = OSFUser.load(payload.get('user')) is_folder = payload.get('kind') == 'folder' @@ -338,9 +336,6 @@ def osfstorage_create_child(file_node, payload, **kwargs): except KeyError: raise HTTPError(httplib.BAD_REQUEST) - if new_name: - file_node.name = new_name - file_node._update_node(save=True) current_version = file_node.get_version() new_version = file_node.create_version(user, location, metadata) From 489fd0233bde4664c7dc374250eef88bba4ad6db Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Fri, 14 Jun 2019 15:29:57 -0500 Subject: [PATCH 05/17] Don't join file version table. --- osf/migrations/0163_populate_file_versions_through.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/osf/migrations/0163_populate_file_versions_through.py b/osf/migrations/0163_populate_file_versions_through.py index 986fb73ab9b..f642ce3a63f 100644 --- a/osf/migrations/0163_populate_file_versions_through.py +++ b/osf/migrations/0163_populate_file_versions_through.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__file__) -increment = 10000 +increment = 100000 def restore_default_through_table(state, schema): sql = """ @@ -29,10 +29,9 @@ def populate_fileversion_name(state, schema): sql = """ INSERT INTO osf_basefileversionsthrough (basefilenode_id, fileversion_id, version_name) - SELECT F.id, V.id, F.name - FROM osf_basefilenode_versions THRU, osf_fileversion V, osf_basefilenode F - WHERE THRU.fileversion_id = V.id - AND THRU.basefilenode_id = F.id + SELECT THRU.basefilenode_id, THRU.fileversion_id, F.name + FROM osf_basefilenode_versions THRU, osf_basefilenode F + WHERE THRU.basefilenode_id = F.id AND THRU.id > {} AND THRU.id <= {}; """ @@ -53,6 +52,7 @@ def populate_fileversion_name(state, schema): page_start, page_end )) + print page page_start = page_end class Migration(migrations.Migration): From 37ffa2ec217eada10b47d2a8b0f69bedca4c9f3c Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Sat, 22 Jun 2019 21:47:30 -0500 Subject: [PATCH 06/17] Optimize migration creating new custom thru table. --- .../0163_populate_file_versions_through.py | 96 ++++++++++++------- 1 file changed, 64 insertions(+), 32 deletions(-) diff --git a/osf/migrations/0163_populate_file_versions_through.py b/osf/migrations/0163_populate_file_versions_through.py index f642ce3a63f..b60c99bfda3 100644 --- a/osf/migrations/0163_populate_file_versions_through.py +++ b/osf/migrations/0163_populate_file_versions_through.py @@ -3,57 +3,89 @@ from __future__ import unicode_literals import logging -from math import ceil from django.db import migrations, connection logger = logging.getLogger(__file__) -increment = 100000 def restore_default_through_table(state, schema): sql = """ + DROP TABLE osf_basefilenode_versions; + CREATE TABLE osf_basefilenode_versions AS + SELECT + new_thru.basefilenode_id, + new_thru.fileversion_id + FROM + osf_basefileversionsthrough AS new_thru; - INSERT INTO osf_basefilenode_versions (basefilenode_id, fileversion_id) - SELECT new_thru.basefilenode_id, new_thru.fileversion_id - FROM osf_basefileversionsthrough new_thru; + ALTER TABLE osf_basefilenode_versions ADD COLUMN id SERIAL PRIMARY KEY; + ALTER TABLE osf_basefilenode_versions ADD CONSTRAINT osf_basefilenod_basefilenode_id_b0knah27_fk_osf_basefilenode_id FOREIGN KEY (basefilenode_id) REFERENCES osf_basefilenode DEFERRABLE INITIALLY DEFERRED; + ALTER TABLE osf_basefilenode_versions ALTER COLUMN basefilenode_id + SET + DATA TYPE INTEGER; + ALTER TABLE osf_basefilenode_versions ALTER COLUMN fileversion_id + SET + NOT NULL; + ALTER TABLE osf_basefilenode_versions ALTER COLUMN fileversion_id + SET + DATA TYPE INTEGER; + ALTER TABLE osf_basefilenode_versions ALTER COLUMN basefilenode_id + SET + NOT NULL; + ALTER TABLE osf_basefilenode_versions ADD CONSTRAINT osf_basefilenode__fileversion_id_93etanfc_fk_osf_fileversion_id FOREIGN KEY (fileversion_id) REFERENCES osf_fileversion DEFERRABLE INITIALLY DEFERRED; + ALTER TABLE osf_basefilenode_versions ADD CONSTRAINT osf_basefilenode__fileversion_uniq564 UNIQUE (basefilenode_id, fileversion_id); + CREATE INDEX + ON osf_basefilenode_versions (basefilenode_id, fileversion_id); + CREATE INDEX + ON osf_basefilenode_versions (basefilenode_id); + CREATE INDEX + ON osf_basefilenode_versions (fileversion_id); """ with connection.cursor() as cursor: cursor.execute(sql) -# Batching adapted from strategy in website/search_migration/migrate.py def populate_fileversion_name(state, schema): - BaseFileNodeVersion = state.get_model('osf.basefilenode_versions') - max_thru_id = getattr(BaseFileNodeVersion.objects.order_by('id').last(), 'id', 0) sql = """ - INSERT INTO osf_basefileversionsthrough (basefilenode_id, fileversion_id, version_name) - SELECT THRU.basefilenode_id, THRU.fileversion_id, F.name - FROM osf_basefilenode_versions THRU, osf_basefilenode F - WHERE THRU.basefilenode_id = F.id - AND THRU.id > {} - AND THRU.id <= {}; + DROP TABLE osf_basefileversionsthrough; + CREATE TABLE osf_basefileversionsthrough AS + SELECT + obfv.basefilenode_id, + obfv.fileversion_id, + ob.name as version_name + FROM + osf_basefilenode_versions obfv + LEFT JOIN + osf_basefilenode ob + ON obfv.basefilenode_id = ob.id; + ALTER TABLE osf_basefileversionsthrough ADD COLUMN id SERIAL PRIMARY KEY; + ALTER TABLE osf_basefileversionsthrough ADD CONSTRAINT osf_basefilenod_basefilenode_id_b0nwad27_fk_osf_basefilenode_id FOREIGN KEY (basefilenode_id) REFERENCES osf_basefilenode DEFERRABLE INITIALLY DEFERRED; + ALTER TABLE osf_basefileversionsthrough ALTER COLUMN basefilenode_id + SET + DATA TYPE INTEGER; + ALTER TABLE osf_basefileversionsthrough ALTER COLUMN fileversion_id + SET + NOT NULL; + ALTER TABLE osf_basefileversionsthrough ALTER COLUMN fileversion_id + SET + DATA TYPE INTEGER; + ALTER TABLE osf_basefileversionsthrough ALTER COLUMN basefilenode_id + SET + NOT NULL; + ALTER TABLE osf_basefileversionsthrough ADD CONSTRAINT osf_basefilenode__fileversion_id_93nwadfc_fk_osf_fileversion_id FOREIGN KEY (fileversion_id) REFERENCES osf_fileversion DEFERRABLE INITIALLY DEFERRED; + ALTER TABLE osf_basefileversionsthrough ADD CONSTRAINT osf_basefilenode__fileversion_uniq UNIQUE (basefilenode_id, fileversion_id); + CREATE INDEX + ON osf_basefileversionsthrough (basefilenode_id, fileversion_id); + CREATE INDEX + ON osf_basefileversionsthrough (basefilenode_id); + CREATE INDEX + ON osf_basefileversionsthrough (fileversion_id); """ - total_pages = int(ceil(max_thru_id / float(increment))) - page_start = 0 - page_end = 0 - page = 0 - while page_end <= (max_thru_id): - page += 1 - page_end += increment - if page <= total_pages: - logger.info('Transferring BaseFileNodeVersions to custom through table: {} / {}'.format(page_end / increment, total_pages)) - with connection.cursor() as cursor: - cursor.execute(sql.format( - page_start, - page_end, - page_start, - page_end - )) - print page - page_start = page_end + with connection.cursor() as cursor: + cursor.execute(sql) class Migration(migrations.Migration): From 6de4ad533d4e0cfacedd5af57b1f1c2c67ad147b Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Sat, 22 Jun 2019 22:34:15 -0500 Subject: [PATCH 07/17] Rebase migrations. --- ...ions_through.py => 0169_add_custom_file_versions_through.py} | 2 +- ...rsions_through.py => 0170_populate_file_versions_through.py} | 2 +- ...ilenode_versions.py => 0171_remove_basefilenode_versions.py} | 2 +- ...5_basefilenode_versions.py => 0172_basefilenode_versions.py} | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) rename osf/migrations/{0162_add_custom_file_versions_through.py => 0169_add_custom_file_versions_through.py} (95%) rename osf/migrations/{0163_populate_file_versions_through.py => 0170_populate_file_versions_through.py} (98%) rename osf/migrations/{0164_remove_basefilenode_versions.py => 0171_remove_basefilenode_versions.py} (86%) rename osf/migrations/{0165_basefilenode_versions.py => 0172_basefilenode_versions.py} (89%) diff --git a/osf/migrations/0162_add_custom_file_versions_through.py b/osf/migrations/0169_add_custom_file_versions_through.py similarity index 95% rename from osf/migrations/0162_add_custom_file_versions_through.py rename to osf/migrations/0169_add_custom_file_versions_through.py index 2ee766b885d..a537f0c4e25 100644 --- a/osf/migrations/0162_add_custom_file_versions_through.py +++ b/osf/migrations/0169_add_custom_file_versions_through.py @@ -9,7 +9,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0161_add_spam_fields_to_user'), + ('osf', '0168_merge_20190610_2308'), ] operations = [ diff --git a/osf/migrations/0163_populate_file_versions_through.py b/osf/migrations/0170_populate_file_versions_through.py similarity index 98% rename from osf/migrations/0163_populate_file_versions_through.py rename to osf/migrations/0170_populate_file_versions_through.py index b60c99bfda3..f138fe9a929 100644 --- a/osf/migrations/0163_populate_file_versions_through.py +++ b/osf/migrations/0170_populate_file_versions_through.py @@ -90,7 +90,7 @@ def populate_fileversion_name(state, schema): class Migration(migrations.Migration): dependencies = [ - ('osf', '0162_add_custom_file_versions_through'), + ('osf', '0169_add_custom_file_versions_through'), ] operations = [ diff --git a/osf/migrations/0164_remove_basefilenode_versions.py b/osf/migrations/0171_remove_basefilenode_versions.py similarity index 86% rename from osf/migrations/0164_remove_basefilenode_versions.py rename to osf/migrations/0171_remove_basefilenode_versions.py index f296eb87d7c..9fb9044e32b 100644 --- a/osf/migrations/0164_remove_basefilenode_versions.py +++ b/osf/migrations/0171_remove_basefilenode_versions.py @@ -8,7 +8,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0163_populate_file_versions_through'), + ('osf', '0170_populate_file_versions_through'), ] operations = [ diff --git a/osf/migrations/0165_basefilenode_versions.py b/osf/migrations/0172_basefilenode_versions.py similarity index 89% rename from osf/migrations/0165_basefilenode_versions.py rename to osf/migrations/0172_basefilenode_versions.py index 9ae89b10f85..cbe8cc02825 100644 --- a/osf/migrations/0165_basefilenode_versions.py +++ b/osf/migrations/0172_basefilenode_versions.py @@ -8,7 +8,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0164_remove_basefilenode_versions'), + ('osf', '0171_remove_basefilenode_versions'), ] operations = [ From 2b48bac66644b27d4663c80c4742f0dbd1f92587 Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Sun, 23 Jun 2019 11:52:00 -0500 Subject: [PATCH 08/17] When bulk copying files, copy the version names as well. --- osf/management/commands/force_archive.py | 2 +- osf/models/files.py | 17 +++++++++++++---- website/files/utils.py | 18 ++++++++++++------ 3 files changed, 26 insertions(+), 11 deletions(-) diff --git a/osf/management/commands/force_archive.py b/osf/management/commands/force_archive.py index ae73cb52663..72dd4b8675a 100644 --- a/osf/management/commands/force_archive.py +++ b/osf/management/commands/force_archive.py @@ -195,7 +195,7 @@ def manually_archive(tree, reg, node_settings, parent=None): if file_obj.versions.exists() and filenode['version']: # Min version identifier is 1 if not cloned.versions.filter(identifier=filenode['version']).exists(): - attach_versions(cloned, file_obj.versions.filter(identifier__lte=filenode['version'])) + attach_versions(cloned, file_obj.versions.filter(identifier__lte=filenode['version']), file_obj) if filenode.get('children'): manually_archive(filenode['children'], reg, node_settings, parent=cloned) diff --git a/osf/models/files.py b/osf/models/files.py index e31b23fcdc3..2f143cf109b 100644 --- a/osf/models/files.py +++ b/osf/models/files.py @@ -234,9 +234,17 @@ def to_storage(self, **kwargs): storage.pop(key) return storage - def add_version(self, version): - BaseFileVersionsThrough.objects.create(fileversion=version, basefilenode=self, version_name=self.name) - return + def add_version(self, version, name=None): + """ + Relates the file object to the version object. + :param version: Version object + :param name: Name, optional. Pass in if this version needs to have + a different name than the file + :return: Returns version that was passed in + """ + version_name = name or self.name + BaseFileVersionsThrough.objects.create(fileversion=version, basefilenode=self, version_name=version_name) + return version @classmethod def files_checked_out(cls, user): @@ -371,6 +379,7 @@ def move_under(self, destination_parent, name=None): if renaming and self.is_file and self.versions.exists(): newest_version = self.versions.first() node_file_version = newest_version.get_basefilenode_version(self) + # Rename version in through table node_file_version.version_name = self.name node_file_version.save() @@ -766,7 +775,7 @@ def is_duplicate(self, other): def get_basefilenode_version(self, file): # Returns the throughtable object - the record that links this version # to the given file. - return self.basefileversionsthrough_set.get(basefilenode=file) + return self.basefileversionsthrough_set.filter(basefilenode=file).first() def update_metadata(self, metadata, save=True): self.metadata.update(metadata) diff --git a/website/files/utils.py b/website/files/utils.py index a75d1180311..cff82293458 100644 --- a/website/files/utils.py +++ b/website/files/utils.py @@ -15,24 +15,24 @@ def copy_files(src, target_node, parent=None, name=None): cloned.copied_from = src cloned.save() - if src.is_file and src.versions.exists(): fileversions = src.versions.select_related('region').order_by('-created') most_recent_fileversion = fileversions.first() if most_recent_fileversion.region and most_recent_fileversion.region != target_node.osfstorage_region: # add all original version except the most recent - attach_versions(cloned, fileversions[1:]) + attach_versions(cloned, fileversions[1:], src) # create a new most recent version and update the region before adding new_fileversion = most_recent_fileversion.clone() new_fileversion.region = target_node.osfstorage_region new_fileversion.save() - attach_versions(cloned, [new_fileversion]) + attach_versions(cloned, [new_fileversion], src) else: - attach_versions(cloned, src.versions.all()) + attach_versions(cloned, src.versions.all(), src) if renaming: latest_version = cloned.versions.first() node_file_version = latest_version.get_basefilenode_version(cloned) + # If this is a copy and a rename, update the name on the through table node_file_version.version_name = cloned.name node_file_version.save() @@ -48,6 +48,12 @@ def copy_files(src, target_node, parent=None, name=None): return cloned -def attach_versions(file, versions_list): +def attach_versions(file, versions_list, src=None): + """ + Loops through all versions in the versions list and attaches them to the file. + Fetches the name associated with the versions on the original src, if copying. + """ for version in versions_list: - file.add_version(version) + original_version = version.get_basefilenode_version(src) + name = original_version.version_name if original_version else None + file.add_version(version, name) From eb20a81703970bdec7e4e23ade6413747707db99 Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Sun, 23 Jun 2019 23:31:39 -0500 Subject: [PATCH 09/17] Modify osfstorage_download view so the name in the payload is dependent on the version. --- addons/osfstorage/tests/test_views.py | 20 +++++++++++++++++++- addons/osfstorage/views.py | 5 ++++- 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/addons/osfstorage/tests/test_views.py b/addons/osfstorage/tests/test_views.py index 1a016a35029..c228c4c2484 100644 --- a/addons/osfstorage/tests/test_views.py +++ b/addons/osfstorage/tests/test_views.py @@ -1571,7 +1571,6 @@ def test_file_remove_tag_fail_doesnt_create_log(self, mock_log): @pytest.mark.django_db @pytest.mark.enable_bookmark_creation class TestFileViews(StorageTestCase): - def test_file_views(self): file = create_test_file(target=self.node, user=self.user) url = self.node.web_url_for('addon_view_or_download_file', path=file._id, provider=file.provider) @@ -1617,6 +1616,25 @@ def test_download_file(self): redirect = self.app.get(url, auth=self.user.auth, expect_errors=True) assert redirect.status_code == 400 + def test_osfstorage_download_view(self): + file = create_test_file(target=self.node, user=self.user) + version = factories.FileVersionFactory() + file.add_version(version) + file.move_under(self.node_settings.get_root(), name='new_name') + file.save() + + res = self.app.get( + api_url_for( + 'osfstorage_download', + fid=file._id, + guid=self.node._id, + **signing.sign_data(signing.default_signer, {}) + ), + auth=self.user.auth, + ) + assert res.status_code == 200 + assert res.json['data']['name'] == 'new_name' + @responses.activate @mock.patch('framework.auth.cas.get_client') def test_download_file_with_token(self, mock_get_client): diff --git a/addons/osfstorage/views.py b/addons/osfstorage/views.py index 357698ff89b..8c5593946c9 100644 --- a/addons/osfstorage/views.py +++ b/addons/osfstorage/views.py @@ -404,9 +404,12 @@ def osfstorage_download(file_node, payload, **kwargs): raise make_error(httplib.BAD_REQUEST, message_short='Version must be an integer if not specified') version = file_node.get_version(version_id, required=True) + file_version_thru = version.get_basefilenode_version(file_node) + name = file_version_thru.version_name if file_version_thru else file_node.name + return { 'data': { - 'name': file_node.name, + 'name': name, 'path': version.location_hash, }, 'settings': { From bbb2ee7ee4793d83e51a4a826bfdf356307bc4c3 Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Mon, 1 Jul 2019 10:32:26 -0500 Subject: [PATCH 10/17] Rebase migrations and update Brian's data storage usage management command to use new custom through table for files and versions. --- osf/management/commands/data_storage_usage.py | 22 +++++++++---------- ... 0170_add_custom_file_versions_through.py} | 2 +- ...=> 0171_populate_file_versions_through.py} | 2 +- ...y => 0172_remove_basefilenode_versions.py} | 2 +- ...sions.py => 0173_basefilenode_versions.py} | 2 +- 5 files changed, 15 insertions(+), 15 deletions(-) rename osf/migrations/{0169_add_custom_file_versions_through.py => 0170_add_custom_file_versions_through.py} (96%) rename osf/migrations/{0170_populate_file_versions_through.py => 0171_populate_file_versions_through.py} (98%) rename osf/migrations/{0171_remove_basefilenode_versions.py => 0172_remove_basefilenode_versions.py} (86%) rename osf/migrations/{0172_basefilenode_versions.py => 0173_basefilenode_versions.py} (89%) diff --git a/osf/management/commands/data_storage_usage.py b/osf/management/commands/data_storage_usage.py index 4bd9134c819..b115d2615e6 100644 --- a/osf/management/commands/data_storage_usage.py +++ b/osf/management/commands/data_storage_usage.py @@ -48,7 +48,7 @@ LAST_ROW_SQL = """ SELECT obfnv.id AS fileversion_id - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv ORDER BY obfnv.id DESC LIMIT 1 """ @@ -73,7 +73,7 @@ node.is_deleted AS node_deleted, node.spam_status, preprint.id IS NOT NULL AS is_supplementary_node - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN addons_osfstorage_region region ON version.region_id = region.id @@ -89,7 +89,7 @@ TOTAL_FILE_SIZE_SUM_SQL = """ SELECT 'total', sum(size) AS deleted_size_sum - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id WHERE file.provider = 'osfstorage' @@ -100,7 +100,7 @@ DELETED_FILE_SIZE_SUM_SQL = """ SELECT 'deleted', sum(size) AS deleted_size_sum - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id WHERE file.provider = 'osfstorage' @@ -112,7 +112,7 @@ REGIONAL_NODE_SIZE_SUM_SQL = """ SELECT region.name, sum(size) - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN addons_osfstorage_region region ON version.region_id = region.id @@ -128,7 +128,7 @@ node.type = 'osf.node' AND NOT node.is_public ) THEN 'osf.private-node' ELSE node.type END AS type, sum(size) - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN osf_abstractnode node ON file.target_object_id = node.id @@ -144,7 +144,7 @@ ND_QUICK_FILE_SIZE_SUM_SQL = """ SELECT node.type, sum(size) - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN osf_abstractnode node ON file.target_object_id = node.id @@ -161,7 +161,7 @@ ND_PREPRINT_SUPPLEMENT_SIZE_SUM_SQL = """ SELECT 'nd_supplement', sum(size) AS supplementary_node_size - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN osf_abstractnode node ON node.id = file.target_object_id @@ -193,7 +193,7 @@ preprint.deleted IS NOT NULL AS preprint_deleted, preprint.spam_status, FALSE AS is_supplementary_node - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN addons_osfstorage_region region ON version.region_id = region.id @@ -207,7 +207,7 @@ ND_PREPRINT_SIZE_SUM_SQL = """ SELECT 'nd_preprints', sum(size) AS nd_preprint_size_sum - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN osf_preprint preprint ON preprint.id = file.target_object_id @@ -220,7 +220,7 @@ REGIONAL_PREPRINT_SIZE_SUM_SQL = """ SELECT region.name, sum(size) - FROM osf_basefilenode_versions AS obfnv + FROM osf_basefileversionsthrough AS obfnv LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id LEFT JOIN addons_osfstorage_region region ON version.region_id = region.id diff --git a/osf/migrations/0169_add_custom_file_versions_through.py b/osf/migrations/0170_add_custom_file_versions_through.py similarity index 96% rename from osf/migrations/0169_add_custom_file_versions_through.py rename to osf/migrations/0170_add_custom_file_versions_through.py index a537f0c4e25..b0f3eef41dc 100644 --- a/osf/migrations/0169_add_custom_file_versions_through.py +++ b/osf/migrations/0170_add_custom_file_versions_through.py @@ -9,7 +9,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0168_merge_20190610_2308'), + ('osf', '0169_merge_20190618_1429'), ] operations = [ diff --git a/osf/migrations/0170_populate_file_versions_through.py b/osf/migrations/0171_populate_file_versions_through.py similarity index 98% rename from osf/migrations/0170_populate_file_versions_through.py rename to osf/migrations/0171_populate_file_versions_through.py index f138fe9a929..ec84add6924 100644 --- a/osf/migrations/0170_populate_file_versions_through.py +++ b/osf/migrations/0171_populate_file_versions_through.py @@ -90,7 +90,7 @@ def populate_fileversion_name(state, schema): class Migration(migrations.Migration): dependencies = [ - ('osf', '0169_add_custom_file_versions_through'), + ('osf', '0170_add_custom_file_versions_through'), ] operations = [ diff --git a/osf/migrations/0171_remove_basefilenode_versions.py b/osf/migrations/0172_remove_basefilenode_versions.py similarity index 86% rename from osf/migrations/0171_remove_basefilenode_versions.py rename to osf/migrations/0172_remove_basefilenode_versions.py index 9fb9044e32b..44039eb6056 100644 --- a/osf/migrations/0171_remove_basefilenode_versions.py +++ b/osf/migrations/0172_remove_basefilenode_versions.py @@ -8,7 +8,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0170_populate_file_versions_through'), + ('osf', '0171_populate_file_versions_through'), ] operations = [ diff --git a/osf/migrations/0172_basefilenode_versions.py b/osf/migrations/0173_basefilenode_versions.py similarity index 89% rename from osf/migrations/0172_basefilenode_versions.py rename to osf/migrations/0173_basefilenode_versions.py index cbe8cc02825..4ae671ccf5e 100644 --- a/osf/migrations/0172_basefilenode_versions.py +++ b/osf/migrations/0173_basefilenode_versions.py @@ -8,7 +8,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0171_remove_basefilenode_versions'), + ('osf', '0172_remove_basefilenode_versions'), ] operations = [ From 5390560b4086a8071900daa39b4a21d69b962ff9 Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Mon, 1 Jul 2019 13:28:25 -0500 Subject: [PATCH 11/17] Returning `version_names` under `addon_view_file` for use in properly building the name of the downloaded file on the legacy file revisions page. Page is using a WB call to get revision names, but we need to get the names of older versions that are now stored separately on a custom file-version through table. --- addons/base/views.py | 9 +++++++++ addons/osfstorage/tests/test_views.py | 17 ++++++++++++++++- website/static/js/filepage/revisions.js | 7 ++++++- website/templates/project/view_file.mako | 1 + 4 files changed, 32 insertions(+), 2 deletions(-) diff --git a/addons/base/views.py b/addons/base/views.py index 42711255011..61112a96dcf 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -869,6 +869,14 @@ def addon_view_file(auth, node, file_node, version): args={'url': download_url.url} ) + version_names = [] + versions = file_node.versions.order_by('-created') + if versions.count(): + for vers in versions: + file_version_thru = vers.get_basefilenode_version(file_node) + name = file_version_thru.version_name if file_version_thru else file_node.name + version_names.append(name) + ret.update({ 'urls': { 'render': render_url.url, @@ -896,6 +904,7 @@ def addon_view_file(auth, node, file_node, version): 'allow_comments': file_node.provider in settings.ADDONS_COMMENTABLE, 'checkout_user': file_node.checkout._id if file_node.checkout else None, 'pre_reg_checkout': is_pre_reg_checkout(node, file_node), + 'version_names': version_names }) ret.update(rubeus.collect_addon_assets(node)) diff --git a/addons/osfstorage/tests/test_views.py b/addons/osfstorage/tests/test_views.py index c228c4c2484..396d5aff6d3 100644 --- a/addons/osfstorage/tests/test_views.py +++ b/addons/osfstorage/tests/test_views.py @@ -31,7 +31,7 @@ from osf.models import files as models from addons.osfstorage.apps import osf_storage_root from addons.osfstorage import utils -from addons.base.views import make_auth +from addons.base.views import make_auth, addon_view_file from addons.osfstorage import settings as storage_settings from api_tests.utils import create_test_file, create_test_preprint_file from api.caching.settings import STORAGE_USAGE_KEY @@ -1616,6 +1616,21 @@ def test_download_file(self): redirect = self.app.get(url, auth=self.user.auth, expect_errors=True) assert redirect.status_code == 400 + def test_addon_view_file(self): + file = create_test_file(target=self.node, user=self.user, filename='first_name') + version = factories.FileVersionFactory() + file.add_version(version) + file.move_under(self.node_settings.get_root(), name='second_name') + file.save() + + version = factories.FileVersionFactory() + file.add_version(version) + file.move_under(self.node_settings.get_root(), name='third_name') + file.save() + + ret = addon_view_file(Auth(self.user), self.node, file, version) + assert ret['version_names'] == ['third_name', 'second_name', 'first_name'] + def test_osfstorage_download_view(self): file = create_test_file(target=self.node, user=self.user) version = factories.FileVersionFactory() diff --git a/website/static/js/filepage/revisions.js b/website/static/js/filepage/revisions.js index 0e99d81f625..c938bb68bc0 100644 --- a/website/static/js/filepage/revisions.js +++ b/website/static/js/filepage/revisions.js @@ -218,7 +218,12 @@ var FileRevisionsTable = { } if (file.provider === 'osfstorage' && file.name && index !== 0) { - var parts = file.name.split('.'); + var parts; + if (file.versionNames && file.versionNames.length) { + parts = file.versionNames[index].split('.'); + } else { + parts = file.name.split('.'); + } if (parts.length === 1) { options.displayName = parts[0] + '-' + revision.modified; } else { diff --git a/website/templates/project/view_file.mako b/website/templates/project/view_file.mako index b2876bce2ce..98b9804d648 100644 --- a/website/templates/project/view_file.mako +++ b/website/templates/project/view_file.mako @@ -197,6 +197,7 @@ id: ${file_id | sjson, n}, checkoutUser: ${checkout_user if checkout_user else None | sjson, n}, isPreregCheckout: ${pre_reg_checkout if pre_reg_checkout else False | sjson, n}, + versionNames: ${version_names if version_names else [] | sjson, n}, urls: { %if error is None: render: ${ urls['render'] | sjson, n }, From 32203ff63c18bd23381f426853715a1a95641182 Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Sun, 21 Jul 2019 00:33:18 -0500 Subject: [PATCH 12/17] Pull version_names from BaseFileVersionsThrough table. --- addons/base/views.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/addons/base/views.py b/addons/base/views.py index 934bd38c2f3..763aec065a8 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -37,7 +37,7 @@ from addons.base import signals as file_signals from addons.base.utils import format_last_known_metadata, get_mfr_url from osf import features -from osf.models import (BaseFileNode, TrashedFileNode, +from osf.models import (BaseFileNode, TrashedFileNode, BaseFileVersionsThrough, OSFUser, AbstractNode, Preprint, NodeLog, DraftRegistration, RegistrationSchema, Guid, FileVersionUserMetadata, FileVersion) @@ -875,13 +875,9 @@ def addon_view_file(auth, node, file_node, version): args={'url': download_url.url} ) - version_names = [] - versions = file_node.versions.order_by('-created') - if versions.count(): - for vers in versions: - file_version_thru = vers.get_basefilenode_version(file_node) - name = file_version_thru.version_name if file_version_thru else file_node.name - version_names.append(name) + version_names = BaseFileVersionsThrough.objects.filter( + basefilenode_id=file_node.id + ).order_by('-fileversion_id').values_list('version_name', flat=True) ret.update({ 'urls': { @@ -910,7 +906,7 @@ def addon_view_file(auth, node, file_node, version): 'allow_comments': file_node.provider in settings.ADDONS_COMMENTABLE, 'checkout_user': file_node.checkout._id if file_node.checkout else None, 'pre_reg_checkout': is_pre_reg_checkout(node, file_node), - 'version_names': version_names + 'version_names': list(version_names) }) ret.update(rubeus.collect_addon_assets(node)) From c5c8e0afbb973631580d4dac519998b5a98cb2f7 Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Wed, 28 Aug 2019 14:24:23 -0500 Subject: [PATCH 13/17] Rebase migrations. --- ...ions_through.py => 0181_add_custom_file_versions_through.py} | 2 +- ...rsions_through.py => 0182_populate_file_versions_through.py} | 2 +- ...ilenode_versions.py => 0183_remove_basefilenode_versions.py} | 2 +- ...7_basefilenode_versions.py => 0184_basefilenode_versions.py} | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) rename osf/migrations/{0174_add_custom_file_versions_through.py => 0181_add_custom_file_versions_through.py} (95%) rename osf/migrations/{0175_populate_file_versions_through.py => 0182_populate_file_versions_through.py} (98%) rename osf/migrations/{0176_remove_basefilenode_versions.py => 0183_remove_basefilenode_versions.py} (86%) rename osf/migrations/{0177_basefilenode_versions.py => 0184_basefilenode_versions.py} (89%) diff --git a/osf/migrations/0174_add_custom_file_versions_through.py b/osf/migrations/0181_add_custom_file_versions_through.py similarity index 95% rename from osf/migrations/0174_add_custom_file_versions_through.py rename to osf/migrations/0181_add_custom_file_versions_through.py index 3183e3527ae..f48b69e0d0a 100644 --- a/osf/migrations/0174_add_custom_file_versions_through.py +++ b/osf/migrations/0181_add_custom_file_versions_through.py @@ -9,7 +9,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0173_ensure_schemas'), + ('osf', '0180_finalize_token_scopes_mig'), ] operations = [ diff --git a/osf/migrations/0175_populate_file_versions_through.py b/osf/migrations/0182_populate_file_versions_through.py similarity index 98% rename from osf/migrations/0175_populate_file_versions_through.py rename to osf/migrations/0182_populate_file_versions_through.py index dca8af3bde6..4832f8cc7da 100644 --- a/osf/migrations/0175_populate_file_versions_through.py +++ b/osf/migrations/0182_populate_file_versions_through.py @@ -90,7 +90,7 @@ def populate_fileversion_name(state, schema): class Migration(migrations.Migration): dependencies = [ - ('osf', '0174_add_custom_file_versions_through'), + ('osf', '0181_add_custom_file_versions_through'), ] operations = [ diff --git a/osf/migrations/0176_remove_basefilenode_versions.py b/osf/migrations/0183_remove_basefilenode_versions.py similarity index 86% rename from osf/migrations/0176_remove_basefilenode_versions.py rename to osf/migrations/0183_remove_basefilenode_versions.py index 6e27ab68b23..ea63971db89 100644 --- a/osf/migrations/0176_remove_basefilenode_versions.py +++ b/osf/migrations/0183_remove_basefilenode_versions.py @@ -8,7 +8,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0175_populate_file_versions_through'), + ('osf', '0182_populate_file_versions_through'), ] operations = [ diff --git a/osf/migrations/0177_basefilenode_versions.py b/osf/migrations/0184_basefilenode_versions.py similarity index 89% rename from osf/migrations/0177_basefilenode_versions.py rename to osf/migrations/0184_basefilenode_versions.py index d9e2329738c..5500d800644 100644 --- a/osf/migrations/0177_basefilenode_versions.py +++ b/osf/migrations/0184_basefilenode_versions.py @@ -8,7 +8,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0176_remove_basefilenode_versions'), + ('osf', '0183_remove_basefilenode_versions'), ] operations = [ From b35c775c0976e7596e98cce87feb64bccb9c6950 Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Wed, 28 Aug 2019 22:31:27 -0500 Subject: [PATCH 14/17] Update tests to use new add_version method as you can't use add() on a ManyToManyField which specifies an intermediary model. --- addons/osfstorage/tests/test_models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/addons/osfstorage/tests/test_models.py b/addons/osfstorage/tests/test_models.py index 0e8df8a65a4..42147bf5768 100644 --- a/addons/osfstorage/tests/test_models.py +++ b/addons/osfstorage/tests/test_models.py @@ -360,7 +360,7 @@ def test_move_nested_between_regions(self): for _ in range(2): version = factories.FileVersionFactory(region=self.node_settings.region) - child.versions.add(version) + child.add_version(version) child.save() moved = to_move.move_under(move_to) From 228a84bbacaa33dd496d7da668b7b8bb7c7eec6e Mon Sep 17 00:00:00 2001 From: longze chen Date: Fri, 6 Sep 2019 12:11:09 -0400 Subject: [PATCH 15/17] Update docker-compose readme on populating institutions --- README-docker-compose.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README-docker-compose.md b/README-docker-compose.md index dc0742940e1..d84a2670014 100644 --- a/README-docker-compose.md +++ b/README-docker-compose.md @@ -147,7 +147,7 @@ - `docker-compose run --rm web python manage.py migrate` - Populate institutions: - After resetting your database or with a new install you will need to populate the table of institutions. **You must have run migrations first.** - - `docker-compose run --rm web python -m scripts.populate_institutions test` + - `docker-compose run --rm web python -m scripts.populate_institutions -e test -a` - Populate preprint, registration, and collection providers: - After resetting your database or with a new install, the required providers and subjects will be created automatically **when you run migrations.** To create more: - `docker-compose run --rm web python manage.py populate_fake_providers` From 89332ba3352dbfe256455d8395f31f92ab1a6c2b Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Fri, 6 Sep 2019 11:49:53 -0500 Subject: [PATCH 16/17] Rebasing migrations. --- ...ions_through.py => 0182_add_custom_file_versions_through.py} | 2 +- ...rsions_through.py => 0183_populate_file_versions_through.py} | 2 +- ...ilenode_versions.py => 0184_remove_basefilenode_versions.py} | 2 +- ...4_basefilenode_versions.py => 0185_basefilenode_versions.py} | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) rename osf/migrations/{0181_add_custom_file_versions_through.py => 0182_add_custom_file_versions_through.py} (95%) rename osf/migrations/{0182_populate_file_versions_through.py => 0183_populate_file_versions_through.py} (98%) rename osf/migrations/{0183_remove_basefilenode_versions.py => 0184_remove_basefilenode_versions.py} (86%) rename osf/migrations/{0184_basefilenode_versions.py => 0185_basefilenode_versions.py} (89%) diff --git a/osf/migrations/0181_add_custom_file_versions_through.py b/osf/migrations/0182_add_custom_file_versions_through.py similarity index 95% rename from osf/migrations/0181_add_custom_file_versions_through.py rename to osf/migrations/0182_add_custom_file_versions_through.py index f48b69e0d0a..e4d5f1b19b6 100644 --- a/osf/migrations/0181_add_custom_file_versions_through.py +++ b/osf/migrations/0182_add_custom_file_versions_through.py @@ -9,7 +9,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0180_finalize_token_scopes_mig'), + ('osf', '0181_osfuser_contacted_deactivation'), ] operations = [ diff --git a/osf/migrations/0182_populate_file_versions_through.py b/osf/migrations/0183_populate_file_versions_through.py similarity index 98% rename from osf/migrations/0182_populate_file_versions_through.py rename to osf/migrations/0183_populate_file_versions_through.py index 4832f8cc7da..703c2767588 100644 --- a/osf/migrations/0182_populate_file_versions_through.py +++ b/osf/migrations/0183_populate_file_versions_through.py @@ -90,7 +90,7 @@ def populate_fileversion_name(state, schema): class Migration(migrations.Migration): dependencies = [ - ('osf', '0181_add_custom_file_versions_through'), + ('osf', '0182_add_custom_file_versions_through'), ] operations = [ diff --git a/osf/migrations/0183_remove_basefilenode_versions.py b/osf/migrations/0184_remove_basefilenode_versions.py similarity index 86% rename from osf/migrations/0183_remove_basefilenode_versions.py rename to osf/migrations/0184_remove_basefilenode_versions.py index ea63971db89..7218a83c71a 100644 --- a/osf/migrations/0183_remove_basefilenode_versions.py +++ b/osf/migrations/0184_remove_basefilenode_versions.py @@ -8,7 +8,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0182_populate_file_versions_through'), + ('osf', '0183_populate_file_versions_through'), ] operations = [ diff --git a/osf/migrations/0184_basefilenode_versions.py b/osf/migrations/0185_basefilenode_versions.py similarity index 89% rename from osf/migrations/0184_basefilenode_versions.py rename to osf/migrations/0185_basefilenode_versions.py index 5500d800644..c910787a7c4 100644 --- a/osf/migrations/0184_basefilenode_versions.py +++ b/osf/migrations/0185_basefilenode_versions.py @@ -8,7 +8,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0183_remove_basefilenode_versions'), + ('osf', '0184_remove_basefilenode_versions'), ] operations = [ From cefe84c400c83a290e313ce262849c77f2a85bba Mon Sep 17 00:00:00 2001 From: Dawn Pattison Date: Wed, 11 Sep 2019 17:00:50 -0500 Subject: [PATCH 17/17] Bump version and update changelog. --- CHANGELOG | 8 ++++++++ package.json | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG b/CHANGELOG index c0eadf5b56f..ae86734434f 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,14 @@ We follow the CalVer (https://calver.org/) versioning scheme: YY.MINOR.MICRO. +19.26.0 (2019-09-11) +=================== +- Create a custom through table for linking files and versions + for storing version names. Supports different versions of the same + file having different names. +- Update README for populating institutions. + + 19.25.0 (2019-09-05) =================== - Automate account deactivation if users have no content diff --git a/package.json b/package.json index c5f42c3ccf2..bc9777b8ec4 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "OSF", - "version": "19.25.0", + "version": "19.26.0", "description": "Facilitating Open Science", "repository": "https://github.com/CenterForOpenScience/osf.io", "author": "Center for Open Science",