From b3feaffc8d685b2d354c94e0a15c9ae9d727eaa3 Mon Sep 17 00:00:00 2001 From: TOMONORI ENDOU Date: Sat, 7 Feb 2026 11:17:07 +0900 Subject: [PATCH 1/3] feat(provider): add s3compatsigv4 provider for S3 signature V4 fix admin test case and issues according to feedback --- Dockerfile | 2 + addons.json | 8 + addons/s3compatsigv4/README.md | 33 ++ addons/s3compatsigv4/__init__.py | 1 + addons/s3compatsigv4/apps.py | 68 ++++ .../s3compatsigv4/migrations/0001_initial.py | 68 ++++ addons/s3compatsigv4/migrations/__init__.py | 0 addons/s3compatsigv4/models.py | 177 +++++++++ addons/s3compatsigv4/provider.py | 20 ++ addons/s3compatsigv4/requirements.txt | 1 + addons/s3compatsigv4/routes.py | 89 +++++ addons/s3compatsigv4/serializer.py | 45 +++ addons/s3compatsigv4/settings/__init__.py | 8 + addons/s3compatsigv4/settings/defaults.py | 53 +++ addons/s3compatsigv4/static/comicon.png | Bin 0 -> 1673 bytes addons/s3compatsigv4/static/node-cfg.js | 7 + .../static/s3compatsigv4-rubeus-cfg.js | 46 +++ .../s3compatsigv4AnonymousLogActionList.json | 11 + .../static/s3compatsigv4LogActionList.json | 11 + .../static/s3compatsigv4NodeConfig.js | 323 +++++++++++++++++ .../static/s3compatsigv4UserConfig.js | 178 +++++++++ addons/s3compatsigv4/static/settings.json | 82 +++++ addons/s3compatsigv4/static/user-cfg.js | 6 + .../s3compatsigv4_credentials_modal.mako | 51 +++ .../s3compatsigv4_node_settings.mako | 93 +++++ .../s3compatsigv4_user_settings.mako | 46 +++ addons/s3compatsigv4/tests/__init__.py | 0 addons/s3compatsigv4/tests/conftest.py | 1 + addons/s3compatsigv4/tests/factories.py | 32 ++ addons/s3compatsigv4/tests/test_model.py | 251 +++++++++++++ addons/s3compatsigv4/tests/test_serializer.py | 31 ++ addons/s3compatsigv4/tests/test_view.py | 308 ++++++++++++++++ addons/s3compatsigv4/tests/utils.py | 43 +++ addons/s3compatsigv4/utils.py | 337 ++++++++++++++++++ addons/s3compatsigv4/views.py | 180 ++++++++++ admin/base/settings/defaults.py | 3 + .../export_data/utils.py | 30 ++ .../export_data/views/location.py | 22 +- admin/rdm_custom_storage_location/utils.py | 103 ++++++ admin/rdm_custom_storage_location/views.py | 17 + .../rdm-institutional-storage-page.js | 8 + .../providers/s3compatsigv4_modal.html | 60 ++++ .../export_data/test_utils.py | 58 +++ .../export_data/views/test_location.py | 24 ++ .../test_s3compatsigv4.py | 256 +++++++++++++ .../rdm_custom_storage_location/test_utils.py | 41 ++- .../rdm_custom_storage_location/test_views.py | 1 + api/base/settings/defaults.py | 3 + framework/addons/data/addons.json | 30 ++ .../0121_remove_wiki_fields_from_node.py | 1 + .../0237_ensure_schema_and_mappings.py | 2 +- osf/migrations/0261_auto_20260112_1402.py | 50 +++ osf/models/export_data_location.py | 11 + osf_tests/test_export_data_location.py | 9 + scripts/translations/messages_addonsJson.js | 2 + website/notifications/constants.py | 1 + website/static/js/logActionsList_extract.js | 9 + website/static/js/osfLanguage.js | 5 + .../static/js/translations/osfLanguage_en.js | 5 + .../static/js/translations/osfLanguage_ja.js | 5 + website/static/storageAddons.json | 4 + 61 files changed, 3365 insertions(+), 5 deletions(-) create mode 100644 addons/s3compatsigv4/README.md create mode 100644 addons/s3compatsigv4/__init__.py create mode 100644 addons/s3compatsigv4/apps.py create mode 100644 addons/s3compatsigv4/migrations/0001_initial.py create mode 100644 addons/s3compatsigv4/migrations/__init__.py create mode 100644 addons/s3compatsigv4/models.py create mode 100644 addons/s3compatsigv4/provider.py create mode 100644 addons/s3compatsigv4/requirements.txt create mode 100644 addons/s3compatsigv4/routes.py create mode 100644 addons/s3compatsigv4/serializer.py create mode 100644 addons/s3compatsigv4/settings/__init__.py create mode 100644 addons/s3compatsigv4/settings/defaults.py create mode 100644 addons/s3compatsigv4/static/comicon.png create mode 100644 addons/s3compatsigv4/static/node-cfg.js create mode 100644 addons/s3compatsigv4/static/s3compatsigv4-rubeus-cfg.js create mode 100644 addons/s3compatsigv4/static/s3compatsigv4AnonymousLogActionList.json create mode 100644 addons/s3compatsigv4/static/s3compatsigv4LogActionList.json create mode 100644 addons/s3compatsigv4/static/s3compatsigv4NodeConfig.js create mode 100644 addons/s3compatsigv4/static/s3compatsigv4UserConfig.js create mode 100644 addons/s3compatsigv4/static/settings.json create mode 100644 addons/s3compatsigv4/static/user-cfg.js create mode 100644 addons/s3compatsigv4/templates/s3compatsigv4_credentials_modal.mako create mode 100644 addons/s3compatsigv4/templates/s3compatsigv4_node_settings.mako create mode 100644 addons/s3compatsigv4/templates/s3compatsigv4_user_settings.mako create mode 100644 addons/s3compatsigv4/tests/__init__.py create mode 100644 addons/s3compatsigv4/tests/conftest.py create mode 100644 addons/s3compatsigv4/tests/factories.py create mode 100644 addons/s3compatsigv4/tests/test_model.py create mode 100644 addons/s3compatsigv4/tests/test_serializer.py create mode 100644 addons/s3compatsigv4/tests/test_view.py create mode 100644 addons/s3compatsigv4/tests/utils.py create mode 100644 addons/s3compatsigv4/utils.py create mode 100644 addons/s3compatsigv4/views.py create mode 100644 admin/templates/rdm_custom_storage_location/providers/s3compatsigv4_modal.html create mode 100644 admin_tests/rdm_custom_storage_location/test_s3compatsigv4.py create mode 100644 osf/migrations/0261_auto_20260112_1402.py diff --git a/Dockerfile b/Dockerfile index 12297a0d3c8..b50de0042a9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -61,6 +61,7 @@ COPY ./addons/swift/requirements.txt ./addons/swift/ COPY ./addons/azureblobstorage/requirements.txt ./addons/azureblobstorage/ COPY ./addons/weko/requirements.txt ./addons/weko/ COPY ./addons/s3compat/requirements.txt ./addons/s3compat/ +COPY ./addons/s3compatsigv4/requirements.txt ./addons/s3compatsigv4/ COPY ./addons/s3compatinstitutions/requirements.txt ./addons/s3compatinstitutions/ COPY ./addons/s3compatb3/requirements.txt ./addons/s3compatb3/ COPY ./addons/ociinstitutions/requirements.txt ./addons/ociinstitutions/ @@ -167,6 +168,7 @@ COPY ./addons/azureblobstorage/static/ ./addons/azureblobstorage/static/ COPY ./addons/weko/static/ ./addons/weko/static/ COPY ./addons/jupyterhub/static/ ./addons/jupyterhub/static/ COPY ./addons/s3compat/static/ ./addons/s3compat/static/ +COPY ./addons/s3compatsigv4/static/ ./addons/s3compatsigv4/static/ COPY ./addons/s3compatinstitutions/static/ ./addons/s3compatinstitutions/static/ COPY ./addons/s3compatb3/static/ ./addons/s3compatb3/static/ COPY ./addons/ociinstitutions/requirements.txt ./addons/ociinstitutions/ diff --git a/addons.json b/addons.json index 98ce5044863..aa177aeb6b4 100644 --- a/addons.json +++ b/addons.json @@ -22,6 +22,7 @@ "weko", "jupyterhub", "s3compat", + "s3compatsigv4", "s3compatinstitutions", "s3compatb3", "ociinstitutions", @@ -61,6 +62,7 @@ "azureblobstorage": "partial", "weko": "partial", "s3compat": "partial", + "s3compatsigv4": "partial", "s3compatinstitutions": "partial", "s3compatb3": "partial", "ociinstitutions": "partial", @@ -88,6 +90,7 @@ "azureblobstorage", "weko", "s3compat", + "s3compatsigv4", "s3compatinstitutions", "s3compatb3", "ociinstitutions", @@ -128,6 +131,7 @@ "weko": "JAIRO Cloud is an application server to share, archive data.", "jupyterhub": "Jupyter is a web-based interactive computational environment. Files on a GakuNin RDM project can be imported to/exported from Jupyter", "s3compat": "S3 Compatible Storage is a file storage add-on. Connect your S3 Compatible Storage account to a GakuNin RDM project to interact with files hosted on S3 Compatible Storage via the GakuNin RDM.", + "s3compatsigv4": "S3 Compatible Storage (SigV4) is a file storage add-on. Connect your S3 Compatible Storage (SigV4) account to a GakuNin RDM project to interact with files hosted on S3 Compatible Storage (SigV4) via the GakuNin RDM.", "s3compatinstitutions": "S3 Compatible Storage for Institutions is a file storage add-on. Connect your S3 Compatible Storage account to a GakuNin RDM project to interact with files hosted on S3 Compatible Storage via the GakuNin RDM.", "s3compatb3": "S3 Compatible Storage is a file storage add-on. Connect your S3 Compatible Storage account to a GakuNin RDM project to interact with files hosted on S3 Compatible Storage via the GakuNin RDM.", "ociinstitutions": "Oracle Cloud Infrastructure for Institutions is a file storage add-on. Connect your Oracle Cloud Infrastructure Object Storage account to a GakuNin RDM project to interact with files hosted on Oracle Cloud Infrastructure Object Storage via the GakuNin RDM.", @@ -159,6 +163,7 @@ "weko": "https://weko.at.nii.ac.jp/", "jupyterhub": "https://jupyterhub.readthedocs.io/", "s3compat": "https://aws.amazon.com/s3/", + "s3compatsigv4": "https://aws.amazon.com/s3/", "s3compatb3": "https://aws.amazon.com/s3/", "nextcloud": "https://nextcloud.com/", "iqbrims": "https://drive.google.com", @@ -184,11 +189,13 @@ "onedrive", "s3", "s3compat", + "s3compatsigv4", "owncloud" ], "addons_has_max_keys": [ "s3", "s3compat", + "s3compatsigv4", "s3compatinstitutions", "s3compatb3", "ociinstitutions" @@ -196,6 +203,7 @@ "addons_folder_field": { "s3": "folder_name", "s3compat": "folder_name", + "s3compatsigv4": "folder_name", "s3compatb3": "folder_name", "azureblobstorage": "folder_name", "box": "folder_name", diff --git a/addons/s3compatsigv4/README.md b/addons/s3compatsigv4/README.md new file mode 100644 index 00000000000..81e768efd37 --- /dev/null +++ b/addons/s3compatsigv4/README.md @@ -0,0 +1,33 @@ +# RDM S3 Compatible Storage (SigV4) Addon + +S3 Compatible Storage (SigV4) Addon enables to mount Cloud Storage which supports Amazon S3-like API on the project. + +## Configuring the addon + +Users can select storage from the S3 Compatible Storage (SigV4) List, +which is defined in `addons/s3compatsigv4/static/settings.json`. + +``` +{ + "availableServices": [{"name": "Wasabi", + "host": "s3.wasabisys.com", + "bucketLocations": { + "us-east": {"name": "us-east", "host": "s3.wasabisys.com"}, + "us-west-1": {"name": "us-west-1", "host": "s3.us-west-1.wasabisys.com"}, + "eu-central": {"name": "eu-central", "host": "s3.eu-central-1.wasabisys.com"}, + "": {"name": "Virginia"}}}, + {"name": "My Private Storage", + "host": "my-private-storage-address:80"} + ], + "encryptUploads": true +} +``` + +## Enabling the addon + +### Enable on RDM +1. On RDM, enable S3 Compatible Storage (SigV4) as a provider +2. Scroll down to Configure Add-ons +3. Choose desired storage service +4. Connect your account and enter your ID and secret +5. Select a bucket to work from, or create a new one. diff --git a/addons/s3compatsigv4/__init__.py b/addons/s3compatsigv4/__init__.py new file mode 100644 index 00000000000..765a0235a95 --- /dev/null +++ b/addons/s3compatsigv4/__init__.py @@ -0,0 +1 @@ +default_app_config = 'addons.s3compatsigv4.apps.S3CompatSigV4AddonAppConfig' diff --git a/addons/s3compatsigv4/apps.py b/addons/s3compatsigv4/apps.py new file mode 100644 index 00000000000..eddfb64a76b --- /dev/null +++ b/addons/s3compatsigv4/apps.py @@ -0,0 +1,68 @@ +import os +from addons.base.apps import BaseAddonAppConfig, generic_root_folder +from addons.s3compatsigv4.settings import MAX_UPLOAD_SIZE + +s3compatsigv4_root_folder = generic_root_folder('s3compatsigv4') + +HERE = os.path.dirname(os.path.abspath(__file__)) +TEMPLATE_PATH = os.path.join( + HERE, + 'templates' +) + +class S3CompatSigV4AddonAppConfig(BaseAddonAppConfig): + + name = 'addons.s3compatsigv4' + label = 'addons_s3compatsigv4' + owners = ['user', 'node'] + configs = ['accounts', 'node'] + categories = ['storage'] + has_hgrid_files = True + max_file_size = MAX_UPLOAD_SIZE + node_settings_template = os.path.join(TEMPLATE_PATH, 's3compatsigv4_node_settings.mako') + user_settings_template = os.path.join(TEMPLATE_PATH, 's3compatsigv4_user_settings.mako') + + @property + def full_name(self): + return 'S3 Compatible Storage (SigV4)' + + @property + def short_name(self): + return 's3compatsigv4' + + @property + def get_hgrid_data(self): + return s3compatsigv4_root_folder + + BUCKET_LINKED = 's3compatsigv4_bucket_linked' + BUCKET_UNLINKED = 's3compatsigv4_bucket_unlinked' + FILE_ADDED = 's3compatsigv4_file_added' + FILE_REMOVED = 's3compatsigv4_file_removed' + FILE_UPDATED = 's3compatsigv4_file_updated' + FOLDER_CREATED = 's3compatsigv4_folder_created' + NODE_AUTHORIZED = 's3compatsigv4_node_authorized' + NODE_DEAUTHORIZED = 's3compatsigv4_node_deauthorized' + NODE_DEAUTHORIZED_NO_USER = 's3compatsigv4_node_deauthorized_no_user' + + actions = (BUCKET_LINKED, + BUCKET_UNLINKED, + FILE_ADDED, + FILE_REMOVED, + FILE_UPDATED, + FOLDER_CREATED, + NODE_AUTHORIZED, + NODE_DEAUTHORIZED, + NODE_DEAUTHORIZED_NO_USER) + + @property + def routes(self): + from . import routes + return [routes.api_routes] + + @property + def user_settings(self): + return self.get_model('UserSettings') + + @property + def node_settings(self): + return self.get_model('NodeSettings') diff --git a/addons/s3compatsigv4/migrations/0001_initial.py b/addons/s3compatsigv4/migrations/0001_initial.py new file mode 100644 index 00000000000..d6986eabaed --- /dev/null +++ b/addons/s3compatsigv4/migrations/0001_initial.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.28 on 2026-01-12 14:02 +from __future__ import unicode_literals + +import addons.base.models +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import django_extensions.db.fields +import osf.models.base +import osf.utils.datetime_aware_jsonfield +import osf.utils.fields + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('osf', '0261_auto_20260112_1402'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='NodeSettings', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')), + ('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')), + ('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)), + ('is_deleted', models.BooleanField(default=False)), + ('deleted', osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True)), + ('folder_id', models.TextField(blank=True, null=True)), + ('folder_name', models.TextField(blank=True, null=True)), + ('folder_location', models.TextField(blank=True, null=True)), + ('encrypt_uploads', models.BooleanField(default=True)), + ('external_account', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='addons_s3compatsigv4_node_settings', to='osf.ExternalAccount')), + ('owner', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='addons_s3compatsigv4_node_settings', to='osf.AbstractNode')), + ], + options={ + 'abstract': False, + }, + bases=(models.Model, osf.models.base.QuerySetExplainMixin, addons.base.models.BaseStorageAddon), + ), + migrations.CreateModel( + name='UserSettings', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')), + ('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')), + ('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)), + ('is_deleted', models.BooleanField(default=False)), + ('deleted', osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True)), + ('oauth_grants', osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONField(blank=True, default=dict, encoder=osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONEncoder)), + ('owner', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='addons_s3compatsigv4_user_settings', to=settings.AUTH_USER_MODEL)), + ], + options={ + 'abstract': False, + }, + bases=(models.Model, osf.models.base.QuerySetExplainMixin), + ), + migrations.AddField( + model_name='nodesettings', + name='user_settings', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='addons_s3compatsigv4.UserSettings'), + ), + ] diff --git a/addons/s3compatsigv4/migrations/__init__.py b/addons/s3compatsigv4/migrations/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/addons/s3compatsigv4/models.py b/addons/s3compatsigv4/models.py new file mode 100644 index 00000000000..2f129437820 --- /dev/null +++ b/addons/s3compatsigv4/models.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- + +from addons.base.models import (BaseOAuthNodeSettings, BaseOAuthUserSettings, + BaseStorageAddon) +from django.db import models +from framework.auth.core import Auth +from osf.models.files import File, Folder, BaseFileNode +from addons.base import exceptions +from addons.s3compatsigv4.provider import S3CompatSigV4Provider +from addons.s3compatsigv4.serializer import S3CompatSigV4Serializer +from addons.s3compatsigv4.settings import ENCRYPT_UPLOADS_DEFAULT +from addons.s3compatsigv4.utils import (bucket_exists, + get_bucket_location_or_error, + get_bucket_names, + find_service_by_host) + +class S3CompatSigV4FileNode(BaseFileNode): + _provider = 's3compatsigv4' + + +class S3CompatSigV4Folder(S3CompatSigV4FileNode, Folder): + pass + + +class S3CompatSigV4File(S3CompatSigV4FileNode, File): + version_identifier = 'version' + + +class UserSettings(BaseOAuthUserSettings): + oauth_provider = S3CompatSigV4Provider + serializer = S3CompatSigV4Serializer + + +class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon): + oauth_provider = S3CompatSigV4Provider + serializer = S3CompatSigV4Serializer + + folder_id = models.TextField(blank=True, null=True) + folder_name = models.TextField(blank=True, null=True) + folder_location = models.TextField(blank=True, null=True) + encrypt_uploads = models.BooleanField(default=ENCRYPT_UPLOADS_DEFAULT) + user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE) + + @property + def folder_path(self): + return self.folder_name + + @property + def display_name(self): + return u'{0}: {1}'.format(self.config.full_name, self.folder_id) + + def set_folder(self, folder_id, auth): + if not bucket_exists(self.external_account.provider_id.split('\t')[0], + self.external_account.oauth_key, + self.external_account.oauth_secret, folder_id): + error_message = ('We are having trouble connecting to that bucket. ' + 'Try a different one.') + raise exceptions.InvalidFolderError(error_message) + + self.folder_id = str(folder_id) + host = self.external_account.provider_id.split('\t')[0] + + bucket_location = get_bucket_location_or_error( + host, + self.external_account.oauth_key, + self.external_account.oauth_secret, + folder_id + ) + self.folder_location = bucket_location + try: + service = find_service_by_host(host) + bucket_location = service['bucketLocations'][bucket_location]['name'] + except KeyError: + # Unlisted location, Default to the key. + pass + if bucket_location is None or bucket_location == '': + bucket_location = 'Default' + + self.folder_name = '{} ({})'.format(folder_id, bucket_location) + self.encrypt_uploads = service.get('serverSideEncryption', True) + self.save() + + self.nodelogger.log(action='bucket_linked', extra={'bucket': str(folder_id)}, save=True) + + def get_folders(self, **kwargs): + # This really gets only buckets, not subfolders, + # as that's all we want to be linkable on a node. + try: + buckets = get_bucket_names(self) + except Exception: + raise exceptions.InvalidAuthError() + + return [ + { + 'addon': 's3compatsigv4', + 'kind': 'folder', + 'id': bucket, + 'name': bucket, + 'path': bucket, + 'urls': { + 'folders': '' + } + } + for bucket in buckets + ] + + @property + def complete(self): + return self.has_auth and self.folder_id is not None + + def authorize(self, user_settings, save=False): + self.user_settings = user_settings + self.nodelogger.log(action='node_authorized', save=save) + + def clear_settings(self): + self.folder_id = None + self.folder_name = None + self.folder_location = None + + def deauthorize(self, auth=None, log=True): + """Remove user authorization from this node and log the event.""" + self.clear_settings() + self.clear_auth() # Also performs a save + + if log: + self.nodelogger.log(action='node_deauthorized', save=True) + + def delete(self, save=True): + self.deauthorize(log=False) + super(NodeSettings, self).delete(save=save) + + def serialize_waterbutler_credentials(self): + if not self.has_auth: + raise exceptions.AddonError('Cannot serialize credentials for S3 Compatible Storage (SigV4) addon') + host = self.external_account.provider_id.split('\t')[0] + if self.folder_location is not None and len(self.folder_location) > 0: + try: + service = find_service_by_host(host) + host = service['bucketLocations'][self.folder_location]['host'] + except KeyError: + # Unlisted location, use default host + pass + return { + 'host': host, + 'access_key': self.external_account.oauth_key, + 'secret_key': self.external_account.oauth_secret, + } + + def serialize_waterbutler_settings(self): + if not self.folder_id: + raise exceptions.AddonError('Cannot serialize settings for S3 Compatible Storage (SigV4) addon') + return { + 'bucket': self.folder_id, + 'region': self.folder_location, + 'encrypt_uploads': self.encrypt_uploads + } + + def create_waterbutler_log(self, auth, action, metadata): + url = self.owner.web_url_for('addon_view_or_download_file', path=metadata['path'], provider='s3compatsigv4') + + self.owner.add_log( + 's3compatsigv4_{0}'.format(action), + auth=auth, + params={ + 'project': self.owner.parent_id, + 'node': self.owner._id, + 'path': metadata['materialized'], + 'bucket': self.folder_id, + 'urls': { + 'view': url, + 'download': url + '?action=download' + } + }, + ) + + def after_delete(self, user): + self.deauthorize(Auth(user=user), log=True) diff --git a/addons/s3compatsigv4/provider.py b/addons/s3compatsigv4/provider.py new file mode 100644 index 00000000000..9e5e01be952 --- /dev/null +++ b/addons/s3compatsigv4/provider.py @@ -0,0 +1,20 @@ +from addons.s3compatsigv4.serializer import S3CompatSigV4Serializer + +class S3CompatSigV4Provider(object): + """An alternative to `ExternalProvider` not tied to OAuth""" + + name = 'S3 Compatible Storage (SigV4)' + short_name = 's3compatsigv4' + serializer = S3CompatSigV4Serializer + + def __init__(self, account=None): + super(S3CompatSigV4Provider, self).__init__() + + # provide an unauthenticated session by default + self.account = account + + def __repr__(self): + return '<{name}: {status}>'.format( + name=self.__class__.__name__, + status=self.account.provider_id if self.account else 'anonymous' + ) diff --git a/addons/s3compatsigv4/requirements.txt b/addons/s3compatsigv4/requirements.txt new file mode 100644 index 00000000000..7b1e3f456fa --- /dev/null +++ b/addons/s3compatsigv4/requirements.txt @@ -0,0 +1 @@ +boto3==1.4.7 \ No newline at end of file diff --git a/addons/s3compatsigv4/routes.py b/addons/s3compatsigv4/routes.py new file mode 100644 index 00000000000..67be5627446 --- /dev/null +++ b/addons/s3compatsigv4/routes.py @@ -0,0 +1,89 @@ +from framework.routing import Rule, json_renderer + +from addons.s3compatsigv4 import views + + +api_routes = { + 'rules': [ + Rule( + [ + '/settings/s3compatsigv4/accounts/', + ], + 'post', + views.s3compatsigv4_add_user_account, + json_renderer, + ), + Rule( + [ + '/settings/s3compatsigv4/accounts/', + ], + 'get', + views.s3compatsigv4_account_list, + json_renderer, + ), + Rule( + [ + '/project//s3compatsigv4/settings/', + '/project//node//s3compatsigv4/settings/', + ], + 'put', + views.s3compatsigv4_set_config, + json_renderer, + ), + Rule( + [ + '/project//s3compatsigv4/settings/', + '/project//node//s3compatsigv4/settings/', + ], + 'get', + views.s3compatsigv4_get_config, + json_renderer, + ), + Rule( + [ + '/project//s3compatsigv4/user-auth/', + '/project//node//s3compatsigv4/user-auth/', + ], + 'put', + views.s3compatsigv4_import_auth, + json_renderer, + ), + Rule( + [ + '/project//s3compatsigv4/user-auth/', + '/project//node//s3compatsigv4/user-auth/', + ], + 'delete', + views.s3compatsigv4_deauthorize_node, + json_renderer, + ), + Rule( + [ + '/project//s3compatsigv4/buckets/', + '/project//node//s3compatsigv4/buckets/', + ], + 'get', + views.s3compatsigv4_folder_list, + json_renderer, + ), + Rule( + [ + '/project//s3compatsigv4/attached/', + '/project//node//s3compatsigv4/attached/', + ], + 'get', + views.s3compatsigv4_attached_service, + json_renderer, + ), + Rule( + [ + '/project//s3compatsigv4/newbucket/', + '/project//node//s3compatsigv4/newbucket/', + ], + 'post', + views.s3compatsigv4_create_bucket, + json_renderer + ), + ], + 'prefix': '/api/v1', +} diff --git a/addons/s3compatsigv4/serializer.py b/addons/s3compatsigv4/serializer.py new file mode 100644 index 00000000000..d6bb0c18869 --- /dev/null +++ b/addons/s3compatsigv4/serializer.py @@ -0,0 +1,45 @@ +from website.util import web_url_for +from addons.base.serializer import StorageAddonSerializer +from addons.s3compatsigv4 import utils + +class S3CompatSigV4Serializer(StorageAddonSerializer): + @property + def addon_short_name(self): + return 's3compatsigv4' + + REQUIRED_URLS = [] + + @property + def addon_serialized_urls(self): + node = self.node_settings.owner + user_settings = self.node_settings.user_settings or self.user_settings + + result = { + 'accounts': node.api_url_for('s3compatsigv4_account_list'), + 'createBucket': node.api_url_for('s3compatsigv4_create_bucket'), + 'importAuth': node.api_url_for('s3compatsigv4_import_auth'), + 'create': node.api_url_for('s3compatsigv4_add_user_account'), + 'deauthorize': node.api_url_for('s3compatsigv4_deauthorize_node'), + 'folders': node.api_url_for('s3compatsigv4_folder_list'), + 'config': node.api_url_for('s3compatsigv4_set_config'), + 'files': node.web_url_for('collect_file_trees'), + 'attachedService': node.api_url_for('s3compatsigv4_attached_service'), + } + if user_settings: + result['owner'] = web_url_for('profile_view_id', + uid=user_settings.owner._id) + return result + + def serialized_folder(self, node_settings): + return { + 'path': node_settings.folder_id, + 'name': node_settings.folder_name + } + + def credentials_are_valid(self, user_settings, client=None): + if user_settings: + for account in user_settings.external_accounts.all(): + if utils.can_list(account.provider_id.split('\t')[0], + account.oauth_key, account.oauth_secret): + return True + return False diff --git a/addons/s3compatsigv4/settings/__init__.py b/addons/s3compatsigv4/settings/__init__.py new file mode 100644 index 00000000000..4d3fcfa3d4f --- /dev/null +++ b/addons/s3compatsigv4/settings/__init__.py @@ -0,0 +1,8 @@ +import logging +from .defaults import * # noqa + +logger = logging.getLogger(__name__) +try: + from .local import * # noqa +except ImportError: + logger.warn('No local.py settings file found') diff --git a/addons/s3compatsigv4/settings/defaults.py b/addons/s3compatsigv4/settings/defaults.py new file mode 100644 index 00000000000..63e9df1901f --- /dev/null +++ b/addons/s3compatsigv4/settings/defaults.py @@ -0,0 +1,53 @@ +import json +import os + +from website.settings import parent_dir + +HERE = os.path.dirname(os.path.abspath(__file__)) +STATIC_PATH = os.path.join(parent_dir(HERE), 'static') + +MAX_RENDER_SIZE = (1024 ** 2) * 3 + +# Max file size permitted by frontend in megabytes +MAX_UPLOAD_SIZE = 50 * 1024 # 50 GB + +ALLOWED_ORIGIN = '*' + +ENCRYPT_UPLOADS_DEFAULT = True +# Load S3 settings used in both front and back end +with open(os.path.join(STATIC_PATH, 'settings.json')) as fp: + settings = json.load(fp) + AVAILABLE_SERVICES = settings.get('availableServices', []) + ENCRYPT_UPLOADS_DEFAULT = settings.get('encryptUploads', True) + +OSF_USER = 'osf-user{0}' +OSF_USER_POLICY_NAME = 'osf-user-policy' +OSF_USER_POLICY = json.dumps( + { + 'Version': '2012-10-17', + 'Statement': [ + { + 'Sid': 'Stmt1392138408000', + 'Effect': 'Allow', + 'Action': [ + 's3:*' + ], + 'Resource': [ + '*' + ] + }, + { + 'Sid': 'Stmt1392138440000', + 'Effect': 'Allow', + 'Action': [ + 'iam:DeleteAccessKey', + 'iam:DeleteUser', + 'iam:DeleteUserPolicy' + ], + 'Resource': [ + '*' + ] + } + ] + } +) diff --git a/addons/s3compatsigv4/static/comicon.png b/addons/s3compatsigv4/static/comicon.png new file mode 100644 index 0000000000000000000000000000000000000000..7acb4dcdd2318771002d42e4c2143c4371220bab GIT binary patch literal 1673 zcmeAS@N?(olHy`uVBq!ia0vp^0wB!61|;P_|4#%`Ea{HEjtmSN`?>!lvI6;>1s;*b z3=G`DAk4@xYmNj^jY?)nL`j6Nk5zJhu3lnFep0GlMQ#C5H3Nf9g%yyQn_7~nP?4LH zS8P>bs{~eI1!RMS^_3LBN=mYAl_Got6rA&mQWZ?~O!N$t?6?#Z6l{u8(yW49+@RWl zJX@uVl9B=|ef{$Ca=mh6z5JqdeM3u2OML?)eIp}XpbFjM%Dj@q3f;V7Wr!g#b6ir3 zlZ!G7N;32F6hP)CCgqow*eWT3EP?}wJ4-Ut5H{r%L%jv`pgu@O-%!s$ADgz+icB2Z zKr%SBr6j|BRZv=#1NKu&vVLk#YHn&?Nik5LAy(^vVGGxY;>e1`0*GbcK!o_s2IO+9 zpw#00oKjE_gyvGs1{@~boCJkYwXNnb|4F) zYw$111o|`)x zocyBTg2d!hki)=Qkc7}xgRP0gWet)9lE#!|tK!n6tkmQZq}T$+8940+rxxlX3ESvn zRc!-I30A)ODVb@NE{P?nc18w-|45X9fnw-<~dx zAr^vXgKYgn97Sx)@9Df@-|eLsuPBn0v`|nU(eiL8dbgb9A$Ct{s*A#djC)*mP!|>}#)j`KAriKNh=8t-W~lkJ&-dBft7$XQUR} z1wUstd7^u1Qu>|XS`Fjnsk(=P;@(WHniRM0sZ~?Mt0YP1>E8^0_N-4onbc!>_l<|X zR^ch;jC1-YpZs~ll4ZG8J!6;knw^~P{66s@5BKnB0@-yekLv1#{1h*Zu+j^MiVA#9O$9-omo|68iVCqCmqv(ug z-oTU2vKeeV@3xj))K+em@i(7%RMk(-kHP!oEwww5hyHu$3fFe-icxFs_ 1 && !parent.isAddonRoot) { + name = parent.name + '/' + name; + parent = self.getByID(parent.parentID); + } + file.destination = name; + file.signedUrlFrom = parent.urls.upload; + }, + + uploadSending: function(file, formData, xhr) { + xhr.setRequestHeader('Content-Type', file.type || 'application/octet-stream'); + xhr.setRequestHeader('x-amz-acl', 'private'); + }, + + uploadSuccess: function(file, row) { + var self = this; + var parent = this.getByID(row.parentID); + row.urls = { + 'delete': parent.nodeApiUrl + 's3compatsigv4/' + file.destination + '/', + 'download': parent.nodeUrl + 's3compatsigv4/' + file.destination + '/download/', + 'view': parent.nodeUrl + 's3compatsigv4/' + file.destination + '/' + }; + row.permissions = parent.permissions; + this.updateItem(row); + var updated = Rubeus.Utils.itemUpdated(row, parent); + if (updated) { + self.changeStatus(row, Rubeus.Status.UPDATED); + self.delayRemoveRow(row); + } else { + self.changeStatus(row, Rubeus.Status.UPLOAD_SUCCESS, null, 2000, + function(row) { + self.showButtons(row); + }); + } + } + }; diff --git a/addons/s3compatsigv4/static/s3compatsigv4AnonymousLogActionList.json b/addons/s3compatsigv4/static/s3compatsigv4AnonymousLogActionList.json new file mode 100644 index 00000000000..d510c19eb0b --- /dev/null +++ b/addons/s3compatsigv4/static/s3compatsigv4AnonymousLogActionList.json @@ -0,0 +1,11 @@ +{ + "s3compatsigv4_bucket_linked" : "A user linked an S3 Compatible Storage (SigV4) bucket to a project", + "s3compatsigv4_bucket_unlinked" : "A user unselected an S3 Compatible Storage (SigV4) bucket in a project", + "s3compatsigv4_file_added" : "A user added a file to an S3 Compatible Storage (SigV4) bucket in a project", + "s3compatsigv4_file_removed" : "A user removed a file in an S3 Compatible Storage (SigV4) bucket in a project", + "s3compatsigv4_file_updated" : "A user updated a file in an S3 Compatible Storage (SigV4) bucket in a project", + "s3compatsigv4_folder_created" : "A user created a folder in an S3 Compatible Storage (SigV4) in a project", + "s3compatsigv4_node_authorized" : "A user authorized the S3 Compatible Storage (SigV4) addon for a project", + "s3compatsigv4_node_deauthorized" : "A user deauthorized the S3 Compatible Storage (SigV4) addon for a project", + "s3compatsigv4_node_deauthorized_no_user" : "S3 Compatible Storage (SigV4) addon for a project deauthorized" +} diff --git a/addons/s3compatsigv4/static/s3compatsigv4LogActionList.json b/addons/s3compatsigv4/static/s3compatsigv4LogActionList.json new file mode 100644 index 00000000000..ac955705ea2 --- /dev/null +++ b/addons/s3compatsigv4/static/s3compatsigv4LogActionList.json @@ -0,0 +1,11 @@ +{ + "s3compatsigv4_bucket_linked" : "${user} linked the S3 Compatible Storage (SigV4) bucket ${bucket} to ${node}", + "s3compatsigv4_bucket_unlinked" : "${user} unselected the S3 Compatible Storage (SigV4) bucket ${bucket} in ${node}", + "s3compatsigv4_file_added" : "${user} added file ${path} to S3 Compatible Storage (SigV4) bucket ${bucket} in ${node}", + "s3compatsigv4_file_removed" : "${user} removed ${path} in S3 Compatible Storage (SigV4) bucket ${bucket} in ${node}", + "s3compatsigv4_file_updated" : "${user} updated file ${path} in S3 Compatible Storage (SigV4) bucket ${bucket} in ${node}", + "s3compatsigv4_folder_created" : "${user} created folder ${path} in S3 Compatible Storage (SigV4) bucket ${bucket} in ${node}", + "s3compatsigv4_node_authorized" : "${user} authorized the S3 Compatible Storage (SigV4) addon for ${node}", + "s3compatsigv4_node_deauthorized" : "${user} deauthorized the S3 Compatible Storage (SigV4) addon for ${node}", + "s3compatsigv4_node_deauthorized_no_user" : "S3 Compatible Storage (SigV4) addon for ${node} deauthorized" +} diff --git a/addons/s3compatsigv4/static/s3compatsigv4NodeConfig.js b/addons/s3compatsigv4/static/s3compatsigv4NodeConfig.js new file mode 100644 index 00000000000..0e0b76dcf31 --- /dev/null +++ b/addons/s3compatsigv4/static/s3compatsigv4NodeConfig.js @@ -0,0 +1,323 @@ +'use strict'; + +var $ = require('jquery'); +var ko = require('knockout'); +var m = require('mithril'); +var bootbox = require('bootbox'); +var Raven = require('raven-js'); +var $osf = require('js/osfHelpers'); +var oop = require('js/oop'); + +var s3compatsigv4Settings = require('json-loader!./settings.json'); + +var OauthAddonFolderPicker = require('js/oauthAddonNodeConfig')._OauthAddonNodeConfigViewModel; + +var s3compatsigv4FolderPickerViewModel = oop.extend(OauthAddonFolderPicker, { + constructor: function(addonName, url, selector, folderPicker, opts, tbOpts) { + var self = this; + // TODO: [OSF-7069] + self.super.super.constructor.call(self, addonName, url, selector, folderPicker, tbOpts); + self.super.construct.call(self, addonName, url, selector, folderPicker, opts, tbOpts); + // Non-OAuth fields + self.availableServices = ko.observableArray(s3compatsigv4Settings['availableServices']); + self.selectedService = ko.observable(s3compatsigv4Settings['availableServices'][0]); + self.accessKey = ko.observable(''); + self.secretKey = ko.observable(''); + // Treebeard config + self.treebeardOptions = $.extend( + {}, + self.treebeardOptions, + { // TreeBeard Options + columnTitles: function() { + return [{ + title: 'Buckets', + width: '75%', + sort: false + }, { + title: 'Select', + width: '25%', + sort: false + }]; + }, + resolveToggle: function(item) { + return ''; + }, + resolveIcon: function(item) { + return m('i.fa.fa-folder-o', ' '); + }, + }, + tbOpts + ); + + // Description about an attached service + self.attachedService = null; + self.nodeHasAuth.subscribe(function(newValue) { + if (newValue && self.urls().length > 0) { + self.fetchAttachedService(self); + } + }); + self.urls.subscribe(function(newValue) { + if (self.nodeHasAuth()) { + self.fetchAttachedService(self); + } + }); + }, + + connectAccount: function() { + var self = this; + if( !self.accessKey() && !self.secretKey() ){ + self.changeMessage('Please enter both an API access key and secret key.', 'text-danger'); + return; + } + + if (!self.accessKey() ){ + self.changeMessage('Please enter an API access key.', 'text-danger'); + return; + } + + if (!self.secretKey() ){ + self.changeMessage('Please enter an API secret key.', 'text-danger'); + return; + } + $osf.block(); + + return $osf.postJSON( + self.urls().create, { + host: self.selectedService()['host'], + secret_key: self.secretKey(), + access_key: self.accessKey() + } + ).done(function(response) { + $osf.unblock(); + self.clearModal(); + $('#s3compatsigv4InputCredentials').modal('hide'); + self.changeMessage('Successfully added S3 Compatible Storage (SigV4) credentials.', 'text-success', null, true); + self.updateFromData(response); + self.importAuth(); + }).fail(function(xhr, status, error) { + $osf.unblock(); + var message = ''; + var response = JSON.parse(xhr.responseText); + if (response && response.message) { + message = response.message; + } + self.changeMessage(message, 'text-danger'); + Raven.captureMessage('Could not add S3 Compatible Storage (SigV4) credentials', { + extra: { + url: self.urls().importAuth, + textStatus: status, + error: error + } + }); + }); + }, + /** + * Tests if the given string is a valid S3 Compatible Storage (SigV4) bucket name. Supports two modes: strict and lax. + * Strict is for bucket creation and follows the guidelines at: + * + * http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html#bucketnamingrules + * + * However, the US East (N. Virginia) region currently permits much laxer naming rules. The S3 + * docs claim this will be changed at some point, but to support our user's already existing + * buckets, we provide the lax mode checking. + * + * Strict checking is the default. + * + * @param {String} bucketName user-provided name of bucket to validate + * @param {Boolean} laxChecking whether to use the more permissive validation + */ + isValidBucketName: function(bucketName, laxChecking) { + if (laxChecking === true) { + return /^[a-zA-Z0-9.\-_]{1,255}$/.test(bucketName); + } + var label = '[a-z0-9]+(?:[a-z0-9\-]*[a-z0-9])?'; + var strictBucketName = new RegExp('^' + label + '(?:\\.' + label + ')*$'); + var isIpAddress = /^[0-9]+(?:\.[0-9]+){3}$/; + return bucketName.length >= 3 && bucketName.length <= 63 && + strictBucketName.test(bucketName) && !isIpAddress.test(bucketName); + }, + + /** Reset all fields from S3 Compatible Storage (SigV4) credentials input modal */ + clearModal: function() { + var self = this; + self.message(''); + self.messageClass('text-info'); + self.selectedService(s3compatsigv4Settings['availableServices'][0]); + self.secretKey(null); + self.accessKey(null); + }, + + createBucket: function(self, bucketName, bucketLocation) { + $osf.block(); + bucketName = bucketName.toLowerCase(); + return $osf.postJSON( + self.urls().createBucket, { + bucket_name: bucketName, + bucket_location: bucketLocation + } + ).done(function(response) { + $osf.unblock(); + self.loadedFolders(false); + self.activatePicker(); + var msg = 'Successfully created bucket "' + $osf.htmlEscape(bucketName) + '". You can now select it from the list.'; + var msgType = 'text-success'; + self.changeMessage(msg, msgType, null, true); + }).fail(function(xhr) { + var resp = JSON.parse(xhr.responseText); + var message = resp.message; + var title = resp.title || 'Problem creating bucket'; + $osf.unblock(); + if (!message) { + message = 'Looks like that name is taken. Try another name?'; + } + bootbox.confirm({ + title: $osf.htmlEscape(title), + message: $osf.htmlEscape(message), + callback: function(result) { + if (result) { + self.openCreateBucket(); + } + }, + buttons:{ + confirm:{ + label:'Try again' + } + } + }); + }); + }, + + fetchAttachedService: function(self) { + var url = self.urls().attachedService; + $.ajax({ + url: url, + type: 'GET', + dataType: 'json' + }).done(function (data) { + var targetServices = self.availableServices().filter(function(service) { + return service.host == data.host; + }); + self.attachedService = targetServices[0]; + }).fail(function(xhr, status, error) { + Raven.captureMessage('Error while retrieving addon info', { + extra: { + url: url, + status: status, + error: error + } + }); + }); + }, + + openCreateBucket: function() { + var self = this; + + // Generates html options for key-value pairs in BUCKET_LOCATION_MAP + function generateBucketOptions() { + if (self.attachedService == null || (! self.attachedService.bucketLocations)) { + return ''; + } + var options = ''; + var locations = self.attachedService.bucketLocations; + var names = new Array(); + for (var location in locations) { + if (locations.hasOwnProperty(location)) { + var name = locations[location]['name']; + if (names.indexOf(name) < 0) { + options = options + ['', '\n'].join(''); + names.push(name); + } + } + } + return options; + } + + function generateBucketSelector() { + return ''; + } + + bootbox.dialog({ + title: 'Create a new bucket', + message: + '
' + + '
' + + '
' + + '
' + + ' ' + + '
' + + ' ' + + '
' + + '' + + '
'+ + '
' + + '
' + + '
' + + ' ' + + '
' + + generateBucketSelector() + + '
' + + '
' + + '
' + + '
' + + '
', + buttons: { + cancel: { + label: 'Cancel', + className: 'btn-default' + }, + confirm: { + label: 'Create', + className: 'btn-success', + callback: function () { + var bucketName = $('#bucketName').val(); + var bucketLocation = $('#bucketLocation').val(); + + if (!bucketName) { + var errorMessage = $('#bucketModalErrorMessage'); + errorMessage.text('Bucket name cannot be empty'); + errorMessage[0].classList.add('text-danger'); + return false; + } else if (!self.isValidBucketName(bucketName, false)) { + bootbox.confirm({ + title: 'Invalid bucket name', + message: 'S3 Compatible Storage (SigV4) buckets can contain lowercase letters, numbers, and hyphens separated by' + + ' periods. Please try another name.', + callback: function (result) { + if (result) { + self.openCreateBucket(); + } + }, + buttons: { + confirm: { + label: 'Try again' + } + } + }); + } else { + self.createBucket(self, bucketName, bucketLocation); + } + } + } + } + }); + } +}); + +// Public API +function s3compatsigv4NodeConfig(addonName, selector, url, folderPicker, opts, tbOpts) { + var self = this; + self.url = url; + self.folderPicker = folderPicker; + opts = opts || {}; + tbOpts = tbOpts || {}; + self.viewModel = new s3compatsigv4FolderPickerViewModel(addonName, url, selector, folderPicker, opts, tbOpts); + self.viewModel.updateFromData(); + $osf.applyBindings(self.viewModel, selector); +} + +module.exports = { + s3compatsigv4NodeConfig: s3compatsigv4NodeConfig, + _s3compatsigv4NodeConfigViewModel: s3compatsigv4FolderPickerViewModel +}; diff --git a/addons/s3compatsigv4/static/s3compatsigv4UserConfig.js b/addons/s3compatsigv4/static/s3compatsigv4UserConfig.js new file mode 100644 index 00000000000..0d5a218b8e6 --- /dev/null +++ b/addons/s3compatsigv4/static/s3compatsigv4UserConfig.js @@ -0,0 +1,178 @@ +/** +* Module that controls the S3 Compatible Storage (SigV4) user settings. Includes Knockout view-model +* for syncing data. +*/ + +var ko = require('knockout'); +var $ = require('jquery'); +var Raven = require('raven-js'); +var bootbox = require('bootbox'); +require('js/osfToggleHeight'); + +var language = require('js/osfLanguage').Addons.s3compatsigv4; +var osfHelpers = require('js/osfHelpers'); +var addonSettings = require('js/addonSettings'); +var ChangeMessageMixin = require('js/changeMessage'); + +var s3compatsigv4Settings = require('json-loader!./settings.json'); + +var ExternalAccount = addonSettings.ExternalAccount; + +var $modal = $('#s3compatsigv4InputCredentials'); + + +function ViewModel(url) { + var self = this; + + self.properName = 'S3 Compatible Storage (SigV4)'; + self.availableServices = ko.observableArray(s3compatsigv4Settings['availableServices']); + self.selectedService = ko.observable(s3compatsigv4Settings['availableServices'][0]); + self.accessKey = ko.observable(); + self.secretKey = ko.observable(); + self.account_url = '/api/v1/settings/s3compatsigv4/accounts/'; + self.accounts = ko.observableArray(); + + ChangeMessageMixin.call(self); + + /** Reset all fields from S3 Compatible Storage (SigV4) credentials input modal */ + self.clearModal = function() { + self.message(''); + self.messageClass('text-info'); + self.selectedService(s3compatsigv4Settings['availableServices'][0]); + self.accessKey(null); + self.secretKey(null); + }; + /** Send POST request to authorize S3 Compatible Storage (SigV4) */ + self.connectAccount = function() { + // Selection should not be empty + if( !self.accessKey() && !self.secretKey() ){ + self.changeMessage('Please enter both an API access key and secret key.', 'text-danger'); + return; + } + + if (!self.accessKey() ){ + self.changeMessage('Please enter an API access key.', 'text-danger'); + return; + } + + if (!self.secretKey() ){ + self.changeMessage('Please enter an API secret key.', 'text-danger'); + return; + } + return osfHelpers.postJSON( + self.account_url, + ko.toJS({ + host: self.selectedService()['host'], + access_key: self.accessKey, + secret_key: self.secretKey, + }) + ).done(function() { + self.clearModal(); + $modal.modal('hide'); + self.updateAccounts(); + + }).fail(function(xhr, textStatus, error) { + var errorMessage = (xhr.status === 400 && xhr.responseJSON.message !== undefined) ? xhr.responseJSON.message : language.authError; + self.changeMessage(errorMessage, 'text-danger'); + Raven.captureMessage('Could not authenticate with S3 Compatible Storage (SigV4)', { + extra: { + url: self.account_url, + textStatus: textStatus, + error: error + } + }); + }); + }; + + self.updateAccounts = function() { + return $.ajax({ + url: url, + type: 'GET', + dataType: 'json' + }).done(function (data) { + self.accounts($.map(data.accounts, function(account) { + var externalAccount = new ExternalAccount(account); + externalAccount.accessKey = account.oauth_key; + externalAccount.secretKey = account.oauth_secret; + return externalAccount; + })); + $('#s3compatsigv4-header').osfToggleHeight({height: 160}); + }).fail(function(xhr, status, error) { + self.changeMessage(language.userSettingsError, 'text-danger'); + Raven.captureMessage('Error while updating addon account', { + extra: { + url: url, + status: status, + error: error + } + }); + }); + }; + + self.askDisconnect = function(account) { + var self = this; + bootbox.confirm({ + title: 'Disconnect S3 Compatible Storage (SigV4) Account?', + message: '

' + + 'Are you sure you want to disconnect the S3 Compatible Storage (SigV4) account ' + + osfHelpers.htmlEscape(account.name) + '? This will revoke access to S3 Compatible Storage (SigV4) for all projects associated with this account.' + + '

', + callback: function (confirm) { + if (confirm) { + self.disconnectAccount(account); + } + }, + buttons:{ + confirm:{ + label:'Disconnect', + className:'btn-danger' + } + } + }); + }; + + self.disconnectAccount = function(account) { + var self = this; + var url = '/api/v1/oauth/accounts/' + account.id + '/'; + var request = $.ajax({ + url: url, + type: 'DELETE' + }); + request.done(function(data) { + self.updateAccounts(); + }); + request.fail(function(xhr, status, error) { + Raven.captureMessage('Error while removing addon authorization for ' + account.id, { + extra: { + url: url, + status: status, + error: error + } + }); + }); + return request; + }; + + self.selectionChanged = function() { + self.changeMessage('',''); + }; + + self.updateAccounts(); +} + +$.extend(ViewModel.prototype, ChangeMessageMixin.prototype); + +function s3compatsigv4UserConfig(selector, url) { + // Initialization code + var self = this; + self.selector = selector; + self.url = url; + // On success, instantiate and bind the ViewModel + self.viewModel = new ViewModel(url); + osfHelpers.applyBindings(self.viewModel, self.selector); +} + +module.exports = { + s3compatsigv4ViewModel: ViewModel, + s3compatsigv4UserConfig: s3compatsigv4UserConfig +}; diff --git a/addons/s3compatsigv4/static/settings.json b/addons/s3compatsigv4/static/settings.json new file mode 100644 index 00000000000..ede46617bde --- /dev/null +++ b/addons/s3compatsigv4/static/settings.json @@ -0,0 +1,82 @@ +{ + "availableServices": [{"name": "IDCF Cloud", + "host": "ds.jp-east.idcfcloud.com", + "bucketLocations": {"us-east-1": {"name": "Fukushima"}}}, + {"name": "SAKURA Cloud", + "host": "s3.isk01.sakurastorage.jp:443"}, + {"name": "Wasabi", + "host": "s3.wasabisys.com", + "bucketLocations": { + "us-east-1": {"name": "Northern Virginia 1", "host": "s3.wasabisys.com"}, + "us-east-2": {"name": "Northern Virginia 2", "host": "s3.us-east-2.wasabisys.com"}, + "us-west-1": {"name": "Oregon", "host": "s3.us-west-1.wasabisys.com"}, + "us-central-1": {"name": "Plano, Texas", "host": "s3.us-central-1.wasabisys.com"}, + "ca-central-1": {"name": "Toronto, Canada", "host": "s3.ca-central-1.wasabisys.com"}, + "ap-northeast-1": {"name": "Tokyo", "host": "s3.ap-northeast-1.wasabisys.com"}, + "ap-northeast-2": {"name": "Osaka", "host": "s3.ap-northeast-2.wasabisys.com"}, + "ap-southeast-1": {"name": "Singapore", "host": "s3.ap-southeast-1.wasabisys.com"}, + "ap-southeast-2": {"name": "Sydney", "host": "s3.ap-southeast-2.wasabisys.com"}, + "eu-central-1": {"name": "Amsterdam", "host": "s3.eu-central-1.wasabisys.com"}, + "eu-central-2": {"name": "Frankfurt", "host": "s3.eu-central-2.wasabisys.com"}, + "eu-west-1": {"name": "London 1", "host": "s3.eu-west-1.wasabisys.com"}, + "eu-west-2": {"name": "Paris", "host": "s3.eu-west-2.wasabisys.com"}, + "eu-west-3": {"name": "London 2", "host": "s3.eu-west-3.wasabisys.com"}, + "eu-south-1": {"name": "Milan", "host": "s3.eu-south-1.wasabisys.com"}, + "": {"name": "Northern Virginia 1"}}}, + {"name": "Kanazawa University Cloud(HyperStore)", + "host": "s3-kakuma.rdm.kanazawa-u.ac.jp", + "serverSideEncryption": false}, + {"name": "Rakuten Cloud", + "host": "s3.jp3.objectstorage.rakuten-cloud.net"}, + {"name": "mdx S3DS", + "host": "s3ds.mdx.jp", + "bucketLocations": { + "us-east-1": {"name": "us-east-1", "host": "s3ds.mdx.jp"}, + "": {"name": "us-east-1"}}, + "serverSideEncryption": false}, + {"name": "RIKEN HSS S3 Service", + "host": "hssgws3.riken.jp:443", + "bucketLocations": { + "sailingship": {"name": "sailingship", "host": "hssgws3.riken.jp:443"}, + "": {"name": "sailingship"}}, + "serverSideEncryption": false}, + {"name": "SPring-8 Proto S3 Service", + "host": "sp8-s3gw.spring8.or.jp:443", + "bucketLocations": { + "us-east-1": {"name": "us-east-1", "host": "sp8-s3gw.spring8.or.jp:443"}, + "": {"name": "us-east-1"}}, + "serverSideEncryption": false}, + {"name": "ONION-object @ D3C The University of Osaka", + "host": "s3-osakau.oniongw.hpc.cmc.osaka-u.ac.jp:443", + "bucketLocations": { + "osakau": {"name": "osakau", "host": "s3-osakau.oniongw.hpc.cmc.osaka-u.ac.jp:443"}, + "": {"name": "osakau"}}, + "serverSideEncryption": false}, + {"name": "MinIO@Kanazawa University", + "host": "minio.rdm.kanazawa-u.ac.jp:443", + "bucketLocations": { + "ku-kakuma-1": {"name": "ku-kakuma-1", "host": "minio.rdm.kanazawa-u.ac.jp:443"}, + "": {"name": "ku-kakuma-1"}}, + "serverSideEncryption": false}, + {"name": "MinIO-Dev@Gunma University", + "host": "minio-dev.media.gunma-u.ac.jp:443"}, + {"name": "Kagoshima University RDM Storage (Cloudian)", + "host": "s3-rdms.cc.kagoshima-u.ac.jp:443"}, + {"name": "Hirosaki University S3", + "host": "file-oa-s3.oa.hirosaki-u.ac.jp:443", + "serverSideEncryption": false}, + {"name": "University of the Ryukyus ActiveScale", + "host": "grdm.lab.u-ryukyu.ac.jp:443"}, + {"name": "RDM Storage @ Toyohashi University of Technology", + "host": "ys3ds.edu.tut.ac.jp:443", + "serverSideEncryption": false}, + {"name": "Kyoto University, IIMC - Object Storage (StorageGRID)", + "host": "s3.rdm.kyoto-u.ac.jp:443"}, + {"name": "IZUMI - Tohoku University Research Data Lake", + "host": "s3.rdx.tohoku.ac.jp:443", + "serverSideEncryption": false}, + {"name": "KEK S3 Service", + "host": "oas.kek.jp:443"} + ], + "encryptUploads": true +} diff --git a/addons/s3compatsigv4/static/user-cfg.js b/addons/s3compatsigv4/static/user-cfg.js new file mode 100644 index 00000000000..c02639228cb --- /dev/null +++ b/addons/s3compatsigv4/static/user-cfg.js @@ -0,0 +1,6 @@ +var s3compatsigv4UserConfig = require('./s3compatsigv4UserConfig.js').s3compatsigv4UserConfig; + +// Endpoint for S3 Compatible Storage (SigV4) user settings +var url = '/api/v1/settings/s3compatsigv4/accounts/'; + +var s3compatsigv4UserConfig = new s3compatsigv4UserConfig('#s3compatsigv4AddonScope', url); diff --git a/addons/s3compatsigv4/templates/s3compatsigv4_credentials_modal.mako b/addons/s3compatsigv4/templates/s3compatsigv4_credentials_modal.mako new file mode 100644 index 00000000000..2110c1f4102 --- /dev/null +++ b/addons/s3compatsigv4/templates/s3compatsigv4_credentials_modal.mako @@ -0,0 +1,51 @@ + diff --git a/addons/s3compatsigv4/templates/s3compatsigv4_node_settings.mako b/addons/s3compatsigv4/templates/s3compatsigv4_node_settings.mako new file mode 100644 index 00000000000..e53e179a116 --- /dev/null +++ b/addons/s3compatsigv4/templates/s3compatsigv4_node_settings.mako @@ -0,0 +1,93 @@ +
+ + + <%include file="s3compatsigv4_credentials_modal.mako"/> + +

+ + ${addon_full_name} + + + authorized by + % if not is_registration: + ${_("Disconnect Account")} + % endif + + + + + + ${_("Import Account from Profile")} + + + + + +

+ ${_("Loading ...")} +

+
+ + + + + ${_("Connect Account")} + + +
+

+ +
+
+
+

+ ${_("Current Bucket:")} + + + + + ${_("None")} + +

+ +
+ + +
+ +
+

+ Loading buckets...

+
+
+
+ +
+
+
+
+ ${_("Connect %(folderName)s?") % dict(folderName='') | n} +
+
+
+ + +
+
+
+
+
+ +
+ +
+ +
+

+
+
diff --git a/addons/s3compatsigv4/templates/s3compatsigv4_user_settings.mako b/addons/s3compatsigv4/templates/s3compatsigv4_user_settings.mako new file mode 100644 index 00000000000..d918f83316c --- /dev/null +++ b/addons/s3compatsigv4/templates/s3compatsigv4_user_settings.mako @@ -0,0 +1,46 @@ + +
+ + <%include file="s3compatsigv4_credentials_modal.mako"/> + +

+ + + + ${_("Connect or Reauthorize Account")} + +

+ +
+ + ${_("Disconnect Account")} + +
+ + + + + + + + + + + + + + +
+ + ${_("Private project")} + + + + +
+
+ +
+
diff --git a/addons/s3compatsigv4/tests/__init__.py b/addons/s3compatsigv4/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/addons/s3compatsigv4/tests/conftest.py b/addons/s3compatsigv4/tests/conftest.py new file mode 100644 index 00000000000..da9f243685b --- /dev/null +++ b/addons/s3compatsigv4/tests/conftest.py @@ -0,0 +1 @@ +from osf_tests.conftest import * # noqa diff --git a/addons/s3compatsigv4/tests/factories.py b/addons/s3compatsigv4/tests/factories.py new file mode 100644 index 00000000000..6e037e4669f --- /dev/null +++ b/addons/s3compatsigv4/tests/factories.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +"""Factories for the S3 Compatible Storage (SigV4) addon.""" +import factory +from factory.django import DjangoModelFactory +from osf_tests.factories import UserFactory, ProjectFactory, ExternalAccountFactory + +from addons.s3compatsigv4.models import ( + UserSettings, + NodeSettings +) + +class S3CompatSigV4AccountFactory(ExternalAccountFactory): + provider = 's3compatsigv4' + provider_id = factory.Sequence(lambda n: 'id-{0}'.format(n)) + oauth_key = factory.Sequence(lambda n: 'key-{0}'.format(n)) + oauth_secret = factory.Sequence(lambda n: 'secret-{0}'.format(n)) + display_name = 'S3 Compatible Storage (SigV4) Fake User' + + +class S3CompatSigV4UserSettingsFactory(DjangoModelFactory): + class Meta: + model = UserSettings + + owner = factory.SubFactory(UserFactory) + + +class S3CompatSigV4NodeSettingsFactory(DjangoModelFactory): + class Meta: + model = NodeSettings + + owner = factory.SubFactory(ProjectFactory) + user_settings = factory.SubFactory(S3CompatSigV4UserSettingsFactory) diff --git a/addons/s3compatsigv4/tests/test_model.py b/addons/s3compatsigv4/tests/test_model.py new file mode 100644 index 00000000000..29e11e9d004 --- /dev/null +++ b/addons/s3compatsigv4/tests/test_model.py @@ -0,0 +1,251 @@ +# from nose.tools import * # noqa +import mock +from nose.tools import (assert_false, assert_true, + assert_equal, assert_is_none) +import pytest +import unittest + +from framework.auth import Auth + +from osf_tests.factories import ProjectFactory, DraftRegistrationFactory +from tests.base import get_default_metaschema + +from addons.base.tests.models import ( + OAuthAddonNodeSettingsTestSuiteMixin, + OAuthAddonUserSettingTestSuiteMixin +) +from addons.s3compatsigv4.models import NodeSettings +from addons.s3compatsigv4.tests.factories import ( + S3CompatSigV4UserSettingsFactory, + S3CompatSigV4NodeSettingsFactory, + S3CompatSigV4AccountFactory +) + +pytestmark = pytest.mark.django_db + +class TestUserSettings(OAuthAddonUserSettingTestSuiteMixin, unittest.TestCase): + + short_name = 's3compatsigv4' + full_name = 'S3 Compatible Storage (SigV4)' + ExternalAccountFactory = S3CompatSigV4AccountFactory + +class TestNodeSettings(OAuthAddonNodeSettingsTestSuiteMixin, unittest.TestCase): + + short_name = 's3compatsigv4' + full_name = 'S3 Compatible Storage (SigV4)' + ExternalAccountFactory = S3CompatSigV4AccountFactory + NodeSettingsFactory = S3CompatSigV4NodeSettingsFactory + NodeSettingsClass = NodeSettings + UserSettingsFactory = S3CompatSigV4UserSettingsFactory + + def test_registration_settings(self): + registration = ProjectFactory() + clone, message = self.node_settings.after_register( + self.node, registration, self.user, + ) + assert_is_none(clone) + + def test_before_register_no_settings(self): + self.node_settings.user_settings = None + message = self.node_settings.before_register(self.node, self.user) + assert_false(message) + + def test_before_register_no_auth(self): + self.node_settings.external_account = None + message = self.node_settings.before_register(self.node, self.user) + assert_false(message) + + def test_before_register_settings_and_auth(self): + message = self.node_settings.before_register(self.node, self.user) + assert_true(message) + + @mock.patch('website.archiver.tasks.archive') + def test_does_not_get_copied_to_registrations(self, mock_archive): + registration = self.node.register_node( + schema=get_default_metaschema(), + auth=Auth(user=self.user), + draft_registration=DraftRegistrationFactory(branched_from=self.node), + ) + assert_false(registration.has_addon('s3compatsigv4')) + + ## Overrides ## + + def test_serialize_credentials(self): + self.user_settings.external_accounts[0].provider_id = 'host-11\tuser-11' + self.user_settings.external_accounts[0].oauth_key = 'key-11' + self.user_settings.external_accounts[0].oauth_secret = 'secret-15' + self.user_settings.save() + credentials = self.node_settings.serialize_waterbutler_credentials() + + expected = {'host': self.node_settings.external_account.provider_id.split('\t')[0], + 'access_key': self.node_settings.external_account.oauth_key, + 'secret_key': self.node_settings.external_account.oauth_secret} + assert_equal(credentials, expected) + + @mock.patch('addons.s3compatsigv4.models.bucket_exists') + @mock.patch('addons.s3compatsigv4.models.get_bucket_location_or_error') + @mock.patch('addons.s3compatsigv4.models.find_service_by_host') + def test_serialize_credentials_undefined_location(self, mock_service, mock_location, mock_exists): + mock_exists.return_value = True + mock_location.return_value = 'dummy-1' + mock_service.return_value = {'name': 'Dummy', 'host': 'dummy.example.com'} + self.user_settings.external_accounts[0].provider_id = 'host-11\tuser-11' + self.user_settings.external_accounts[0].oauth_key = 'key-11' + self.user_settings.external_accounts[0].oauth_secret = 'secret-15' + self.user_settings.save() + folder_id = '1234567890' + self.node_settings.set_folder(folder_id, auth=Auth(self.user)) + self.node_settings.save() + credentials = self.node_settings.serialize_waterbutler_credentials() + + expected = {'host': self.node_settings.external_account.provider_id.split('\t')[0], + 'access_key': self.node_settings.external_account.oauth_key, + 'secret_key': self.node_settings.external_account.oauth_secret} + assert_equal(credentials, expected) + + @mock.patch('addons.s3compatsigv4.models.bucket_exists') + @mock.patch('addons.s3compatsigv4.models.get_bucket_location_or_error') + @mock.patch('addons.s3compatsigv4.models.find_service_by_host') + def test_serialize_credentials_defined_location(self, mock_service, mock_location, mock_exists): + mock_exists.return_value = True + mock_location.return_value = 'dummy-2' + mock_service.return_value = {'name': 'Dummy', + 'host': 'dummy.example.com', + 'bucketLocations': {'dummy-1': {'name': 'Location1'}, + 'dummy-2': {'name': 'Location2', + 'host': 'host-location2'}}} + self.user_settings.external_accounts[0].provider_id = 'host-11\tuser-11' + self.user_settings.external_accounts[0].oauth_key = 'key-11' + self.user_settings.external_accounts[0].oauth_secret = 'secret-15' + self.user_settings.save() + folder_id = '1234567890' + self.node_settings.set_folder(folder_id, auth=Auth(self.user)) + self.node_settings.save() + credentials = self.node_settings.serialize_waterbutler_credentials() + + expected = {'host': 'host-location2', + 'access_key': self.node_settings.external_account.oauth_key, + 'secret_key': self.node_settings.external_account.oauth_secret} + assert_equal(credentials, expected) + + mock_location.return_value = 'dummy-1' + self.node_settings.set_folder(folder_id, auth=Auth(self.user)) + self.node_settings.save() + credentials = self.node_settings.serialize_waterbutler_credentials() + + expected = {'host': self.node_settings.external_account.provider_id.split('\t')[0], + 'access_key': self.node_settings.external_account.oauth_key, + 'secret_key': self.node_settings.external_account.oauth_secret} + assert_equal(credentials, expected) + + @mock.patch('addons.s3compatsigv4.models.bucket_exists') + @mock.patch('addons.s3compatsigv4.models.get_bucket_location_or_error') + @mock.patch('addons.s3compatsigv4.models.find_service_by_host') + def test_set_folder(self, mock_service, mock_location, mock_exists): + mock_exists.return_value = True + mock_location.return_value = '' + mock_service.return_value = {'name': 'Dummy', 'host': 'dummy.example.com'} + folder_id = '1234567890' + self.node_settings.set_folder(folder_id, auth=Auth(self.user)) + self.node_settings.save() + # Bucket was set + assert_equal(self.node_settings.folder_id, folder_id) + assert_equal(self.node_settings.folder_name, '{} (Default)'.format(folder_id)) + assert_equal(self.node_settings.folder_location, '') + # Log was saved + last_log = self.node.logs.latest() + assert_equal(last_log.action, '{0}_bucket_linked'.format(self.short_name)) + + @mock.patch('addons.s3compatsigv4.models.bucket_exists') + @mock.patch('addons.s3compatsigv4.models.get_bucket_location_or_error') + @mock.patch('addons.s3compatsigv4.models.find_service_by_host') + def test_set_folder_undefined_location(self, mock_service, mock_location, mock_exists): + mock_exists.return_value = True + mock_location.return_value = 'dummy-1' + mock_service.return_value = {'name': 'Dummy', 'host': 'dummy.example.com'} + folder_id = '1234567890' + self.node_settings.set_folder(folder_id, auth=Auth(self.user)) + self.node_settings.save() + # Bucket was set + assert_equal(self.node_settings.folder_id, folder_id) + assert_equal(self.node_settings.folder_name, '{} (dummy-1)'.format(folder_id)) + assert_equal(self.node_settings.folder_location, 'dummy-1') + # Log was saved + last_log = self.node.logs.latest() + assert_equal(last_log.action, '{0}_bucket_linked'.format(self.short_name)) + + @mock.patch('addons.s3compatsigv4.models.bucket_exists') + @mock.patch('addons.s3compatsigv4.models.get_bucket_location_or_error') + @mock.patch('addons.s3compatsigv4.models.find_service_by_host') + def test_set_folder_defined_location(self, mock_service, mock_location, mock_exists): + mock_exists.return_value = True + mock_location.return_value = 'dummy-2' + mock_service.return_value = {'name': 'Dummy', + 'host': 'dummy.example.com', + 'bucketLocations': {'dummy-1': {'name': 'Location1'}, + 'dummy-2': {'name': 'Location2'}}} + folder_id = '1234567890' + self.node_settings.set_folder(folder_id, auth=Auth(self.user)) + self.node_settings.save() + # Bucket was set + assert_equal(self.node_settings.folder_id, folder_id) + assert_equal(self.node_settings.folder_name, '{} (Location2)'.format(folder_id)) + assert_equal(self.node_settings.folder_location, 'dummy-2') + # Log was saved + last_log = self.node.logs.latest() + assert_equal(last_log.action, '{0}_bucket_linked'.format(self.short_name)) + + @mock.patch('addons.s3compatsigv4.models.bucket_exists') + @mock.patch('addons.s3compatsigv4.models.get_bucket_location_or_error') + @mock.patch('addons.s3compatsigv4.models.find_service_by_host') + def test_set_folder_encrypt_uploads_with_encryption_setting(self, mock_service, mock_location, mock_exists): + mock_exists.return_value = True + mock_location.return_value = 'dummy-3' + mock_service.return_value = {'name': 'Dummy', + 'host': 'dummy.example.com', + 'serverSideEncryption': False} + folder_id = '1234567890' + self.node_settings.set_folder(folder_id, auth=Auth(self.user)) + self.node_settings.save() + # encrypt_uploads set + assert_equal(self.node_settings.encrypt_uploads, False) + + # Log was saved + last_log = self.node.logs.latest() + assert_equal(last_log.action, '{0}_bucket_linked'.format(self.short_name)) + + @mock.patch('addons.s3compatsigv4.models.bucket_exists') + @mock.patch('addons.s3compatsigv4.models.get_bucket_location_or_error') + @mock.patch('addons.s3compatsigv4.models.find_service_by_host') + def test_set_folder_encrypt_uploads_without_encryption_setting(self, mock_service, mock_location, mock_exists): + mock_exists.return_value = True + mock_location.return_value = 'dummy-3' + mock_service.return_value = {'name': 'Dummy', + 'host': 'dummy.example.com',} + folder_id = '1234567890' + self.node_settings.set_folder(folder_id, auth=Auth(self.user)) + self.node_settings.save() + # encrypt_uploads set + assert_equal(self.node_settings.encrypt_uploads, True) + + # Log was saved + last_log = self.node.logs.latest() + assert_equal(last_log.action, '{0}_bucket_linked'.format(self.short_name)) + + @mock.patch('addons.s3compatsigv4.models.bucket_exists') + @mock.patch('addons.s3compatsigv4.models.get_bucket_location_or_error') + @mock.patch('addons.s3compatsigv4.models.find_service_by_host') + def test_serialize_settings(self, mock_service, mock_location, mock_exists): + mock_exists.return_value = True + mock_location.return_value = 'us-east-1' + mock_service.return_value = {'name': 'Dummy', + 'host': 'dummy.example.com', + 'bucketLocations': {'us-east-1': {'name': 'Fukushima'}}} + folder_id = '1234567890' + self.node_settings.set_folder(folder_id, auth=Auth(self.user)) + self.node_settings.save() + settings = self.node_settings.serialize_waterbutler_settings() + expected = {'bucket': self.node_settings.folder_id, + 'encrypt_uploads': self.node_settings.encrypt_uploads, + 'region': 'us-east-1'} + assert_equal(settings, expected) diff --git a/addons/s3compatsigv4/tests/test_serializer.py b/addons/s3compatsigv4/tests/test_serializer.py new file mode 100644 index 00000000000..b0be8872942 --- /dev/null +++ b/addons/s3compatsigv4/tests/test_serializer.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +"""Serializer tests for the S3 Compatible Storage (SigV4) addon.""" +import mock +import pytest + +from addons.base.tests.serializers import StorageAddonSerializerTestSuiteMixin +from addons.s3compatsigv4.tests.factories import S3CompatSigV4AccountFactory +from addons.s3compatsigv4.serializer import S3CompatSigV4Serializer + +from tests.base import OsfTestCase + +pytestmark = pytest.mark.django_db + +class Tests3compatsigv4Serializer(StorageAddonSerializerTestSuiteMixin, OsfTestCase): + addon_short_name = 's3compatsigv4' + Serializer = S3CompatSigV4Serializer + ExternalAccountFactory = S3CompatSigV4AccountFactory + client = None + + def set_provider_id(self, pid): + self.node_settings.folder_id = pid + + def setUp(self): + self.mock_can_list = mock.patch('addons.s3compatsigv4.serializer.utils.can_list') + self.mock_can_list.return_value = True + self.mock_can_list.start() + super(Tests3compatsigv4Serializer, self).setUp() + + def tearDown(self): + self.mock_can_list.stop() + super(Tests3compatsigv4Serializer, self).tearDown() diff --git a/addons/s3compatsigv4/tests/test_view.py b/addons/s3compatsigv4/tests/test_view.py new file mode 100644 index 00000000000..29585c28648 --- /dev/null +++ b/addons/s3compatsigv4/tests/test_view.py @@ -0,0 +1,308 @@ +# -*- coding: utf-8 -*- +from rest_framework import status as http_status + +from botocore.exceptions import ClientError +import mock +from nose.tools import (assert_equal, assert_equals, + assert_true, assert_in, assert_false) +import pytest + +from framework.auth import Auth +from tests.base import OsfTestCase, get_default_metaschema +from osf_tests.factories import ProjectFactory, AuthUserFactory, DraftRegistrationFactory, InstitutionFactory + +from addons.base.tests.views import ( + OAuthAddonConfigViewsTestCaseMixin +) +from addons.s3compatsigv4.tests.utils import S3CompatSigV4AddonTestCase +from addons.s3compatsigv4.utils import validate_bucket_name +import addons.s3compatsigv4.settings as s3compatsigv4_settings +from website.util import api_url_for +from admin.rdm_addons.utils import get_rdm_addon_option + +pytestmark = pytest.mark.django_db + +class Tests3compatsigv4Views(S3CompatSigV4AddonTestCase, OAuthAddonConfigViewsTestCaseMixin, OsfTestCase): + def setUp(self): + self.mock_can_list = mock.patch('addons.s3compatsigv4.views.utils.can_list') + self.mock_can_list.return_value = True + self.mock_can_list.start() + self.mock_uid = mock.patch('addons.s3compatsigv4.views.utils.get_user_info') + self.mock_uid.return_value = {'id': '1234567890', 'display_name': 's3compatsigv4.user'} + self.mock_uid.start() + self.mock_exists = mock.patch('addons.s3compatsigv4.views.utils.bucket_exists') + self.mock_exists.return_value = True + self.mock_exists.start() + super(Tests3compatsigv4Views, self).setUp() + + def tearDown(self): + self.mock_can_list.stop() + self.mock_uid.stop() + self.mock_exists.stop() + super(Tests3compatsigv4Views, self).tearDown() + + def test_s3compatsigv4_settings_input_empty_keys(self): + url = self.project.api_url_for('s3compatsigv4_add_user_account') + rv = self.app.post_json(url, { + 'host': '', + 'access_key': '', + 'secret_key': '' + }, auth=self.user.auth, expect_errors=True) + assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) + assert_in('All the fields above are required.', rv.body.decode()) + + def test_s3compatsigv4_settings_input_empty_host(self): + url = self.project.api_url_for('s3compatsigv4_add_user_account') + rv = self.app.post_json(url, { + 'host': '', + 'access_key': 'Non-empty-access-key', + 'secret_key': 'Non-empty-secret-key' + }, auth=self.user.auth, expect_errors=True) + assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) + assert_in('All the fields above are required.', rv.body.decode()) + + def test_s3compatsigv4_settings_input_empty_access_key(self): + url = self.project.api_url_for('s3compatsigv4_add_user_account') + rv = self.app.post_json(url, { + 'host': 'Non-empty-host', + 'access_key': '', + 'secret_key': 'Non-empty-secret-key' + }, auth=self.user.auth, expect_errors=True) + assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) + assert_in('All the fields above are required.', rv.body.decode()) + + def test_s3compatsigv4_settings_input_empty_secret_key(self): + url = self.project.api_url_for('s3compatsigv4_add_user_account') + rv = self.app.post_json(url, { + 'host': 'Non-empty-host', + 'access_key': 'Non-empty-access-key', + 'secret_key': '' + }, auth=self.user.auth, expect_errors=True) + assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) + assert_in('All the fields above are required.', rv.body.decode()) + + def test_s3compatsigv4_settings_input_unknown_host(self): + url = self.project.api_url_for('s3compatsigv4_add_user_account') + rv = self.app.post_json(url, { + 'host': 'Non-empty-host', + 'access_key': 'Non-empty-access-key', + 'secret_key': 'Non-empty-secret-key' + }, auth=self.user.auth, expect_errors=True) + assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) + assert_in('The host is not available.', rv.body.decode()) + + def test_s3compatsigv4_settings_rdm_addons_denied(self): + institution = InstitutionFactory() + self.user.affiliated_institutions.add(institution) + self.user.save() + rdm_addon_option = get_rdm_addon_option(institution.id, self.ADDON_SHORT_NAME) + rdm_addon_option.is_allowed = False + rdm_addon_option.save() + url = self.project.api_url_for('s3compatsigv4_add_user_account') + rv = self.app.post_json(url,{ + 'access_key': 'aldkjf', + 'secret_key': 'las' + }, auth=self.user.auth, expect_errors=True) + assert_equal(rv.status_int, http_status.HTTP_403_FORBIDDEN) + assert_in('You are prohibited from using this add-on.', rv.body.decode()) + + def test_s3compatsigv4_set_bucket_no_settings(self): + user = AuthUserFactory() + self.project.add_contributor(user, save=True) + url = self.project.api_url_for('s3compatsigv4_set_config') + res = self.app.put_json( + url, {'s3compatsigv4_bucket': 'hammertofall'}, auth=user.auth, + expect_errors=True + ) + assert_equal(res.status_code, http_status.HTTP_400_BAD_REQUEST) + + def test_s3compatsigv4_set_bucket_no_auth(self): + + user = AuthUserFactory() + user.add_addon('s3compatsigv4') + self.project.add_contributor(user, save=True) + url = self.project.api_url_for('s3compatsigv4_set_config') + res = self.app.put_json( + url, {'s3compatsigv4_bucket': 'hammertofall'}, auth=user.auth, + expect_errors=True + ) + assert_equal(res.status_code, http_status.HTTP_403_FORBIDDEN) + + def test_s3compatsigv4_set_bucket_registered(self): + registration = self.project.register_node( + get_default_metaschema(), Auth(self.user), + DraftRegistrationFactory(branched_from=self.project), '' + ) + + url = registration.api_url_for('s3compatsigv4_set_config') + res = self.app.put_json( + url, {'s3compatsigv4_bucket': 'hammertofall'}, auth=self.user.auth, + expect_errors=True, + ) + + assert_equal(res.status_code, http_status.HTTP_400_BAD_REQUEST) + + @mock.patch('addons.s3compatsigv4.views.utils.can_list', return_value=False) + def test_user_settings_cant_list(self, mock_can_list): + url = api_url_for('s3compatsigv4_add_user_account') + rv = self.app.post_json(url, { + 'host': s3compatsigv4_settings.AVAILABLE_SERVICES[0]['host'], + 'access_key': 'aldkjf', + 'secret_key': 'las' + }, auth=self.user.auth, expect_errors=True) + + assert_in('Unable to list buckets.', rv.body.decode()) + assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) + + def test_s3compatsigv4_remove_node_settings_owner(self): + url = self.node_settings.owner.api_url_for('s3compatsigv4_deauthorize_node') + self.app.delete(url, auth=self.user.auth) + result = self.Serializer().serialize_settings(node_settings=self.node_settings, current_user=self.user) + assert_equal(result['nodeHasAuth'], False) + + def test_s3compatsigv4_remove_node_settings_unauthorized(self): + url = self.node_settings.owner.api_url_for('s3compatsigv4_deauthorize_node') + ret = self.app.delete(url, auth=None, expect_errors=True) + + assert_equal(ret.status_code, 401) + + def test_s3compatsigv4_get_node_settings_owner(self): + self.node_settings.set_auth(self.external_account, self.user) + self.node_settings.folder_id = 'bucket' + self.node_settings.save() + url = self.node_settings.owner.api_url_for('s3compatsigv4_get_config') + res = self.app.get(url, auth=self.user.auth) + + result = res.json['result'] + assert_equal(result['nodeHasAuth'], True) + assert_equal(result['userIsOwner'], True) + assert_equal(result['folder']['path'], self.node_settings.folder_id) + + def test_s3compatsigv4_get_node_settings_unauthorized(self): + url = self.node_settings.owner.api_url_for('s3compatsigv4_get_config') + unauthorized = AuthUserFactory() + ret = self.app.get(url, auth=unauthorized.auth, expect_errors=True) + + assert_equal(ret.status_code, 403) + + ## Overrides ## + + @mock.patch('addons.s3compatsigv4.models.get_bucket_names') + def test_folder_list(self, mock_names): + mock_names.return_value = ['bucket1', 'bucket2'] + super(Tests3compatsigv4Views, self).test_folder_list() + + @mock.patch('addons.s3compatsigv4.models.bucket_exists') + @mock.patch('addons.s3compatsigv4.models.get_bucket_location_or_error') + @mock.patch('addons.s3compatsigv4.models.find_service_by_host') + def test_set_config(self, mock_service, mock_location, mock_exists): + mock_exists.return_value = True + mock_location.return_value = '' + mock_service.return_value = {'name': 'Dummy', 'host': 'dummy.example.com'} + self.node_settings.set_auth(self.external_account, self.user) + url = self.project.api_url_for('{0}_set_config'.format(self.ADDON_SHORT_NAME)) + res = self.app.put_json(url, { + 'selected': self.folder + }, auth=self.user.auth) + assert_equal(res.status_code, http_status.HTTP_200_OK) + self.project.reload() + self.node_settings.reload() + assert_equal( + self.project.logs.latest().action, + '{0}_bucket_linked'.format(self.ADDON_SHORT_NAME) + ) + assert_equal(res.json['result']['folder']['name'], self.node_settings.folder_name) + + +class TestCreateBucket(S3CompatSigV4AddonTestCase, OsfTestCase): + + def setUp(self): + + super(TestCreateBucket, self).setUp() + + self.user = AuthUserFactory() + self.consolidated_auth = Auth(user=self.user) + self.auth = self.user.auth + self.project = ProjectFactory(creator=self.user) + + self.project.add_addon('s3compatsigv4', auth=self.consolidated_auth) + self.project.creator.add_addon('s3compatsigv4') + + self.user_settings = self.user.get_addon('s3compatsigv4') + self.user_settings.access_key = 'We-Will-Rock-You' + self.user_settings.secret_key = 'Idontknowanyqueensongs' + self.user_settings.save() + + self.node_settings = self.project.get_addon('s3compatsigv4') + self.node_settings.bucket = 'Sheer-Heart-Attack' + self.node_settings.user_settings = self.project.creator.get_addon('s3compatsigv4') + + self.node_settings.save() + + def test_bad_names(self): + assert_false(validate_bucket_name('')) + assert_false(validate_bucket_name('no')) + assert_false(validate_bucket_name('a' * 64)) + assert_false(validate_bucket_name(' leadingspace')) + assert_false(validate_bucket_name('trailingspace ')) + assert_false(validate_bucket_name('bogus naMe')) + assert_false(validate_bucket_name('.cantstartwithp')) + assert_false(validate_bucket_name('or.endwith.')) + assert_false(validate_bucket_name('..nodoubles')) + assert_false(validate_bucket_name('no_unders_in')) + assert_false(validate_bucket_name('-leadinghyphen')) + assert_false(validate_bucket_name('trailinghyphen-')) + assert_false(validate_bucket_name('Mixedcase')) + assert_false(validate_bucket_name('empty..label')) + assert_false(validate_bucket_name('label-.trailinghyphen')) + assert_false(validate_bucket_name('label.-leadinghyphen')) + assert_false(validate_bucket_name('8.8.8.8')) + assert_false(validate_bucket_name('600.9000.0.28')) + assert_false(validate_bucket_name('no_underscore')) + assert_false(validate_bucket_name('_nounderscoreinfront')) + assert_false(validate_bucket_name('no-underscore-in-back_')) + assert_false(validate_bucket_name('no-underscore-in_the_middle_either')) + + def test_names(self): + assert_true(validate_bucket_name('imagoodname')) + assert_true(validate_bucket_name('still.passing')) + assert_true(validate_bucket_name('can-have-dashes')) + assert_true(validate_bucket_name('kinda.name.spaced')) + assert_true(validate_bucket_name('a-o.valid')) + assert_true(validate_bucket_name('11.12.m')) + assert_true(validate_bucket_name('a--------a')) + assert_true(validate_bucket_name('a' * 63)) + + @mock.patch('addons.s3compatsigv4.views.utils.create_bucket') + @mock.patch('addons.s3compatsigv4.views.utils.get_bucket_names') + def test_create_bucket_pass(self, mock_names, mock_make): + mock_make.return_value = True + mock_names.return_value = [ + 'butintheend', + 'it', + 'doesntevenmatter' + ] + url = self.project.api_url_for('s3compatsigv4_create_bucket') + ret = self.app.post_json( + url, + { + 'bucket_name': 'doesntevenmatter' + }, + auth=self.user.auth + ) + + assert_equal(ret.status_int, http_status.HTTP_200_OK) + assert_equal(ret.json, {}) + + @mock.patch('addons.s3compatsigv4.views.utils.create_bucket') + def test_create_bucket_fail(self, mock_make): + error = ClientError( + {'Error': {'Code': '418', 'Message': 'This should work'}}, + 'CreateBucket' + ) + mock_make.side_effect = error + + url = '/api/v1/project/{0}/s3compatsigv4/newbucket/'.format(self.project._id) + ret = self.app.post_json(url, {'bucket_name': 'doesntevenmatter'}, auth=self.user.auth, expect_errors=True) + + assert_equals(ret.body.decode(), '{"message": "An error occurred (418) when calling the CreateBucket operation: This should work", "title": "Problem connecting to S3 Compatible Storage (SigV4)"}') diff --git a/addons/s3compatsigv4/tests/utils.py b/addons/s3compatsigv4/tests/utils.py new file mode 100644 index 00000000000..07c94f27182 --- /dev/null +++ b/addons/s3compatsigv4/tests/utils.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +from nose.tools import (assert_equals, assert_true, assert_false) + +from addons.base.tests.base import OAuthAddonTestCaseMixin, AddonTestCase +from addons.s3compatsigv4.tests.factories import S3CompatSigV4AccountFactory +from addons.s3compatsigv4.provider import S3CompatSigV4Provider +from addons.s3compatsigv4.serializer import S3CompatSigV4Serializer +from addons.s3compatsigv4 import utils + +class S3CompatSigV4AddonTestCase(OAuthAddonTestCaseMixin, AddonTestCase): + + ADDON_SHORT_NAME = 's3compatsigv4' + ExternalAccountFactory = S3CompatSigV4AccountFactory + Provider = S3CompatSigV4Provider + Serializer = S3CompatSigV4Serializer + client = None + folder = { + 'path': 'bucket', + 'name': 'bucket', + 'id': 'bucket' + } + + def test_https(self): + connection = utils.connect_s3compatsigv4(host='securehost', + access_key='a', + secret_key='s') + assert_equals(connection.meta.endpoint_url, 'https://securehost:443') + + connection = utils.connect_s3compatsigv4(host='securehost:443', + access_key='a', + secret_key='s') + assert_equals(connection.meta.endpoint_url, 'https://securehost:443') + + def test_http(self): + connection = utils.connect_s3compatsigv4(host='normalhost:80', + access_key='a', + secret_key='s') + assert_equals(connection.meta.endpoint_url, 'http://normalhost:80') + + connection = utils.connect_s3compatsigv4(host='normalhost:8080', + access_key='a', + secret_key='s') + assert_equals(connection.meta.endpoint_url, 'http://normalhost:8080') diff --git a/addons/s3compatsigv4/utils.py b/addons/s3compatsigv4/utils.py new file mode 100644 index 00000000000..fc7df39fd19 --- /dev/null +++ b/addons/s3compatsigv4/utils.py @@ -0,0 +1,337 @@ +# -*- coding: utf-8 -*- +import re +import logging +from rest_framework import status as http_status + +import boto3 +from botocore.exceptions import ClientError, NoCredentialsError, BotoCoreError +from botocore.client import Config + +import addons.s3compatsigv4.settings as settings +from framework.exceptions import HTTPError +from addons.base.exceptions import InvalidAuthError, InvalidFolderError + +logger = logging.getLogger(__name__) + + +def connect_s3compatsigv4(host=None, access_key=None, secret_key=None, node_settings=None): + """Helper to build an S3 client object using boto3 with Signature V4 + + Args: + host: S3 compatible storage host (e.g., 's3.amazonaws.com' or 'storage.example.com') + access_key: AWS access key ID + secret_key: AWS secret access key + node_settings: NodeSettings object containing external account info + + Returns: + boto3.client: Configured S3 client with Signature V4 + """ + if node_settings is not None: + if node_settings.external_account is not None: + host = node_settings.external_account.provider_id.split('\t')[0] + access_key = node_settings.external_account.oauth_key + secret_key = node_settings.external_account.oauth_secret + + if not all([host, access_key, secret_key]): + raise ValueError('Host, access_key, and secret_key are required') + + # Parse host and port + port = 443 + use_ssl = True + m = re.match(r'^(.+):([0-9]+)$', host) + if m is not None: + host = m.group(1) + port = int(m.group(2)) + use_ssl = (port == 443) + + # Construct endpoint URL + protocol = 'https' if use_ssl else 'http' + endpoint_url = f'{protocol}://{host}:{port}' + + # Configure boto3 client with Signature V4 + config = Config( + signature_version='s3v4', + s3={'addressing_style': 'auto'}, # Use auto-style addressing for compatibility + ) + + try: + s3_client = boto3.client( + 's3', + aws_access_key_id=access_key, + aws_secret_access_key=secret_key, + endpoint_url=endpoint_url, + config=config, + use_ssl=use_ssl, + verify=use_ssl, # Verify SSL certificates in production + ) + return s3_client + except Exception as e: + logger.error(f'Failed to create S3 client: {e}') + raise InvalidAuthError(f'Failed to connect to S3: {str(e)}') + + +def get_bucket_names(node_settings): + """Get list of all bucket names accessible by the user + + Args: + node_settings: NodeSettings object + + Returns: + list: List of bucket names + """ + try: + s3_client = connect_s3compatsigv4(node_settings=node_settings) + response = s3_client.list_buckets() + return [bucket['Name'] for bucket in response.get('Buckets', [])] + except NoCredentialsError: + raise HTTPError(http_status.HTTP_403_FORBIDDEN) + except ClientError as e: + error_code = e.response.get('Error', {}).get('Code', 'Unknown') + status_code = e.response.get('ResponseMetadata', {}).get('HTTPStatusCode', 500) + logger.error(f'Failed to list buckets: {error_code}') + raise HTTPError(status_code) + except Exception as e: + logger.error(f'Unexpected error listing buckets: {e}') + raise HTTPError(http_status.HTTP_500_INTERNAL_SERVER_ERROR) + + +def find_service_by_host(host): + """Find service configuration by host + + Args: + host: Host name + + Returns: + dict: Service configuration + + Raises: + KeyError: If service not found + """ + services = [s for s in settings.AVAILABLE_SERVICES if s['host'] == host] + if len(services) == 0: + raise KeyError(f'Service not found for host: {host}') + return services[0] + + +def validate_bucket_location(node_settings, location): + """Validate that the bucket location is supported by the service + + Args: + node_settings: NodeSettings object + location: Bucket location/region + + Returns: + bool: True if valid, False otherwise + """ + if location == '': + return True + + try: + host = node_settings.external_account.provider_id.split('\t')[0] + service = find_service_by_host(host) + return location in service.get('bucketLocations', []) + except (KeyError, AttributeError) as e: + logger.warning(f'Failed to validate bucket location: {e}') + return False + + +def validate_bucket_name(name): + """Validate bucket name according to S3 naming rules + + Rules: + - Between 3 and 63 characters long + - Lowercase letters, numbers, hyphens, and periods only + - Must start and end with letter or number + - Cannot be formatted as an IP address + + Args: + name: Bucket name to validate + + Returns: + bool: True if valid, False otherwise + """ + if not name or not isinstance(name, str): + return False + + label = r'[a-z0-9]+(?:[a-z0-9\-]*[a-z0-9])?' + validate_name = re.compile('^' + label + '(?:\\.' + label + ')*$') + is_ip_address = re.compile(r'^[0-9]+(?:\.[0-9]+){3}$') + + return ( + 3 <= len(name) <= 63 and + bool(validate_name.match(name)) and + not bool(is_ip_address.match(name)) + ) + + +def create_bucket(node_settings, bucket_name, location=''): + """Create a new bucket + + Args: + node_settings: NodeSettings object + bucket_name: Name for the new bucket + location: Bucket location/region (optional) + + Returns: + dict: Response from create_bucket operation + """ + try: + s3_client = connect_s3compatsigv4(node_settings=node_settings) + + if location and location != '': + create_bucket_config = {'LocationConstraint': location} + response = s3_client.create_bucket( + Bucket=bucket_name, + CreateBucketConfiguration=create_bucket_config + ) + else: + response = s3_client.create_bucket(Bucket=bucket_name) + + return response + except ClientError as e: + error_code = e.response.get('Error', {}).get('Code', 'Unknown') + logger.error(f'Failed to create bucket {bucket_name}: {error_code}') + raise + except Exception as e: + logger.error(f'Unexpected error creating bucket: {e}') + raise + + +def bucket_exists(host, access_key, secret_key, bucket_name): + """Test if a bucket exists and is accessible + + Args: + host: S3 compatible storage host + access_key: AWS access key ID + secret_key: AWS secret access key + bucket_name: Bucket name to check + + Returns: + bool: True if bucket exists and is accessible, False otherwise + """ + if not bucket_name: + return False + + try: + s3_client = connect_s3compatsigv4(host, access_key, secret_key) + s3_client.head_bucket(Bucket=bucket_name) + return True + except ClientError as e: + error_code = e.response.get('Error', {}).get('Code', '') + # 301/302 redirects indicate bucket exists but in different region + if error_code in ['301', '302', 'PermanentRedirect', 'TemporaryRedirect']: + return True + logger.debug(f'Bucket {bucket_name} not accessible: {error_code}') + return False + except Exception as e: + logger.error(f'Error checking bucket existence: {e}') + return False + + +def can_list(host, access_key, secret_key): + """Check if user can list all accessible buckets + + Args: + host: S3 compatible storage host + access_key: AWS access key ID + secret_key: AWS secret access key + + Returns: + bool: True if user can list buckets, False otherwise + """ + if not all([host, access_key, secret_key]): + return False + + try: + s3_client = connect_s3compatsigv4(host, access_key, secret_key) + s3_client.list_buckets() + return True + except (ClientError, NoCredentialsError, BotoCoreError) as e: + logger.debug(f'Cannot list buckets: {e}') + return False + except Exception as e: + logger.error(f'Unexpected error checking list permission: {e}') + return False + + +def get_user_info(host, access_key, secret_key): + """Get S3 user information + + Args: + host: S3 compatible storage host + access_key: AWS access key ID + secret_key: AWS secret access key + + Returns: + dict: User info with 'DisplayName' and 'ID', or None if unavailable + """ + if not all([access_key, secret_key]): + return None + + try: + s3_client = connect_s3compatsigv4(host, access_key, secret_key) + response = s3_client.list_buckets() + owner = response.get('Owner', {}) + + if owner: + return { + 'display_name': owner.get('DisplayName', ''), + 'id': owner.get('ID', '') + } + return None + except (ClientError, NoCredentialsError) as e: + logger.error(f'Cannot get user info: {e}') + return None + except Exception as e: + logger.error(f'Unexpected error getting user info: {e}') + return None + + +def get_bucket_location_or_error(host, access_key, secret_key, bucket_name): + """Get bucket location/region or raise appropriate error + + Args: + host: S3 compatible storage host + access_key: AWS access key ID + secret_key: AWS secret access key + bucket_name: Bucket name + + Returns: + str: Bucket location/region + + Raises: + InvalidAuthError: If authentication fails + InvalidFolderError: If bucket doesn't exist or isn't accessible + """ + try: + s3_client = connect_s3compatsigv4(host, access_key, secret_key) + except Exception as e: + logger.error(f'Failed to connect: {e}') + raise InvalidAuthError('Invalid credentials or connection failed') + + try: + # Check if bucket exists + s3_client.head_bucket(Bucket=bucket_name) + except ClientError as e: + error_code = e.response.get('Error', {}).get('Code', '') + logger.error(f'Bucket {bucket_name} not accessible: {error_code}') + raise InvalidFolderError(f'Bucket not found or not accessible: {bucket_name}') + + try: + # Get bucket location + response = s3_client.get_bucket_location(Bucket=bucket_name) + bucket_location = response.get('LocationConstraint', '') + + # AWS returns None for us-east-1, convert to empty string + if bucket_location is None: + bucket_location = '' + + return bucket_location + except ClientError as e: + error_code = e.response.get('Error', {}).get('Code', '') + logger.warning(f'Could not get bucket location for {bucket_name}: {error_code}') + # Return empty string as default location if we can't determine it + return '' + except Exception as e: + logger.error(f'Unexpected error getting bucket location: {e}') + return '' diff --git a/addons/s3compatsigv4/views.py b/addons/s3compatsigv4/views.py new file mode 100644 index 00000000000..65eced33ceb --- /dev/null +++ b/addons/s3compatsigv4/views.py @@ -0,0 +1,180 @@ +from rest_framework import status as http_status + +from botocore.exceptions import ClientError, BotoCoreError +from django.core.exceptions import ValidationError +from flask import request + +from framework.exceptions import HTTPError +from framework.auth.decorators import must_be_logged_in + +from addons.base import generic_views +from addons.s3compatsigv4 import utils +from addons.s3compatsigv4.serializer import S3CompatSigV4Serializer +import addons.s3compatsigv4.settings as settings +from osf.models import ExternalAccount +from website.project.decorators import ( + must_have_addon, must_have_permission, + must_be_addon_authorizer, +) + +from admin.rdm_addons.decorators import must_be_rdm_addons_allowed + + +SHORT_NAME = 's3compatsigv4' +FULL_NAME = 'S3 Compatible Storage (SigV4)' + +s3compatsigv4_account_list = generic_views.account_list( + SHORT_NAME, + S3CompatSigV4Serializer +) + +s3compatsigv4_import_auth = generic_views.import_auth( + SHORT_NAME, + S3CompatSigV4Serializer +) + +s3compatsigv4_deauthorize_node = generic_views.deauthorize_node( + SHORT_NAME +) + +s3compatsigv4_get_config = generic_views.get_config( + SHORT_NAME, + S3CompatSigV4Serializer +) + +def _set_folder(node_addon, folder, auth): + folder_id = folder['id'] + node_addon.set_folder(folder_id, auth=auth) + node_addon.save() + +s3compatsigv4_set_config = generic_views.set_config( + SHORT_NAME, + FULL_NAME, + S3CompatSigV4Serializer, + _set_folder +) + +@must_have_addon(SHORT_NAME, 'node') +@must_be_addon_authorizer(SHORT_NAME) +def s3compatsigv4_folder_list(node_addon, **kwargs): + """ Returns all the subsequent folders under the folder id passed. + """ + return node_addon.get_folders() + +@must_have_addon(SHORT_NAME, 'node') +@must_be_addon_authorizer(SHORT_NAME) +def s3compatsigv4_attached_service(node_addon, **kwargs): + """ Returns the description about the attached S3 service. + """ + result = {} + if node_addon.external_account is not None: + host = node_addon.external_account.provider_id.split('\t')[0] + result['host'] = host + return result + +@must_be_logged_in +@must_be_rdm_addons_allowed(SHORT_NAME) +def s3compatsigv4_add_user_account(auth, **kwargs): + """Verifies new external account credentials and adds to user's list""" + try: + host = request.json['host'] + access_key = request.json['access_key'] + secret_key = request.json['secret_key'] + except KeyError: + raise HTTPError(http_status.HTTP_400_BAD_REQUEST) + + if not (host and access_key and secret_key): + return { + 'message': 'All the fields above are required.' + }, http_status.HTTP_400_BAD_REQUEST + if host not in [s['host'] for s in settings.AVAILABLE_SERVICES]: + return { + 'message': 'The host is not available.' + }, http_status.HTTP_400_BAD_REQUEST + + user_info = utils.get_user_info(host, access_key, secret_key) + if not user_info: + return { + 'message': ('Unable to access account.\n' + 'Check to make sure that the above credentials are valid, ' + 'and that they have permission to list buckets.') + }, http_status.HTTP_400_BAD_REQUEST + + if not utils.can_list(host, access_key, secret_key): + return { + 'message': ('Unable to list buckets.\n' + 'Listing buckets is required permission that can be changed via IAM') + }, http_status.HTTP_400_BAD_REQUEST + + account = None + # GRDM-53044 Identify S3 Compatible Storage (SigV4) authentication information + # using both the AWS Account and Access Key + provider_id = '{}\t{}\t{}'.format(host, user_info['id'], access_key) + masked_access_key = f'****{access_key[-4:]}' if len(access_key) > 4 else '****' + display_name = '{} ({}, {})'.format(user_info['display_name'], host, masked_access_key) + try: + account = ExternalAccount( + provider=SHORT_NAME, + provider_name=FULL_NAME, + oauth_key=access_key, + oauth_secret=secret_key, + provider_id=provider_id, + display_name=display_name, + ) + account.save() + except ValidationError: + # ... or get the old one + account = ExternalAccount.objects.get( + provider=SHORT_NAME, + provider_id=provider_id, + ) + if account.oauth_key != access_key or account.oauth_secret != secret_key: + account.oauth_key = access_key + account.oauth_secret = secret_key + account.save() + assert account is not None + + if not auth.user.external_accounts.filter(id=account.id).exists(): + auth.user.external_accounts.add(account) + + # Ensure S3 Compatible Storage (SigV4) is enabled. + auth.user.get_or_add_addon('s3compatsigv4', auth=auth) + auth.user.save() + + return {} + + +@must_be_addon_authorizer(SHORT_NAME) +@must_have_addon('s3compatsigv4', 'node') +@must_have_permission('write') +def s3compatsigv4_create_bucket(auth, node_addon, **kwargs): + bucket_name = request.json.get('bucket_name', '') + bucket_location = request.json.get('bucket_location', '') + + if not utils.validate_bucket_name(bucket_name): + return { + 'message': 'That bucket name is not valid.', + 'title': 'Invalid bucket name', + }, http_status.HTTP_400_BAD_REQUEST + + # Get location and verify it is valid + if not utils.validate_bucket_location(node_addon, bucket_location): + return { + 'message': 'That bucket location is not valid.', + 'title': 'Invalid bucket location', + }, http_status.HTTP_400_BAD_REQUEST + + try: + utils.create_bucket(node_addon, bucket_name, bucket_location) + except ClientError as e: + return { + 'message': str(e), + 'title': 'Problem connecting to S3 Compatible Storage (SigV4)', + }, http_status.HTTP_400_BAD_REQUEST + except BotoCoreError as e: # Base class catchall + return { + 'message': str(e), + 'title': 'Error connecting to S3 Compatible Storage (SigV4)', + }, http_status.HTTP_400_BAD_REQUEST + + return {} diff --git a/admin/base/settings/defaults.py b/admin/base/settings/defaults.py index 1338d43f750..20f076916f0 100644 --- a/admin/base/settings/defaults.py +++ b/admin/base/settings/defaults.py @@ -128,6 +128,7 @@ 'addons.azureblobstorage', 'addons.weko', 'addons.s3compat', + 'addons.s3compatsigv4', 'addons.s3compatb3', 'addons.nextcloud', 'addons.gitlab', @@ -164,6 +165,7 @@ 'addons_azureblobstorage': None, 'addons_weko': None, 'addons_s3compat': None, + 'addons_s3compatsigv4': None, 'addons_s3compatb3': None, 'addons_nextcloud': None, 'addons_gitlab': None, @@ -181,6 +183,7 @@ 'swift', 'weko', 's3compat', + 's3compatsigv4', 's3compatb3', 'nextcloud', 'gitlab', diff --git a/admin/rdm_custom_storage_location/export_data/utils.py b/admin/rdm_custom_storage_location/export_data/utils.py index fe32cc9742a..368a77a9997 100644 --- a/admin/rdm_custom_storage_location/export_data/utils.py +++ b/admin/rdm_custom_storage_location/export_data/utils.py @@ -22,6 +22,7 @@ test_owncloud_connection, test_s3_connection, test_s3compat_connection, + test_s3compatsigv4_connection, wd_info_for_institutions, ) from api.base.utils import waterbutler_api_url_for @@ -177,6 +178,35 @@ def save_s3compat_credentials(institution_guid, storage_name, host_url, access_k return {'message': 'Saved credentials successfully!!'}, http_status.HTTP_200_OK +def save_s3compatsigv4_credentials(institution_guid, storage_name, host_url, access_key, secret_key, bucket): + test_connection_result = test_s3compatsigv4_connection(host_url, access_key, secret_key, bucket) + if test_connection_result[1] != http_status.HTTP_200_OK: + return test_connection_result + + host = host_url.rstrip('/').replace('https://', '').replace('http://', '') + + wb_credentials = { + 'storage': { + 'access_key': access_key, + 'secret_key': secret_key, + 'host': host, + } + } + wb_settings = { + 'storage': { + 'folder': { + 'encrypt_uploads': True, + }, + 'bucket': bucket, + 'provider': 's3compatsigv4', + } + } + + update_storage_location(institution_guid, storage_name, wb_credentials, wb_settings) + + return {'message': 'Saved credentials successfully!!'}, http_status.HTTP_200_OK + + def save_dropboxbusiness_credentials(institution, storage_name, provider_name): test_connection_result = test_dropboxbusiness_connection(institution) if test_connection_result[1] != http_status.HTTP_200_OK: diff --git a/admin/rdm_custom_storage_location/export_data/views/location.py b/admin/rdm_custom_storage_location/export_data/views/location.py index cf83072db6e..76fd3811d8a 100644 --- a/admin/rdm_custom_storage_location/export_data/views/location.py +++ b/admin/rdm_custom_storage_location/export_data/views/location.py @@ -30,7 +30,7 @@ class ExportStorageLocationViewBaseView(RdmPermissionMixin, UserPassesTestMixin): """ Base class for all the Institutional Storage Views """ - PROVIDERS_AVAILABLE = ['s3', 's3compat', 'nextcloudinstitutions'] + PROVIDERS_AVAILABLE = ['s3', 's3compat', 's3compatsigv4', 'nextcloudinstitutions'] INSTITUTION_DEFAULT = Institution.INSTITUTION_DEFAULT institution_guid = INSTITUTION_DEFAULT institution = None @@ -51,7 +51,7 @@ def test_func(self): user = self.request.user institution_id = self.kwargs.get('institution_id', None) if user.is_institutional_admin or (institution_id and user.is_super_admin): - self.PROVIDERS_AVAILABLE = ['s3', 's3compat', + self.PROVIDERS_AVAILABLE = ['s3', 's3compat', 's3compatsigv4', 'dropboxbusiness', 'nextcloudinstitutions'] return user.is_super_admin or user.is_institutional_admin @@ -191,6 +191,13 @@ def post(self, request, *args, **kwargs): data.get('s3compat_secret_key'), data.get('s3compat_bucket'), ) + elif provider_short_name == 's3compatsigv4': + result = utils.test_s3compatsigv4_connection( + data.get('s3compatsigv4_endpoint_url'), + data.get('s3compatsigv4_access_key'), + data.get('s3compatsigv4_secret_key'), + data.get('s3compatsigv4_bucket'), + ) elif provider_short_name == 'nextcloudinstitutions': result = utils.test_owncloud_connection( data.get('nextcloudinstitutions_host'), @@ -272,6 +279,15 @@ def post(self, request, *args, **kwargs): data.get('s3compat_secret_key'), data.get('s3compat_bucket'), ) + elif provider_short_name == 's3compatsigv4': + result = export_data_utils.save_s3compatsigv4_credentials( + institution_guid, + storage_name, + data.get('s3compatsigv4_endpoint_url'), + data.get('s3compatsigv4_access_key'), + data.get('s3compatsigv4_secret_key'), + data.get('s3compatsigv4_bucket'), + ) elif provider_short_name == 'nextcloudinstitutions': result = export_data_utils.save_nextcloudinstitutions_credentials( institution, @@ -313,7 +329,7 @@ def test_func(self): return False if user.is_institutional_admin: - self.PROVIDERS_AVAILABLE = ['s3', 's3compat', + self.PROVIDERS_AVAILABLE = ['s3', 's3compat', 's3compatsigv4', 'dropboxbusiness', 'nextcloudinstitutions'] location_id = self.kwargs.get('location_id') institution_id = self.kwargs.get('institution_id') diff --git a/admin/rdm_custom_storage_location/utils.py b/admin/rdm_custom_storage_location/utils.py index 80448aa7a8e..b86eeed3e95 100644 --- a/admin/rdm_custom_storage_location/utils.py +++ b/admin/rdm_custom_storage_location/utils.py @@ -25,6 +25,7 @@ from addons.s3 import utils as s3_utils from addons.s3compat import utils as s3compat_utils from addons.s3compatb3 import utils as s3compatb3_utils +from addons.s3compatsigv4 import utils as s3compatsigv4_utils from addons.swift import settings as swift_settings, utils as swift_utils from addons.swift.provider import SwiftProvider from addons.dropboxbusiness import utils as dropboxbusiness_utils @@ -68,6 +69,7 @@ enabled_providers_list = [ 's3', 'osfstorage', 'swift', 's3compat', + 's3compatsigv4', ] enabled_providers_list.extend(enabled_providers_forinstitutions_list) @@ -321,6 +323,60 @@ def test_s3compat_connection(host_url, access_key, secret_key, bucket): } }, http_status.HTTP_200_OK) +def test_s3compatsigv4_connection(host_url, access_key, secret_key, bucket): + host = host_url.rstrip('/').replace('https://', '').replace('http://', '') + if not (host and access_key and secret_key and bucket): + return ({ + 'message': 'All the fields above are required.' + }, http_status.HTTP_400_BAD_REQUEST) + + try: + user_info = s3compatsigv4_utils.get_user_info(host, access_key, secret_key) + e_message = '' + except Exception as e: + user_info = None + e_message = traceback.format_exception_only(type(e), e)[0].rstrip('\n') + if not user_info: + return ({ + 'message': 'Unable to access account.\n' + 'Check to make sure that the above credentials are valid, ' + 'and that they have permission to list buckets.', + 'e_message': e_message + }, http_status.HTTP_400_BAD_REQUEST) + + try: + res = s3compatsigv4_utils.can_list(host, access_key, secret_key) + e_message = '' + except Exception as e: + res = False + e_message = traceback.format_exception_only(type(e), e)[0].rstrip('\n') + if not res: + return ({ + 'message': 'Unable to list buckets.\n' + 'Listing buckets is required permission that can be changed via IAM', + 'e_message': e_message + }, http_status.HTTP_400_BAD_REQUEST) + + try: + res = s3compatsigv4_utils.bucket_exists(host, access_key, secret_key, bucket) + e_message = '' + except Exception as e: + res = False + e_message = traceback.format_exception_only(type(e), e)[0].rstrip('\n') + if not res: + return ({ + 'message': 'Invalid bucket.', + 'e_message': e_message + }, http_status.HTTP_400_BAD_REQUEST) + + return ({ + 'message': 'Credentials are valid', + 'data': { + 'id': user_info['id'], + 'display_name': user_info['display_name'], + } + }, http_status.HTTP_200_OK) + def test_s3compatb3_connection(host_url, access_key, secret_key, bucket): host = host_url.rstrip('/').replace('https://', '').replace('http://', '') if not (host and access_key and secret_key and bucket): @@ -667,6 +723,39 @@ def save_s3compat_credentials(institution_id, storage_name, host_url, access_key 'message': 'Saved credentials successfully!!' }, http_status.HTTP_200_OK) +def save_s3compatsigv4_credentials(institution_id, storage_name, host_url, access_key, secret_key, + bucket, server_side_encryption=False): + + test_connection_result = test_s3compatsigv4_connection(host_url, access_key, secret_key, bucket) + if test_connection_result[1] != http_status.HTTP_200_OK: + return test_connection_result + + host = host_url.rstrip('/').replace('https://', '').replace('http://', '') + + wb_credentials = { + 'storage': { + 'access_key': access_key, + 'secret_key': secret_key, + 'host': host, + } + } + wb_settings = { + 'storage': { + 'folder': '', + 'encrypt_uploads': server_side_encryption, + 'bucket': bucket, + 'provider': 's3compatsigv4', + 'type': Region.INSTITUTIONS, + } + } + + region = update_storage(institution_id, storage_name, wb_credentials, wb_settings) + external_util.remove_region_external_account(region) + + return ({ + 'message': 'Saved credentials successfully!!' + }, http_status.HTTP_200_OK) + def save_s3compatb3_credentials(institution_id, storage_name, host_url, access_key, secret_key, bucket): @@ -1198,6 +1287,19 @@ def get_s3compat_info(waterbutler_credentials_storage, waterbutler_settings_stor } +def get_s3compatsigv4_info(waterbutler_credentials_storage, waterbutler_settings_storage): + """Get storage information for S3 Compatible (SigV4) Storage.""" + return { + 'host': create_storage_info_template('Endpoint URL', waterbutler_credentials_storage.get('host')), + 'access_key': create_storage_info_template('Access Key', waterbutler_credentials_storage.get('access_key')), + 'bucket': create_storage_info_template('Bucket', waterbutler_settings_storage.get('bucket')), + 'encrypt_uploads': create_storage_info_template( + 'Enable Server Side Encryption', + waterbutler_settings_storage.get('encrypt_uploads', False) + ) + } + + def get_s3compatinstitutions_info(institution, provider_name, region): """Get storage information for S3 Compatible Storage for Institutions.""" rdm_addon_option, external_account = get_institution_addon_info(institution.id, provider_name) @@ -1253,6 +1355,7 @@ def get_institutional_storage_information(provider_name, region, institution): 'osfstorage': lambda: get_osfstorage_info(waterbutler_settings_storage), 's3': lambda: get_s3_info(waterbutler_credentials_storage, waterbutler_settings_storage), 's3compat': lambda: get_s3compat_info(waterbutler_credentials_storage, waterbutler_settings_storage), + 's3compatsigv4': lambda: get_s3compatsigv4_info(waterbutler_credentials_storage, waterbutler_settings_storage), 's3compatinstitutions': lambda: get_s3compatinstitutions_info(institution, provider_name, region), 'ociinstitutions': lambda: get_ociinstitutions_info(institution, provider_name), 'nextcloudinstitutions': lambda: get_nextcloudinstitutions_info(institution, provider_name), diff --git a/admin/rdm_custom_storage_location/views.py b/admin/rdm_custom_storage_location/views.py index 33cbb46fb07..7203b358287 100644 --- a/admin/rdm_custom_storage_location/views.py +++ b/admin/rdm_custom_storage_location/views.py @@ -192,6 +192,13 @@ def post(self, request, *args, **kwargs): data.get('s3compat_secret_key'), data.get('s3compat_bucket'), ) + elif provider_short_name == 's3compatsigv4': + result = utils.test_s3compatsigv4_connection( + data.get('s3compatsigv4_endpoint_url'), + data.get('s3compatsigv4_access_key'), + data.get('s3compatsigv4_secret_key'), + data.get('s3compatsigv4_bucket'), + ) elif provider_short_name == 's3compatb3': result = utils.test_s3compatb3_connection( data.get('s3compatb3_endpoint_url'), @@ -305,6 +312,16 @@ def post(self, request, *args, **kwargs): data.get('s3compat_bucket'), bool(strtobool(data.get('s3compat_server_side_encryption'))), ) + elif provider_short_name == 's3compatsigv4': + result = utils.save_s3compatsigv4_credentials( + institution_id, + storage_name, + data.get('s3compatsigv4_endpoint_url'), + data.get('s3compatsigv4_access_key'), + data.get('s3compatsigv4_secret_key'), + data.get('s3compatsigv4_bucket'), + bool(strtobool(data.get('s3compatsigv4_server_side_encryption'))), + ) elif provider_short_name == 's3compatb3': result = utils.save_s3compatb3_credentials( institution_id, diff --git a/admin/static/js/rdm_custom_storage_location/rdm-institutional-storage-page.js b/admin/static/js/rdm_custom_storage_location/rdm-institutional-storage-page.js index 6f0c1b8876f..3af119bd14d 100644 --- a/admin/static/js/rdm_custom_storage_location/rdm-institutional-storage-page.js +++ b/admin/static/js/rdm_custom_storage_location/rdm-institutional-storage-page.js @@ -146,6 +146,14 @@ $('#s3compat_modal input').on('paste', function(e) { validateRequiredFields('s3compat'); }); +$('#s3compatsigv4_modal input').keyup(function () { + validateRequiredFields('s3compatsigv4'); +}); + +$('#s3compatsigv4_modal input').on('paste', function (e) { + validateRequiredFields('s3compatsigv4'); +}); + $('#s3compatinstitutions_modal input').keyup(function () { validateRequiredFields('s3compatinstitutions'); }); diff --git a/admin/templates/rdm_custom_storage_location/providers/s3compatsigv4_modal.html b/admin/templates/rdm_custom_storage_location/providers/s3compatsigv4_modal.html new file mode 100644 index 00000000000..a681a83be0d --- /dev/null +++ b/admin/templates/rdm_custom_storage_location/providers/s3compatsigv4_modal.html @@ -0,0 +1,60 @@ +{% load i18n %} + + + diff --git a/admin_tests/rdm_custom_storage_location/export_data/test_utils.py b/admin_tests/rdm_custom_storage_location/export_data/test_utils.py index a2eacb30d7c..d8f6bc28045 100644 --- a/admin_tests/rdm_custom_storage_location/export_data/test_utils.py +++ b/admin_tests/rdm_custom_storage_location/export_data/test_utils.py @@ -987,6 +987,64 @@ def test_save_s3compat_credentials__successfully(self): mock_test_s3compat_connection_patcher.stop() mock_update_storage_location_patcher.stop() + def test_save_s3compatsigv4_credentials__error_connection(self): + mock_test_s3compatsigv4_connection_patcher = mock.patch( + f'{EXPORT_DATA_UTIL_PATH}.test_s3compatsigv4_connection', + return_value=({'message': 'test'}, 400) + ) + mock_update_storage_location_patcher = mock.patch( + f'{EXPORT_DATA_UTIL_PATH}.update_storage_location', + return_value=None + ) + + mock_test_s3compatsigv4_connection = mock_test_s3compatsigv4_connection_patcher.start() + mock_update_storage_location = mock_update_storage_location_patcher.start() + + data, status_code = utils.save_s3compatsigv4_credentials( + institution_guid=self.institution.guid, + storage_name='testname', + host_url='http://host_url/', + access_key=self.access_key, + secret_key=self.secret_key, + bucket=self.bucket + ) + mock_test_s3compatsigv4_connection.assert_called() + mock_update_storage_location.assert_not_called() + nt.assert_equal(data, {'message': 'test'}) + nt.assert_equal(status_code, 400) + + mock_test_s3compatsigv4_connection_patcher.stop() + mock_update_storage_location_patcher.stop() + + def test_save_s3compatsigv4_credentials__successfully(self): + mock_test_s3compatsigv4_connection_patcher = mock.patch( + f'{EXPORT_DATA_UTIL_PATH}.test_s3compatsigv4_connection', + return_value=({'message': 'test'}, 200) + ) + mock_update_storage_location_patcher = mock.patch( + f'{EXPORT_DATA_UTIL_PATH}.update_storage_location', + return_value=None + ) + + mock_test_s3compatsigv4_connection = mock_test_s3compatsigv4_connection_patcher.start() + mock_update_storage_location = mock_update_storage_location_patcher.start() + + data, status_code = utils.save_s3compatsigv4_credentials( + institution_guid=self.institution.guid, + storage_name='testname', + host_url='http://host_url/', + access_key=self.access_key, + secret_key=self.secret_key, + bucket=self.bucket + ) + mock_test_s3compatsigv4_connection.assert_called() + mock_update_storage_location.assert_called() + nt.assert_equal(data, {'message': 'Saved credentials successfully!!'}) + nt.assert_equal(status_code, 200) + + mock_test_s3compatsigv4_connection_patcher.stop() + mock_update_storage_location_patcher.stop() + def test_save_dropboxbusiness_credentials__error_connection(self): mock_test_dropboxbusiness_connection_patcher = mock.patch( f'{EXPORT_DATA_UTIL_PATH}.test_dropboxbusiness_connection', diff --git a/admin_tests/rdm_custom_storage_location/export_data/views/test_location.py b/admin_tests/rdm_custom_storage_location/export_data/views/test_location.py index e1b28e7f377..6882e6692ee 100644 --- a/admin_tests/rdm_custom_storage_location/export_data/views/test_location.py +++ b/admin_tests/rdm_custom_storage_location/export_data/views/test_location.py @@ -257,6 +257,17 @@ def test_view_post_s3compat(self): request_post_response = self.view_post(params) nt.assert_equals(request_post_response.status_code, http_status.HTTP_400_BAD_REQUEST) + def test_view_post_s3compatsigv4(self): + params = { + 's3compatsigv4_endpoint_url': 's3.compat.co.jp', + 's3compatsigv4_access_key': 'Non-empty-secret-key', + 's3compatsigv4_secret_key': 'Non-empty-secret-key', + 's3compatsigv4_bucket': 'Water bucket', + 'provider_short_name': 's3compatsigv4', + } + request_post_response = self.view_post(params) + nt.assert_equals(request_post_response.status_code, http_status.HTTP_400_BAD_REQUEST) + def test_view_post_nextcloudinstitutions(self): params = { 'nextcloudinstitutions_host': 's3.compat.co.jp', @@ -374,6 +385,19 @@ def test_view_post_s3compat(self): request_post_response = self.view_post(params) nt.assert_equals(request_post_response.status_code, http_status.HTTP_400_BAD_REQUEST) + def test_view_post_s3compatsigv4(self): + params = { + 's3compatsigv4_endpoint_url': 's3.compat.co.jp', + 's3compatsigv4_access_key': 'Non-empty-secret-key', + 's3compatsigv4_secret_key': 'Non-empty-secret-key', + 's3compatsigv4_bucket': 'Water bucket', + 'provider_short_name': 's3compatsigv4', + 'storage_name': 'test storage_name', + + } + request_post_response = self.view_post(params) + nt.assert_equals(request_post_response.status_code, http_status.HTTP_400_BAD_REQUEST) + def test_view_post_nextcloudinstitutions(self): params = { 'nextcloudinstitutions_host': 's3.compat.co.jp', diff --git a/admin_tests/rdm_custom_storage_location/test_s3compatsigv4.py b/admin_tests/rdm_custom_storage_location/test_s3compatsigv4.py new file mode 100644 index 00000000000..e228a994f3c --- /dev/null +++ b/admin_tests/rdm_custom_storage_location/test_s3compatsigv4.py @@ -0,0 +1,256 @@ +from django.test import RequestFactory +from rest_framework import status as http_status +import json +import mock +from nose import tools as nt + +from addons.osfstorage.models import Region +from admin.rdm_custom_storage_location import views +from osf_tests.factories import ( + AuthUserFactory, + InstitutionFactory, +) +from tests.base import AdminTestCase + + +class TestConnection(AdminTestCase): + + def setUp(self): + super(TestConnection, self).setUp() + self.institution = InstitutionFactory() + self.user = AuthUserFactory() + self.user.affiliated_institutions.add(self.institution) + self.user.is_staff = True + self.user.save() + + def view_post(self, params): + request = RequestFactory().post( + 'fake_path', + json.dumps(params), + content_type='application/json' + ) + request.is_ajax() + request.user = self.user + return views.TestConnectionView.as_view()(request, institution_id=self.institution.id) + + def test_empty_keys_with_provider(self): + params = { + 's3compatsigv4_endpoint_url': '', + 's3compatsigv4_access_key': '', + 's3compatsigv4_secret_key': '', + 's3compatsigv4_bucket': 'Water bucket', + 'provider_short_name': 's3compatsigv4', + } + request_post_response = self.view_post(params) + nt.assert_equals(request_post_response.status_code, http_status.HTTP_400_BAD_REQUEST) + nt.assert_in('All the fields above are required.', request_post_response.content.decode()) + + def test_empty_access_key(self): + params = { + 's3compatsigv4_endpoint_url': 's3.compat.co.jp', + 's3compatsigv4_access_key': '', + 's3compatsigv4_secret_key': 'Non-empty-secret-key', + 's3compatsigv4_bucket': 'Water bucket', + 'provider_short_name': 's3compatsigv4', + } + request_post_response = self.view_post(params) + nt.assert_equals(request_post_response.status_code, http_status.HTTP_400_BAD_REQUEST) + nt.assert_in('All the fields above are required.', request_post_response.content.decode()) + + def test_empty_secret_key(self): + params = { + 's3compatsigv4_endpoint_url': 's3.compat.co.jp', + 's3compatsigv4_access_key': 'Non-empty-secret-key', + 's3compatsigv4_secret_key': '', + 's3compatsigv4_bucket': 'Water bucket', + 'provider_short_name': 's3compatsigv4', + } + request_post_response = self.view_post(params) + nt.assert_equals(request_post_response.status_code, http_status.HTTP_400_BAD_REQUEST) + nt.assert_in('All the fields above are required.', request_post_response.content.decode()) + + @mock.patch('addons.s3compatsigv4.views.utils.can_list', return_value=False) + @mock.patch('addons.s3compatsigv4.views.utils.get_user_info', return_value=True) + def test_user_settings_cant_list(self, mock_get_user_info, mock_can_list): + params = { + 's3compatsigv4_endpoint_url': 's3.compat.co.jp', + 's3compatsigv4_access_key': 'Non-empty-secret-key', + 's3compatsigv4_secret_key': 'Non-empty-secret-key', + 's3compatsigv4_bucket': 'Water bucket', + 'provider_short_name': 's3compatsigv4', + } + request_post_response = self.view_post(params) + nt.assert_equals(request_post_response.status_code, http_status.HTTP_400_BAD_REQUEST) + nt.assert_in('Unable to list buckets.', request_post_response.content.decode()) + + @mock.patch('addons.s3compatsigv4.views.utils.bucket_exists', return_value=False) + @mock.patch('addons.s3compatsigv4.views.utils.can_list', return_value=True) + @mock.patch('addons.s3compatsigv4.views.utils.get_user_info') + def test_invalid_bucket(self, mock_get_user_info, mock_can_list, mock_bucket_exists): + params = { + 's3compatsigv4_endpoint_url': 's3.compat.co.jp', + 's3compatsigv4_access_key': 'Non-empty-secret-key', + 's3compatsigv4_secret_key': 'Non-empty-secret-key', + 's3compatsigv4_bucket': 'Water bucket', + 'provider_short_name': 's3compatsigv4', + } + request_post_response = self.view_post(params) + nt.assert_equals(request_post_response.status_code, http_status.HTTP_400_BAD_REQUEST) + nt.assert_in('Invalid bucket.', request_post_response.content.decode()) + + @mock.patch('addons.s3compatsigv4.views.utils.bucket_exists', return_value=True) + @mock.patch('addons.s3compatsigv4.views.utils.can_list', return_value=True) + @mock.patch('addons.s3compatsigv4.views.utils.get_user_info') + def test_success(self, mock_get_user_info, mock_can_list, mock_bucket_exists): + mock_get_user_info.return_value = {'id': '12346789', 'display_name': 's3.user'} + + params = { + 's3compatsigv4_endpoint_url': 's3.compat.co.jp', + 's3compatsigv4_access_key': 'Non-empty-secret-key', + 's3compatsigv4_secret_key': 'Non-empty-secret-key', + 's3compatsigv4_bucket': 'Water bucket', + 'provider_short_name': 's3compatsigv4', + } + request_post_response = self.view_post(params) + nt.assert_equals(request_post_response.status_code, http_status.HTTP_200_OK) + nt.assert_in('Credentials are valid', request_post_response.content.decode()) + + @mock.patch('addons.s3compatsigv4.views.utils.get_user_info', return_value=None) + def test_invalid_credentials(self, mock_uid): + params = { + 's3compatsigv4_endpoint_url': 's3.compat.co.jp', + 's3compatsigv4_access_key': 'Non-empty-secret-key', + 's3compatsigv4_secret_key': 'Non-empty-secret-key', + 's3compatsigv4_bucket': 'Water bucket', + 'provider_short_name': 's3compatsigv4', + } + request_post_response = self.view_post(params) + nt.assert_equals(request_post_response.status_code, http_status.HTTP_400_BAD_REQUEST) + nt.assert_in('Unable to access account.\\n' + 'Check to make sure that the above credentials are valid, ' + 'and that they have permission to list buckets.', request_post_response.content.decode()) + + +class TestSaveCredentials(AdminTestCase): + + def setUp(self): + super(TestSaveCredentials, self).setUp() + self.institution = InstitutionFactory() + self.user = AuthUserFactory() + self.user.affiliated_institutions.add(self.institution) + self.user.is_staff = True + self.user.save() + + def view_post(self, params): + request = RequestFactory().post( + 'fake_path', + json.dumps(params), + content_type='application/json' + ) + request.is_ajax() + request.user = self.user + return views.SaveCredentialsView.as_view()(request, institution_id=self.institution.id) + + def test_provider_missing(self): + response = self.view_post({ + 'storage_name': 'My storage', + 's3compatsigv4_endpoint_url': 's3.compat.co.jp', + 's3compatsigv4_access_key': 'Non-empty-access-key', + 's3compatsigv4_secret_key': 'Non-empty-secret-key', + 's3compatsigv4_bucket': 'Cute bucket', + 's3compatsigv4_server_side_encryption': 'False', + }) + + nt.assert_equals(response.status_code, http_status.HTTP_400_BAD_REQUEST) + nt.assert_in('Provider is missing.', response.content.decode()) + + def test_invalid_provider(self): + response = self.view_post({ + 'storage_name': 'My storage', + 's3compatsigv4_endpoint_url': 's3.compat.co.jp', + 's3compatsigv4_access_key': 'Non-empty-access-key', + 's3compatsigv4_secret_key': 'Non-empty-secret-key', + 's3compatsigv4_bucket': 'Cute bucket', + 's3compatsigv4_server_side_encryption': 'False', + 'provider_short_name': 'invalidprovider', + }) + + nt.assert_equals(response.status_code, http_status.HTTP_400_BAD_REQUEST) + nt.assert_in('Invalid provider.', response.content.decode()) + + @mock.patch('admin.rdm_custom_storage_location.utils.test_s3compatsigv4_connection') + def test_success(self, mock_testconnection): + mock_testconnection.return_value = {'message': 'Nice'}, http_status.HTTP_200_OK + response = self.view_post({ + 'storage_name': 'My storage', + 's3compatsigv4_endpoint_url': 's3.compat.co.jp', + 's3compatsigv4_access_key': 'Non-empty-access-key', + 's3compatsigv4_secret_key': 'Non-empty-secret-key', + 's3compatsigv4_bucket': 'Cute bucket', + 's3compatsigv4_server_side_encryption': 'False', + 'provider_short_name': 's3compatsigv4', + }) + + nt.assert_equals(response.status_code, http_status.HTTP_200_OK) + nt.assert_in('Saved credentials successfully!!', response.content.decode()) + + institution_storage = Region.objects.filter(_id=self.institution._id).first() + nt.assert_is_not_none(institution_storage) + nt.assert_equals(institution_storage.name, 'My storage') + + wb_credentials = institution_storage.waterbutler_credentials + nt.assert_equals(wb_credentials['storage']['access_key'], 'Non-empty-access-key') + nt.assert_equals(wb_credentials['storage']['secret_key'], 'Non-empty-secret-key') + + wb_settings = institution_storage.waterbutler_settings + nt.assert_equals(wb_settings['storage']['provider'], 's3compatsigv4') + nt.assert_equals(wb_settings['storage']['bucket'], 'Cute bucket') + + @mock.patch('admin.rdm_custom_storage_location.utils.test_s3compatsigv4_connection') + def test_invalid_credentials(self, mock_testconnection): + mock_testconnection.return_value = {'message': 'NG'}, http_status.HTTP_400_BAD_REQUEST + + response = self.view_post({ + 'storage_name': 'My storage', + 's3compatsigv4_endpoint_url': 's3.compat.co.jp', + 's3compatsigv4_access_key': 'Wrong-access-key', + 's3compatsigv4_secret_key': 'Wrong-secret-key', + 's3compatsigv4_bucket': 'Cute bucket', + 's3compatsigv4_server_side_encryption': 'False', + 'provider_short_name': 's3compatsigv4', + }) + + nt.assert_equals(response.status_code, http_status.HTTP_400_BAD_REQUEST) + nt.assert_in('NG', response.content.decode()) + nt.assert_false(Region.objects.filter(_id=self.institution._id).exists()) + + @mock.patch('admin.rdm_custom_storage_location.utils.test_s3compatsigv4_connection') + def test_success_superuser(self, mock_testconnection): + self.user.affiliated_institutions.clear() + self.user.is_superuser = True + self.user.save() + mock_testconnection.return_value = {'message': 'Nice'}, http_status.HTTP_200_OK + response = self.view_post({ + 'storage_name': 'My storage', + 's3compatsigv4_endpoint_url': 's3.compat.co.jp', + 's3compatsigv4_access_key': 'Non-empty-access-key', + 's3compatsigv4_secret_key': 'Non-empty-secret-key', + 's3compatsigv4_bucket': 'Cute bucket', + 's3compatsigv4_server_side_encryption': 'False', + 'provider_short_name': 's3compatsigv4', + }) + + nt.assert_equals(response.status_code, http_status.HTTP_200_OK) + nt.assert_in('Saved credentials successfully!!', response.content.decode()) + + institution_storage = Region.objects.filter(_id=self.institution._id).first() + nt.assert_is_not_none(institution_storage) + nt.assert_equals(institution_storage.name, 'My storage') + + wb_credentials = institution_storage.waterbutler_credentials + nt.assert_equals(wb_credentials['storage']['access_key'], 'Non-empty-access-key') + nt.assert_equals(wb_credentials['storage']['secret_key'], 'Non-empty-secret-key') + + wb_settings = institution_storage.waterbutler_settings + nt.assert_equals(wb_settings['storage']['provider'], 's3compatsigv4') + nt.assert_equals(wb_settings['storage']['bucket'], 'Cute bucket') diff --git a/admin_tests/rdm_custom_storage_location/test_utils.py b/admin_tests/rdm_custom_storage_location/test_utils.py index 96f1d049336..d8dcef54b35 100644 --- a/admin_tests/rdm_custom_storage_location/test_utils.py +++ b/admin_tests/rdm_custom_storage_location/test_utils.py @@ -7,12 +7,14 @@ get_providers, add_node_settings_to_projects, save_s3compatb3_credentials, + save_s3compatsigv4_credentials, wd_info_for_institutions, create_storage_info_template, get_osfstorage_info, get_institution_addon_info, get_s3_info, get_s3compat_info, + get_s3compatsigv4_info, get_s3compatinstitutions_info, get_ociinstitutions_info, get_nextcloudinstitutions_info, @@ -47,12 +49,13 @@ def test_get_providers(self): nt.assert_in('swift', provider_list_short_name, 'swift') nt.assert_in('ociinstitutions', provider_list_short_name, 'ociinstitutions') nt.assert_in('s3compat', provider_list_short_name, 's3compat') + nt.assert_in('s3compatsigv4', provider_list_short_name, 's3compatsigv4') nt.assert_in('s3compatinstitutions', provider_list_short_name, 's3compatinstitutions') provider_list = get_providers(available_list=[]) nt.assert_equal(len(provider_list), 0) - available_list = ['s3', 's3compat'] + available_list = ['s3', 's3compat', 's3compatsigv4'] provider_list = get_providers(available_list=available_list) provider_list_short_name = [p.short_name for p in provider_list] nt.assert_list_equal(provider_list_short_name, available_list) @@ -71,6 +74,20 @@ def test_save_s3compatb3_credentials(self, nt.assert_equal(response, {'message': 'Saved credentials successfully!!'}) nt.assert_equal(status, http_status.HTTP_200_OK) + @patch('osf.utils.external_util.remove_region_external_account') + @patch('admin.rdm_custom_storage_location.utils.update_storage') + @patch('admin.rdm_custom_storage_location.utils.test_s3compatsigv4_connection') + def test_save_s3compatsigv4_credentials(self, + mock_testconnection, mock_update_storage, + mock_remove_region_external_account): + mock_testconnection.return_value = {'message': 'Nice'}, http_status.HTTP_200_OK + mock_update_storage.return_value = {} + mock_remove_region_external_account.return_value = None + response, status = save_s3compatsigv4_credentials('guid_test', 'My storage', 's3.compat.co.jp', + 'Non-empty-access-key', 'Non-empty-secret-key', 'Cute bucket') + nt.assert_equal(response, {'message': 'Saved credentials successfully!!'}) + nt.assert_equal(status, http_status.HTTP_200_OK) + def test_wd_info_for_institutions(self): for_institution_providers = [ 's3compatinstitutions', @@ -268,6 +285,28 @@ def test_get_s3compat_info(self): } nt.assert_equal(result, expected) + def test_get_s3compatsigv4_info(self): + """Test get_s3compatsigv4_info function""" + wb_credentials = { + 'host': 'test_host', + 'access_key': 'test_key', + 'secret_key': 'test_secret' + } + wb_settings = { + 'bucket': 'test_bucket', + 'encrypt_uploads': True + } + + result = get_s3compatsigv4_info(wb_credentials, wb_settings) + + expected = { + 'host': {'field_name': 'Endpoint URL', 'value': 'test_host'}, + 'access_key': {'field_name': 'Access Key', 'value': 'test_key'}, + 'bucket': {'field_name': 'Bucket', 'value': 'test_bucket'}, + 'encrypt_uploads': {'field_name': 'Enable Server Side Encryption', 'value': True} + } + nt.assert_equal(result, expected) + @mock.patch('admin.rdm_custom_storage_location.utils.get_institution_addon_info') def test_get_s3compatinstitutions_info(self, mock_get_institution_addon_info): """Test get_s3compatinstitutions_info function""" diff --git a/admin_tests/rdm_custom_storage_location/test_views.py b/admin_tests/rdm_custom_storage_location/test_views.py index e5f468483f0..fac5695ae67 100644 --- a/admin_tests/rdm_custom_storage_location/test_views.py +++ b/admin_tests/rdm_custom_storage_location/test_views.py @@ -41,6 +41,7 @@ def setUp(self): 'GoogleDriveAddonConfig', 'SwiftAddonAppConfig', 'S3CompatAddonAppConfig', + 'S3CompatSigV4AddonAppConfig', 'NextcloudAddonAppConfig', 'DropboxBusinessAddonAppConfig', 'NextcloudInstitutionsAddonAppConfig', diff --git a/api/base/settings/defaults.py b/api/base/settings/defaults.py index 781dbd6fb82..a43a21d223d 100644 --- a/api/base/settings/defaults.py +++ b/api/base/settings/defaults.py @@ -370,6 +370,9 @@ INSTALLED_APPS += ('addons.s3compat',) ADDONS_FOLDER_CONFIGURABLE.append('s3compat') ADDONS_OAUTH.append('s3compat') +INSTALLED_APPS += ('addons.s3compatsigv4',) +ADDONS_FOLDER_CONFIGURABLE.append('s3compatsigv4') +ADDONS_OAUTH.append('s3compatsigv4') INSTALLED_APPS += ('addons.s3compatb3',) ADDONS_FOLDER_CONFIGURABLE.append('s3compatb3') ADDONS_OAUTH.append('s3compatb3') diff --git a/framework/addons/data/addons.json b/framework/addons/data/addons.json index 1394d294d3a..8f5558bfaef 100644 --- a/framework/addons/data/addons.json +++ b/framework/addons/data/addons.json @@ -499,6 +499,36 @@ "text": "Making a GakuNin RDM project public or private is independent of making an S3 Compatible Storage bucket public or private. The GakuNin RDM does not alter the permissions of linked S3 Compatible Storage buckets." } }, + "S3 Compatible Storage (SigV4)": { + "Forking": { + "status": "partial", + "text": "Forking a project or component does not copy S3 Compatible Storage (SigV4) authorization unless the user forking the project is the same user who authorized the S3 Compatible Storage (SigV4) add-on in the source project being forked." + }, + "Logs": { + "status": "partial", + "text": "The GakuNin RDM keeps track of changes you make to your S3 Compatible Storage (SigV4) buckets through the GakuNin RDM, but not for changes made using S3 Compatible Storage (SigV4) directly." + }, + "Registering": { + "status": "partial", + "text": "S3 Compatible Storage (SigV4) content will be registered, but version history will not be copied to the registration." + }, + "Add / update files": { + "status": "full", + "text": "Adding/updating files in the project via GakuNin RDM will be reflected in S3 Compatible Storage (SigV4)." + }, + "View / download file versions": { + "status": "partial", + "text": "The S3 Compatible Storage (SigV4) add-on supports file versions if versioning is enabled for your S3 Compatible Storage (SigV4) buckets." + }, + "Delete files": { + "status": "full", + "text": "Files deleted via GakuNin RDM will be deleted in S3 Compatible Storage (SigV4)." + }, + "Permissions": { + "status": "partial", + "text": "Making a GakuNin RDM project public or private is independent of making an S3 Compatible Storage (SigV4) bucket public or private. The GakuNin RDM does not alter the permissions of linked S3 Compatible Storage (SigV4) buckets." + } + }, "Oracle Cloud Infrastructure Object Storage": { "Forking": { "status": "partial", diff --git a/osf/migrations/0121_remove_wiki_fields_from_node.py b/osf/migrations/0121_remove_wiki_fields_from_node.py index ffc432420db..d21071cf9ab 100644 --- a/osf/migrations/0121_remove_wiki_fields_from_node.py +++ b/osf/migrations/0121_remove_wiki_fields_from_node.py @@ -8,6 +8,7 @@ class Migration(migrations.Migration): dependencies = [ + ('addons_wiki', '0010_migrate_node_wiki_pages'), ('osf', '0120_merge_20180716_1457'), ] diff --git a/osf/migrations/0237_ensure_schema_and_mappings.py b/osf/migrations/0237_ensure_schema_and_mappings.py index 0af8640c726..282237810c6 100644 --- a/osf/migrations/0237_ensure_schema_and_mappings.py +++ b/osf/migrations/0237_ensure_schema_and_mappings.py @@ -29,7 +29,7 @@ def ensure_registration_mappings(*args): class Migration(migrations.Migration): dependencies = [ - ('addons_weko', '0005_registrationmetadatamapping'), + ('addons_weko', '0006_registrationmetadatamapping_filename'), ('osf', '0236_add_columns_to_registration_schema_block'), ] diff --git a/osf/migrations/0261_auto_20260112_1402.py b/osf/migrations/0261_auto_20260112_1402.py new file mode 100644 index 00000000000..d5956276fa0 --- /dev/null +++ b/osf/migrations/0261_auto_20260112_1402.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.28 on 2026-01-12 14:02 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0260_merge_20251126_1230'), + ] + + operations = [ + migrations.CreateModel( + name='S3CompatSigV4FileNode', + fields=[ + ], + options={ + 'proxy': True, + 'indexes': [], + }, + bases=('osf.basefilenode',), + ), + migrations.AlterField( + model_name='basefilenode', + name='type', + field=models.CharField(choices=[('osf.trashedfilenode', 'trashed file node'), ('osf.trashedfile', 'trashed file'), ('osf.trashedfolder', 'trashed folder'), ('osf.osfstoragefilenode', 'osf storage file node'), ('osf.osfstoragefile', 'osf storage file'), ('osf.osfstoragefolder', 'osf storage folder'), ('osf.bitbucketfilenode', 'bitbucket file node'), ('osf.bitbucketfolder', 'bitbucket folder'), ('osf.bitbucketfile', 'bitbucket file'), ('osf.boxfilenode', 'box file node'), ('osf.boxfolder', 'box folder'), ('osf.boxfile', 'box file'), ('osf.dataversefilenode', 'dataverse file node'), ('osf.dataversefolder', 'dataverse folder'), ('osf.dataversefile', 'dataverse file'), ('osf.dropboxfilenode', 'dropbox file node'), ('osf.dropboxfolder', 'dropbox folder'), ('osf.dropboxfile', 'dropbox file'), ('osf.figsharefilenode', 'figshare file node'), ('osf.figsharefolder', 'figshare folder'), ('osf.figsharefile', 'figshare file'), ('osf.githubfilenode', 'github file node'), ('osf.githubfolder', 'github folder'), ('osf.githubfile', 'github file'), ('osf.gitlabfilenode', 'git lab file node'), ('osf.gitlabfolder', 'git lab folder'), ('osf.gitlabfile', 'git lab file'), ('osf.googledrivefilenode', 'google drive file node'), ('osf.googledrivefolder', 'google drive folder'), ('osf.googledrivefile', 'google drive file'), ('osf.onedrivefilenode', 'one drive file node'), ('osf.onedrivefolder', 'one drive folder'), ('osf.onedrivefile', 'one drive file'), ('osf.owncloudfilenode', 'owncloud file node'), ('osf.owncloudfolder', 'owncloud folder'), ('osf.owncloudfile', 'owncloud file'), ('osf.s3filenode', 's3 file node'), ('osf.s3folder', 's3 folder'), ('osf.s3file', 's3 file'), ('osf.swiftfilenode', 'swift file node'), ('osf.swiftfolder', 'swift folder'), ('osf.swiftfile', 'swift file'), ('osf.azureblobstoragefilenode', 'azure blob storage file node'), ('osf.azureblobstoragefolder', 'azure blob storage folder'), ('osf.azureblobstoragefile', 'azure blob storage file'), ('osf.wekofilenode', 'weko file node'), ('osf.wekofolder', 'weko folder'), ('osf.wekofile', 'weko file'), ('osf.iqbrimsfilenode', 'iqbrims file node'), ('osf.iqbrimsfolder', 'iqbrims folder'), ('osf.iqbrimsfile', 'iqbrims file'), ('osf.dropboxbusinessfilenode', 'dropbox business file node'), ('osf.dropboxbusinessfolder', 'dropbox business folder'), ('osf.dropboxbusinessfile', 'dropbox business file'), ('osf.nextcloudfilenode', 'nextcloud file node'), ('osf.nextcloudfolder', 'nextcloud folder'), ('osf.nextcloudfile', 'nextcloud file'), ('osf.nextcloudinstitutionsfilenode', 'nextcloud institutions file node'), ('osf.nextcloudinstitutionsfolder', 'nextcloud institutions folder'), ('osf.nextcloudinstitutionsfile', 'nextcloud institutions file'), ('osf.s3compatinstitutionsfilenode', 's3 compat institutions file node'), ('osf.s3compatinstitutionsfolder', 's3 compat institutions folder'), ('osf.s3compatinstitutionsfile', 's3 compat institutions file'), ('osf.ociinstitutionsfilenode', 'oci institutions file node'), ('osf.ociinstitutionsfolder', 'oci institutions folder'), ('osf.ociinstitutionsfile', 'oci institutions file'), ('osf.onedrivebusinessfilenode', 'one drive business file node'), ('osf.onedrivebusinessfolder', 'one drive business folder'), ('osf.onedrivebusinessfile', 'one drive business file'), ('osf.s3compatfilenode', 's3 compat file node'), ('osf.s3compatfolder', 's3 compat folder'), ('osf.s3compatfile', 's3 compat file'), ('osf.s3compatsigv4filenode', 's3 compat sig v4 file node'), ('osf.s3compatsigv4folder', 's3 compat sig v4 folder'), ('osf.s3compatsigv4file', 's3 compat sig v4 file'), ('osf.s3compatb3filenode', 's3 compat b3 file node'), ('osf.s3compatb3folder', 's3 compat b3 folder'), ('osf.s3compatb3file', 's3 compat b3 file')], db_index=True, max_length=255), + ), + migrations.CreateModel( + name='S3CompatSigV4File', + fields=[ + ], + options={ + 'proxy': True, + 'indexes': [], + }, + bases=('osf.s3compatsigv4filenode', models.Model), + ), + migrations.CreateModel( + name='S3CompatSigV4Folder', + fields=[ + ], + options={ + 'proxy': True, + 'indexes': [], + }, + bases=('osf.s3compatsigv4filenode', models.Model), + ), + ] diff --git a/osf/models/export_data_location.py b/osf/models/export_data_location.py index 58bb933d062..5c89b3424c1 100644 --- a/osf/models/export_data_location.py +++ b/osf/models/export_data_location.py @@ -78,6 +78,12 @@ def serialize_waterbutler_credentials(self, provider_name): 'secret_key': storage_credentials['secret_key'], 'host': storage_credentials['host'], } + elif provider_name == 's3compatsigv4': + result = { + 'access_key': storage_credentials['access_key'], + 'secret_key': storage_credentials['secret_key'], + 'host': storage_credentials['host'], + } elif provider_name == 'nextcloudinstitutions': """ /storage @@ -117,6 +123,11 @@ def serialize_waterbutler_settings(self, provider_name): 'bucket': storage_settings['bucket'], 'encrypt_uploads': storage_settings['folder']['encrypt_uploads'], } + elif provider_name == 's3compatsigv4': + result = { + 'bucket': storage_settings['bucket'], + 'encrypt_uploads': storage_settings['folder']['encrypt_uploads'], + } elif provider_name == 'nextcloudinstitutions': """ /storage diff --git a/osf_tests/test_export_data_location.py b/osf_tests/test_export_data_location.py index 7ff582574de..badb8f3acaf 100644 --- a/osf_tests/test_export_data_location.py +++ b/osf_tests/test_export_data_location.py @@ -90,6 +90,11 @@ def test_serialize_waterbutler_credentials_s3compat(self): expect_value = dict(self.export_data_location.waterbutler_credentials['storage']).copy() nt.assert_equal(res['secret_key'], expect_value['secret_key']) + def test_serialize_waterbutler_credentials_s3compatsigv4(self): + res = self.export_data_location.serialize_waterbutler_credentials('s3compatsigv4') + expect_value = dict(self.export_data_location.waterbutler_credentials['storage']).copy() + nt.assert_equal(res['secret_key'], expect_value['secret_key']) + def test_serialize_waterbutler_credentials_nextcloudinstitutions(self): res = self.export_data_location.serialize_waterbutler_credentials('nextcloudinstitutions') nt.assert_equal(res['host'], self.export_data_location.waterbutler_credentials['external_account']['oauth_secret']) @@ -106,6 +111,10 @@ def test_serialize_waterbutler_settings_s3compat(self): res = self.export_data_location.serialize_waterbutler_settings('s3compat') nt.assert_equal(res['bucket'], self.export_data_location.waterbutler_settings['storage']['bucket']) + def test_serialize_waterbutler_settings_s3compatsigv4(self): + res = self.export_data_location.serialize_waterbutler_settings('s3compatsigv4') + nt.assert_equal(res['bucket'], self.export_data_location.waterbutler_settings['storage']['bucket']) + def test_serialize_waterbutler_settings_nextcloudinstitutions(self): res = self.export_data_location.serialize_waterbutler_settings('nextcloudinstitutions') nt.assert_not_equal(res, None) diff --git a/scripts/translations/messages_addonsJson.js b/scripts/translations/messages_addonsJson.js index 63e6ddd49e8..2c060180391 100644 --- a/scripts/translations/messages_addonsJson.js +++ b/scripts/translations/messages_addonsJson.js @@ -36,6 +36,8 @@ var addonTerms_jupyterhub = _('\n\n

JupyterHub Add-on Terms

\n\nS3 Compatible Storage Add-on Terms\n\n
\n\n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n
FunctionStatus
PermissionsMaking a GakuNin RDM project public or private is independent of making an S3 Compatible Storage bucket public or private. The GakuNin RDM does not alter the permissions of linked S3 Compatible Storage buckets.
View / download file versionsThe S3 add-on supports file versions if versioning is enabled for your S3 Compatible Storage buckets.
Add / update filesAdding/updating files in the project via GakuNin RDM will be reflected in S3 Compatible Storage.
Delete filesFiles deleted via GakuNin RDM will be deleted in S3 Compatible Storage.
LogsThe GakuNin RDM keeps track of changes you make to your S3 Compatible Storage buckets through the GakuNin RDM, but not for changes made using S3 Compatible Storage directly.
ForkingForking a project or component does not copy S3 Compatible Storage authorization unless the user forking the project is the same user who authorized the S3 Compatible Storage add-on in the source project being forked.
\n\n
    \n
  • This add-on connects your GakuNin RDM project to an external service. Use of this service is bound by its terms and conditions. The GakuNin RDM is not responsible for the service or for your use thereof.
  • \n
  • This add-on allows you to store files using an external service. Files added to this add-on are not stored within the GakuNin RDM.
  • \n
\n'); +var addonTerms_s3compatsigv4 = _('\n\n

S3 Compatible Storage (SigV4) Add-on Terms

\n\n\n\n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n
FunctionStatus
PermissionsMaking a GakuNin RDM project public or private is independent of making an S3 Compatible Storage (SigV4) bucket public or private. The GakuNin RDM does not alter the permissions of linked S3 Compatible Storage (SigV4) buckets.
View / download file versionsThe S3 Compatible Storage (SigV4) add-on supports file versions if versioning is enabled for your S3 Compatible Storage (SigV4) buckets.
Add / update filesAdding/updating files in the project via GakuNin RDM will be reflected in S3 Compatible Storage (SigV4).
Delete filesFiles deleted via GakuNin RDM will be deleted in S3 Compatible Storage (SigV4).
LogsThe GakuNin RDM keeps track of changes you make to your S3 Compatible Storage (SigV4) buckets through the GakuNin RDM, but not for changes made using S3 Compatible Storage (SigV4) directly.
ForkingForking a project or component does not copy S3 Compatible Storage (SigV4) authorization unless the user forking the project is the same user who authorized the S3 Compatible Storage (SigV4) add-on in the source project being forked.
\n\n
    \n
  • This add-on connects your GakuNin RDM project to an external service. Use of this service is bound by its terms and conditions. The GakuNin RDM is not responsible for the service or for your use thereof.
  • \n
  • This add-on allows you to store files using an external service. Files added to this add-on are not stored within the GakuNin RDM.
  • \n
\n'); + var addonTerms_s3compatb3 = _('\n\n

Oracle Cloud Infrastructure Object Storage Add-on Terms

\n\n\n\n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n
FunctionStatus
PermissionsMaking a GakuNin RDM project public or private is independent of making an Oracle Cloud Infrastructure Object Storage bucket public or private. The GakuNin RDM does not alter the permissions of linked Oracle Cloud Infrastructure Object Storage buckets.
View / download file versionsThe Oracle Cloud Infrastructure Object Storage add-on does not support file versions.
Add / update filesAdding/updating files in the project via GakuNin RDM will be reflected in Oracle Cloud Infrastructure Object Storage.
Delete filesFiles deleted via GakuNin RDM will be deleted in Oracle Cloud Infrastructure Object Storage.
LogsThe GakuNin RDM keeps track of changes you make to your Oracle Cloud Infrastructure Object Storage buckets through the GakuNin RDM, but not for changes made using Oracle Cloud Infrastructure Object Storage directly.
ForkingForking a project or component does not copy Oracle Cloud Infrastructure Object Storage authorization unless the user forking the project is the same user who authorized the Oracle Cloud Infrastructure Object Storage add-on in the source project being forked.
\n\n
    \n
  • This add-on connects your GakuNin RDM project to an external service. Use of this service is bound by its terms and conditions. The GakuNin RDM is not responsible for the service or for your use thereof.
  • \n
  • This add-on allows you to store files using an external service. Files added to this add-on are not stored within the GakuNin RDM.
  • \n
\n'); var addonTerms_nextcloud = _('\n\n

Nextcloud Add-on Terms

\n\n\n\n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n
FunctionStatus
PermissionsMaking a GakuNin RDM project public or private is independent of Nextcloud privacy. The GakuNin RDM does not alter the permissions of linked Nextcloud folders.
View / download file versionsNextcloud files can be viewed/downloaded via GakuNin RDM, but version history is not supported by the Nextcloud WebDAV API.
Add / update filesAdding/updating files in the project via GakuNin RDM will be reflected in Nextcloud.
Delete filesFiles deleted via GakuNin RDM will be deleted in Nextcloud.
LogsGakuNin RDM keeps track of changes you make to your Nextcloud content through GakuNin RDM, but not for changes made using Nextcloud directly.
ForkingForking a project or component does not copy Nextcloud authorization unless the user forking the project is the same user who authorized the Nextcloud add-on in the source project being forked.
\n\n
    \n
  • This add-on connects your GakuNin RDM project to an external service. Use of this service is bound by its terms and conditions. The GakuNin RDM is not responsible for the service or for your use thereof.
  • \n
  • This add-on allows you to store files using an external service. Files added to this add-on are not stored within the GakuNin RDM.
  • \n
\n'); diff --git a/website/notifications/constants.py b/website/notifications/constants.py index 532a7a6b87a..fb2d82a6fef 100644 --- a/website/notifications/constants.py +++ b/website/notifications/constants.py @@ -46,6 +46,7 @@ 'nextcloudinstitutions': 'Nextcloud for Institutions', 's3compatinstitutions': 'S3 Compatible Storage for Institutions', 's3compatb3': 'S3 Compatible Storage', + 's3compatsigv4': 'S3 Compatible Storage (SigV4)', 'ociinstitutions': 'Oracle Cloud Infrastructure for Institutions', 'iqbrims': 'IQB-RIMS', 'onedrivebusiness': 'OneDrive for Office365', diff --git a/website/static/js/logActionsList_extract.js b/website/static/js/logActionsList_extract.js index 30f10c090e6..1dbd0d1ee36 100644 --- a/website/static/js/logActionsList_extract.js +++ b/website/static/js/logActionsList_extract.js @@ -243,6 +243,15 @@ var s3compat_folder_created = _('${user} created folder ${path} in S3 Compatible var s3compat_node_authorized = _('${user} authorized the S3 Compatible Storage addon for ${node}'); var s3compat_node_deauthorized = _('${user} deauthorized the S3 Compatible Storage addon for ${node}'); var s3compat_node_deauthorized_no_user = _('S3 Compatible Storage addon for ${node} deauthorized'); +var s3compatsigv4_bucket_linked = _('${user} linked the S3 Compatible Storage (SigV4) bucket ${bucket} to ${node}'); +var s3compatsigv4_bucket_unlinked = _('${user} unselected the S3 Compatible Storage (SigV4) bucket ${bucket} in ${node}'); +var s3compatsigv4_file_added = _('${user} added file ${path} to S3 Compatible Storage (SigV4) bucket ${bucket} in ${node}'); +var s3compatsigv4_file_removed = _('${user} removed ${path} in S3 Compatible Storage (SigV4) bucket ${bucket} in ${node}'); +var s3compatsigv4_file_updated = _('${user} updated file ${path} in S3 Compatible Storage (SigV4) bucket ${bucket} in ${node}'); +var s3compatsigv4_folder_created = _('${user} created folder ${path} in S3 Compatible Storage (SigV4) bucket ${bucket} in ${node}'); +var s3compatsigv4_node_authorized = _('${user} authorized the S3 Compatible Storage (SigV4) addon for ${node}'); +var s3compatsigv4_node_deauthorized = _('${user} deauthorized the S3 Compatible Storage (SigV4) addon for ${node}'); +var s3compatsigv4_node_deauthorized_no_user = _('S3 Compatible Storage (SigV4) addon for ${node} deauthorized'); var s3compatinstitutions_file_added = _('${user} added file ${path} to S3 Compatible Storage for Institutions in ${node}'); var s3compatinstitutions_file_removed = _('${user} removed ${path_type} ${path} from S3 Compatible Storage for Institutions in ${node}'); var s3compatinstitutions_file_updated = _('${user} updated file ${path} in S3 Compatible Storage for Institutions in ${node}'); diff --git a/website/static/js/osfLanguage.js b/website/static/js/osfLanguage.js index 0dfffda75ba..b54701019d4 100644 --- a/website/static/js/osfLanguage.js +++ b/website/static/js/osfLanguage.js @@ -124,6 +124,11 @@ module.exports = { userSettingsError: 'Could not retrieve settings. Please refresh the page or ' + 'contact ' + $osf.osfSupportLink() + ' if the problem persists.', }, + s3compatsigv4: { + authError: 'Could not connect to S3 Compatible Storage (SigV4) at this time. Please try again later.', + userSettingsError: 'Could not retrieve settings. Please refresh the page or ' + + 'contact ' + $osf.osfSupportLink() + ' if the problem persists.', + }, nextcloud: { authError: 'Invalid Nextcloud server', authInvalid: 'Invalid credentials. Please enter a valid username and password.', diff --git a/website/static/js/translations/osfLanguage_en.js b/website/static/js/translations/osfLanguage_en.js index d3b2ad1e408..0a389c238ed 100644 --- a/website/static/js/translations/osfLanguage_en.js +++ b/website/static/js/translations/osfLanguage_en.js @@ -125,6 +125,11 @@ module.exports = { userSettingsError: 'Could not retrieve settings. Please refresh the page or ' + 'contact ' + $osf.osfSupportLink() + ' if the problem persists.', }, + s3compatsigv4: { + authError: 'Could not connect to S3 Compatible Storage (SigV4) at this time. Please try again later.', + userSettingsError: 'Could not retrieve settings. Please refresh the page or ' + + 'contact ' + $osf.osfSupportLink() + ' if the problem persists.', + }, nextcloud: { authError: 'Invalid Nextcloud server', authInvalid: 'Invalid credentials. Please enter a valid username and password.', diff --git a/website/static/js/translations/osfLanguage_ja.js b/website/static/js/translations/osfLanguage_ja.js index 1d7ec577e19..695eda80162 100644 --- a/website/static/js/translations/osfLanguage_ja.js +++ b/website/static/js/translations/osfLanguage_ja.js @@ -124,6 +124,11 @@ module.exports = { userSettingsError: '設定を取得できませんでした。 ページを更新するか、' + '問題が解決しない場合は' + $osf.osfSupportLink() + 'へお問い合わせください。', }, + s3compatsigv4: { + authError: '現在、S3互換ストレージ(SigV4)に接続できませんでした。 後でもう一度やり直してください。', + userSettingsError: '設定を取得できませんでした。 ページを更新するか、' + + '問題が解決しない場合は' + $osf.osfSupportLink() + 'へお問い合わせください。', + }, nextcloud: { authError: '無効なNextcloudサーバー', authInvalid: '無効な資格情報。 有効なユーザー名とパスワードを入力してください。', diff --git a/website/static/storageAddons.json b/website/static/storageAddons.json index 88761556420..e4eb8e80714 100644 --- a/website/static/storageAddons.json +++ b/website/static/storageAddons.json @@ -59,6 +59,10 @@ "fullName": "S3 Compatible Storage", "externalView": false }, + "s3compatsigv4": { + "fullName": "S3 Compatible Storage (SigV4)", + "externalView": false + }, "s3compatb3": { "fullName": "S3 Compatible Storage", "externalView": false From bf3bef440c3cd4af195406e528a68f820bf89644 Mon Sep 17 00:00:00 2001 From: TOMONORI ENDOU Date: Fri, 20 Feb 2026 10:25:27 +0900 Subject: [PATCH 2/3] fix(migration): add merge migration for s3compatsigv4 Resolve conflicting migrations: multiple leaf nodes (0263_merge_20260130_1152, 0261_auto_20260112_1402) in osf. --- osf/migrations/0264_merge_s3compatsigv4.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 osf/migrations/0264_merge_s3compatsigv4.py diff --git a/osf/migrations/0264_merge_s3compatsigv4.py b/osf/migrations/0264_merge_s3compatsigv4.py new file mode 100644 index 00000000000..441633f70f0 --- /dev/null +++ b/osf/migrations/0264_merge_s3compatsigv4.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0263_merge_20260130_1152'), + ('osf', '0261_auto_20260112_1402'), + ] + + operations = [ + ] From 4d9d450d6eb9f7a94bff8de464aa01129f1d1641 Mon Sep 17 00:00:00 2001 From: TOMONORI ENDOU Date: Fri, 20 Feb 2026 13:10:20 +0900 Subject: [PATCH 3/3] fix(s3compatsigv4): rename duplicate HTML id to avoid conflict with s3compat The selected_service id was duplicated between s3compat and s3compatsigv4 credentials modals, causing Playwright strict mode violations when both addons are enabled. --- .../templates/s3compatsigv4_credentials_modal.mako | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/addons/s3compatsigv4/templates/s3compatsigv4_credentials_modal.mako b/addons/s3compatsigv4/templates/s3compatsigv4_credentials_modal.mako index 2110c1f4102..f9c2d7af6be 100644 --- a/addons/s3compatsigv4/templates/s3compatsigv4_credentials_modal.mako +++ b/addons/s3compatsigv4/templates/s3compatsigv4_credentials_modal.mako @@ -15,7 +15,7 @@
- +